##// END OF EJS Templates
pull-requests: properly save merge failure metadata. Before this change...
marcink -
r4471:0186d5e2 default
parent child Browse files
Show More
@@ -1,5672 +1,5689 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers2.text import remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
60 60 from rhodecode.lib.utils2 import (
61 61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 65 JsonRaw
66 66 from rhodecode.lib.ext_json import json
67 67 from rhodecode.lib.caching_query import FromCache
68 68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 69 from rhodecode.lib.encrypt2 import Encryptor
70 70 from rhodecode.lib.exceptions import (
71 71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 72 from rhodecode.model.meta import Base, Session
73 73
74 74 URL_SEP = '/'
75 75 log = logging.getLogger(__name__)
76 76
77 77 # =============================================================================
78 78 # BASE CLASSES
79 79 # =============================================================================
80 80
81 81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 82 # beaker.session.secret if first is not set.
83 83 # and initialized at environment.py
84 84 ENCRYPTION_KEY = None
85 85
86 86 # used to sort permissions by types, '#' used here is not allowed to be in
87 87 # usernames, and it's very early in sorted string.printable table.
88 88 PERMISSION_TYPE_SORT = {
89 89 'admin': '####',
90 90 'write': '###',
91 91 'read': '##',
92 92 'none': '#',
93 93 }
94 94
95 95
96 96 def display_user_sort(obj):
97 97 """
98 98 Sort function used to sort permissions in .permissions() function of
99 99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 100 of all other resources
101 101 """
102 102
103 103 if obj.username == User.DEFAULT_USER:
104 104 return '#####'
105 105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 106 extra_sort_num = '1' # default
107 107
108 108 # NOTE(dan): inactive duplicates goes last
109 109 if getattr(obj, 'duplicate_perm', None):
110 110 extra_sort_num = '9'
111 111 return prefix + extra_sort_num + obj.username
112 112
113 113
114 114 def display_user_group_sort(obj):
115 115 """
116 116 Sort function used to sort permissions in .permissions() function of
117 117 Repository, RepoGroup, UserGroup. Also it put the default user in front
118 118 of all other resources
119 119 """
120 120
121 121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
122 122 return prefix + obj.users_group_name
123 123
124 124
125 125 def _hash_key(k):
126 126 return sha1_safe(k)
127 127
128 128
129 129 def in_filter_generator(qry, items, limit=500):
130 130 """
131 131 Splits IN() into multiple with OR
132 132 e.g.::
133 133 cnt = Repository.query().filter(
134 134 or_(
135 135 *in_filter_generator(Repository.repo_id, range(100000))
136 136 )).count()
137 137 """
138 138 if not items:
139 139 # empty list will cause empty query which might cause security issues
140 140 # this can lead to hidden unpleasant results
141 141 items = [-1]
142 142
143 143 parts = []
144 144 for chunk in xrange(0, len(items), limit):
145 145 parts.append(
146 146 qry.in_(items[chunk: chunk + limit])
147 147 )
148 148
149 149 return parts
150 150
151 151
152 152 base_table_args = {
153 153 'extend_existing': True,
154 154 'mysql_engine': 'InnoDB',
155 155 'mysql_charset': 'utf8',
156 156 'sqlite_autoincrement': True
157 157 }
158 158
159 159
160 160 class EncryptedTextValue(TypeDecorator):
161 161 """
162 162 Special column for encrypted long text data, use like::
163 163
164 164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
165 165
166 166 This column is intelligent so if value is in unencrypted form it return
167 167 unencrypted form, but on save it always encrypts
168 168 """
169 169 impl = Text
170 170
171 171 def process_bind_param(self, value, dialect):
172 172 """
173 173 Setter for storing value
174 174 """
175 175 import rhodecode
176 176 if not value:
177 177 return value
178 178
179 179 # protect against double encrypting if values is already encrypted
180 180 if value.startswith('enc$aes$') \
181 181 or value.startswith('enc$aes_hmac$') \
182 182 or value.startswith('enc2$'):
183 183 raise ValueError('value needs to be in unencrypted format, '
184 184 'ie. not starting with enc$ or enc2$')
185 185
186 186 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
187 187 if algo == 'aes':
188 188 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
189 189 elif algo == 'fernet':
190 190 return Encryptor(ENCRYPTION_KEY).encrypt(value)
191 191 else:
192 192 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
193 193
194 194 def process_result_value(self, value, dialect):
195 195 """
196 196 Getter for retrieving value
197 197 """
198 198
199 199 import rhodecode
200 200 if not value:
201 201 return value
202 202
203 203 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
204 204 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
205 205 if algo == 'aes':
206 206 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
207 207 elif algo == 'fernet':
208 208 return Encryptor(ENCRYPTION_KEY).decrypt(value)
209 209 else:
210 210 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
211 211 return decrypted_data
212 212
213 213
214 214 class BaseModel(object):
215 215 """
216 216 Base Model for all classes
217 217 """
218 218
219 219 @classmethod
220 220 def _get_keys(cls):
221 221 """return column names for this model """
222 222 return class_mapper(cls).c.keys()
223 223
224 224 def get_dict(self):
225 225 """
226 226 return dict with keys and values corresponding
227 227 to this model data """
228 228
229 229 d = {}
230 230 for k in self._get_keys():
231 231 d[k] = getattr(self, k)
232 232
233 233 # also use __json__() if present to get additional fields
234 234 _json_attr = getattr(self, '__json__', None)
235 235 if _json_attr:
236 236 # update with attributes from __json__
237 237 if callable(_json_attr):
238 238 _json_attr = _json_attr()
239 239 for k, val in _json_attr.iteritems():
240 240 d[k] = val
241 241 return d
242 242
243 243 def get_appstruct(self):
244 244 """return list with keys and values tuples corresponding
245 245 to this model data """
246 246
247 247 lst = []
248 248 for k in self._get_keys():
249 249 lst.append((k, getattr(self, k),))
250 250 return lst
251 251
252 252 def populate_obj(self, populate_dict):
253 253 """populate model with data from given populate_dict"""
254 254
255 255 for k in self._get_keys():
256 256 if k in populate_dict:
257 257 setattr(self, k, populate_dict[k])
258 258
259 259 @classmethod
260 260 def query(cls):
261 261 return Session().query(cls)
262 262
263 263 @classmethod
264 264 def get(cls, id_):
265 265 if id_:
266 266 return cls.query().get(id_)
267 267
268 268 @classmethod
269 269 def get_or_404(cls, id_):
270 270 from pyramid.httpexceptions import HTTPNotFound
271 271
272 272 try:
273 273 id_ = int(id_)
274 274 except (TypeError, ValueError):
275 275 raise HTTPNotFound()
276 276
277 277 res = cls.query().get(id_)
278 278 if not res:
279 279 raise HTTPNotFound()
280 280 return res
281 281
282 282 @classmethod
283 283 def getAll(cls):
284 284 # deprecated and left for backward compatibility
285 285 return cls.get_all()
286 286
287 287 @classmethod
288 288 def get_all(cls):
289 289 return cls.query().all()
290 290
291 291 @classmethod
292 292 def delete(cls, id_):
293 293 obj = cls.query().get(id_)
294 294 Session().delete(obj)
295 295
296 296 @classmethod
297 297 def identity_cache(cls, session, attr_name, value):
298 298 exist_in_session = []
299 299 for (item_cls, pkey), instance in session.identity_map.items():
300 300 if cls == item_cls and getattr(instance, attr_name) == value:
301 301 exist_in_session.append(instance)
302 302 if exist_in_session:
303 303 if len(exist_in_session) == 1:
304 304 return exist_in_session[0]
305 305 log.exception(
306 306 'multiple objects with attr %s and '
307 307 'value %s found with same name: %r',
308 308 attr_name, value, exist_in_session)
309 309
310 310 def __repr__(self):
311 311 if hasattr(self, '__unicode__'):
312 312 # python repr needs to return str
313 313 try:
314 314 return safe_str(self.__unicode__())
315 315 except UnicodeDecodeError:
316 316 pass
317 317 return '<DB:%s>' % (self.__class__.__name__)
318 318
319 319
320 320 class RhodeCodeSetting(Base, BaseModel):
321 321 __tablename__ = 'rhodecode_settings'
322 322 __table_args__ = (
323 323 UniqueConstraint('app_settings_name'),
324 324 base_table_args
325 325 )
326 326
327 327 SETTINGS_TYPES = {
328 328 'str': safe_str,
329 329 'int': safe_int,
330 330 'unicode': safe_unicode,
331 331 'bool': str2bool,
332 332 'list': functools.partial(aslist, sep=',')
333 333 }
334 334 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
335 335 GLOBAL_CONF_KEY = 'app_settings'
336 336
337 337 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
338 338 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
339 339 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
340 340 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
341 341
342 342 def __init__(self, key='', val='', type='unicode'):
343 343 self.app_settings_name = key
344 344 self.app_settings_type = type
345 345 self.app_settings_value = val
346 346
347 347 @validates('_app_settings_value')
348 348 def validate_settings_value(self, key, val):
349 349 assert type(val) == unicode
350 350 return val
351 351
352 352 @hybrid_property
353 353 def app_settings_value(self):
354 354 v = self._app_settings_value
355 355 _type = self.app_settings_type
356 356 if _type:
357 357 _type = self.app_settings_type.split('.')[0]
358 358 # decode the encrypted value
359 359 if 'encrypted' in self.app_settings_type:
360 360 cipher = EncryptedTextValue()
361 361 v = safe_unicode(cipher.process_result_value(v, None))
362 362
363 363 converter = self.SETTINGS_TYPES.get(_type) or \
364 364 self.SETTINGS_TYPES['unicode']
365 365 return converter(v)
366 366
367 367 @app_settings_value.setter
368 368 def app_settings_value(self, val):
369 369 """
370 370 Setter that will always make sure we use unicode in app_settings_value
371 371
372 372 :param val:
373 373 """
374 374 val = safe_unicode(val)
375 375 # encode the encrypted value
376 376 if 'encrypted' in self.app_settings_type:
377 377 cipher = EncryptedTextValue()
378 378 val = safe_unicode(cipher.process_bind_param(val, None))
379 379 self._app_settings_value = val
380 380
381 381 @hybrid_property
382 382 def app_settings_type(self):
383 383 return self._app_settings_type
384 384
385 385 @app_settings_type.setter
386 386 def app_settings_type(self, val):
387 387 if val.split('.')[0] not in self.SETTINGS_TYPES:
388 388 raise Exception('type must be one of %s got %s'
389 389 % (self.SETTINGS_TYPES.keys(), val))
390 390 self._app_settings_type = val
391 391
392 392 @classmethod
393 393 def get_by_prefix(cls, prefix):
394 394 return RhodeCodeSetting.query()\
395 395 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
396 396 .all()
397 397
398 398 def __unicode__(self):
399 399 return u"<%s('%s:%s[%s]')>" % (
400 400 self.__class__.__name__,
401 401 self.app_settings_name, self.app_settings_value,
402 402 self.app_settings_type
403 403 )
404 404
405 405
406 406 class RhodeCodeUi(Base, BaseModel):
407 407 __tablename__ = 'rhodecode_ui'
408 408 __table_args__ = (
409 409 UniqueConstraint('ui_key'),
410 410 base_table_args
411 411 )
412 412
413 413 HOOK_REPO_SIZE = 'changegroup.repo_size'
414 414 # HG
415 415 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
416 416 HOOK_PULL = 'outgoing.pull_logger'
417 417 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
418 418 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
419 419 HOOK_PUSH = 'changegroup.push_logger'
420 420 HOOK_PUSH_KEY = 'pushkey.key_push'
421 421
422 422 HOOKS_BUILTIN = [
423 423 HOOK_PRE_PULL,
424 424 HOOK_PULL,
425 425 HOOK_PRE_PUSH,
426 426 HOOK_PRETX_PUSH,
427 427 HOOK_PUSH,
428 428 HOOK_PUSH_KEY,
429 429 ]
430 430
431 431 # TODO: johbo: Unify way how hooks are configured for git and hg,
432 432 # git part is currently hardcoded.
433 433
434 434 # SVN PATTERNS
435 435 SVN_BRANCH_ID = 'vcs_svn_branch'
436 436 SVN_TAG_ID = 'vcs_svn_tag'
437 437
438 438 ui_id = Column(
439 439 "ui_id", Integer(), nullable=False, unique=True, default=None,
440 440 primary_key=True)
441 441 ui_section = Column(
442 442 "ui_section", String(255), nullable=True, unique=None, default=None)
443 443 ui_key = Column(
444 444 "ui_key", String(255), nullable=True, unique=None, default=None)
445 445 ui_value = Column(
446 446 "ui_value", String(255), nullable=True, unique=None, default=None)
447 447 ui_active = Column(
448 448 "ui_active", Boolean(), nullable=True, unique=None, default=True)
449 449
450 450 def __repr__(self):
451 451 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
452 452 self.ui_key, self.ui_value)
453 453
454 454
455 455 class RepoRhodeCodeSetting(Base, BaseModel):
456 456 __tablename__ = 'repo_rhodecode_settings'
457 457 __table_args__ = (
458 458 UniqueConstraint(
459 459 'app_settings_name', 'repository_id',
460 460 name='uq_repo_rhodecode_setting_name_repo_id'),
461 461 base_table_args
462 462 )
463 463
464 464 repository_id = Column(
465 465 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
466 466 nullable=False)
467 467 app_settings_id = Column(
468 468 "app_settings_id", Integer(), nullable=False, unique=True,
469 469 default=None, primary_key=True)
470 470 app_settings_name = Column(
471 471 "app_settings_name", String(255), nullable=True, unique=None,
472 472 default=None)
473 473 _app_settings_value = Column(
474 474 "app_settings_value", String(4096), nullable=True, unique=None,
475 475 default=None)
476 476 _app_settings_type = Column(
477 477 "app_settings_type", String(255), nullable=True, unique=None,
478 478 default=None)
479 479
480 480 repository = relationship('Repository')
481 481
482 482 def __init__(self, repository_id, key='', val='', type='unicode'):
483 483 self.repository_id = repository_id
484 484 self.app_settings_name = key
485 485 self.app_settings_type = type
486 486 self.app_settings_value = val
487 487
488 488 @validates('_app_settings_value')
489 489 def validate_settings_value(self, key, val):
490 490 assert type(val) == unicode
491 491 return val
492 492
493 493 @hybrid_property
494 494 def app_settings_value(self):
495 495 v = self._app_settings_value
496 496 type_ = self.app_settings_type
497 497 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
498 498 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
499 499 return converter(v)
500 500
501 501 @app_settings_value.setter
502 502 def app_settings_value(self, val):
503 503 """
504 504 Setter that will always make sure we use unicode in app_settings_value
505 505
506 506 :param val:
507 507 """
508 508 self._app_settings_value = safe_unicode(val)
509 509
510 510 @hybrid_property
511 511 def app_settings_type(self):
512 512 return self._app_settings_type
513 513
514 514 @app_settings_type.setter
515 515 def app_settings_type(self, val):
516 516 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
517 517 if val not in SETTINGS_TYPES:
518 518 raise Exception('type must be one of %s got %s'
519 519 % (SETTINGS_TYPES.keys(), val))
520 520 self._app_settings_type = val
521 521
522 522 def __unicode__(self):
523 523 return u"<%s('%s:%s:%s[%s]')>" % (
524 524 self.__class__.__name__, self.repository.repo_name,
525 525 self.app_settings_name, self.app_settings_value,
526 526 self.app_settings_type
527 527 )
528 528
529 529
530 530 class RepoRhodeCodeUi(Base, BaseModel):
531 531 __tablename__ = 'repo_rhodecode_ui'
532 532 __table_args__ = (
533 533 UniqueConstraint(
534 534 'repository_id', 'ui_section', 'ui_key',
535 535 name='uq_repo_rhodecode_ui_repository_id_section_key'),
536 536 base_table_args
537 537 )
538 538
539 539 repository_id = Column(
540 540 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
541 541 nullable=False)
542 542 ui_id = Column(
543 543 "ui_id", Integer(), nullable=False, unique=True, default=None,
544 544 primary_key=True)
545 545 ui_section = Column(
546 546 "ui_section", String(255), nullable=True, unique=None, default=None)
547 547 ui_key = Column(
548 548 "ui_key", String(255), nullable=True, unique=None, default=None)
549 549 ui_value = Column(
550 550 "ui_value", String(255), nullable=True, unique=None, default=None)
551 551 ui_active = Column(
552 552 "ui_active", Boolean(), nullable=True, unique=None, default=True)
553 553
554 554 repository = relationship('Repository')
555 555
556 556 def __repr__(self):
557 557 return '<%s[%s:%s]%s=>%s]>' % (
558 558 self.__class__.__name__, self.repository.repo_name,
559 559 self.ui_section, self.ui_key, self.ui_value)
560 560
561 561
562 562 class User(Base, BaseModel):
563 563 __tablename__ = 'users'
564 564 __table_args__ = (
565 565 UniqueConstraint('username'), UniqueConstraint('email'),
566 566 Index('u_username_idx', 'username'),
567 567 Index('u_email_idx', 'email'),
568 568 base_table_args
569 569 )
570 570
571 571 DEFAULT_USER = 'default'
572 572 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
573 573 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
574 574
575 575 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
576 576 username = Column("username", String(255), nullable=True, unique=None, default=None)
577 577 password = Column("password", String(255), nullable=True, unique=None, default=None)
578 578 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
579 579 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
580 580 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
581 581 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
582 582 _email = Column("email", String(255), nullable=True, unique=None, default=None)
583 583 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
584 584 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
585 585 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
586 586
587 587 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
588 588 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
589 589 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
590 590 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
591 591 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
592 592 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
593 593
594 594 user_log = relationship('UserLog')
595 595 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
596 596
597 597 repositories = relationship('Repository')
598 598 repository_groups = relationship('RepoGroup')
599 599 user_groups = relationship('UserGroup')
600 600
601 601 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
602 602 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
603 603
604 604 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
605 605 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
606 606 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
607 607
608 608 group_member = relationship('UserGroupMember', cascade='all')
609 609
610 610 notifications = relationship('UserNotification', cascade='all')
611 611 # notifications assigned to this user
612 612 user_created_notifications = relationship('Notification', cascade='all')
613 613 # comments created by this user
614 614 user_comments = relationship('ChangesetComment', cascade='all')
615 615 # user profile extra info
616 616 user_emails = relationship('UserEmailMap', cascade='all')
617 617 user_ip_map = relationship('UserIpMap', cascade='all')
618 618 user_auth_tokens = relationship('UserApiKeys', cascade='all')
619 619 user_ssh_keys = relationship('UserSshKeys', cascade='all')
620 620
621 621 # gists
622 622 user_gists = relationship('Gist', cascade='all')
623 623 # user pull requests
624 624 user_pull_requests = relationship('PullRequest', cascade='all')
625 625
626 626 # external identities
627 627 external_identities = relationship(
628 628 'ExternalIdentity',
629 629 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
630 630 cascade='all')
631 631 # review rules
632 632 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
633 633
634 634 # artifacts owned
635 635 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
636 636
637 637 # no cascade, set NULL
638 638 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
639 639
640 640 def __unicode__(self):
641 641 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
642 642 self.user_id, self.username)
643 643
644 644 @hybrid_property
645 645 def email(self):
646 646 return self._email
647 647
648 648 @email.setter
649 649 def email(self, val):
650 650 self._email = val.lower() if val else None
651 651
652 652 @hybrid_property
653 653 def first_name(self):
654 654 from rhodecode.lib import helpers as h
655 655 if self.name:
656 656 return h.escape(self.name)
657 657 return self.name
658 658
659 659 @hybrid_property
660 660 def last_name(self):
661 661 from rhodecode.lib import helpers as h
662 662 if self.lastname:
663 663 return h.escape(self.lastname)
664 664 return self.lastname
665 665
666 666 @hybrid_property
667 667 def api_key(self):
668 668 """
669 669 Fetch if exist an auth-token with role ALL connected to this user
670 670 """
671 671 user_auth_token = UserApiKeys.query()\
672 672 .filter(UserApiKeys.user_id == self.user_id)\
673 673 .filter(or_(UserApiKeys.expires == -1,
674 674 UserApiKeys.expires >= time.time()))\
675 675 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
676 676 if user_auth_token:
677 677 user_auth_token = user_auth_token.api_key
678 678
679 679 return user_auth_token
680 680
681 681 @api_key.setter
682 682 def api_key(self, val):
683 683 # don't allow to set API key this is deprecated for now
684 684 self._api_key = None
685 685
686 686 @property
687 687 def reviewer_pull_requests(self):
688 688 return PullRequestReviewers.query() \
689 689 .options(joinedload(PullRequestReviewers.pull_request)) \
690 690 .filter(PullRequestReviewers.user_id == self.user_id) \
691 691 .all()
692 692
693 693 @property
694 694 def firstname(self):
695 695 # alias for future
696 696 return self.name
697 697
698 698 @property
699 699 def emails(self):
700 700 other = UserEmailMap.query()\
701 701 .filter(UserEmailMap.user == self) \
702 702 .order_by(UserEmailMap.email_id.asc()) \
703 703 .all()
704 704 return [self.email] + [x.email for x in other]
705 705
706 706 def emails_cached(self):
707 707 emails = UserEmailMap.query()\
708 708 .filter(UserEmailMap.user == self) \
709 709 .order_by(UserEmailMap.email_id.asc())
710 710
711 711 emails = emails.options(
712 712 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
713 713 )
714 714
715 715 return [self.email] + [x.email for x in emails]
716 716
717 717 @property
718 718 def auth_tokens(self):
719 719 auth_tokens = self.get_auth_tokens()
720 720 return [x.api_key for x in auth_tokens]
721 721
722 722 def get_auth_tokens(self):
723 723 return UserApiKeys.query()\
724 724 .filter(UserApiKeys.user == self)\
725 725 .order_by(UserApiKeys.user_api_key_id.asc())\
726 726 .all()
727 727
728 728 @LazyProperty
729 729 def feed_token(self):
730 730 return self.get_feed_token()
731 731
732 732 def get_feed_token(self, cache=True):
733 733 feed_tokens = UserApiKeys.query()\
734 734 .filter(UserApiKeys.user == self)\
735 735 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
736 736 if cache:
737 737 feed_tokens = feed_tokens.options(
738 738 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
739 739
740 740 feed_tokens = feed_tokens.all()
741 741 if feed_tokens:
742 742 return feed_tokens[0].api_key
743 743 return 'NO_FEED_TOKEN_AVAILABLE'
744 744
745 745 @LazyProperty
746 746 def artifact_token(self):
747 747 return self.get_artifact_token()
748 748
749 749 def get_artifact_token(self, cache=True):
750 750 artifacts_tokens = UserApiKeys.query()\
751 751 .filter(UserApiKeys.user == self)\
752 752 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
753 753 if cache:
754 754 artifacts_tokens = artifacts_tokens.options(
755 755 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
756 756
757 757 artifacts_tokens = artifacts_tokens.all()
758 758 if artifacts_tokens:
759 759 return artifacts_tokens[0].api_key
760 760 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
761 761
762 762 @classmethod
763 763 def get(cls, user_id, cache=False):
764 764 if not user_id:
765 765 return
766 766
767 767 user = cls.query()
768 768 if cache:
769 769 user = user.options(
770 770 FromCache("sql_cache_short", "get_users_%s" % user_id))
771 771 return user.get(user_id)
772 772
773 773 @classmethod
774 774 def extra_valid_auth_tokens(cls, user, role=None):
775 775 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
776 776 .filter(or_(UserApiKeys.expires == -1,
777 777 UserApiKeys.expires >= time.time()))
778 778 if role:
779 779 tokens = tokens.filter(or_(UserApiKeys.role == role,
780 780 UserApiKeys.role == UserApiKeys.ROLE_ALL))
781 781 return tokens.all()
782 782
783 783 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
784 784 from rhodecode.lib import auth
785 785
786 786 log.debug('Trying to authenticate user: %s via auth-token, '
787 787 'and roles: %s', self, roles)
788 788
789 789 if not auth_token:
790 790 return False
791 791
792 792 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
793 793 tokens_q = UserApiKeys.query()\
794 794 .filter(UserApiKeys.user_id == self.user_id)\
795 795 .filter(or_(UserApiKeys.expires == -1,
796 796 UserApiKeys.expires >= time.time()))
797 797
798 798 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
799 799
800 800 crypto_backend = auth.crypto_backend()
801 801 enc_token_map = {}
802 802 plain_token_map = {}
803 803 for token in tokens_q:
804 804 if token.api_key.startswith(crypto_backend.ENC_PREF):
805 805 enc_token_map[token.api_key] = token
806 806 else:
807 807 plain_token_map[token.api_key] = token
808 808 log.debug(
809 809 'Found %s plain and %s encrypted tokens to check for authentication for this user',
810 810 len(plain_token_map), len(enc_token_map))
811 811
812 812 # plain token match comes first
813 813 match = plain_token_map.get(auth_token)
814 814
815 815 # check encrypted tokens now
816 816 if not match:
817 817 for token_hash, token in enc_token_map.items():
818 818 # NOTE(marcink): this is expensive to calculate, but most secure
819 819 if crypto_backend.hash_check(auth_token, token_hash):
820 820 match = token
821 821 break
822 822
823 823 if match:
824 824 log.debug('Found matching token %s', match)
825 825 if match.repo_id:
826 826 log.debug('Found scope, checking for scope match of token %s', match)
827 827 if match.repo_id == scope_repo_id:
828 828 return True
829 829 else:
830 830 log.debug(
831 831 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
832 832 'and calling scope is:%s, skipping further checks',
833 833 match.repo, scope_repo_id)
834 834 return False
835 835 else:
836 836 return True
837 837
838 838 return False
839 839
840 840 @property
841 841 def ip_addresses(self):
842 842 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
843 843 return [x.ip_addr for x in ret]
844 844
845 845 @property
846 846 def username_and_name(self):
847 847 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
848 848
849 849 @property
850 850 def username_or_name_or_email(self):
851 851 full_name = self.full_name if self.full_name is not ' ' else None
852 852 return self.username or full_name or self.email
853 853
854 854 @property
855 855 def full_name(self):
856 856 return '%s %s' % (self.first_name, self.last_name)
857 857
858 858 @property
859 859 def full_name_or_username(self):
860 860 return ('%s %s' % (self.first_name, self.last_name)
861 861 if (self.first_name and self.last_name) else self.username)
862 862
863 863 @property
864 864 def full_contact(self):
865 865 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
866 866
867 867 @property
868 868 def short_contact(self):
869 869 return '%s %s' % (self.first_name, self.last_name)
870 870
871 871 @property
872 872 def is_admin(self):
873 873 return self.admin
874 874
875 875 @property
876 876 def language(self):
877 877 return self.user_data.get('language')
878 878
879 879 def AuthUser(self, **kwargs):
880 880 """
881 881 Returns instance of AuthUser for this user
882 882 """
883 883 from rhodecode.lib.auth import AuthUser
884 884 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
885 885
886 886 @hybrid_property
887 887 def user_data(self):
888 888 if not self._user_data:
889 889 return {}
890 890
891 891 try:
892 892 return json.loads(self._user_data)
893 893 except TypeError:
894 894 return {}
895 895
896 896 @user_data.setter
897 897 def user_data(self, val):
898 898 if not isinstance(val, dict):
899 899 raise Exception('user_data must be dict, got %s' % type(val))
900 900 try:
901 901 self._user_data = json.dumps(val)
902 902 except Exception:
903 903 log.error(traceback.format_exc())
904 904
905 905 @classmethod
906 906 def get_by_username(cls, username, case_insensitive=False,
907 907 cache=False, identity_cache=False):
908 908 session = Session()
909 909
910 910 if case_insensitive:
911 911 q = cls.query().filter(
912 912 func.lower(cls.username) == func.lower(username))
913 913 else:
914 914 q = cls.query().filter(cls.username == username)
915 915
916 916 if cache:
917 917 if identity_cache:
918 918 val = cls.identity_cache(session, 'username', username)
919 919 if val:
920 920 return val
921 921 else:
922 922 cache_key = "get_user_by_name_%s" % _hash_key(username)
923 923 q = q.options(
924 924 FromCache("sql_cache_short", cache_key))
925 925
926 926 return q.scalar()
927 927
928 928 @classmethod
929 929 def get_by_auth_token(cls, auth_token, cache=False):
930 930 q = UserApiKeys.query()\
931 931 .filter(UserApiKeys.api_key == auth_token)\
932 932 .filter(or_(UserApiKeys.expires == -1,
933 933 UserApiKeys.expires >= time.time()))
934 934 if cache:
935 935 q = q.options(
936 936 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
937 937
938 938 match = q.first()
939 939 if match:
940 940 return match.user
941 941
942 942 @classmethod
943 943 def get_by_email(cls, email, case_insensitive=False, cache=False):
944 944
945 945 if case_insensitive:
946 946 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
947 947
948 948 else:
949 949 q = cls.query().filter(cls.email == email)
950 950
951 951 email_key = _hash_key(email)
952 952 if cache:
953 953 q = q.options(
954 954 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
955 955
956 956 ret = q.scalar()
957 957 if ret is None:
958 958 q = UserEmailMap.query()
959 959 # try fetching in alternate email map
960 960 if case_insensitive:
961 961 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
962 962 else:
963 963 q = q.filter(UserEmailMap.email == email)
964 964 q = q.options(joinedload(UserEmailMap.user))
965 965 if cache:
966 966 q = q.options(
967 967 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
968 968 ret = getattr(q.scalar(), 'user', None)
969 969
970 970 return ret
971 971
972 972 @classmethod
973 973 def get_from_cs_author(cls, author):
974 974 """
975 975 Tries to get User objects out of commit author string
976 976
977 977 :param author:
978 978 """
979 979 from rhodecode.lib.helpers import email, author_name
980 980 # Valid email in the attribute passed, see if they're in the system
981 981 _email = email(author)
982 982 if _email:
983 983 user = cls.get_by_email(_email, case_insensitive=True)
984 984 if user:
985 985 return user
986 986 # Maybe we can match by username?
987 987 _author = author_name(author)
988 988 user = cls.get_by_username(_author, case_insensitive=True)
989 989 if user:
990 990 return user
991 991
992 992 def update_userdata(self, **kwargs):
993 993 usr = self
994 994 old = usr.user_data
995 995 old.update(**kwargs)
996 996 usr.user_data = old
997 997 Session().add(usr)
998 998 log.debug('updated userdata with %s', kwargs)
999 999
1000 1000 def update_lastlogin(self):
1001 1001 """Update user lastlogin"""
1002 1002 self.last_login = datetime.datetime.now()
1003 1003 Session().add(self)
1004 1004 log.debug('updated user %s lastlogin', self.username)
1005 1005
1006 1006 def update_password(self, new_password):
1007 1007 from rhodecode.lib.auth import get_crypt_password
1008 1008
1009 1009 self.password = get_crypt_password(new_password)
1010 1010 Session().add(self)
1011 1011
1012 1012 @classmethod
1013 1013 def get_first_super_admin(cls):
1014 1014 user = User.query()\
1015 1015 .filter(User.admin == true()) \
1016 1016 .order_by(User.user_id.asc()) \
1017 1017 .first()
1018 1018
1019 1019 if user is None:
1020 1020 raise Exception('FATAL: Missing administrative account!')
1021 1021 return user
1022 1022
1023 1023 @classmethod
1024 1024 def get_all_super_admins(cls, only_active=False):
1025 1025 """
1026 1026 Returns all admin accounts sorted by username
1027 1027 """
1028 1028 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1029 1029 if only_active:
1030 1030 qry = qry.filter(User.active == true())
1031 1031 return qry.all()
1032 1032
1033 1033 @classmethod
1034 1034 def get_all_user_ids(cls, only_active=True):
1035 1035 """
1036 1036 Returns all users IDs
1037 1037 """
1038 1038 qry = Session().query(User.user_id)
1039 1039
1040 1040 if only_active:
1041 1041 qry = qry.filter(User.active == true())
1042 1042 return [x.user_id for x in qry]
1043 1043
1044 1044 @classmethod
1045 1045 def get_default_user(cls, cache=False, refresh=False):
1046 1046 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1047 1047 if user is None:
1048 1048 raise Exception('FATAL: Missing default account!')
1049 1049 if refresh:
1050 1050 # The default user might be based on outdated state which
1051 1051 # has been loaded from the cache.
1052 1052 # A call to refresh() ensures that the
1053 1053 # latest state from the database is used.
1054 1054 Session().refresh(user)
1055 1055 return user
1056 1056
1057 1057 @classmethod
1058 1058 def get_default_user_id(cls):
1059 1059 import rhodecode
1060 1060 return rhodecode.CONFIG['default_user_id']
1061 1061
1062 1062 def _get_default_perms(self, user, suffix=''):
1063 1063 from rhodecode.model.permission import PermissionModel
1064 1064 return PermissionModel().get_default_perms(user.user_perms, suffix)
1065 1065
1066 1066 def get_default_perms(self, suffix=''):
1067 1067 return self._get_default_perms(self, suffix)
1068 1068
1069 1069 def get_api_data(self, include_secrets=False, details='full'):
1070 1070 """
1071 1071 Common function for generating user related data for API
1072 1072
1073 1073 :param include_secrets: By default secrets in the API data will be replaced
1074 1074 by a placeholder value to prevent exposing this data by accident. In case
1075 1075 this data shall be exposed, set this flag to ``True``.
1076 1076
1077 1077 :param details: details can be 'basic|full' basic gives only a subset of
1078 1078 the available user information that includes user_id, name and emails.
1079 1079 """
1080 1080 user = self
1081 1081 user_data = self.user_data
1082 1082 data = {
1083 1083 'user_id': user.user_id,
1084 1084 'username': user.username,
1085 1085 'firstname': user.name,
1086 1086 'lastname': user.lastname,
1087 1087 'description': user.description,
1088 1088 'email': user.email,
1089 1089 'emails': user.emails,
1090 1090 }
1091 1091 if details == 'basic':
1092 1092 return data
1093 1093
1094 1094 auth_token_length = 40
1095 1095 auth_token_replacement = '*' * auth_token_length
1096 1096
1097 1097 extras = {
1098 1098 'auth_tokens': [auth_token_replacement],
1099 1099 'active': user.active,
1100 1100 'admin': user.admin,
1101 1101 'extern_type': user.extern_type,
1102 1102 'extern_name': user.extern_name,
1103 1103 'last_login': user.last_login,
1104 1104 'last_activity': user.last_activity,
1105 1105 'ip_addresses': user.ip_addresses,
1106 1106 'language': user_data.get('language')
1107 1107 }
1108 1108 data.update(extras)
1109 1109
1110 1110 if include_secrets:
1111 1111 data['auth_tokens'] = user.auth_tokens
1112 1112 return data
1113 1113
1114 1114 def __json__(self):
1115 1115 data = {
1116 1116 'full_name': self.full_name,
1117 1117 'full_name_or_username': self.full_name_or_username,
1118 1118 'short_contact': self.short_contact,
1119 1119 'full_contact': self.full_contact,
1120 1120 }
1121 1121 data.update(self.get_api_data())
1122 1122 return data
1123 1123
1124 1124
1125 1125 class UserApiKeys(Base, BaseModel):
1126 1126 __tablename__ = 'user_api_keys'
1127 1127 __table_args__ = (
1128 1128 Index('uak_api_key_idx', 'api_key'),
1129 1129 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1130 1130 base_table_args
1131 1131 )
1132 1132 __mapper_args__ = {}
1133 1133
1134 1134 # ApiKey role
1135 1135 ROLE_ALL = 'token_role_all'
1136 1136 ROLE_VCS = 'token_role_vcs'
1137 1137 ROLE_API = 'token_role_api'
1138 1138 ROLE_HTTP = 'token_role_http'
1139 1139 ROLE_FEED = 'token_role_feed'
1140 1140 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1141 1141 # The last one is ignored in the list as we only
1142 1142 # use it for one action, and cannot be created by users
1143 1143 ROLE_PASSWORD_RESET = 'token_password_reset'
1144 1144
1145 1145 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1146 1146
1147 1147 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1148 1148 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1149 1149 api_key = Column("api_key", String(255), nullable=False, unique=True)
1150 1150 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1151 1151 expires = Column('expires', Float(53), nullable=False)
1152 1152 role = Column('role', String(255), nullable=True)
1153 1153 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1154 1154
1155 1155 # scope columns
1156 1156 repo_id = Column(
1157 1157 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1158 1158 nullable=True, unique=None, default=None)
1159 1159 repo = relationship('Repository', lazy='joined')
1160 1160
1161 1161 repo_group_id = Column(
1162 1162 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1163 1163 nullable=True, unique=None, default=None)
1164 1164 repo_group = relationship('RepoGroup', lazy='joined')
1165 1165
1166 1166 user = relationship('User', lazy='joined')
1167 1167
1168 1168 def __unicode__(self):
1169 1169 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1170 1170
1171 1171 def __json__(self):
1172 1172 data = {
1173 1173 'auth_token': self.api_key,
1174 1174 'role': self.role,
1175 1175 'scope': self.scope_humanized,
1176 1176 'expired': self.expired
1177 1177 }
1178 1178 return data
1179 1179
1180 1180 def get_api_data(self, include_secrets=False):
1181 1181 data = self.__json__()
1182 1182 if include_secrets:
1183 1183 return data
1184 1184 else:
1185 1185 data['auth_token'] = self.token_obfuscated
1186 1186 return data
1187 1187
1188 1188 @hybrid_property
1189 1189 def description_safe(self):
1190 1190 from rhodecode.lib import helpers as h
1191 1191 return h.escape(self.description)
1192 1192
1193 1193 @property
1194 1194 def expired(self):
1195 1195 if self.expires == -1:
1196 1196 return False
1197 1197 return time.time() > self.expires
1198 1198
1199 1199 @classmethod
1200 1200 def _get_role_name(cls, role):
1201 1201 return {
1202 1202 cls.ROLE_ALL: _('all'),
1203 1203 cls.ROLE_HTTP: _('http/web interface'),
1204 1204 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1205 1205 cls.ROLE_API: _('api calls'),
1206 1206 cls.ROLE_FEED: _('feed access'),
1207 1207 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1208 1208 }.get(role, role)
1209 1209
1210 1210 @classmethod
1211 1211 def _get_role_description(cls, role):
1212 1212 return {
1213 1213 cls.ROLE_ALL: _('Token for all actions.'),
1214 1214 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1215 1215 'login using `api_access_controllers_whitelist` functionality.'),
1216 1216 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1217 1217 'Requires auth_token authentication plugin to be active. <br/>'
1218 1218 'Such Token should be used then instead of a password to '
1219 1219 'interact with a repository, and additionally can be '
1220 1220 'limited to single repository using repo scope.'),
1221 1221 cls.ROLE_API: _('Token limited to api calls.'),
1222 1222 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1223 1223 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1224 1224 }.get(role, role)
1225 1225
1226 1226 @property
1227 1227 def role_humanized(self):
1228 1228 return self._get_role_name(self.role)
1229 1229
1230 1230 def _get_scope(self):
1231 1231 if self.repo:
1232 1232 return 'Repository: {}'.format(self.repo.repo_name)
1233 1233 if self.repo_group:
1234 1234 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1235 1235 return 'Global'
1236 1236
1237 1237 @property
1238 1238 def scope_humanized(self):
1239 1239 return self._get_scope()
1240 1240
1241 1241 @property
1242 1242 def token_obfuscated(self):
1243 1243 if self.api_key:
1244 1244 return self.api_key[:4] + "****"
1245 1245
1246 1246
1247 1247 class UserEmailMap(Base, BaseModel):
1248 1248 __tablename__ = 'user_email_map'
1249 1249 __table_args__ = (
1250 1250 Index('uem_email_idx', 'email'),
1251 1251 UniqueConstraint('email'),
1252 1252 base_table_args
1253 1253 )
1254 1254 __mapper_args__ = {}
1255 1255
1256 1256 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1257 1257 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1258 1258 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1259 1259 user = relationship('User', lazy='joined')
1260 1260
1261 1261 @validates('_email')
1262 1262 def validate_email(self, key, email):
1263 1263 # check if this email is not main one
1264 1264 main_email = Session().query(User).filter(User.email == email).scalar()
1265 1265 if main_email is not None:
1266 1266 raise AttributeError('email %s is present is user table' % email)
1267 1267 return email
1268 1268
1269 1269 @hybrid_property
1270 1270 def email(self):
1271 1271 return self._email
1272 1272
1273 1273 @email.setter
1274 1274 def email(self, val):
1275 1275 self._email = val.lower() if val else None
1276 1276
1277 1277
1278 1278 class UserIpMap(Base, BaseModel):
1279 1279 __tablename__ = 'user_ip_map'
1280 1280 __table_args__ = (
1281 1281 UniqueConstraint('user_id', 'ip_addr'),
1282 1282 base_table_args
1283 1283 )
1284 1284 __mapper_args__ = {}
1285 1285
1286 1286 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1287 1287 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1288 1288 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1289 1289 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1290 1290 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1291 1291 user = relationship('User', lazy='joined')
1292 1292
1293 1293 @hybrid_property
1294 1294 def description_safe(self):
1295 1295 from rhodecode.lib import helpers as h
1296 1296 return h.escape(self.description)
1297 1297
1298 1298 @classmethod
1299 1299 def _get_ip_range(cls, ip_addr):
1300 1300 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1301 1301 return [str(net.network_address), str(net.broadcast_address)]
1302 1302
1303 1303 def __json__(self):
1304 1304 return {
1305 1305 'ip_addr': self.ip_addr,
1306 1306 'ip_range': self._get_ip_range(self.ip_addr),
1307 1307 }
1308 1308
1309 1309 def __unicode__(self):
1310 1310 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1311 1311 self.user_id, self.ip_addr)
1312 1312
1313 1313
1314 1314 class UserSshKeys(Base, BaseModel):
1315 1315 __tablename__ = 'user_ssh_keys'
1316 1316 __table_args__ = (
1317 1317 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1318 1318
1319 1319 UniqueConstraint('ssh_key_fingerprint'),
1320 1320
1321 1321 base_table_args
1322 1322 )
1323 1323 __mapper_args__ = {}
1324 1324
1325 1325 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1326 1326 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1327 1327 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1328 1328
1329 1329 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1330 1330
1331 1331 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1332 1332 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1333 1333 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1334 1334
1335 1335 user = relationship('User', lazy='joined')
1336 1336
1337 1337 def __json__(self):
1338 1338 data = {
1339 1339 'ssh_fingerprint': self.ssh_key_fingerprint,
1340 1340 'description': self.description,
1341 1341 'created_on': self.created_on
1342 1342 }
1343 1343 return data
1344 1344
1345 1345 def get_api_data(self):
1346 1346 data = self.__json__()
1347 1347 return data
1348 1348
1349 1349
1350 1350 class UserLog(Base, BaseModel):
1351 1351 __tablename__ = 'user_logs'
1352 1352 __table_args__ = (
1353 1353 base_table_args,
1354 1354 )
1355 1355
1356 1356 VERSION_1 = 'v1'
1357 1357 VERSION_2 = 'v2'
1358 1358 VERSIONS = [VERSION_1, VERSION_2]
1359 1359
1360 1360 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1361 1361 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1362 1362 username = Column("username", String(255), nullable=True, unique=None, default=None)
1363 1363 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1364 1364 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1365 1365 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1366 1366 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1367 1367 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1368 1368
1369 1369 version = Column("version", String(255), nullable=True, default=VERSION_1)
1370 1370 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1371 1371 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1372 1372
1373 1373 def __unicode__(self):
1374 1374 return u"<%s('id:%s:%s')>" % (
1375 1375 self.__class__.__name__, self.repository_name, self.action)
1376 1376
1377 1377 def __json__(self):
1378 1378 return {
1379 1379 'user_id': self.user_id,
1380 1380 'username': self.username,
1381 1381 'repository_id': self.repository_id,
1382 1382 'repository_name': self.repository_name,
1383 1383 'user_ip': self.user_ip,
1384 1384 'action_date': self.action_date,
1385 1385 'action': self.action,
1386 1386 }
1387 1387
1388 1388 @hybrid_property
1389 1389 def entry_id(self):
1390 1390 return self.user_log_id
1391 1391
1392 1392 @property
1393 1393 def action_as_day(self):
1394 1394 return datetime.date(*self.action_date.timetuple()[:3])
1395 1395
1396 1396 user = relationship('User')
1397 1397 repository = relationship('Repository', cascade='')
1398 1398
1399 1399
1400 1400 class UserGroup(Base, BaseModel):
1401 1401 __tablename__ = 'users_groups'
1402 1402 __table_args__ = (
1403 1403 base_table_args,
1404 1404 )
1405 1405
1406 1406 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1407 1407 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1408 1408 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1409 1409 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1410 1410 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1411 1411 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1412 1412 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1413 1413 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1414 1414
1415 1415 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1416 1416 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1417 1417 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1418 1418 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1419 1419 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1420 1420 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1421 1421
1422 1422 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1423 1423 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1424 1424
1425 1425 @classmethod
1426 1426 def _load_group_data(cls, column):
1427 1427 if not column:
1428 1428 return {}
1429 1429
1430 1430 try:
1431 1431 return json.loads(column) or {}
1432 1432 except TypeError:
1433 1433 return {}
1434 1434
1435 1435 @hybrid_property
1436 1436 def description_safe(self):
1437 1437 from rhodecode.lib import helpers as h
1438 1438 return h.escape(self.user_group_description)
1439 1439
1440 1440 @hybrid_property
1441 1441 def group_data(self):
1442 1442 return self._load_group_data(self._group_data)
1443 1443
1444 1444 @group_data.expression
1445 1445 def group_data(self, **kwargs):
1446 1446 return self._group_data
1447 1447
1448 1448 @group_data.setter
1449 1449 def group_data(self, val):
1450 1450 try:
1451 1451 self._group_data = json.dumps(val)
1452 1452 except Exception:
1453 1453 log.error(traceback.format_exc())
1454 1454
1455 1455 @classmethod
1456 1456 def _load_sync(cls, group_data):
1457 1457 if group_data:
1458 1458 return group_data.get('extern_type')
1459 1459
1460 1460 @property
1461 1461 def sync(self):
1462 1462 return self._load_sync(self.group_data)
1463 1463
1464 1464 def __unicode__(self):
1465 1465 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1466 1466 self.users_group_id,
1467 1467 self.users_group_name)
1468 1468
1469 1469 @classmethod
1470 1470 def get_by_group_name(cls, group_name, cache=False,
1471 1471 case_insensitive=False):
1472 1472 if case_insensitive:
1473 1473 q = cls.query().filter(func.lower(cls.users_group_name) ==
1474 1474 func.lower(group_name))
1475 1475
1476 1476 else:
1477 1477 q = cls.query().filter(cls.users_group_name == group_name)
1478 1478 if cache:
1479 1479 q = q.options(
1480 1480 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1481 1481 return q.scalar()
1482 1482
1483 1483 @classmethod
1484 1484 def get(cls, user_group_id, cache=False):
1485 1485 if not user_group_id:
1486 1486 return
1487 1487
1488 1488 user_group = cls.query()
1489 1489 if cache:
1490 1490 user_group = user_group.options(
1491 1491 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1492 1492 return user_group.get(user_group_id)
1493 1493
1494 1494 def permissions(self, with_admins=True, with_owner=True,
1495 1495 expand_from_user_groups=False):
1496 1496 """
1497 1497 Permissions for user groups
1498 1498 """
1499 1499 _admin_perm = 'usergroup.admin'
1500 1500
1501 1501 owner_row = []
1502 1502 if with_owner:
1503 1503 usr = AttributeDict(self.user.get_dict())
1504 1504 usr.owner_row = True
1505 1505 usr.permission = _admin_perm
1506 1506 owner_row.append(usr)
1507 1507
1508 1508 super_admin_ids = []
1509 1509 super_admin_rows = []
1510 1510 if with_admins:
1511 1511 for usr in User.get_all_super_admins():
1512 1512 super_admin_ids.append(usr.user_id)
1513 1513 # if this admin is also owner, don't double the record
1514 1514 if usr.user_id == owner_row[0].user_id:
1515 1515 owner_row[0].admin_row = True
1516 1516 else:
1517 1517 usr = AttributeDict(usr.get_dict())
1518 1518 usr.admin_row = True
1519 1519 usr.permission = _admin_perm
1520 1520 super_admin_rows.append(usr)
1521 1521
1522 1522 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1523 1523 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1524 1524 joinedload(UserUserGroupToPerm.user),
1525 1525 joinedload(UserUserGroupToPerm.permission),)
1526 1526
1527 1527 # get owners and admins and permissions. We do a trick of re-writing
1528 1528 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1529 1529 # has a global reference and changing one object propagates to all
1530 1530 # others. This means if admin is also an owner admin_row that change
1531 1531 # would propagate to both objects
1532 1532 perm_rows = []
1533 1533 for _usr in q.all():
1534 1534 usr = AttributeDict(_usr.user.get_dict())
1535 1535 # if this user is also owner/admin, mark as duplicate record
1536 1536 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1537 1537 usr.duplicate_perm = True
1538 1538 usr.permission = _usr.permission.permission_name
1539 1539 perm_rows.append(usr)
1540 1540
1541 1541 # filter the perm rows by 'default' first and then sort them by
1542 1542 # admin,write,read,none permissions sorted again alphabetically in
1543 1543 # each group
1544 1544 perm_rows = sorted(perm_rows, key=display_user_sort)
1545 1545
1546 1546 user_groups_rows = []
1547 1547 if expand_from_user_groups:
1548 1548 for ug in self.permission_user_groups(with_members=True):
1549 1549 for user_data in ug.members:
1550 1550 user_groups_rows.append(user_data)
1551 1551
1552 1552 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1553 1553
1554 1554 def permission_user_groups(self, with_members=False):
1555 1555 q = UserGroupUserGroupToPerm.query()\
1556 1556 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1557 1557 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1558 1558 joinedload(UserGroupUserGroupToPerm.target_user_group),
1559 1559 joinedload(UserGroupUserGroupToPerm.permission),)
1560 1560
1561 1561 perm_rows = []
1562 1562 for _user_group in q.all():
1563 1563 entry = AttributeDict(_user_group.user_group.get_dict())
1564 1564 entry.permission = _user_group.permission.permission_name
1565 1565 if with_members:
1566 1566 entry.members = [x.user.get_dict()
1567 1567 for x in _user_group.user_group.members]
1568 1568 perm_rows.append(entry)
1569 1569
1570 1570 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1571 1571 return perm_rows
1572 1572
1573 1573 def _get_default_perms(self, user_group, suffix=''):
1574 1574 from rhodecode.model.permission import PermissionModel
1575 1575 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1576 1576
1577 1577 def get_default_perms(self, suffix=''):
1578 1578 return self._get_default_perms(self, suffix)
1579 1579
1580 1580 def get_api_data(self, with_group_members=True, include_secrets=False):
1581 1581 """
1582 1582 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1583 1583 basically forwarded.
1584 1584
1585 1585 """
1586 1586 user_group = self
1587 1587 data = {
1588 1588 'users_group_id': user_group.users_group_id,
1589 1589 'group_name': user_group.users_group_name,
1590 1590 'group_description': user_group.user_group_description,
1591 1591 'active': user_group.users_group_active,
1592 1592 'owner': user_group.user.username,
1593 1593 'sync': user_group.sync,
1594 1594 'owner_email': user_group.user.email,
1595 1595 }
1596 1596
1597 1597 if with_group_members:
1598 1598 users = []
1599 1599 for user in user_group.members:
1600 1600 user = user.user
1601 1601 users.append(user.get_api_data(include_secrets=include_secrets))
1602 1602 data['users'] = users
1603 1603
1604 1604 return data
1605 1605
1606 1606
1607 1607 class UserGroupMember(Base, BaseModel):
1608 1608 __tablename__ = 'users_groups_members'
1609 1609 __table_args__ = (
1610 1610 base_table_args,
1611 1611 )
1612 1612
1613 1613 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1614 1614 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1615 1615 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1616 1616
1617 1617 user = relationship('User', lazy='joined')
1618 1618 users_group = relationship('UserGroup')
1619 1619
1620 1620 def __init__(self, gr_id='', u_id=''):
1621 1621 self.users_group_id = gr_id
1622 1622 self.user_id = u_id
1623 1623
1624 1624
1625 1625 class RepositoryField(Base, BaseModel):
1626 1626 __tablename__ = 'repositories_fields'
1627 1627 __table_args__ = (
1628 1628 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1629 1629 base_table_args,
1630 1630 )
1631 1631
1632 1632 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1633 1633
1634 1634 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1635 1635 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1636 1636 field_key = Column("field_key", String(250))
1637 1637 field_label = Column("field_label", String(1024), nullable=False)
1638 1638 field_value = Column("field_value", String(10000), nullable=False)
1639 1639 field_desc = Column("field_desc", String(1024), nullable=False)
1640 1640 field_type = Column("field_type", String(255), nullable=False, unique=None)
1641 1641 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1642 1642
1643 1643 repository = relationship('Repository')
1644 1644
1645 1645 @property
1646 1646 def field_key_prefixed(self):
1647 1647 return 'ex_%s' % self.field_key
1648 1648
1649 1649 @classmethod
1650 1650 def un_prefix_key(cls, key):
1651 1651 if key.startswith(cls.PREFIX):
1652 1652 return key[len(cls.PREFIX):]
1653 1653 return key
1654 1654
1655 1655 @classmethod
1656 1656 def get_by_key_name(cls, key, repo):
1657 1657 row = cls.query()\
1658 1658 .filter(cls.repository == repo)\
1659 1659 .filter(cls.field_key == key).scalar()
1660 1660 return row
1661 1661
1662 1662
1663 1663 class Repository(Base, BaseModel):
1664 1664 __tablename__ = 'repositories'
1665 1665 __table_args__ = (
1666 1666 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1667 1667 base_table_args,
1668 1668 )
1669 1669 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1670 1670 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1671 1671 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1672 1672
1673 1673 STATE_CREATED = 'repo_state_created'
1674 1674 STATE_PENDING = 'repo_state_pending'
1675 1675 STATE_ERROR = 'repo_state_error'
1676 1676
1677 1677 LOCK_AUTOMATIC = 'lock_auto'
1678 1678 LOCK_API = 'lock_api'
1679 1679 LOCK_WEB = 'lock_web'
1680 1680 LOCK_PULL = 'lock_pull'
1681 1681
1682 1682 NAME_SEP = URL_SEP
1683 1683
1684 1684 repo_id = Column(
1685 1685 "repo_id", Integer(), nullable=False, unique=True, default=None,
1686 1686 primary_key=True)
1687 1687 _repo_name = Column(
1688 1688 "repo_name", Text(), nullable=False, default=None)
1689 1689 repo_name_hash = Column(
1690 1690 "repo_name_hash", String(255), nullable=False, unique=True)
1691 1691 repo_state = Column("repo_state", String(255), nullable=True)
1692 1692
1693 1693 clone_uri = Column(
1694 1694 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1695 1695 default=None)
1696 1696 push_uri = Column(
1697 1697 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1698 1698 default=None)
1699 1699 repo_type = Column(
1700 1700 "repo_type", String(255), nullable=False, unique=False, default=None)
1701 1701 user_id = Column(
1702 1702 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1703 1703 unique=False, default=None)
1704 1704 private = Column(
1705 1705 "private", Boolean(), nullable=True, unique=None, default=None)
1706 1706 archived = Column(
1707 1707 "archived", Boolean(), nullable=True, unique=None, default=None)
1708 1708 enable_statistics = Column(
1709 1709 "statistics", Boolean(), nullable=True, unique=None, default=True)
1710 1710 enable_downloads = Column(
1711 1711 "downloads", Boolean(), nullable=True, unique=None, default=True)
1712 1712 description = Column(
1713 1713 "description", String(10000), nullable=True, unique=None, default=None)
1714 1714 created_on = Column(
1715 1715 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1716 1716 default=datetime.datetime.now)
1717 1717 updated_on = Column(
1718 1718 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1719 1719 default=datetime.datetime.now)
1720 1720 _landing_revision = Column(
1721 1721 "landing_revision", String(255), nullable=False, unique=False,
1722 1722 default=None)
1723 1723 enable_locking = Column(
1724 1724 "enable_locking", Boolean(), nullable=False, unique=None,
1725 1725 default=False)
1726 1726 _locked = Column(
1727 1727 "locked", String(255), nullable=True, unique=False, default=None)
1728 1728 _changeset_cache = Column(
1729 1729 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1730 1730
1731 1731 fork_id = Column(
1732 1732 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1733 1733 nullable=True, unique=False, default=None)
1734 1734 group_id = Column(
1735 1735 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1736 1736 unique=False, default=None)
1737 1737
1738 1738 user = relationship('User', lazy='joined')
1739 1739 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1740 1740 group = relationship('RepoGroup', lazy='joined')
1741 1741 repo_to_perm = relationship(
1742 1742 'UserRepoToPerm', cascade='all',
1743 1743 order_by='UserRepoToPerm.repo_to_perm_id')
1744 1744 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1745 1745 stats = relationship('Statistics', cascade='all', uselist=False)
1746 1746
1747 1747 followers = relationship(
1748 1748 'UserFollowing',
1749 1749 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1750 1750 cascade='all')
1751 1751 extra_fields = relationship(
1752 1752 'RepositoryField', cascade="all, delete-orphan")
1753 1753 logs = relationship('UserLog')
1754 1754 comments = relationship(
1755 1755 'ChangesetComment', cascade="all, delete-orphan")
1756 1756 pull_requests_source = relationship(
1757 1757 'PullRequest',
1758 1758 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1759 1759 cascade="all, delete-orphan")
1760 1760 pull_requests_target = relationship(
1761 1761 'PullRequest',
1762 1762 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1763 1763 cascade="all, delete-orphan")
1764 1764 ui = relationship('RepoRhodeCodeUi', cascade="all")
1765 1765 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1766 1766 integrations = relationship('Integration', cascade="all, delete-orphan")
1767 1767
1768 1768 scoped_tokens = relationship('UserApiKeys', cascade="all")
1769 1769
1770 1770 # no cascade, set NULL
1771 1771 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1772 1772
1773 1773 def __unicode__(self):
1774 1774 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1775 1775 safe_unicode(self.repo_name))
1776 1776
1777 1777 @hybrid_property
1778 1778 def description_safe(self):
1779 1779 from rhodecode.lib import helpers as h
1780 1780 return h.escape(self.description)
1781 1781
1782 1782 @hybrid_property
1783 1783 def landing_rev(self):
1784 1784 # always should return [rev_type, rev], e.g ['branch', 'master']
1785 1785 if self._landing_revision:
1786 1786 _rev_info = self._landing_revision.split(':')
1787 1787 if len(_rev_info) < 2:
1788 1788 _rev_info.insert(0, 'rev')
1789 1789 return [_rev_info[0], _rev_info[1]]
1790 1790 return [None, None]
1791 1791
1792 1792 @property
1793 1793 def landing_ref_type(self):
1794 1794 return self.landing_rev[0]
1795 1795
1796 1796 @property
1797 1797 def landing_ref_name(self):
1798 1798 return self.landing_rev[1]
1799 1799
1800 1800 @landing_rev.setter
1801 1801 def landing_rev(self, val):
1802 1802 if ':' not in val:
1803 1803 raise ValueError('value must be delimited with `:` and consist '
1804 1804 'of <rev_type>:<rev>, got %s instead' % val)
1805 1805 self._landing_revision = val
1806 1806
1807 1807 @hybrid_property
1808 1808 def locked(self):
1809 1809 if self._locked:
1810 1810 user_id, timelocked, reason = self._locked.split(':')
1811 1811 lock_values = int(user_id), timelocked, reason
1812 1812 else:
1813 1813 lock_values = [None, None, None]
1814 1814 return lock_values
1815 1815
1816 1816 @locked.setter
1817 1817 def locked(self, val):
1818 1818 if val and isinstance(val, (list, tuple)):
1819 1819 self._locked = ':'.join(map(str, val))
1820 1820 else:
1821 1821 self._locked = None
1822 1822
1823 1823 @classmethod
1824 1824 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1825 1825 from rhodecode.lib.vcs.backends.base import EmptyCommit
1826 1826 dummy = EmptyCommit().__json__()
1827 1827 if not changeset_cache_raw:
1828 1828 dummy['source_repo_id'] = repo_id
1829 1829 return json.loads(json.dumps(dummy))
1830 1830
1831 1831 try:
1832 1832 return json.loads(changeset_cache_raw)
1833 1833 except TypeError:
1834 1834 return dummy
1835 1835 except Exception:
1836 1836 log.error(traceback.format_exc())
1837 1837 return dummy
1838 1838
1839 1839 @hybrid_property
1840 1840 def changeset_cache(self):
1841 1841 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1842 1842
1843 1843 @changeset_cache.setter
1844 1844 def changeset_cache(self, val):
1845 1845 try:
1846 1846 self._changeset_cache = json.dumps(val)
1847 1847 except Exception:
1848 1848 log.error(traceback.format_exc())
1849 1849
1850 1850 @hybrid_property
1851 1851 def repo_name(self):
1852 1852 return self._repo_name
1853 1853
1854 1854 @repo_name.setter
1855 1855 def repo_name(self, value):
1856 1856 self._repo_name = value
1857 1857 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1858 1858
1859 1859 @classmethod
1860 1860 def normalize_repo_name(cls, repo_name):
1861 1861 """
1862 1862 Normalizes os specific repo_name to the format internally stored inside
1863 1863 database using URL_SEP
1864 1864
1865 1865 :param cls:
1866 1866 :param repo_name:
1867 1867 """
1868 1868 return cls.NAME_SEP.join(repo_name.split(os.sep))
1869 1869
1870 1870 @classmethod
1871 1871 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1872 1872 session = Session()
1873 1873 q = session.query(cls).filter(cls.repo_name == repo_name)
1874 1874
1875 1875 if cache:
1876 1876 if identity_cache:
1877 1877 val = cls.identity_cache(session, 'repo_name', repo_name)
1878 1878 if val:
1879 1879 return val
1880 1880 else:
1881 1881 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1882 1882 q = q.options(
1883 1883 FromCache("sql_cache_short", cache_key))
1884 1884
1885 1885 return q.scalar()
1886 1886
1887 1887 @classmethod
1888 1888 def get_by_id_or_repo_name(cls, repoid):
1889 1889 if isinstance(repoid, (int, long)):
1890 1890 try:
1891 1891 repo = cls.get(repoid)
1892 1892 except ValueError:
1893 1893 repo = None
1894 1894 else:
1895 1895 repo = cls.get_by_repo_name(repoid)
1896 1896 return repo
1897 1897
1898 1898 @classmethod
1899 1899 def get_by_full_path(cls, repo_full_path):
1900 1900 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1901 1901 repo_name = cls.normalize_repo_name(repo_name)
1902 1902 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1903 1903
1904 1904 @classmethod
1905 1905 def get_repo_forks(cls, repo_id):
1906 1906 return cls.query().filter(Repository.fork_id == repo_id)
1907 1907
1908 1908 @classmethod
1909 1909 def base_path(cls):
1910 1910 """
1911 1911 Returns base path when all repos are stored
1912 1912
1913 1913 :param cls:
1914 1914 """
1915 1915 q = Session().query(RhodeCodeUi)\
1916 1916 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1917 1917 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1918 1918 return q.one().ui_value
1919 1919
1920 1920 @classmethod
1921 1921 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1922 1922 case_insensitive=True, archived=False):
1923 1923 q = Repository.query()
1924 1924
1925 1925 if not archived:
1926 1926 q = q.filter(Repository.archived.isnot(true()))
1927 1927
1928 1928 if not isinstance(user_id, Optional):
1929 1929 q = q.filter(Repository.user_id == user_id)
1930 1930
1931 1931 if not isinstance(group_id, Optional):
1932 1932 q = q.filter(Repository.group_id == group_id)
1933 1933
1934 1934 if case_insensitive:
1935 1935 q = q.order_by(func.lower(Repository.repo_name))
1936 1936 else:
1937 1937 q = q.order_by(Repository.repo_name)
1938 1938
1939 1939 return q.all()
1940 1940
1941 1941 @property
1942 1942 def repo_uid(self):
1943 1943 return '_{}'.format(self.repo_id)
1944 1944
1945 1945 @property
1946 1946 def forks(self):
1947 1947 """
1948 1948 Return forks of this repo
1949 1949 """
1950 1950 return Repository.get_repo_forks(self.repo_id)
1951 1951
1952 1952 @property
1953 1953 def parent(self):
1954 1954 """
1955 1955 Returns fork parent
1956 1956 """
1957 1957 return self.fork
1958 1958
1959 1959 @property
1960 1960 def just_name(self):
1961 1961 return self.repo_name.split(self.NAME_SEP)[-1]
1962 1962
1963 1963 @property
1964 1964 def groups_with_parents(self):
1965 1965 groups = []
1966 1966 if self.group is None:
1967 1967 return groups
1968 1968
1969 1969 cur_gr = self.group
1970 1970 groups.insert(0, cur_gr)
1971 1971 while 1:
1972 1972 gr = getattr(cur_gr, 'parent_group', None)
1973 1973 cur_gr = cur_gr.parent_group
1974 1974 if gr is None:
1975 1975 break
1976 1976 groups.insert(0, gr)
1977 1977
1978 1978 return groups
1979 1979
1980 1980 @property
1981 1981 def groups_and_repo(self):
1982 1982 return self.groups_with_parents, self
1983 1983
1984 1984 @LazyProperty
1985 1985 def repo_path(self):
1986 1986 """
1987 1987 Returns base full path for that repository means where it actually
1988 1988 exists on a filesystem
1989 1989 """
1990 1990 q = Session().query(RhodeCodeUi).filter(
1991 1991 RhodeCodeUi.ui_key == self.NAME_SEP)
1992 1992 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1993 1993 return q.one().ui_value
1994 1994
1995 1995 @property
1996 1996 def repo_full_path(self):
1997 1997 p = [self.repo_path]
1998 1998 # we need to split the name by / since this is how we store the
1999 1999 # names in the database, but that eventually needs to be converted
2000 2000 # into a valid system path
2001 2001 p += self.repo_name.split(self.NAME_SEP)
2002 2002 return os.path.join(*map(safe_unicode, p))
2003 2003
2004 2004 @property
2005 2005 def cache_keys(self):
2006 2006 """
2007 2007 Returns associated cache keys for that repo
2008 2008 """
2009 2009 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2010 2010 repo_id=self.repo_id)
2011 2011 return CacheKey.query()\
2012 2012 .filter(CacheKey.cache_args == invalidation_namespace)\
2013 2013 .order_by(CacheKey.cache_key)\
2014 2014 .all()
2015 2015
2016 2016 @property
2017 2017 def cached_diffs_relative_dir(self):
2018 2018 """
2019 2019 Return a relative to the repository store path of cached diffs
2020 2020 used for safe display for users, who shouldn't know the absolute store
2021 2021 path
2022 2022 """
2023 2023 return os.path.join(
2024 2024 os.path.dirname(self.repo_name),
2025 2025 self.cached_diffs_dir.split(os.path.sep)[-1])
2026 2026
2027 2027 @property
2028 2028 def cached_diffs_dir(self):
2029 2029 path = self.repo_full_path
2030 2030 return os.path.join(
2031 2031 os.path.dirname(path),
2032 2032 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2033 2033
2034 2034 def cached_diffs(self):
2035 2035 diff_cache_dir = self.cached_diffs_dir
2036 2036 if os.path.isdir(diff_cache_dir):
2037 2037 return os.listdir(diff_cache_dir)
2038 2038 return []
2039 2039
2040 2040 def shadow_repos(self):
2041 2041 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2042 2042 return [
2043 2043 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2044 2044 if x.startswith(shadow_repos_pattern)]
2045 2045
2046 2046 def get_new_name(self, repo_name):
2047 2047 """
2048 2048 returns new full repository name based on assigned group and new new
2049 2049
2050 2050 :param group_name:
2051 2051 """
2052 2052 path_prefix = self.group.full_path_splitted if self.group else []
2053 2053 return self.NAME_SEP.join(path_prefix + [repo_name])
2054 2054
2055 2055 @property
2056 2056 def _config(self):
2057 2057 """
2058 2058 Returns db based config object.
2059 2059 """
2060 2060 from rhodecode.lib.utils import make_db_config
2061 2061 return make_db_config(clear_session=False, repo=self)
2062 2062
2063 2063 def permissions(self, with_admins=True, with_owner=True,
2064 2064 expand_from_user_groups=False):
2065 2065 """
2066 2066 Permissions for repositories
2067 2067 """
2068 2068 _admin_perm = 'repository.admin'
2069 2069
2070 2070 owner_row = []
2071 2071 if with_owner:
2072 2072 usr = AttributeDict(self.user.get_dict())
2073 2073 usr.owner_row = True
2074 2074 usr.permission = _admin_perm
2075 2075 usr.permission_id = None
2076 2076 owner_row.append(usr)
2077 2077
2078 2078 super_admin_ids = []
2079 2079 super_admin_rows = []
2080 2080 if with_admins:
2081 2081 for usr in User.get_all_super_admins():
2082 2082 super_admin_ids.append(usr.user_id)
2083 2083 # if this admin is also owner, don't double the record
2084 2084 if usr.user_id == owner_row[0].user_id:
2085 2085 owner_row[0].admin_row = True
2086 2086 else:
2087 2087 usr = AttributeDict(usr.get_dict())
2088 2088 usr.admin_row = True
2089 2089 usr.permission = _admin_perm
2090 2090 usr.permission_id = None
2091 2091 super_admin_rows.append(usr)
2092 2092
2093 2093 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2094 2094 q = q.options(joinedload(UserRepoToPerm.repository),
2095 2095 joinedload(UserRepoToPerm.user),
2096 2096 joinedload(UserRepoToPerm.permission),)
2097 2097
2098 2098 # get owners and admins and permissions. We do a trick of re-writing
2099 2099 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2100 2100 # has a global reference and changing one object propagates to all
2101 2101 # others. This means if admin is also an owner admin_row that change
2102 2102 # would propagate to both objects
2103 2103 perm_rows = []
2104 2104 for _usr in q.all():
2105 2105 usr = AttributeDict(_usr.user.get_dict())
2106 2106 # if this user is also owner/admin, mark as duplicate record
2107 2107 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2108 2108 usr.duplicate_perm = True
2109 2109 # also check if this permission is maybe used by branch_permissions
2110 2110 if _usr.branch_perm_entry:
2111 2111 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2112 2112
2113 2113 usr.permission = _usr.permission.permission_name
2114 2114 usr.permission_id = _usr.repo_to_perm_id
2115 2115 perm_rows.append(usr)
2116 2116
2117 2117 # filter the perm rows by 'default' first and then sort them by
2118 2118 # admin,write,read,none permissions sorted again alphabetically in
2119 2119 # each group
2120 2120 perm_rows = sorted(perm_rows, key=display_user_sort)
2121 2121
2122 2122 user_groups_rows = []
2123 2123 if expand_from_user_groups:
2124 2124 for ug in self.permission_user_groups(with_members=True):
2125 2125 for user_data in ug.members:
2126 2126 user_groups_rows.append(user_data)
2127 2127
2128 2128 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2129 2129
2130 2130 def permission_user_groups(self, with_members=True):
2131 2131 q = UserGroupRepoToPerm.query()\
2132 2132 .filter(UserGroupRepoToPerm.repository == self)
2133 2133 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2134 2134 joinedload(UserGroupRepoToPerm.users_group),
2135 2135 joinedload(UserGroupRepoToPerm.permission),)
2136 2136
2137 2137 perm_rows = []
2138 2138 for _user_group in q.all():
2139 2139 entry = AttributeDict(_user_group.users_group.get_dict())
2140 2140 entry.permission = _user_group.permission.permission_name
2141 2141 if with_members:
2142 2142 entry.members = [x.user.get_dict()
2143 2143 for x in _user_group.users_group.members]
2144 2144 perm_rows.append(entry)
2145 2145
2146 2146 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2147 2147 return perm_rows
2148 2148
2149 2149 def get_api_data(self, include_secrets=False):
2150 2150 """
2151 2151 Common function for generating repo api data
2152 2152
2153 2153 :param include_secrets: See :meth:`User.get_api_data`.
2154 2154
2155 2155 """
2156 2156 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2157 2157 # move this methods on models level.
2158 2158 from rhodecode.model.settings import SettingsModel
2159 2159 from rhodecode.model.repo import RepoModel
2160 2160
2161 2161 repo = self
2162 2162 _user_id, _time, _reason = self.locked
2163 2163
2164 2164 data = {
2165 2165 'repo_id': repo.repo_id,
2166 2166 'repo_name': repo.repo_name,
2167 2167 'repo_type': repo.repo_type,
2168 2168 'clone_uri': repo.clone_uri or '',
2169 2169 'push_uri': repo.push_uri or '',
2170 2170 'url': RepoModel().get_url(self),
2171 2171 'private': repo.private,
2172 2172 'created_on': repo.created_on,
2173 2173 'description': repo.description_safe,
2174 2174 'landing_rev': repo.landing_rev,
2175 2175 'owner': repo.user.username,
2176 2176 'fork_of': repo.fork.repo_name if repo.fork else None,
2177 2177 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2178 2178 'enable_statistics': repo.enable_statistics,
2179 2179 'enable_locking': repo.enable_locking,
2180 2180 'enable_downloads': repo.enable_downloads,
2181 2181 'last_changeset': repo.changeset_cache,
2182 2182 'locked_by': User.get(_user_id).get_api_data(
2183 2183 include_secrets=include_secrets) if _user_id else None,
2184 2184 'locked_date': time_to_datetime(_time) if _time else None,
2185 2185 'lock_reason': _reason if _reason else None,
2186 2186 }
2187 2187
2188 2188 # TODO: mikhail: should be per-repo settings here
2189 2189 rc_config = SettingsModel().get_all_settings()
2190 2190 repository_fields = str2bool(
2191 2191 rc_config.get('rhodecode_repository_fields'))
2192 2192 if repository_fields:
2193 2193 for f in self.extra_fields:
2194 2194 data[f.field_key_prefixed] = f.field_value
2195 2195
2196 2196 return data
2197 2197
2198 2198 @classmethod
2199 2199 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2200 2200 if not lock_time:
2201 2201 lock_time = time.time()
2202 2202 if not lock_reason:
2203 2203 lock_reason = cls.LOCK_AUTOMATIC
2204 2204 repo.locked = [user_id, lock_time, lock_reason]
2205 2205 Session().add(repo)
2206 2206 Session().commit()
2207 2207
2208 2208 @classmethod
2209 2209 def unlock(cls, repo):
2210 2210 repo.locked = None
2211 2211 Session().add(repo)
2212 2212 Session().commit()
2213 2213
2214 2214 @classmethod
2215 2215 def getlock(cls, repo):
2216 2216 return repo.locked
2217 2217
2218 2218 def is_user_lock(self, user_id):
2219 2219 if self.lock[0]:
2220 2220 lock_user_id = safe_int(self.lock[0])
2221 2221 user_id = safe_int(user_id)
2222 2222 # both are ints, and they are equal
2223 2223 return all([lock_user_id, user_id]) and lock_user_id == user_id
2224 2224
2225 2225 return False
2226 2226
2227 2227 def get_locking_state(self, action, user_id, only_when_enabled=True):
2228 2228 """
2229 2229 Checks locking on this repository, if locking is enabled and lock is
2230 2230 present returns a tuple of make_lock, locked, locked_by.
2231 2231 make_lock can have 3 states None (do nothing) True, make lock
2232 2232 False release lock, This value is later propagated to hooks, which
2233 2233 do the locking. Think about this as signals passed to hooks what to do.
2234 2234
2235 2235 """
2236 2236 # TODO: johbo: This is part of the business logic and should be moved
2237 2237 # into the RepositoryModel.
2238 2238
2239 2239 if action not in ('push', 'pull'):
2240 2240 raise ValueError("Invalid action value: %s" % repr(action))
2241 2241
2242 2242 # defines if locked error should be thrown to user
2243 2243 currently_locked = False
2244 2244 # defines if new lock should be made, tri-state
2245 2245 make_lock = None
2246 2246 repo = self
2247 2247 user = User.get(user_id)
2248 2248
2249 2249 lock_info = repo.locked
2250 2250
2251 2251 if repo and (repo.enable_locking or not only_when_enabled):
2252 2252 if action == 'push':
2253 2253 # check if it's already locked !, if it is compare users
2254 2254 locked_by_user_id = lock_info[0]
2255 2255 if user.user_id == locked_by_user_id:
2256 2256 log.debug(
2257 2257 'Got `push` action from user %s, now unlocking', user)
2258 2258 # unlock if we have push from user who locked
2259 2259 make_lock = False
2260 2260 else:
2261 2261 # we're not the same user who locked, ban with
2262 2262 # code defined in settings (default is 423 HTTP Locked) !
2263 2263 log.debug('Repo %s is currently locked by %s', repo, user)
2264 2264 currently_locked = True
2265 2265 elif action == 'pull':
2266 2266 # [0] user [1] date
2267 2267 if lock_info[0] and lock_info[1]:
2268 2268 log.debug('Repo %s is currently locked by %s', repo, user)
2269 2269 currently_locked = True
2270 2270 else:
2271 2271 log.debug('Setting lock on repo %s by %s', repo, user)
2272 2272 make_lock = True
2273 2273
2274 2274 else:
2275 2275 log.debug('Repository %s do not have locking enabled', repo)
2276 2276
2277 2277 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2278 2278 make_lock, currently_locked, lock_info)
2279 2279
2280 2280 from rhodecode.lib.auth import HasRepoPermissionAny
2281 2281 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2282 2282 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2283 2283 # if we don't have at least write permission we cannot make a lock
2284 2284 log.debug('lock state reset back to FALSE due to lack '
2285 2285 'of at least read permission')
2286 2286 make_lock = False
2287 2287
2288 2288 return make_lock, currently_locked, lock_info
2289 2289
2290 2290 @property
2291 2291 def last_commit_cache_update_diff(self):
2292 2292 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2293 2293
2294 2294 @classmethod
2295 2295 def _load_commit_change(cls, last_commit_cache):
2296 2296 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2297 2297 empty_date = datetime.datetime.fromtimestamp(0)
2298 2298 date_latest = last_commit_cache.get('date', empty_date)
2299 2299 try:
2300 2300 return parse_datetime(date_latest)
2301 2301 except Exception:
2302 2302 return empty_date
2303 2303
2304 2304 @property
2305 2305 def last_commit_change(self):
2306 2306 return self._load_commit_change(self.changeset_cache)
2307 2307
2308 2308 @property
2309 2309 def last_db_change(self):
2310 2310 return self.updated_on
2311 2311
2312 2312 @property
2313 2313 def clone_uri_hidden(self):
2314 2314 clone_uri = self.clone_uri
2315 2315 if clone_uri:
2316 2316 import urlobject
2317 2317 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2318 2318 if url_obj.password:
2319 2319 clone_uri = url_obj.with_password('*****')
2320 2320 return clone_uri
2321 2321
2322 2322 @property
2323 2323 def push_uri_hidden(self):
2324 2324 push_uri = self.push_uri
2325 2325 if push_uri:
2326 2326 import urlobject
2327 2327 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2328 2328 if url_obj.password:
2329 2329 push_uri = url_obj.with_password('*****')
2330 2330 return push_uri
2331 2331
2332 2332 def clone_url(self, **override):
2333 2333 from rhodecode.model.settings import SettingsModel
2334 2334
2335 2335 uri_tmpl = None
2336 2336 if 'with_id' in override:
2337 2337 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2338 2338 del override['with_id']
2339 2339
2340 2340 if 'uri_tmpl' in override:
2341 2341 uri_tmpl = override['uri_tmpl']
2342 2342 del override['uri_tmpl']
2343 2343
2344 2344 ssh = False
2345 2345 if 'ssh' in override:
2346 2346 ssh = True
2347 2347 del override['ssh']
2348 2348
2349 2349 # we didn't override our tmpl from **overrides
2350 2350 request = get_current_request()
2351 2351 if not uri_tmpl:
2352 2352 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2353 2353 rc_config = request.call_context.rc_config
2354 2354 else:
2355 2355 rc_config = SettingsModel().get_all_settings(cache=True)
2356 2356
2357 2357 if ssh:
2358 2358 uri_tmpl = rc_config.get(
2359 2359 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2360 2360
2361 2361 else:
2362 2362 uri_tmpl = rc_config.get(
2363 2363 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2364 2364
2365 2365 return get_clone_url(request=request,
2366 2366 uri_tmpl=uri_tmpl,
2367 2367 repo_name=self.repo_name,
2368 2368 repo_id=self.repo_id,
2369 2369 repo_type=self.repo_type,
2370 2370 **override)
2371 2371
2372 2372 def set_state(self, state):
2373 2373 self.repo_state = state
2374 2374 Session().add(self)
2375 2375 #==========================================================================
2376 2376 # SCM PROPERTIES
2377 2377 #==========================================================================
2378 2378
2379 2379 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2380 2380 return get_commit_safe(
2381 2381 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2382 2382 maybe_unreachable=maybe_unreachable)
2383 2383
2384 2384 def get_changeset(self, rev=None, pre_load=None):
2385 2385 warnings.warn("Use get_commit", DeprecationWarning)
2386 2386 commit_id = None
2387 2387 commit_idx = None
2388 2388 if isinstance(rev, compat.string_types):
2389 2389 commit_id = rev
2390 2390 else:
2391 2391 commit_idx = rev
2392 2392 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2393 2393 pre_load=pre_load)
2394 2394
2395 2395 def get_landing_commit(self):
2396 2396 """
2397 2397 Returns landing commit, or if that doesn't exist returns the tip
2398 2398 """
2399 2399 _rev_type, _rev = self.landing_rev
2400 2400 commit = self.get_commit(_rev)
2401 2401 if isinstance(commit, EmptyCommit):
2402 2402 return self.get_commit()
2403 2403 return commit
2404 2404
2405 2405 def flush_commit_cache(self):
2406 2406 self.update_commit_cache(cs_cache={'raw_id':'0'})
2407 2407 self.update_commit_cache()
2408 2408
2409 2409 def update_commit_cache(self, cs_cache=None, config=None):
2410 2410 """
2411 2411 Update cache of last commit for repository
2412 2412 cache_keys should be::
2413 2413
2414 2414 source_repo_id
2415 2415 short_id
2416 2416 raw_id
2417 2417 revision
2418 2418 parents
2419 2419 message
2420 2420 date
2421 2421 author
2422 2422 updated_on
2423 2423
2424 2424 """
2425 2425 from rhodecode.lib.vcs.backends.base import BaseChangeset
2426 2426 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2427 2427 empty_date = datetime.datetime.fromtimestamp(0)
2428 2428
2429 2429 if cs_cache is None:
2430 2430 # use no-cache version here
2431 2431 try:
2432 2432 scm_repo = self.scm_instance(cache=False, config=config)
2433 2433 except VCSError:
2434 2434 scm_repo = None
2435 2435 empty = scm_repo is None or scm_repo.is_empty()
2436 2436
2437 2437 if not empty:
2438 2438 cs_cache = scm_repo.get_commit(
2439 2439 pre_load=["author", "date", "message", "parents", "branch"])
2440 2440 else:
2441 2441 cs_cache = EmptyCommit()
2442 2442
2443 2443 if isinstance(cs_cache, BaseChangeset):
2444 2444 cs_cache = cs_cache.__json__()
2445 2445
2446 2446 def is_outdated(new_cs_cache):
2447 2447 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2448 2448 new_cs_cache['revision'] != self.changeset_cache['revision']):
2449 2449 return True
2450 2450 return False
2451 2451
2452 2452 # check if we have maybe already latest cached revision
2453 2453 if is_outdated(cs_cache) or not self.changeset_cache:
2454 2454 _current_datetime = datetime.datetime.utcnow()
2455 2455 last_change = cs_cache.get('date') or _current_datetime
2456 2456 # we check if last update is newer than the new value
2457 2457 # if yes, we use the current timestamp instead. Imagine you get
2458 2458 # old commit pushed 1y ago, we'd set last update 1y to ago.
2459 2459 last_change_timestamp = datetime_to_time(last_change)
2460 2460 current_timestamp = datetime_to_time(last_change)
2461 2461 if last_change_timestamp > current_timestamp and not empty:
2462 2462 cs_cache['date'] = _current_datetime
2463 2463
2464 2464 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2465 2465 cs_cache['updated_on'] = time.time()
2466 2466 self.changeset_cache = cs_cache
2467 2467 self.updated_on = last_change
2468 2468 Session().add(self)
2469 2469 Session().commit()
2470 2470
2471 2471 else:
2472 2472 if empty:
2473 2473 cs_cache = EmptyCommit().__json__()
2474 2474 else:
2475 2475 cs_cache = self.changeset_cache
2476 2476
2477 2477 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2478 2478
2479 2479 cs_cache['updated_on'] = time.time()
2480 2480 self.changeset_cache = cs_cache
2481 2481 self.updated_on = _date_latest
2482 2482 Session().add(self)
2483 2483 Session().commit()
2484 2484
2485 2485 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2486 2486 self.repo_name, cs_cache, _date_latest)
2487 2487
2488 2488 @property
2489 2489 def tip(self):
2490 2490 return self.get_commit('tip')
2491 2491
2492 2492 @property
2493 2493 def author(self):
2494 2494 return self.tip.author
2495 2495
2496 2496 @property
2497 2497 def last_change(self):
2498 2498 return self.scm_instance().last_change
2499 2499
2500 2500 def get_comments(self, revisions=None):
2501 2501 """
2502 2502 Returns comments for this repository grouped by revisions
2503 2503
2504 2504 :param revisions: filter query by revisions only
2505 2505 """
2506 2506 cmts = ChangesetComment.query()\
2507 2507 .filter(ChangesetComment.repo == self)
2508 2508 if revisions:
2509 2509 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2510 2510 grouped = collections.defaultdict(list)
2511 2511 for cmt in cmts.all():
2512 2512 grouped[cmt.revision].append(cmt)
2513 2513 return grouped
2514 2514
2515 2515 def statuses(self, revisions=None):
2516 2516 """
2517 2517 Returns statuses for this repository
2518 2518
2519 2519 :param revisions: list of revisions to get statuses for
2520 2520 """
2521 2521 statuses = ChangesetStatus.query()\
2522 2522 .filter(ChangesetStatus.repo == self)\
2523 2523 .filter(ChangesetStatus.version == 0)
2524 2524
2525 2525 if revisions:
2526 2526 # Try doing the filtering in chunks to avoid hitting limits
2527 2527 size = 500
2528 2528 status_results = []
2529 2529 for chunk in xrange(0, len(revisions), size):
2530 2530 status_results += statuses.filter(
2531 2531 ChangesetStatus.revision.in_(
2532 2532 revisions[chunk: chunk+size])
2533 2533 ).all()
2534 2534 else:
2535 2535 status_results = statuses.all()
2536 2536
2537 2537 grouped = {}
2538 2538
2539 2539 # maybe we have open new pullrequest without a status?
2540 2540 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2541 2541 status_lbl = ChangesetStatus.get_status_lbl(stat)
2542 2542 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2543 2543 for rev in pr.revisions:
2544 2544 pr_id = pr.pull_request_id
2545 2545 pr_repo = pr.target_repo.repo_name
2546 2546 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2547 2547
2548 2548 for stat in status_results:
2549 2549 pr_id = pr_repo = None
2550 2550 if stat.pull_request:
2551 2551 pr_id = stat.pull_request.pull_request_id
2552 2552 pr_repo = stat.pull_request.target_repo.repo_name
2553 2553 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2554 2554 pr_id, pr_repo]
2555 2555 return grouped
2556 2556
2557 2557 # ==========================================================================
2558 2558 # SCM CACHE INSTANCE
2559 2559 # ==========================================================================
2560 2560
2561 2561 def scm_instance(self, **kwargs):
2562 2562 import rhodecode
2563 2563
2564 2564 # Passing a config will not hit the cache currently only used
2565 2565 # for repo2dbmapper
2566 2566 config = kwargs.pop('config', None)
2567 2567 cache = kwargs.pop('cache', None)
2568 2568 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2569 2569 if vcs_full_cache is not None:
2570 2570 # allows override global config
2571 2571 full_cache = vcs_full_cache
2572 2572 else:
2573 2573 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2574 2574 # if cache is NOT defined use default global, else we have a full
2575 2575 # control over cache behaviour
2576 2576 if cache is None and full_cache and not config:
2577 2577 log.debug('Initializing pure cached instance for %s', self.repo_path)
2578 2578 return self._get_instance_cached()
2579 2579
2580 2580 # cache here is sent to the "vcs server"
2581 2581 return self._get_instance(cache=bool(cache), config=config)
2582 2582
2583 2583 def _get_instance_cached(self):
2584 2584 from rhodecode.lib import rc_cache
2585 2585
2586 2586 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2587 2587 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2588 2588 repo_id=self.repo_id)
2589 2589 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2590 2590
2591 2591 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2592 2592 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2593 2593 return self._get_instance(repo_state_uid=_cache_state_uid)
2594 2594
2595 2595 # we must use thread scoped cache here,
2596 2596 # because each thread of gevent needs it's own not shared connection and cache
2597 2597 # we also alter `args` so the cache key is individual for every green thread.
2598 2598 inv_context_manager = rc_cache.InvalidationContext(
2599 2599 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2600 2600 thread_scoped=True)
2601 2601 with inv_context_manager as invalidation_context:
2602 2602 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2603 2603 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2604 2604
2605 2605 # re-compute and store cache if we get invalidate signal
2606 2606 if invalidation_context.should_invalidate():
2607 2607 instance = get_instance_cached.refresh(*args)
2608 2608 else:
2609 2609 instance = get_instance_cached(*args)
2610 2610
2611 2611 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2612 2612 return instance
2613 2613
2614 2614 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2615 2615 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2616 2616 self.repo_type, self.repo_path, cache)
2617 2617 config = config or self._config
2618 2618 custom_wire = {
2619 2619 'cache': cache, # controls the vcs.remote cache
2620 2620 'repo_state_uid': repo_state_uid
2621 2621 }
2622 2622 repo = get_vcs_instance(
2623 2623 repo_path=safe_str(self.repo_full_path),
2624 2624 config=config,
2625 2625 with_wire=custom_wire,
2626 2626 create=False,
2627 2627 _vcs_alias=self.repo_type)
2628 2628 if repo is not None:
2629 2629 repo.count() # cache rebuild
2630 2630 return repo
2631 2631
2632 2632 def get_shadow_repository_path(self, workspace_id):
2633 2633 from rhodecode.lib.vcs.backends.base import BaseRepository
2634 2634 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2635 2635 self.repo_full_path, self.repo_id, workspace_id)
2636 2636 return shadow_repo_path
2637 2637
2638 2638 def __json__(self):
2639 2639 return {'landing_rev': self.landing_rev}
2640 2640
2641 2641 def get_dict(self):
2642 2642
2643 2643 # Since we transformed `repo_name` to a hybrid property, we need to
2644 2644 # keep compatibility with the code which uses `repo_name` field.
2645 2645
2646 2646 result = super(Repository, self).get_dict()
2647 2647 result['repo_name'] = result.pop('_repo_name', None)
2648 2648 return result
2649 2649
2650 2650
2651 2651 class RepoGroup(Base, BaseModel):
2652 2652 __tablename__ = 'groups'
2653 2653 __table_args__ = (
2654 2654 UniqueConstraint('group_name', 'group_parent_id'),
2655 2655 base_table_args,
2656 2656 )
2657 2657 __mapper_args__ = {'order_by': 'group_name'}
2658 2658
2659 2659 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2660 2660
2661 2661 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2662 2662 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2663 2663 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2664 2664 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2665 2665 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2666 2666 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2667 2667 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2668 2668 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2669 2669 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2670 2670 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2671 2671 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2672 2672
2673 2673 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2674 2674 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2675 2675 parent_group = relationship('RepoGroup', remote_side=group_id)
2676 2676 user = relationship('User')
2677 2677 integrations = relationship('Integration', cascade="all, delete-orphan")
2678 2678
2679 2679 # no cascade, set NULL
2680 2680 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2681 2681
2682 2682 def __init__(self, group_name='', parent_group=None):
2683 2683 self.group_name = group_name
2684 2684 self.parent_group = parent_group
2685 2685
2686 2686 def __unicode__(self):
2687 2687 return u"<%s('id:%s:%s')>" % (
2688 2688 self.__class__.__name__, self.group_id, self.group_name)
2689 2689
2690 2690 @hybrid_property
2691 2691 def group_name(self):
2692 2692 return self._group_name
2693 2693
2694 2694 @group_name.setter
2695 2695 def group_name(self, value):
2696 2696 self._group_name = value
2697 2697 self.group_name_hash = self.hash_repo_group_name(value)
2698 2698
2699 2699 @classmethod
2700 2700 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2701 2701 from rhodecode.lib.vcs.backends.base import EmptyCommit
2702 2702 dummy = EmptyCommit().__json__()
2703 2703 if not changeset_cache_raw:
2704 2704 dummy['source_repo_id'] = repo_id
2705 2705 return json.loads(json.dumps(dummy))
2706 2706
2707 2707 try:
2708 2708 return json.loads(changeset_cache_raw)
2709 2709 except TypeError:
2710 2710 return dummy
2711 2711 except Exception:
2712 2712 log.error(traceback.format_exc())
2713 2713 return dummy
2714 2714
2715 2715 @hybrid_property
2716 2716 def changeset_cache(self):
2717 2717 return self._load_changeset_cache('', self._changeset_cache)
2718 2718
2719 2719 @changeset_cache.setter
2720 2720 def changeset_cache(self, val):
2721 2721 try:
2722 2722 self._changeset_cache = json.dumps(val)
2723 2723 except Exception:
2724 2724 log.error(traceback.format_exc())
2725 2725
2726 2726 @validates('group_parent_id')
2727 2727 def validate_group_parent_id(self, key, val):
2728 2728 """
2729 2729 Check cycle references for a parent group to self
2730 2730 """
2731 2731 if self.group_id and val:
2732 2732 assert val != self.group_id
2733 2733
2734 2734 return val
2735 2735
2736 2736 @hybrid_property
2737 2737 def description_safe(self):
2738 2738 from rhodecode.lib import helpers as h
2739 2739 return h.escape(self.group_description)
2740 2740
2741 2741 @classmethod
2742 2742 def hash_repo_group_name(cls, repo_group_name):
2743 2743 val = remove_formatting(repo_group_name)
2744 2744 val = safe_str(val).lower()
2745 2745 chars = []
2746 2746 for c in val:
2747 2747 if c not in string.ascii_letters:
2748 2748 c = str(ord(c))
2749 2749 chars.append(c)
2750 2750
2751 2751 return ''.join(chars)
2752 2752
2753 2753 @classmethod
2754 2754 def _generate_choice(cls, repo_group):
2755 2755 from webhelpers2.html import literal as _literal
2756 2756 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2757 2757 return repo_group.group_id, _name(repo_group.full_path_splitted)
2758 2758
2759 2759 @classmethod
2760 2760 def groups_choices(cls, groups=None, show_empty_group=True):
2761 2761 if not groups:
2762 2762 groups = cls.query().all()
2763 2763
2764 2764 repo_groups = []
2765 2765 if show_empty_group:
2766 2766 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2767 2767
2768 2768 repo_groups.extend([cls._generate_choice(x) for x in groups])
2769 2769
2770 2770 repo_groups = sorted(
2771 2771 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2772 2772 return repo_groups
2773 2773
2774 2774 @classmethod
2775 2775 def url_sep(cls):
2776 2776 return URL_SEP
2777 2777
2778 2778 @classmethod
2779 2779 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2780 2780 if case_insensitive:
2781 2781 gr = cls.query().filter(func.lower(cls.group_name)
2782 2782 == func.lower(group_name))
2783 2783 else:
2784 2784 gr = cls.query().filter(cls.group_name == group_name)
2785 2785 if cache:
2786 2786 name_key = _hash_key(group_name)
2787 2787 gr = gr.options(
2788 2788 FromCache("sql_cache_short", "get_group_%s" % name_key))
2789 2789 return gr.scalar()
2790 2790
2791 2791 @classmethod
2792 2792 def get_user_personal_repo_group(cls, user_id):
2793 2793 user = User.get(user_id)
2794 2794 if user.username == User.DEFAULT_USER:
2795 2795 return None
2796 2796
2797 2797 return cls.query()\
2798 2798 .filter(cls.personal == true()) \
2799 2799 .filter(cls.user == user) \
2800 2800 .order_by(cls.group_id.asc()) \
2801 2801 .first()
2802 2802
2803 2803 @classmethod
2804 2804 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2805 2805 case_insensitive=True):
2806 2806 q = RepoGroup.query()
2807 2807
2808 2808 if not isinstance(user_id, Optional):
2809 2809 q = q.filter(RepoGroup.user_id == user_id)
2810 2810
2811 2811 if not isinstance(group_id, Optional):
2812 2812 q = q.filter(RepoGroup.group_parent_id == group_id)
2813 2813
2814 2814 if case_insensitive:
2815 2815 q = q.order_by(func.lower(RepoGroup.group_name))
2816 2816 else:
2817 2817 q = q.order_by(RepoGroup.group_name)
2818 2818 return q.all()
2819 2819
2820 2820 @property
2821 2821 def parents(self, parents_recursion_limit=10):
2822 2822 groups = []
2823 2823 if self.parent_group is None:
2824 2824 return groups
2825 2825 cur_gr = self.parent_group
2826 2826 groups.insert(0, cur_gr)
2827 2827 cnt = 0
2828 2828 while 1:
2829 2829 cnt += 1
2830 2830 gr = getattr(cur_gr, 'parent_group', None)
2831 2831 cur_gr = cur_gr.parent_group
2832 2832 if gr is None:
2833 2833 break
2834 2834 if cnt == parents_recursion_limit:
2835 2835 # this will prevent accidental infinit loops
2836 2836 log.error('more than %s parents found for group %s, stopping '
2837 2837 'recursive parent fetching', parents_recursion_limit, self)
2838 2838 break
2839 2839
2840 2840 groups.insert(0, gr)
2841 2841 return groups
2842 2842
2843 2843 @property
2844 2844 def last_commit_cache_update_diff(self):
2845 2845 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2846 2846
2847 2847 @classmethod
2848 2848 def _load_commit_change(cls, last_commit_cache):
2849 2849 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2850 2850 empty_date = datetime.datetime.fromtimestamp(0)
2851 2851 date_latest = last_commit_cache.get('date', empty_date)
2852 2852 try:
2853 2853 return parse_datetime(date_latest)
2854 2854 except Exception:
2855 2855 return empty_date
2856 2856
2857 2857 @property
2858 2858 def last_commit_change(self):
2859 2859 return self._load_commit_change(self.changeset_cache)
2860 2860
2861 2861 @property
2862 2862 def last_db_change(self):
2863 2863 return self.updated_on
2864 2864
2865 2865 @property
2866 2866 def children(self):
2867 2867 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2868 2868
2869 2869 @property
2870 2870 def name(self):
2871 2871 return self.group_name.split(RepoGroup.url_sep())[-1]
2872 2872
2873 2873 @property
2874 2874 def full_path(self):
2875 2875 return self.group_name
2876 2876
2877 2877 @property
2878 2878 def full_path_splitted(self):
2879 2879 return self.group_name.split(RepoGroup.url_sep())
2880 2880
2881 2881 @property
2882 2882 def repositories(self):
2883 2883 return Repository.query()\
2884 2884 .filter(Repository.group == self)\
2885 2885 .order_by(Repository.repo_name)
2886 2886
2887 2887 @property
2888 2888 def repositories_recursive_count(self):
2889 2889 cnt = self.repositories.count()
2890 2890
2891 2891 def children_count(group):
2892 2892 cnt = 0
2893 2893 for child in group.children:
2894 2894 cnt += child.repositories.count()
2895 2895 cnt += children_count(child)
2896 2896 return cnt
2897 2897
2898 2898 return cnt + children_count(self)
2899 2899
2900 2900 def _recursive_objects(self, include_repos=True, include_groups=True):
2901 2901 all_ = []
2902 2902
2903 2903 def _get_members(root_gr):
2904 2904 if include_repos:
2905 2905 for r in root_gr.repositories:
2906 2906 all_.append(r)
2907 2907 childs = root_gr.children.all()
2908 2908 if childs:
2909 2909 for gr in childs:
2910 2910 if include_groups:
2911 2911 all_.append(gr)
2912 2912 _get_members(gr)
2913 2913
2914 2914 root_group = []
2915 2915 if include_groups:
2916 2916 root_group = [self]
2917 2917
2918 2918 _get_members(self)
2919 2919 return root_group + all_
2920 2920
2921 2921 def recursive_groups_and_repos(self):
2922 2922 """
2923 2923 Recursive return all groups, with repositories in those groups
2924 2924 """
2925 2925 return self._recursive_objects()
2926 2926
2927 2927 def recursive_groups(self):
2928 2928 """
2929 2929 Returns all children groups for this group including children of children
2930 2930 """
2931 2931 return self._recursive_objects(include_repos=False)
2932 2932
2933 2933 def recursive_repos(self):
2934 2934 """
2935 2935 Returns all children repositories for this group
2936 2936 """
2937 2937 return self._recursive_objects(include_groups=False)
2938 2938
2939 2939 def get_new_name(self, group_name):
2940 2940 """
2941 2941 returns new full group name based on parent and new name
2942 2942
2943 2943 :param group_name:
2944 2944 """
2945 2945 path_prefix = (self.parent_group.full_path_splitted if
2946 2946 self.parent_group else [])
2947 2947 return RepoGroup.url_sep().join(path_prefix + [group_name])
2948 2948
2949 2949 def update_commit_cache(self, config=None):
2950 2950 """
2951 2951 Update cache of last commit for newest repository inside this repository group.
2952 2952 cache_keys should be::
2953 2953
2954 2954 source_repo_id
2955 2955 short_id
2956 2956 raw_id
2957 2957 revision
2958 2958 parents
2959 2959 message
2960 2960 date
2961 2961 author
2962 2962
2963 2963 """
2964 2964 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2965 2965 empty_date = datetime.datetime.fromtimestamp(0)
2966 2966
2967 2967 def repo_groups_and_repos(root_gr):
2968 2968 for _repo in root_gr.repositories:
2969 2969 yield _repo
2970 2970 for child_group in root_gr.children.all():
2971 2971 yield child_group
2972 2972
2973 2973 latest_repo_cs_cache = {}
2974 2974 for obj in repo_groups_and_repos(self):
2975 2975 repo_cs_cache = obj.changeset_cache
2976 2976 date_latest = latest_repo_cs_cache.get('date', empty_date)
2977 2977 date_current = repo_cs_cache.get('date', empty_date)
2978 2978 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2979 2979 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2980 2980 latest_repo_cs_cache = repo_cs_cache
2981 2981 if hasattr(obj, 'repo_id'):
2982 2982 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2983 2983 else:
2984 2984 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2985 2985
2986 2986 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
2987 2987
2988 2988 latest_repo_cs_cache['updated_on'] = time.time()
2989 2989 self.changeset_cache = latest_repo_cs_cache
2990 2990 self.updated_on = _date_latest
2991 2991 Session().add(self)
2992 2992 Session().commit()
2993 2993
2994 2994 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
2995 2995 self.group_name, latest_repo_cs_cache, _date_latest)
2996 2996
2997 2997 def permissions(self, with_admins=True, with_owner=True,
2998 2998 expand_from_user_groups=False):
2999 2999 """
3000 3000 Permissions for repository groups
3001 3001 """
3002 3002 _admin_perm = 'group.admin'
3003 3003
3004 3004 owner_row = []
3005 3005 if with_owner:
3006 3006 usr = AttributeDict(self.user.get_dict())
3007 3007 usr.owner_row = True
3008 3008 usr.permission = _admin_perm
3009 3009 owner_row.append(usr)
3010 3010
3011 3011 super_admin_ids = []
3012 3012 super_admin_rows = []
3013 3013 if with_admins:
3014 3014 for usr in User.get_all_super_admins():
3015 3015 super_admin_ids.append(usr.user_id)
3016 3016 # if this admin is also owner, don't double the record
3017 3017 if usr.user_id == owner_row[0].user_id:
3018 3018 owner_row[0].admin_row = True
3019 3019 else:
3020 3020 usr = AttributeDict(usr.get_dict())
3021 3021 usr.admin_row = True
3022 3022 usr.permission = _admin_perm
3023 3023 super_admin_rows.append(usr)
3024 3024
3025 3025 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3026 3026 q = q.options(joinedload(UserRepoGroupToPerm.group),
3027 3027 joinedload(UserRepoGroupToPerm.user),
3028 3028 joinedload(UserRepoGroupToPerm.permission),)
3029 3029
3030 3030 # get owners and admins and permissions. We do a trick of re-writing
3031 3031 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3032 3032 # has a global reference and changing one object propagates to all
3033 3033 # others. This means if admin is also an owner admin_row that change
3034 3034 # would propagate to both objects
3035 3035 perm_rows = []
3036 3036 for _usr in q.all():
3037 3037 usr = AttributeDict(_usr.user.get_dict())
3038 3038 # if this user is also owner/admin, mark as duplicate record
3039 3039 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3040 3040 usr.duplicate_perm = True
3041 3041 usr.permission = _usr.permission.permission_name
3042 3042 perm_rows.append(usr)
3043 3043
3044 3044 # filter the perm rows by 'default' first and then sort them by
3045 3045 # admin,write,read,none permissions sorted again alphabetically in
3046 3046 # each group
3047 3047 perm_rows = sorted(perm_rows, key=display_user_sort)
3048 3048
3049 3049 user_groups_rows = []
3050 3050 if expand_from_user_groups:
3051 3051 for ug in self.permission_user_groups(with_members=True):
3052 3052 for user_data in ug.members:
3053 3053 user_groups_rows.append(user_data)
3054 3054
3055 3055 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3056 3056
3057 3057 def permission_user_groups(self, with_members=False):
3058 3058 q = UserGroupRepoGroupToPerm.query()\
3059 3059 .filter(UserGroupRepoGroupToPerm.group == self)
3060 3060 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3061 3061 joinedload(UserGroupRepoGroupToPerm.users_group),
3062 3062 joinedload(UserGroupRepoGroupToPerm.permission),)
3063 3063
3064 3064 perm_rows = []
3065 3065 for _user_group in q.all():
3066 3066 entry = AttributeDict(_user_group.users_group.get_dict())
3067 3067 entry.permission = _user_group.permission.permission_name
3068 3068 if with_members:
3069 3069 entry.members = [x.user.get_dict()
3070 3070 for x in _user_group.users_group.members]
3071 3071 perm_rows.append(entry)
3072 3072
3073 3073 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3074 3074 return perm_rows
3075 3075
3076 3076 def get_api_data(self):
3077 3077 """
3078 3078 Common function for generating api data
3079 3079
3080 3080 """
3081 3081 group = self
3082 3082 data = {
3083 3083 'group_id': group.group_id,
3084 3084 'group_name': group.group_name,
3085 3085 'group_description': group.description_safe,
3086 3086 'parent_group': group.parent_group.group_name if group.parent_group else None,
3087 3087 'repositories': [x.repo_name for x in group.repositories],
3088 3088 'owner': group.user.username,
3089 3089 }
3090 3090 return data
3091 3091
3092 3092 def get_dict(self):
3093 3093 # Since we transformed `group_name` to a hybrid property, we need to
3094 3094 # keep compatibility with the code which uses `group_name` field.
3095 3095 result = super(RepoGroup, self).get_dict()
3096 3096 result['group_name'] = result.pop('_group_name', None)
3097 3097 return result
3098 3098
3099 3099
3100 3100 class Permission(Base, BaseModel):
3101 3101 __tablename__ = 'permissions'
3102 3102 __table_args__ = (
3103 3103 Index('p_perm_name_idx', 'permission_name'),
3104 3104 base_table_args,
3105 3105 )
3106 3106
3107 3107 PERMS = [
3108 3108 ('hg.admin', _('RhodeCode Super Administrator')),
3109 3109
3110 3110 ('repository.none', _('Repository no access')),
3111 3111 ('repository.read', _('Repository read access')),
3112 3112 ('repository.write', _('Repository write access')),
3113 3113 ('repository.admin', _('Repository admin access')),
3114 3114
3115 3115 ('group.none', _('Repository group no access')),
3116 3116 ('group.read', _('Repository group read access')),
3117 3117 ('group.write', _('Repository group write access')),
3118 3118 ('group.admin', _('Repository group admin access')),
3119 3119
3120 3120 ('usergroup.none', _('User group no access')),
3121 3121 ('usergroup.read', _('User group read access')),
3122 3122 ('usergroup.write', _('User group write access')),
3123 3123 ('usergroup.admin', _('User group admin access')),
3124 3124
3125 3125 ('branch.none', _('Branch no permissions')),
3126 3126 ('branch.merge', _('Branch access by web merge')),
3127 3127 ('branch.push', _('Branch access by push')),
3128 3128 ('branch.push_force', _('Branch access by push with force')),
3129 3129
3130 3130 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3131 3131 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3132 3132
3133 3133 ('hg.usergroup.create.false', _('User Group creation disabled')),
3134 3134 ('hg.usergroup.create.true', _('User Group creation enabled')),
3135 3135
3136 3136 ('hg.create.none', _('Repository creation disabled')),
3137 3137 ('hg.create.repository', _('Repository creation enabled')),
3138 3138 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3139 3139 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3140 3140
3141 3141 ('hg.fork.none', _('Repository forking disabled')),
3142 3142 ('hg.fork.repository', _('Repository forking enabled')),
3143 3143
3144 3144 ('hg.register.none', _('Registration disabled')),
3145 3145 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3146 3146 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3147 3147
3148 3148 ('hg.password_reset.enabled', _('Password reset enabled')),
3149 3149 ('hg.password_reset.hidden', _('Password reset hidden')),
3150 3150 ('hg.password_reset.disabled', _('Password reset disabled')),
3151 3151
3152 3152 ('hg.extern_activate.manual', _('Manual activation of external account')),
3153 3153 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3154 3154
3155 3155 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3156 3156 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3157 3157 ]
3158 3158
3159 3159 # definition of system default permissions for DEFAULT user, created on
3160 3160 # system setup
3161 3161 DEFAULT_USER_PERMISSIONS = [
3162 3162 # object perms
3163 3163 'repository.read',
3164 3164 'group.read',
3165 3165 'usergroup.read',
3166 3166 # branch, for backward compat we need same value as before so forced pushed
3167 3167 'branch.push_force',
3168 3168 # global
3169 3169 'hg.create.repository',
3170 3170 'hg.repogroup.create.false',
3171 3171 'hg.usergroup.create.false',
3172 3172 'hg.create.write_on_repogroup.true',
3173 3173 'hg.fork.repository',
3174 3174 'hg.register.manual_activate',
3175 3175 'hg.password_reset.enabled',
3176 3176 'hg.extern_activate.auto',
3177 3177 'hg.inherit_default_perms.true',
3178 3178 ]
3179 3179
3180 3180 # defines which permissions are more important higher the more important
3181 3181 # Weight defines which permissions are more important.
3182 3182 # The higher number the more important.
3183 3183 PERM_WEIGHTS = {
3184 3184 'repository.none': 0,
3185 3185 'repository.read': 1,
3186 3186 'repository.write': 3,
3187 3187 'repository.admin': 4,
3188 3188
3189 3189 'group.none': 0,
3190 3190 'group.read': 1,
3191 3191 'group.write': 3,
3192 3192 'group.admin': 4,
3193 3193
3194 3194 'usergroup.none': 0,
3195 3195 'usergroup.read': 1,
3196 3196 'usergroup.write': 3,
3197 3197 'usergroup.admin': 4,
3198 3198
3199 3199 'branch.none': 0,
3200 3200 'branch.merge': 1,
3201 3201 'branch.push': 3,
3202 3202 'branch.push_force': 4,
3203 3203
3204 3204 'hg.repogroup.create.false': 0,
3205 3205 'hg.repogroup.create.true': 1,
3206 3206
3207 3207 'hg.usergroup.create.false': 0,
3208 3208 'hg.usergroup.create.true': 1,
3209 3209
3210 3210 'hg.fork.none': 0,
3211 3211 'hg.fork.repository': 1,
3212 3212 'hg.create.none': 0,
3213 3213 'hg.create.repository': 1
3214 3214 }
3215 3215
3216 3216 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3217 3217 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3218 3218 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3219 3219
3220 3220 def __unicode__(self):
3221 3221 return u"<%s('%s:%s')>" % (
3222 3222 self.__class__.__name__, self.permission_id, self.permission_name
3223 3223 )
3224 3224
3225 3225 @classmethod
3226 3226 def get_by_key(cls, key):
3227 3227 return cls.query().filter(cls.permission_name == key).scalar()
3228 3228
3229 3229 @classmethod
3230 3230 def get_default_repo_perms(cls, user_id, repo_id=None):
3231 3231 q = Session().query(UserRepoToPerm, Repository, Permission)\
3232 3232 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3233 3233 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3234 3234 .filter(UserRepoToPerm.user_id == user_id)
3235 3235 if repo_id:
3236 3236 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3237 3237 return q.all()
3238 3238
3239 3239 @classmethod
3240 3240 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3241 3241 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3242 3242 .join(
3243 3243 Permission,
3244 3244 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3245 3245 .join(
3246 3246 UserRepoToPerm,
3247 3247 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3248 3248 .filter(UserRepoToPerm.user_id == user_id)
3249 3249
3250 3250 if repo_id:
3251 3251 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3252 3252 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3253 3253
3254 3254 @classmethod
3255 3255 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3256 3256 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3257 3257 .join(
3258 3258 Permission,
3259 3259 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3260 3260 .join(
3261 3261 Repository,
3262 3262 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3263 3263 .join(
3264 3264 UserGroup,
3265 3265 UserGroupRepoToPerm.users_group_id ==
3266 3266 UserGroup.users_group_id)\
3267 3267 .join(
3268 3268 UserGroupMember,
3269 3269 UserGroupRepoToPerm.users_group_id ==
3270 3270 UserGroupMember.users_group_id)\
3271 3271 .filter(
3272 3272 UserGroupMember.user_id == user_id,
3273 3273 UserGroup.users_group_active == true())
3274 3274 if repo_id:
3275 3275 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3276 3276 return q.all()
3277 3277
3278 3278 @classmethod
3279 3279 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3280 3280 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3281 3281 .join(
3282 3282 Permission,
3283 3283 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3284 3284 .join(
3285 3285 UserGroupRepoToPerm,
3286 3286 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3287 3287 .join(
3288 3288 UserGroup,
3289 3289 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3290 3290 .join(
3291 3291 UserGroupMember,
3292 3292 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3293 3293 .filter(
3294 3294 UserGroupMember.user_id == user_id,
3295 3295 UserGroup.users_group_active == true())
3296 3296
3297 3297 if repo_id:
3298 3298 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3299 3299 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3300 3300
3301 3301 @classmethod
3302 3302 def get_default_group_perms(cls, user_id, repo_group_id=None):
3303 3303 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3304 3304 .join(
3305 3305 Permission,
3306 3306 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3307 3307 .join(
3308 3308 RepoGroup,
3309 3309 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3310 3310 .filter(UserRepoGroupToPerm.user_id == user_id)
3311 3311 if repo_group_id:
3312 3312 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3313 3313 return q.all()
3314 3314
3315 3315 @classmethod
3316 3316 def get_default_group_perms_from_user_group(
3317 3317 cls, user_id, repo_group_id=None):
3318 3318 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3319 3319 .join(
3320 3320 Permission,
3321 3321 UserGroupRepoGroupToPerm.permission_id ==
3322 3322 Permission.permission_id)\
3323 3323 .join(
3324 3324 RepoGroup,
3325 3325 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3326 3326 .join(
3327 3327 UserGroup,
3328 3328 UserGroupRepoGroupToPerm.users_group_id ==
3329 3329 UserGroup.users_group_id)\
3330 3330 .join(
3331 3331 UserGroupMember,
3332 3332 UserGroupRepoGroupToPerm.users_group_id ==
3333 3333 UserGroupMember.users_group_id)\
3334 3334 .filter(
3335 3335 UserGroupMember.user_id == user_id,
3336 3336 UserGroup.users_group_active == true())
3337 3337 if repo_group_id:
3338 3338 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3339 3339 return q.all()
3340 3340
3341 3341 @classmethod
3342 3342 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3343 3343 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3344 3344 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3345 3345 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3346 3346 .filter(UserUserGroupToPerm.user_id == user_id)
3347 3347 if user_group_id:
3348 3348 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3349 3349 return q.all()
3350 3350
3351 3351 @classmethod
3352 3352 def get_default_user_group_perms_from_user_group(
3353 3353 cls, user_id, user_group_id=None):
3354 3354 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3355 3355 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3356 3356 .join(
3357 3357 Permission,
3358 3358 UserGroupUserGroupToPerm.permission_id ==
3359 3359 Permission.permission_id)\
3360 3360 .join(
3361 3361 TargetUserGroup,
3362 3362 UserGroupUserGroupToPerm.target_user_group_id ==
3363 3363 TargetUserGroup.users_group_id)\
3364 3364 .join(
3365 3365 UserGroup,
3366 3366 UserGroupUserGroupToPerm.user_group_id ==
3367 3367 UserGroup.users_group_id)\
3368 3368 .join(
3369 3369 UserGroupMember,
3370 3370 UserGroupUserGroupToPerm.user_group_id ==
3371 3371 UserGroupMember.users_group_id)\
3372 3372 .filter(
3373 3373 UserGroupMember.user_id == user_id,
3374 3374 UserGroup.users_group_active == true())
3375 3375 if user_group_id:
3376 3376 q = q.filter(
3377 3377 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3378 3378
3379 3379 return q.all()
3380 3380
3381 3381
3382 3382 class UserRepoToPerm(Base, BaseModel):
3383 3383 __tablename__ = 'repo_to_perm'
3384 3384 __table_args__ = (
3385 3385 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3386 3386 base_table_args
3387 3387 )
3388 3388
3389 3389 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3390 3390 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3391 3391 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3392 3392 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3393 3393
3394 3394 user = relationship('User')
3395 3395 repository = relationship('Repository')
3396 3396 permission = relationship('Permission')
3397 3397
3398 3398 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3399 3399
3400 3400 @classmethod
3401 3401 def create(cls, user, repository, permission):
3402 3402 n = cls()
3403 3403 n.user = user
3404 3404 n.repository = repository
3405 3405 n.permission = permission
3406 3406 Session().add(n)
3407 3407 return n
3408 3408
3409 3409 def __unicode__(self):
3410 3410 return u'<%s => %s >' % (self.user, self.repository)
3411 3411
3412 3412
3413 3413 class UserUserGroupToPerm(Base, BaseModel):
3414 3414 __tablename__ = 'user_user_group_to_perm'
3415 3415 __table_args__ = (
3416 3416 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3417 3417 base_table_args
3418 3418 )
3419 3419
3420 3420 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3421 3421 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3422 3422 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3423 3423 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3424 3424
3425 3425 user = relationship('User')
3426 3426 user_group = relationship('UserGroup')
3427 3427 permission = relationship('Permission')
3428 3428
3429 3429 @classmethod
3430 3430 def create(cls, user, user_group, permission):
3431 3431 n = cls()
3432 3432 n.user = user
3433 3433 n.user_group = user_group
3434 3434 n.permission = permission
3435 3435 Session().add(n)
3436 3436 return n
3437 3437
3438 3438 def __unicode__(self):
3439 3439 return u'<%s => %s >' % (self.user, self.user_group)
3440 3440
3441 3441
3442 3442 class UserToPerm(Base, BaseModel):
3443 3443 __tablename__ = 'user_to_perm'
3444 3444 __table_args__ = (
3445 3445 UniqueConstraint('user_id', 'permission_id'),
3446 3446 base_table_args
3447 3447 )
3448 3448
3449 3449 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3450 3450 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3451 3451 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3452 3452
3453 3453 user = relationship('User')
3454 3454 permission = relationship('Permission', lazy='joined')
3455 3455
3456 3456 def __unicode__(self):
3457 3457 return u'<%s => %s >' % (self.user, self.permission)
3458 3458
3459 3459
3460 3460 class UserGroupRepoToPerm(Base, BaseModel):
3461 3461 __tablename__ = 'users_group_repo_to_perm'
3462 3462 __table_args__ = (
3463 3463 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3464 3464 base_table_args
3465 3465 )
3466 3466
3467 3467 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3468 3468 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3469 3469 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3470 3470 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3471 3471
3472 3472 users_group = relationship('UserGroup')
3473 3473 permission = relationship('Permission')
3474 3474 repository = relationship('Repository')
3475 3475 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3476 3476
3477 3477 @classmethod
3478 3478 def create(cls, users_group, repository, permission):
3479 3479 n = cls()
3480 3480 n.users_group = users_group
3481 3481 n.repository = repository
3482 3482 n.permission = permission
3483 3483 Session().add(n)
3484 3484 return n
3485 3485
3486 3486 def __unicode__(self):
3487 3487 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3488 3488
3489 3489
3490 3490 class UserGroupUserGroupToPerm(Base, BaseModel):
3491 3491 __tablename__ = 'user_group_user_group_to_perm'
3492 3492 __table_args__ = (
3493 3493 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3494 3494 CheckConstraint('target_user_group_id != user_group_id'),
3495 3495 base_table_args
3496 3496 )
3497 3497
3498 3498 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3499 3499 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3500 3500 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3501 3501 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3502 3502
3503 3503 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3504 3504 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3505 3505 permission = relationship('Permission')
3506 3506
3507 3507 @classmethod
3508 3508 def create(cls, target_user_group, user_group, permission):
3509 3509 n = cls()
3510 3510 n.target_user_group = target_user_group
3511 3511 n.user_group = user_group
3512 3512 n.permission = permission
3513 3513 Session().add(n)
3514 3514 return n
3515 3515
3516 3516 def __unicode__(self):
3517 3517 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3518 3518
3519 3519
3520 3520 class UserGroupToPerm(Base, BaseModel):
3521 3521 __tablename__ = 'users_group_to_perm'
3522 3522 __table_args__ = (
3523 3523 UniqueConstraint('users_group_id', 'permission_id',),
3524 3524 base_table_args
3525 3525 )
3526 3526
3527 3527 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3528 3528 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3529 3529 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3530 3530
3531 3531 users_group = relationship('UserGroup')
3532 3532 permission = relationship('Permission')
3533 3533
3534 3534
3535 3535 class UserRepoGroupToPerm(Base, BaseModel):
3536 3536 __tablename__ = 'user_repo_group_to_perm'
3537 3537 __table_args__ = (
3538 3538 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3539 3539 base_table_args
3540 3540 )
3541 3541
3542 3542 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3543 3543 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3544 3544 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3545 3545 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3546 3546
3547 3547 user = relationship('User')
3548 3548 group = relationship('RepoGroup')
3549 3549 permission = relationship('Permission')
3550 3550
3551 3551 @classmethod
3552 3552 def create(cls, user, repository_group, permission):
3553 3553 n = cls()
3554 3554 n.user = user
3555 3555 n.group = repository_group
3556 3556 n.permission = permission
3557 3557 Session().add(n)
3558 3558 return n
3559 3559
3560 3560
3561 3561 class UserGroupRepoGroupToPerm(Base, BaseModel):
3562 3562 __tablename__ = 'users_group_repo_group_to_perm'
3563 3563 __table_args__ = (
3564 3564 UniqueConstraint('users_group_id', 'group_id'),
3565 3565 base_table_args
3566 3566 )
3567 3567
3568 3568 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3569 3569 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3570 3570 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3571 3571 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3572 3572
3573 3573 users_group = relationship('UserGroup')
3574 3574 permission = relationship('Permission')
3575 3575 group = relationship('RepoGroup')
3576 3576
3577 3577 @classmethod
3578 3578 def create(cls, user_group, repository_group, permission):
3579 3579 n = cls()
3580 3580 n.users_group = user_group
3581 3581 n.group = repository_group
3582 3582 n.permission = permission
3583 3583 Session().add(n)
3584 3584 return n
3585 3585
3586 3586 def __unicode__(self):
3587 3587 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3588 3588
3589 3589
3590 3590 class Statistics(Base, BaseModel):
3591 3591 __tablename__ = 'statistics'
3592 3592 __table_args__ = (
3593 3593 base_table_args
3594 3594 )
3595 3595
3596 3596 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3597 3597 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3598 3598 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3599 3599 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3600 3600 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3601 3601 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3602 3602
3603 3603 repository = relationship('Repository', single_parent=True)
3604 3604
3605 3605
3606 3606 class UserFollowing(Base, BaseModel):
3607 3607 __tablename__ = 'user_followings'
3608 3608 __table_args__ = (
3609 3609 UniqueConstraint('user_id', 'follows_repository_id'),
3610 3610 UniqueConstraint('user_id', 'follows_user_id'),
3611 3611 base_table_args
3612 3612 )
3613 3613
3614 3614 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3615 3615 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3616 3616 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3617 3617 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3618 3618 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3619 3619
3620 3620 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3621 3621
3622 3622 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3623 3623 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3624 3624
3625 3625 @classmethod
3626 3626 def get_repo_followers(cls, repo_id):
3627 3627 return cls.query().filter(cls.follows_repo_id == repo_id)
3628 3628
3629 3629
3630 3630 class CacheKey(Base, BaseModel):
3631 3631 __tablename__ = 'cache_invalidation'
3632 3632 __table_args__ = (
3633 3633 UniqueConstraint('cache_key'),
3634 3634 Index('key_idx', 'cache_key'),
3635 3635 base_table_args,
3636 3636 )
3637 3637
3638 3638 CACHE_TYPE_FEED = 'FEED'
3639 3639
3640 3640 # namespaces used to register process/thread aware caches
3641 3641 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3642 3642 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3643 3643
3644 3644 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3645 3645 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3646 3646 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3647 3647 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3648 3648 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3649 3649
3650 3650 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3651 3651 self.cache_key = cache_key
3652 3652 self.cache_args = cache_args
3653 3653 self.cache_active = False
3654 3654 # first key should be same for all entries, since all workers should share it
3655 3655 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3656 3656
3657 3657 def __unicode__(self):
3658 3658 return u"<%s('%s:%s[%s]')>" % (
3659 3659 self.__class__.__name__,
3660 3660 self.cache_id, self.cache_key, self.cache_active)
3661 3661
3662 3662 def _cache_key_partition(self):
3663 3663 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3664 3664 return prefix, repo_name, suffix
3665 3665
3666 3666 def get_prefix(self):
3667 3667 """
3668 3668 Try to extract prefix from existing cache key. The key could consist
3669 3669 of prefix, repo_name, suffix
3670 3670 """
3671 3671 # this returns prefix, repo_name, suffix
3672 3672 return self._cache_key_partition()[0]
3673 3673
3674 3674 def get_suffix(self):
3675 3675 """
3676 3676 get suffix that might have been used in _get_cache_key to
3677 3677 generate self.cache_key. Only used for informational purposes
3678 3678 in repo_edit.mako.
3679 3679 """
3680 3680 # prefix, repo_name, suffix
3681 3681 return self._cache_key_partition()[2]
3682 3682
3683 3683 @classmethod
3684 3684 def generate_new_state_uid(cls, based_on=None):
3685 3685 if based_on:
3686 3686 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3687 3687 else:
3688 3688 return str(uuid.uuid4())
3689 3689
3690 3690 @classmethod
3691 3691 def delete_all_cache(cls):
3692 3692 """
3693 3693 Delete all cache keys from database.
3694 3694 Should only be run when all instances are down and all entries
3695 3695 thus stale.
3696 3696 """
3697 3697 cls.query().delete()
3698 3698 Session().commit()
3699 3699
3700 3700 @classmethod
3701 3701 def set_invalidate(cls, cache_uid, delete=False):
3702 3702 """
3703 3703 Mark all caches of a repo as invalid in the database.
3704 3704 """
3705 3705
3706 3706 try:
3707 3707 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3708 3708 if delete:
3709 3709 qry.delete()
3710 3710 log.debug('cache objects deleted for cache args %s',
3711 3711 safe_str(cache_uid))
3712 3712 else:
3713 3713 qry.update({"cache_active": False,
3714 3714 "cache_state_uid": cls.generate_new_state_uid()})
3715 3715 log.debug('cache objects marked as invalid for cache args %s',
3716 3716 safe_str(cache_uid))
3717 3717
3718 3718 Session().commit()
3719 3719 except Exception:
3720 3720 log.exception(
3721 3721 'Cache key invalidation failed for cache args %s',
3722 3722 safe_str(cache_uid))
3723 3723 Session().rollback()
3724 3724
3725 3725 @classmethod
3726 3726 def get_active_cache(cls, cache_key):
3727 3727 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3728 3728 if inv_obj:
3729 3729 return inv_obj
3730 3730 return None
3731 3731
3732 3732 @classmethod
3733 3733 def get_namespace_map(cls, namespace):
3734 3734 return {
3735 3735 x.cache_key: x
3736 3736 for x in cls.query().filter(cls.cache_args == namespace)}
3737 3737
3738 3738
3739 3739 class ChangesetComment(Base, BaseModel):
3740 3740 __tablename__ = 'changeset_comments'
3741 3741 __table_args__ = (
3742 3742 Index('cc_revision_idx', 'revision'),
3743 3743 base_table_args,
3744 3744 )
3745 3745
3746 3746 COMMENT_OUTDATED = u'comment_outdated'
3747 3747 COMMENT_TYPE_NOTE = u'note'
3748 3748 COMMENT_TYPE_TODO = u'todo'
3749 3749 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3750 3750
3751 3751 OP_IMMUTABLE = u'immutable'
3752 3752 OP_CHANGEABLE = u'changeable'
3753 3753
3754 3754 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3755 3755 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3756 3756 revision = Column('revision', String(40), nullable=True)
3757 3757 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3758 3758 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3759 3759 line_no = Column('line_no', Unicode(10), nullable=True)
3760 3760 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3761 3761 f_path = Column('f_path', Unicode(1000), nullable=True)
3762 3762 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3763 3763 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3764 3764 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3765 3765 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3766 3766 renderer = Column('renderer', Unicode(64), nullable=True)
3767 3767 display_state = Column('display_state', Unicode(128), nullable=True)
3768 3768 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3769 3769
3770 3770 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3771 3771 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3772 3772
3773 3773 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3774 3774 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3775 3775
3776 3776 author = relationship('User', lazy='joined')
3777 3777 repo = relationship('Repository')
3778 3778 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined')
3779 3779 pull_request = relationship('PullRequest', lazy='joined')
3780 3780 pull_request_version = relationship('PullRequestVersion')
3781 3781 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='joined', order_by='ChangesetCommentHistory.version')
3782 3782
3783 3783 @classmethod
3784 3784 def get_users(cls, revision=None, pull_request_id=None):
3785 3785 """
3786 3786 Returns user associated with this ChangesetComment. ie those
3787 3787 who actually commented
3788 3788
3789 3789 :param cls:
3790 3790 :param revision:
3791 3791 """
3792 3792 q = Session().query(User)\
3793 3793 .join(ChangesetComment.author)
3794 3794 if revision:
3795 3795 q = q.filter(cls.revision == revision)
3796 3796 elif pull_request_id:
3797 3797 q = q.filter(cls.pull_request_id == pull_request_id)
3798 3798 return q.all()
3799 3799
3800 3800 @classmethod
3801 3801 def get_index_from_version(cls, pr_version, versions):
3802 3802 num_versions = [x.pull_request_version_id for x in versions]
3803 3803 try:
3804 3804 return num_versions.index(pr_version) + 1
3805 3805 except (IndexError, ValueError):
3806 3806 return
3807 3807
3808 3808 @property
3809 3809 def outdated(self):
3810 3810 return self.display_state == self.COMMENT_OUTDATED
3811 3811
3812 3812 @property
3813 3813 def immutable(self):
3814 3814 return self.immutable_state == self.OP_IMMUTABLE
3815 3815
3816 3816 def outdated_at_version(self, version):
3817 3817 """
3818 3818 Checks if comment is outdated for given pull request version
3819 3819 """
3820 3820 return self.outdated and self.pull_request_version_id != version
3821 3821
3822 3822 def older_than_version(self, version):
3823 3823 """
3824 3824 Checks if comment is made from previous version than given
3825 3825 """
3826 3826 if version is None:
3827 3827 return self.pull_request_version_id is not None
3828 3828
3829 3829 return self.pull_request_version_id < version
3830 3830
3831 3831 @property
3832 3832 def commit_id(self):
3833 3833 """New style naming to stop using .revision"""
3834 3834 return self.revision
3835 3835
3836 3836 @property
3837 3837 def resolved(self):
3838 3838 return self.resolved_by[0] if self.resolved_by else None
3839 3839
3840 3840 @property
3841 3841 def is_todo(self):
3842 3842 return self.comment_type == self.COMMENT_TYPE_TODO
3843 3843
3844 3844 @property
3845 3845 def is_inline(self):
3846 3846 return self.line_no and self.f_path
3847 3847
3848 3848 @property
3849 3849 def last_version(self):
3850 3850 version = 0
3851 3851 if self.history:
3852 3852 version = self.history[-1].version
3853 3853 return version
3854 3854
3855 3855 def get_index_version(self, versions):
3856 3856 return self.get_index_from_version(
3857 3857 self.pull_request_version_id, versions)
3858 3858
3859 3859 def __repr__(self):
3860 3860 if self.comment_id:
3861 3861 return '<DB:Comment #%s>' % self.comment_id
3862 3862 else:
3863 3863 return '<DB:Comment at %#x>' % id(self)
3864 3864
3865 3865 def get_api_data(self):
3866 3866 comment = self
3867 3867
3868 3868 data = {
3869 3869 'comment_id': comment.comment_id,
3870 3870 'comment_type': comment.comment_type,
3871 3871 'comment_text': comment.text,
3872 3872 'comment_status': comment.status_change,
3873 3873 'comment_f_path': comment.f_path,
3874 3874 'comment_lineno': comment.line_no,
3875 3875 'comment_author': comment.author,
3876 3876 'comment_created_on': comment.created_on,
3877 3877 'comment_resolved_by': self.resolved,
3878 3878 'comment_commit_id': comment.revision,
3879 3879 'comment_pull_request_id': comment.pull_request_id,
3880 3880 'comment_last_version': self.last_version
3881 3881 }
3882 3882 return data
3883 3883
3884 3884 def __json__(self):
3885 3885 data = dict()
3886 3886 data.update(self.get_api_data())
3887 3887 return data
3888 3888
3889 3889
3890 3890 class ChangesetCommentHistory(Base, BaseModel):
3891 3891 __tablename__ = 'changeset_comments_history'
3892 3892 __table_args__ = (
3893 3893 Index('cch_comment_id_idx', 'comment_id'),
3894 3894 base_table_args,
3895 3895 )
3896 3896
3897 3897 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3898 3898 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3899 3899 version = Column("version", Integer(), nullable=False, default=0)
3900 3900 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3901 3901 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3902 3902 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3903 3903 deleted = Column('deleted', Boolean(), default=False)
3904 3904
3905 3905 author = relationship('User', lazy='joined')
3906 3906 comment = relationship('ChangesetComment', cascade="all, delete")
3907 3907
3908 3908 @classmethod
3909 3909 def get_version(cls, comment_id):
3910 3910 q = Session().query(ChangesetCommentHistory).filter(
3911 3911 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3912 3912 if q.count() == 0:
3913 3913 return 1
3914 3914 elif q.count() >= q[0].version:
3915 3915 return q.count() + 1
3916 3916 else:
3917 3917 return q[0].version + 1
3918 3918
3919 3919
3920 3920 class ChangesetStatus(Base, BaseModel):
3921 3921 __tablename__ = 'changeset_statuses'
3922 3922 __table_args__ = (
3923 3923 Index('cs_revision_idx', 'revision'),
3924 3924 Index('cs_version_idx', 'version'),
3925 3925 UniqueConstraint('repo_id', 'revision', 'version'),
3926 3926 base_table_args
3927 3927 )
3928 3928
3929 3929 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3930 3930 STATUS_APPROVED = 'approved'
3931 3931 STATUS_REJECTED = 'rejected'
3932 3932 STATUS_UNDER_REVIEW = 'under_review'
3933 3933
3934 3934 STATUSES = [
3935 3935 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3936 3936 (STATUS_APPROVED, _("Approved")),
3937 3937 (STATUS_REJECTED, _("Rejected")),
3938 3938 (STATUS_UNDER_REVIEW, _("Under Review")),
3939 3939 ]
3940 3940
3941 3941 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3942 3942 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3943 3943 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3944 3944 revision = Column('revision', String(40), nullable=False)
3945 3945 status = Column('status', String(128), nullable=False, default=DEFAULT)
3946 3946 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3947 3947 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3948 3948 version = Column('version', Integer(), nullable=False, default=0)
3949 3949 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3950 3950
3951 3951 author = relationship('User', lazy='joined')
3952 3952 repo = relationship('Repository')
3953 3953 comment = relationship('ChangesetComment', lazy='joined')
3954 3954 pull_request = relationship('PullRequest', lazy='joined')
3955 3955
3956 3956 def __unicode__(self):
3957 3957 return u"<%s('%s[v%s]:%s')>" % (
3958 3958 self.__class__.__name__,
3959 3959 self.status, self.version, self.author
3960 3960 )
3961 3961
3962 3962 @classmethod
3963 3963 def get_status_lbl(cls, value):
3964 3964 return dict(cls.STATUSES).get(value)
3965 3965
3966 3966 @property
3967 3967 def status_lbl(self):
3968 3968 return ChangesetStatus.get_status_lbl(self.status)
3969 3969
3970 3970 def get_api_data(self):
3971 3971 status = self
3972 3972 data = {
3973 3973 'status_id': status.changeset_status_id,
3974 3974 'status': status.status,
3975 3975 }
3976 3976 return data
3977 3977
3978 3978 def __json__(self):
3979 3979 data = dict()
3980 3980 data.update(self.get_api_data())
3981 3981 return data
3982 3982
3983 3983
3984 3984 class _SetState(object):
3985 3985 """
3986 3986 Context processor allowing changing state for sensitive operation such as
3987 3987 pull request update or merge
3988 3988 """
3989 3989
3990 3990 def __init__(self, pull_request, pr_state, back_state=None):
3991 3991 self._pr = pull_request
3992 3992 self._org_state = back_state or pull_request.pull_request_state
3993 3993 self._pr_state = pr_state
3994 3994 self._current_state = None
3995 3995
3996 3996 def __enter__(self):
3997 3997 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
3998 3998 self._pr, self._pr_state)
3999 3999 self.set_pr_state(self._pr_state)
4000 4000 return self
4001 4001
4002 4002 def __exit__(self, exc_type, exc_val, exc_tb):
4003 4003 if exc_val is not None:
4004 4004 log.error(traceback.format_exc(exc_tb))
4005 4005 return None
4006 4006
4007 4007 self.set_pr_state(self._org_state)
4008 4008 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4009 4009 self._pr, self._org_state)
4010 4010
4011 4011 @property
4012 4012 def state(self):
4013 4013 return self._current_state
4014 4014
4015 4015 def set_pr_state(self, pr_state):
4016 4016 try:
4017 4017 self._pr.pull_request_state = pr_state
4018 4018 Session().add(self._pr)
4019 4019 Session().commit()
4020 4020 self._current_state = pr_state
4021 4021 except Exception:
4022 4022 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4023 4023 raise
4024 4024
4025 4025
4026 4026 class _PullRequestBase(BaseModel):
4027 4027 """
4028 4028 Common attributes of pull request and version entries.
4029 4029 """
4030 4030
4031 4031 # .status values
4032 4032 STATUS_NEW = u'new'
4033 4033 STATUS_OPEN = u'open'
4034 4034 STATUS_CLOSED = u'closed'
4035 4035
4036 4036 # available states
4037 4037 STATE_CREATING = u'creating'
4038 4038 STATE_UPDATING = u'updating'
4039 4039 STATE_MERGING = u'merging'
4040 4040 STATE_CREATED = u'created'
4041 4041
4042 4042 title = Column('title', Unicode(255), nullable=True)
4043 4043 description = Column(
4044 4044 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4045 4045 nullable=True)
4046 4046 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4047 4047
4048 4048 # new/open/closed status of pull request (not approve/reject/etc)
4049 4049 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4050 4050 created_on = Column(
4051 4051 'created_on', DateTime(timezone=False), nullable=False,
4052 4052 default=datetime.datetime.now)
4053 4053 updated_on = Column(
4054 4054 'updated_on', DateTime(timezone=False), nullable=False,
4055 4055 default=datetime.datetime.now)
4056 4056
4057 4057 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4058 4058
4059 4059 @declared_attr
4060 4060 def user_id(cls):
4061 4061 return Column(
4062 4062 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4063 4063 unique=None)
4064 4064
4065 4065 # 500 revisions max
4066 4066 _revisions = Column(
4067 4067 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4068 4068
4069 4069 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4070 4070
4071 4071 @declared_attr
4072 4072 def source_repo_id(cls):
4073 4073 # TODO: dan: rename column to source_repo_id
4074 4074 return Column(
4075 4075 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4076 4076 nullable=False)
4077 4077
4078 4078 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4079 4079
4080 4080 @hybrid_property
4081 4081 def source_ref(self):
4082 4082 return self._source_ref
4083 4083
4084 4084 @source_ref.setter
4085 4085 def source_ref(self, val):
4086 4086 parts = (val or '').split(':')
4087 4087 if len(parts) != 3:
4088 4088 raise ValueError(
4089 4089 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4090 4090 self._source_ref = safe_unicode(val)
4091 4091
4092 4092 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4093 4093
4094 4094 @hybrid_property
4095 4095 def target_ref(self):
4096 4096 return self._target_ref
4097 4097
4098 4098 @target_ref.setter
4099 4099 def target_ref(self, val):
4100 4100 parts = (val or '').split(':')
4101 4101 if len(parts) != 3:
4102 4102 raise ValueError(
4103 4103 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4104 4104 self._target_ref = safe_unicode(val)
4105 4105
4106 4106 @declared_attr
4107 4107 def target_repo_id(cls):
4108 4108 # TODO: dan: rename column to target_repo_id
4109 4109 return Column(
4110 4110 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4111 4111 nullable=False)
4112 4112
4113 4113 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4114 4114
4115 4115 # TODO: dan: rename column to last_merge_source_rev
4116 4116 _last_merge_source_rev = Column(
4117 4117 'last_merge_org_rev', String(40), nullable=True)
4118 4118 # TODO: dan: rename column to last_merge_target_rev
4119 4119 _last_merge_target_rev = Column(
4120 4120 'last_merge_other_rev', String(40), nullable=True)
4121 4121 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4122 4122 last_merge_metadata = Column(
4123 4123 'last_merge_metadata', MutationObj.as_mutable(
4124 4124 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4125 4125
4126 4126 merge_rev = Column('merge_rev', String(40), nullable=True)
4127 4127
4128 4128 reviewer_data = Column(
4129 4129 'reviewer_data_json', MutationObj.as_mutable(
4130 4130 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4131 4131
4132 4132 @property
4133 4133 def reviewer_data_json(self):
4134 4134 return json.dumps(self.reviewer_data)
4135 4135
4136 4136 @property
4137 def last_merge_metadata_parsed(self):
4138 metadata = {}
4139 if not self.last_merge_metadata:
4140 return metadata
4141
4142 if hasattr(self.last_merge_metadata, 'de_coerce'):
4143 for k, v in self.last_merge_metadata.de_coerce().items():
4144 if k in ['target_ref', 'source_ref']:
4145 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4146 else:
4147 if hasattr(v, 'de_coerce'):
4148 metadata[k] = v.de_coerce()
4149 else:
4150 metadata[k] = v
4151 return metadata
4152
4153 @property
4137 4154 def work_in_progress(self):
4138 4155 """checks if pull request is work in progress by checking the title"""
4139 4156 title = self.title.upper()
4140 4157 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4141 4158 return True
4142 4159 return False
4143 4160
4144 4161 @hybrid_property
4145 4162 def description_safe(self):
4146 4163 from rhodecode.lib import helpers as h
4147 4164 return h.escape(self.description)
4148 4165
4149 4166 @hybrid_property
4150 4167 def revisions(self):
4151 4168 return self._revisions.split(':') if self._revisions else []
4152 4169
4153 4170 @revisions.setter
4154 4171 def revisions(self, val):
4155 4172 self._revisions = u':'.join(val)
4156 4173
4157 4174 @hybrid_property
4158 4175 def last_merge_status(self):
4159 4176 return safe_int(self._last_merge_status)
4160 4177
4161 4178 @last_merge_status.setter
4162 4179 def last_merge_status(self, val):
4163 4180 self._last_merge_status = val
4164 4181
4165 4182 @declared_attr
4166 4183 def author(cls):
4167 4184 return relationship('User', lazy='joined')
4168 4185
4169 4186 @declared_attr
4170 4187 def source_repo(cls):
4171 4188 return relationship(
4172 4189 'Repository',
4173 4190 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4174 4191
4175 4192 @property
4176 4193 def source_ref_parts(self):
4177 4194 return self.unicode_to_reference(self.source_ref)
4178 4195
4179 4196 @declared_attr
4180 4197 def target_repo(cls):
4181 4198 return relationship(
4182 4199 'Repository',
4183 4200 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4184 4201
4185 4202 @property
4186 4203 def target_ref_parts(self):
4187 4204 return self.unicode_to_reference(self.target_ref)
4188 4205
4189 4206 @property
4190 4207 def shadow_merge_ref(self):
4191 4208 return self.unicode_to_reference(self._shadow_merge_ref)
4192 4209
4193 4210 @shadow_merge_ref.setter
4194 4211 def shadow_merge_ref(self, ref):
4195 4212 self._shadow_merge_ref = self.reference_to_unicode(ref)
4196 4213
4197 4214 @staticmethod
4198 4215 def unicode_to_reference(raw):
4199 4216 """
4200 4217 Convert a unicode (or string) to a reference object.
4201 4218 If unicode evaluates to False it returns None.
4202 4219 """
4203 4220 if raw:
4204 4221 refs = raw.split(':')
4205 4222 return Reference(*refs)
4206 4223 else:
4207 4224 return None
4208 4225
4209 4226 @staticmethod
4210 4227 def reference_to_unicode(ref):
4211 4228 """
4212 4229 Convert a reference object to unicode.
4213 4230 If reference is None it returns None.
4214 4231 """
4215 4232 if ref:
4216 4233 return u':'.join(ref)
4217 4234 else:
4218 4235 return None
4219 4236
4220 4237 def get_api_data(self, with_merge_state=True):
4221 4238 from rhodecode.model.pull_request import PullRequestModel
4222 4239
4223 4240 pull_request = self
4224 4241 if with_merge_state:
4225 4242 merge_response, merge_status, msg = \
4226 4243 PullRequestModel().merge_status(pull_request)
4227 4244 merge_state = {
4228 4245 'status': merge_status,
4229 4246 'message': safe_unicode(msg),
4230 4247 }
4231 4248 else:
4232 4249 merge_state = {'status': 'not_available',
4233 4250 'message': 'not_available'}
4234 4251
4235 4252 merge_data = {
4236 4253 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4237 4254 'reference': (
4238 4255 pull_request.shadow_merge_ref._asdict()
4239 4256 if pull_request.shadow_merge_ref else None),
4240 4257 }
4241 4258
4242 4259 data = {
4243 4260 'pull_request_id': pull_request.pull_request_id,
4244 4261 'url': PullRequestModel().get_url(pull_request),
4245 4262 'title': pull_request.title,
4246 4263 'description': pull_request.description,
4247 4264 'status': pull_request.status,
4248 4265 'state': pull_request.pull_request_state,
4249 4266 'created_on': pull_request.created_on,
4250 4267 'updated_on': pull_request.updated_on,
4251 4268 'commit_ids': pull_request.revisions,
4252 4269 'review_status': pull_request.calculated_review_status(),
4253 4270 'mergeable': merge_state,
4254 4271 'source': {
4255 4272 'clone_url': pull_request.source_repo.clone_url(),
4256 4273 'repository': pull_request.source_repo.repo_name,
4257 4274 'reference': {
4258 4275 'name': pull_request.source_ref_parts.name,
4259 4276 'type': pull_request.source_ref_parts.type,
4260 4277 'commit_id': pull_request.source_ref_parts.commit_id,
4261 4278 },
4262 4279 },
4263 4280 'target': {
4264 4281 'clone_url': pull_request.target_repo.clone_url(),
4265 4282 'repository': pull_request.target_repo.repo_name,
4266 4283 'reference': {
4267 4284 'name': pull_request.target_ref_parts.name,
4268 4285 'type': pull_request.target_ref_parts.type,
4269 4286 'commit_id': pull_request.target_ref_parts.commit_id,
4270 4287 },
4271 4288 },
4272 4289 'merge': merge_data,
4273 4290 'author': pull_request.author.get_api_data(include_secrets=False,
4274 4291 details='basic'),
4275 4292 'reviewers': [
4276 4293 {
4277 4294 'user': reviewer.get_api_data(include_secrets=False,
4278 4295 details='basic'),
4279 4296 'reasons': reasons,
4280 4297 'review_status': st[0][1].status if st else 'not_reviewed',
4281 4298 }
4282 4299 for obj, reviewer, reasons, mandatory, st in
4283 4300 pull_request.reviewers_statuses()
4284 4301 ]
4285 4302 }
4286 4303
4287 4304 return data
4288 4305
4289 4306 def set_state(self, pull_request_state, final_state=None):
4290 4307 """
4291 4308 # goes from initial state to updating to initial state.
4292 4309 # initial state can be changed by specifying back_state=
4293 4310 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4294 4311 pull_request.merge()
4295 4312
4296 4313 :param pull_request_state:
4297 4314 :param final_state:
4298 4315
4299 4316 """
4300 4317
4301 4318 return _SetState(self, pull_request_state, back_state=final_state)
4302 4319
4303 4320
4304 4321 class PullRequest(Base, _PullRequestBase):
4305 4322 __tablename__ = 'pull_requests'
4306 4323 __table_args__ = (
4307 4324 base_table_args,
4308 4325 )
4309 4326
4310 4327 pull_request_id = Column(
4311 4328 'pull_request_id', Integer(), nullable=False, primary_key=True)
4312 4329
4313 4330 def __repr__(self):
4314 4331 if self.pull_request_id:
4315 4332 return '<DB:PullRequest #%s>' % self.pull_request_id
4316 4333 else:
4317 4334 return '<DB:PullRequest at %#x>' % id(self)
4318 4335
4319 4336 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4320 4337 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4321 4338 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4322 4339 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4323 4340 lazy='dynamic')
4324 4341
4325 4342 @classmethod
4326 4343 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4327 4344 internal_methods=None):
4328 4345
4329 4346 class PullRequestDisplay(object):
4330 4347 """
4331 4348 Special object wrapper for showing PullRequest data via Versions
4332 4349 It mimics PR object as close as possible. This is read only object
4333 4350 just for display
4334 4351 """
4335 4352
4336 4353 def __init__(self, attrs, internal=None):
4337 4354 self.attrs = attrs
4338 4355 # internal have priority over the given ones via attrs
4339 4356 self.internal = internal or ['versions']
4340 4357
4341 4358 def __getattr__(self, item):
4342 4359 if item in self.internal:
4343 4360 return getattr(self, item)
4344 4361 try:
4345 4362 return self.attrs[item]
4346 4363 except KeyError:
4347 4364 raise AttributeError(
4348 4365 '%s object has no attribute %s' % (self, item))
4349 4366
4350 4367 def __repr__(self):
4351 4368 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4352 4369
4353 4370 def versions(self):
4354 4371 return pull_request_obj.versions.order_by(
4355 4372 PullRequestVersion.pull_request_version_id).all()
4356 4373
4357 4374 def is_closed(self):
4358 4375 return pull_request_obj.is_closed()
4359 4376
4360 4377 def is_state_changing(self):
4361 4378 return pull_request_obj.is_state_changing()
4362 4379
4363 4380 @property
4364 4381 def pull_request_version_id(self):
4365 4382 return getattr(pull_request_obj, 'pull_request_version_id', None)
4366 4383
4367 4384 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4368 4385
4369 4386 attrs.author = StrictAttributeDict(
4370 4387 pull_request_obj.author.get_api_data())
4371 4388 if pull_request_obj.target_repo:
4372 4389 attrs.target_repo = StrictAttributeDict(
4373 4390 pull_request_obj.target_repo.get_api_data())
4374 4391 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4375 4392
4376 4393 if pull_request_obj.source_repo:
4377 4394 attrs.source_repo = StrictAttributeDict(
4378 4395 pull_request_obj.source_repo.get_api_data())
4379 4396 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4380 4397
4381 4398 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4382 4399 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4383 4400 attrs.revisions = pull_request_obj.revisions
4384 4401 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4385 4402 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4386 4403 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4387 4404 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4388 4405
4389 4406 return PullRequestDisplay(attrs, internal=internal_methods)
4390 4407
4391 4408 def is_closed(self):
4392 4409 return self.status == self.STATUS_CLOSED
4393 4410
4394 4411 def is_state_changing(self):
4395 4412 return self.pull_request_state != PullRequest.STATE_CREATED
4396 4413
4397 4414 def __json__(self):
4398 4415 return {
4399 4416 'revisions': self.revisions,
4400 4417 'versions': self.versions_count
4401 4418 }
4402 4419
4403 4420 def calculated_review_status(self):
4404 4421 from rhodecode.model.changeset_status import ChangesetStatusModel
4405 4422 return ChangesetStatusModel().calculated_review_status(self)
4406 4423
4407 4424 def reviewers_statuses(self):
4408 4425 from rhodecode.model.changeset_status import ChangesetStatusModel
4409 4426 return ChangesetStatusModel().reviewers_statuses(self)
4410 4427
4411 4428 @property
4412 4429 def workspace_id(self):
4413 4430 from rhodecode.model.pull_request import PullRequestModel
4414 4431 return PullRequestModel()._workspace_id(self)
4415 4432
4416 4433 def get_shadow_repo(self):
4417 4434 workspace_id = self.workspace_id
4418 4435 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4419 4436 if os.path.isdir(shadow_repository_path):
4420 4437 vcs_obj = self.target_repo.scm_instance()
4421 4438 return vcs_obj.get_shadow_instance(shadow_repository_path)
4422 4439
4423 4440 @property
4424 4441 def versions_count(self):
4425 4442 """
4426 4443 return number of versions this PR have, e.g a PR that once been
4427 4444 updated will have 2 versions
4428 4445 """
4429 4446 return self.versions.count() + 1
4430 4447
4431 4448
4432 4449 class PullRequestVersion(Base, _PullRequestBase):
4433 4450 __tablename__ = 'pull_request_versions'
4434 4451 __table_args__ = (
4435 4452 base_table_args,
4436 4453 )
4437 4454
4438 4455 pull_request_version_id = Column(
4439 4456 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4440 4457 pull_request_id = Column(
4441 4458 'pull_request_id', Integer(),
4442 4459 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4443 4460 pull_request = relationship('PullRequest')
4444 4461
4445 4462 def __repr__(self):
4446 4463 if self.pull_request_version_id:
4447 4464 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4448 4465 else:
4449 4466 return '<DB:PullRequestVersion at %#x>' % id(self)
4450 4467
4451 4468 @property
4452 4469 def reviewers(self):
4453 4470 return self.pull_request.reviewers
4454 4471
4455 4472 @property
4456 4473 def versions(self):
4457 4474 return self.pull_request.versions
4458 4475
4459 4476 def is_closed(self):
4460 4477 # calculate from original
4461 4478 return self.pull_request.status == self.STATUS_CLOSED
4462 4479
4463 4480 def is_state_changing(self):
4464 4481 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4465 4482
4466 4483 def calculated_review_status(self):
4467 4484 return self.pull_request.calculated_review_status()
4468 4485
4469 4486 def reviewers_statuses(self):
4470 4487 return self.pull_request.reviewers_statuses()
4471 4488
4472 4489
4473 4490 class PullRequestReviewers(Base, BaseModel):
4474 4491 __tablename__ = 'pull_request_reviewers'
4475 4492 __table_args__ = (
4476 4493 base_table_args,
4477 4494 )
4478 4495
4479 4496 @hybrid_property
4480 4497 def reasons(self):
4481 4498 if not self._reasons:
4482 4499 return []
4483 4500 return self._reasons
4484 4501
4485 4502 @reasons.setter
4486 4503 def reasons(self, val):
4487 4504 val = val or []
4488 4505 if any(not isinstance(x, compat.string_types) for x in val):
4489 4506 raise Exception('invalid reasons type, must be list of strings')
4490 4507 self._reasons = val
4491 4508
4492 4509 pull_requests_reviewers_id = Column(
4493 4510 'pull_requests_reviewers_id', Integer(), nullable=False,
4494 4511 primary_key=True)
4495 4512 pull_request_id = Column(
4496 4513 "pull_request_id", Integer(),
4497 4514 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4498 4515 user_id = Column(
4499 4516 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4500 4517 _reasons = Column(
4501 4518 'reason', MutationList.as_mutable(
4502 4519 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4503 4520
4504 4521 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4505 4522 user = relationship('User')
4506 4523 pull_request = relationship('PullRequest')
4507 4524
4508 4525 rule_data = Column(
4509 4526 'rule_data_json',
4510 4527 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4511 4528
4512 4529 def rule_user_group_data(self):
4513 4530 """
4514 4531 Returns the voting user group rule data for this reviewer
4515 4532 """
4516 4533
4517 4534 if self.rule_data and 'vote_rule' in self.rule_data:
4518 4535 user_group_data = {}
4519 4536 if 'rule_user_group_entry_id' in self.rule_data:
4520 4537 # means a group with voting rules !
4521 4538 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4522 4539 user_group_data['name'] = self.rule_data['rule_name']
4523 4540 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4524 4541
4525 4542 return user_group_data
4526 4543
4527 4544 def __unicode__(self):
4528 4545 return u"<%s('id:%s')>" % (self.__class__.__name__,
4529 4546 self.pull_requests_reviewers_id)
4530 4547
4531 4548
4532 4549 class Notification(Base, BaseModel):
4533 4550 __tablename__ = 'notifications'
4534 4551 __table_args__ = (
4535 4552 Index('notification_type_idx', 'type'),
4536 4553 base_table_args,
4537 4554 )
4538 4555
4539 4556 TYPE_CHANGESET_COMMENT = u'cs_comment'
4540 4557 TYPE_MESSAGE = u'message'
4541 4558 TYPE_MENTION = u'mention'
4542 4559 TYPE_REGISTRATION = u'registration'
4543 4560 TYPE_PULL_REQUEST = u'pull_request'
4544 4561 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4545 4562 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4546 4563
4547 4564 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4548 4565 subject = Column('subject', Unicode(512), nullable=True)
4549 4566 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4550 4567 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4551 4568 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4552 4569 type_ = Column('type', Unicode(255))
4553 4570
4554 4571 created_by_user = relationship('User')
4555 4572 notifications_to_users = relationship('UserNotification', lazy='joined',
4556 4573 cascade="all, delete-orphan")
4557 4574
4558 4575 @property
4559 4576 def recipients(self):
4560 4577 return [x.user for x in UserNotification.query()\
4561 4578 .filter(UserNotification.notification == self)\
4562 4579 .order_by(UserNotification.user_id.asc()).all()]
4563 4580
4564 4581 @classmethod
4565 4582 def create(cls, created_by, subject, body, recipients, type_=None):
4566 4583 if type_ is None:
4567 4584 type_ = Notification.TYPE_MESSAGE
4568 4585
4569 4586 notification = cls()
4570 4587 notification.created_by_user = created_by
4571 4588 notification.subject = subject
4572 4589 notification.body = body
4573 4590 notification.type_ = type_
4574 4591 notification.created_on = datetime.datetime.now()
4575 4592
4576 4593 # For each recipient link the created notification to his account
4577 4594 for u in recipients:
4578 4595 assoc = UserNotification()
4579 4596 assoc.user_id = u.user_id
4580 4597 assoc.notification = notification
4581 4598
4582 4599 # if created_by is inside recipients mark his notification
4583 4600 # as read
4584 4601 if u.user_id == created_by.user_id:
4585 4602 assoc.read = True
4586 4603 Session().add(assoc)
4587 4604
4588 4605 Session().add(notification)
4589 4606
4590 4607 return notification
4591 4608
4592 4609
4593 4610 class UserNotification(Base, BaseModel):
4594 4611 __tablename__ = 'user_to_notification'
4595 4612 __table_args__ = (
4596 4613 UniqueConstraint('user_id', 'notification_id'),
4597 4614 base_table_args
4598 4615 )
4599 4616
4600 4617 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4601 4618 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4602 4619 read = Column('read', Boolean, default=False)
4603 4620 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4604 4621
4605 4622 user = relationship('User', lazy="joined")
4606 4623 notification = relationship('Notification', lazy="joined",
4607 4624 order_by=lambda: Notification.created_on.desc(),)
4608 4625
4609 4626 def mark_as_read(self):
4610 4627 self.read = True
4611 4628 Session().add(self)
4612 4629
4613 4630
4614 4631 class UserNotice(Base, BaseModel):
4615 4632 __tablename__ = 'user_notices'
4616 4633 __table_args__ = (
4617 4634 base_table_args
4618 4635 )
4619 4636
4620 4637 NOTIFICATION_TYPE_MESSAGE = 'message'
4621 4638 NOTIFICATION_TYPE_NOTICE = 'notice'
4622 4639
4623 4640 NOTIFICATION_LEVEL_INFO = 'info'
4624 4641 NOTIFICATION_LEVEL_WARNING = 'warning'
4625 4642 NOTIFICATION_LEVEL_ERROR = 'error'
4626 4643
4627 4644 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4628 4645
4629 4646 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4630 4647 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4631 4648
4632 4649 notice_read = Column('notice_read', Boolean, default=False)
4633 4650
4634 4651 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4635 4652 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4636 4653
4637 4654 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4638 4655 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4639 4656
4640 4657 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4641 4658 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4642 4659
4643 4660 @classmethod
4644 4661 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4645 4662
4646 4663 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4647 4664 cls.NOTIFICATION_LEVEL_WARNING,
4648 4665 cls.NOTIFICATION_LEVEL_INFO]:
4649 4666 return
4650 4667
4651 4668 from rhodecode.model.user import UserModel
4652 4669 user = UserModel().get_user(user)
4653 4670
4654 4671 new_notice = UserNotice()
4655 4672 if not allow_duplicate:
4656 4673 existing_msg = UserNotice().query() \
4657 4674 .filter(UserNotice.user == user) \
4658 4675 .filter(UserNotice.notice_body == body) \
4659 4676 .filter(UserNotice.notice_read == false()) \
4660 4677 .scalar()
4661 4678 if existing_msg:
4662 4679 log.warning('Ignoring duplicate notice for user %s', user)
4663 4680 return
4664 4681
4665 4682 new_notice.user = user
4666 4683 new_notice.notice_subject = subject
4667 4684 new_notice.notice_body = body
4668 4685 new_notice.notification_level = notice_level
4669 4686 Session().add(new_notice)
4670 4687 Session().commit()
4671 4688
4672 4689
4673 4690 class Gist(Base, BaseModel):
4674 4691 __tablename__ = 'gists'
4675 4692 __table_args__ = (
4676 4693 Index('g_gist_access_id_idx', 'gist_access_id'),
4677 4694 Index('g_created_on_idx', 'created_on'),
4678 4695 base_table_args
4679 4696 )
4680 4697
4681 4698 GIST_PUBLIC = u'public'
4682 4699 GIST_PRIVATE = u'private'
4683 4700 DEFAULT_FILENAME = u'gistfile1.txt'
4684 4701
4685 4702 ACL_LEVEL_PUBLIC = u'acl_public'
4686 4703 ACL_LEVEL_PRIVATE = u'acl_private'
4687 4704
4688 4705 gist_id = Column('gist_id', Integer(), primary_key=True)
4689 4706 gist_access_id = Column('gist_access_id', Unicode(250))
4690 4707 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4691 4708 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4692 4709 gist_expires = Column('gist_expires', Float(53), nullable=False)
4693 4710 gist_type = Column('gist_type', Unicode(128), nullable=False)
4694 4711 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4695 4712 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4696 4713 acl_level = Column('acl_level', Unicode(128), nullable=True)
4697 4714
4698 4715 owner = relationship('User')
4699 4716
4700 4717 def __repr__(self):
4701 4718 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4702 4719
4703 4720 @hybrid_property
4704 4721 def description_safe(self):
4705 4722 from rhodecode.lib import helpers as h
4706 4723 return h.escape(self.gist_description)
4707 4724
4708 4725 @classmethod
4709 4726 def get_or_404(cls, id_):
4710 4727 from pyramid.httpexceptions import HTTPNotFound
4711 4728
4712 4729 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4713 4730 if not res:
4714 4731 raise HTTPNotFound()
4715 4732 return res
4716 4733
4717 4734 @classmethod
4718 4735 def get_by_access_id(cls, gist_access_id):
4719 4736 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4720 4737
4721 4738 def gist_url(self):
4722 4739 from rhodecode.model.gist import GistModel
4723 4740 return GistModel().get_url(self)
4724 4741
4725 4742 @classmethod
4726 4743 def base_path(cls):
4727 4744 """
4728 4745 Returns base path when all gists are stored
4729 4746
4730 4747 :param cls:
4731 4748 """
4732 4749 from rhodecode.model.gist import GIST_STORE_LOC
4733 4750 q = Session().query(RhodeCodeUi)\
4734 4751 .filter(RhodeCodeUi.ui_key == URL_SEP)
4735 4752 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4736 4753 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4737 4754
4738 4755 def get_api_data(self):
4739 4756 """
4740 4757 Common function for generating gist related data for API
4741 4758 """
4742 4759 gist = self
4743 4760 data = {
4744 4761 'gist_id': gist.gist_id,
4745 4762 'type': gist.gist_type,
4746 4763 'access_id': gist.gist_access_id,
4747 4764 'description': gist.gist_description,
4748 4765 'url': gist.gist_url(),
4749 4766 'expires': gist.gist_expires,
4750 4767 'created_on': gist.created_on,
4751 4768 'modified_at': gist.modified_at,
4752 4769 'content': None,
4753 4770 'acl_level': gist.acl_level,
4754 4771 }
4755 4772 return data
4756 4773
4757 4774 def __json__(self):
4758 4775 data = dict(
4759 4776 )
4760 4777 data.update(self.get_api_data())
4761 4778 return data
4762 4779 # SCM functions
4763 4780
4764 4781 def scm_instance(self, **kwargs):
4765 4782 """
4766 4783 Get an instance of VCS Repository
4767 4784
4768 4785 :param kwargs:
4769 4786 """
4770 4787 from rhodecode.model.gist import GistModel
4771 4788 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4772 4789 return get_vcs_instance(
4773 4790 repo_path=safe_str(full_repo_path), create=False,
4774 4791 _vcs_alias=GistModel.vcs_backend)
4775 4792
4776 4793
4777 4794 class ExternalIdentity(Base, BaseModel):
4778 4795 __tablename__ = 'external_identities'
4779 4796 __table_args__ = (
4780 4797 Index('local_user_id_idx', 'local_user_id'),
4781 4798 Index('external_id_idx', 'external_id'),
4782 4799 base_table_args
4783 4800 )
4784 4801
4785 4802 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4786 4803 external_username = Column('external_username', Unicode(1024), default=u'')
4787 4804 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4788 4805 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4789 4806 access_token = Column('access_token', String(1024), default=u'')
4790 4807 alt_token = Column('alt_token', String(1024), default=u'')
4791 4808 token_secret = Column('token_secret', String(1024), default=u'')
4792 4809
4793 4810 @classmethod
4794 4811 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4795 4812 """
4796 4813 Returns ExternalIdentity instance based on search params
4797 4814
4798 4815 :param external_id:
4799 4816 :param provider_name:
4800 4817 :return: ExternalIdentity
4801 4818 """
4802 4819 query = cls.query()
4803 4820 query = query.filter(cls.external_id == external_id)
4804 4821 query = query.filter(cls.provider_name == provider_name)
4805 4822 if local_user_id:
4806 4823 query = query.filter(cls.local_user_id == local_user_id)
4807 4824 return query.first()
4808 4825
4809 4826 @classmethod
4810 4827 def user_by_external_id_and_provider(cls, external_id, provider_name):
4811 4828 """
4812 4829 Returns User instance based on search params
4813 4830
4814 4831 :param external_id:
4815 4832 :param provider_name:
4816 4833 :return: User
4817 4834 """
4818 4835 query = User.query()
4819 4836 query = query.filter(cls.external_id == external_id)
4820 4837 query = query.filter(cls.provider_name == provider_name)
4821 4838 query = query.filter(User.user_id == cls.local_user_id)
4822 4839 return query.first()
4823 4840
4824 4841 @classmethod
4825 4842 def by_local_user_id(cls, local_user_id):
4826 4843 """
4827 4844 Returns all tokens for user
4828 4845
4829 4846 :param local_user_id:
4830 4847 :return: ExternalIdentity
4831 4848 """
4832 4849 query = cls.query()
4833 4850 query = query.filter(cls.local_user_id == local_user_id)
4834 4851 return query
4835 4852
4836 4853 @classmethod
4837 4854 def load_provider_plugin(cls, plugin_id):
4838 4855 from rhodecode.authentication.base import loadplugin
4839 4856 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4840 4857 auth_plugin = loadplugin(_plugin_id)
4841 4858 return auth_plugin
4842 4859
4843 4860
4844 4861 class Integration(Base, BaseModel):
4845 4862 __tablename__ = 'integrations'
4846 4863 __table_args__ = (
4847 4864 base_table_args
4848 4865 )
4849 4866
4850 4867 integration_id = Column('integration_id', Integer(), primary_key=True)
4851 4868 integration_type = Column('integration_type', String(255))
4852 4869 enabled = Column('enabled', Boolean(), nullable=False)
4853 4870 name = Column('name', String(255), nullable=False)
4854 4871 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4855 4872 default=False)
4856 4873
4857 4874 settings = Column(
4858 4875 'settings_json', MutationObj.as_mutable(
4859 4876 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4860 4877 repo_id = Column(
4861 4878 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4862 4879 nullable=True, unique=None, default=None)
4863 4880 repo = relationship('Repository', lazy='joined')
4864 4881
4865 4882 repo_group_id = Column(
4866 4883 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4867 4884 nullable=True, unique=None, default=None)
4868 4885 repo_group = relationship('RepoGroup', lazy='joined')
4869 4886
4870 4887 @property
4871 4888 def scope(self):
4872 4889 if self.repo:
4873 4890 return repr(self.repo)
4874 4891 if self.repo_group:
4875 4892 if self.child_repos_only:
4876 4893 return repr(self.repo_group) + ' (child repos only)'
4877 4894 else:
4878 4895 return repr(self.repo_group) + ' (recursive)'
4879 4896 if self.child_repos_only:
4880 4897 return 'root_repos'
4881 4898 return 'global'
4882 4899
4883 4900 def __repr__(self):
4884 4901 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4885 4902
4886 4903
4887 4904 class RepoReviewRuleUser(Base, BaseModel):
4888 4905 __tablename__ = 'repo_review_rules_users'
4889 4906 __table_args__ = (
4890 4907 base_table_args
4891 4908 )
4892 4909
4893 4910 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4894 4911 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4895 4912 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4896 4913 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4897 4914 user = relationship('User')
4898 4915
4899 4916 def rule_data(self):
4900 4917 return {
4901 4918 'mandatory': self.mandatory
4902 4919 }
4903 4920
4904 4921
4905 4922 class RepoReviewRuleUserGroup(Base, BaseModel):
4906 4923 __tablename__ = 'repo_review_rules_users_groups'
4907 4924 __table_args__ = (
4908 4925 base_table_args
4909 4926 )
4910 4927
4911 4928 VOTE_RULE_ALL = -1
4912 4929
4913 4930 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4914 4931 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4915 4932 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4916 4933 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4917 4934 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4918 4935 users_group = relationship('UserGroup')
4919 4936
4920 4937 def rule_data(self):
4921 4938 return {
4922 4939 'mandatory': self.mandatory,
4923 4940 'vote_rule': self.vote_rule
4924 4941 }
4925 4942
4926 4943 @property
4927 4944 def vote_rule_label(self):
4928 4945 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4929 4946 return 'all must vote'
4930 4947 else:
4931 4948 return 'min. vote {}'.format(self.vote_rule)
4932 4949
4933 4950
4934 4951 class RepoReviewRule(Base, BaseModel):
4935 4952 __tablename__ = 'repo_review_rules'
4936 4953 __table_args__ = (
4937 4954 base_table_args
4938 4955 )
4939 4956
4940 4957 repo_review_rule_id = Column(
4941 4958 'repo_review_rule_id', Integer(), primary_key=True)
4942 4959 repo_id = Column(
4943 4960 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4944 4961 repo = relationship('Repository', backref='review_rules')
4945 4962
4946 4963 review_rule_name = Column('review_rule_name', String(255))
4947 4964 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4948 4965 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4949 4966 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4950 4967
4951 4968 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4952 4969 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4953 4970 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4954 4971 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4955 4972
4956 4973 rule_users = relationship('RepoReviewRuleUser')
4957 4974 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4958 4975
4959 4976 def _validate_pattern(self, value):
4960 4977 re.compile('^' + glob2re(value) + '$')
4961 4978
4962 4979 @hybrid_property
4963 4980 def source_branch_pattern(self):
4964 4981 return self._branch_pattern or '*'
4965 4982
4966 4983 @source_branch_pattern.setter
4967 4984 def source_branch_pattern(self, value):
4968 4985 self._validate_pattern(value)
4969 4986 self._branch_pattern = value or '*'
4970 4987
4971 4988 @hybrid_property
4972 4989 def target_branch_pattern(self):
4973 4990 return self._target_branch_pattern or '*'
4974 4991
4975 4992 @target_branch_pattern.setter
4976 4993 def target_branch_pattern(self, value):
4977 4994 self._validate_pattern(value)
4978 4995 self._target_branch_pattern = value or '*'
4979 4996
4980 4997 @hybrid_property
4981 4998 def file_pattern(self):
4982 4999 return self._file_pattern or '*'
4983 5000
4984 5001 @file_pattern.setter
4985 5002 def file_pattern(self, value):
4986 5003 self._validate_pattern(value)
4987 5004 self._file_pattern = value or '*'
4988 5005
4989 5006 def matches(self, source_branch, target_branch, files_changed):
4990 5007 """
4991 5008 Check if this review rule matches a branch/files in a pull request
4992 5009
4993 5010 :param source_branch: source branch name for the commit
4994 5011 :param target_branch: target branch name for the commit
4995 5012 :param files_changed: list of file paths changed in the pull request
4996 5013 """
4997 5014
4998 5015 source_branch = source_branch or ''
4999 5016 target_branch = target_branch or ''
5000 5017 files_changed = files_changed or []
5001 5018
5002 5019 branch_matches = True
5003 5020 if source_branch or target_branch:
5004 5021 if self.source_branch_pattern == '*':
5005 5022 source_branch_match = True
5006 5023 else:
5007 5024 if self.source_branch_pattern.startswith('re:'):
5008 5025 source_pattern = self.source_branch_pattern[3:]
5009 5026 else:
5010 5027 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5011 5028 source_branch_regex = re.compile(source_pattern)
5012 5029 source_branch_match = bool(source_branch_regex.search(source_branch))
5013 5030 if self.target_branch_pattern == '*':
5014 5031 target_branch_match = True
5015 5032 else:
5016 5033 if self.target_branch_pattern.startswith('re:'):
5017 5034 target_pattern = self.target_branch_pattern[3:]
5018 5035 else:
5019 5036 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5020 5037 target_branch_regex = re.compile(target_pattern)
5021 5038 target_branch_match = bool(target_branch_regex.search(target_branch))
5022 5039
5023 5040 branch_matches = source_branch_match and target_branch_match
5024 5041
5025 5042 files_matches = True
5026 5043 if self.file_pattern != '*':
5027 5044 files_matches = False
5028 5045 if self.file_pattern.startswith('re:'):
5029 5046 file_pattern = self.file_pattern[3:]
5030 5047 else:
5031 5048 file_pattern = glob2re(self.file_pattern)
5032 5049 file_regex = re.compile(file_pattern)
5033 5050 for file_data in files_changed:
5034 5051 filename = file_data.get('filename')
5035 5052
5036 5053 if file_regex.search(filename):
5037 5054 files_matches = True
5038 5055 break
5039 5056
5040 5057 return branch_matches and files_matches
5041 5058
5042 5059 @property
5043 5060 def review_users(self):
5044 5061 """ Returns the users which this rule applies to """
5045 5062
5046 5063 users = collections.OrderedDict()
5047 5064
5048 5065 for rule_user in self.rule_users:
5049 5066 if rule_user.user.active:
5050 5067 if rule_user.user not in users:
5051 5068 users[rule_user.user.username] = {
5052 5069 'user': rule_user.user,
5053 5070 'source': 'user',
5054 5071 'source_data': {},
5055 5072 'data': rule_user.rule_data()
5056 5073 }
5057 5074
5058 5075 for rule_user_group in self.rule_user_groups:
5059 5076 source_data = {
5060 5077 'user_group_id': rule_user_group.users_group.users_group_id,
5061 5078 'name': rule_user_group.users_group.users_group_name,
5062 5079 'members': len(rule_user_group.users_group.members)
5063 5080 }
5064 5081 for member in rule_user_group.users_group.members:
5065 5082 if member.user.active:
5066 5083 key = member.user.username
5067 5084 if key in users:
5068 5085 # skip this member as we have him already
5069 5086 # this prevents from override the "first" matched
5070 5087 # users with duplicates in multiple groups
5071 5088 continue
5072 5089
5073 5090 users[key] = {
5074 5091 'user': member.user,
5075 5092 'source': 'user_group',
5076 5093 'source_data': source_data,
5077 5094 'data': rule_user_group.rule_data()
5078 5095 }
5079 5096
5080 5097 return users
5081 5098
5082 5099 def user_group_vote_rule(self, user_id):
5083 5100
5084 5101 rules = []
5085 5102 if not self.rule_user_groups:
5086 5103 return rules
5087 5104
5088 5105 for user_group in self.rule_user_groups:
5089 5106 user_group_members = [x.user_id for x in user_group.users_group.members]
5090 5107 if user_id in user_group_members:
5091 5108 rules.append(user_group)
5092 5109 return rules
5093 5110
5094 5111 def __repr__(self):
5095 5112 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5096 5113 self.repo_review_rule_id, self.repo)
5097 5114
5098 5115
5099 5116 class ScheduleEntry(Base, BaseModel):
5100 5117 __tablename__ = 'schedule_entries'
5101 5118 __table_args__ = (
5102 5119 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5103 5120 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5104 5121 base_table_args,
5105 5122 )
5106 5123
5107 5124 schedule_types = ['crontab', 'timedelta', 'integer']
5108 5125 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5109 5126
5110 5127 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5111 5128 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5112 5129 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5113 5130
5114 5131 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5115 5132 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5116 5133
5117 5134 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5118 5135 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5119 5136
5120 5137 # task
5121 5138 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5122 5139 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5123 5140 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5124 5141 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5125 5142
5126 5143 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5127 5144 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5128 5145
5129 5146 @hybrid_property
5130 5147 def schedule_type(self):
5131 5148 return self._schedule_type
5132 5149
5133 5150 @schedule_type.setter
5134 5151 def schedule_type(self, val):
5135 5152 if val not in self.schedule_types:
5136 5153 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5137 5154 val, self.schedule_type))
5138 5155
5139 5156 self._schedule_type = val
5140 5157
5141 5158 @classmethod
5142 5159 def get_uid(cls, obj):
5143 5160 args = obj.task_args
5144 5161 kwargs = obj.task_kwargs
5145 5162 if isinstance(args, JsonRaw):
5146 5163 try:
5147 5164 args = json.loads(args)
5148 5165 except ValueError:
5149 5166 args = tuple()
5150 5167
5151 5168 if isinstance(kwargs, JsonRaw):
5152 5169 try:
5153 5170 kwargs = json.loads(kwargs)
5154 5171 except ValueError:
5155 5172 kwargs = dict()
5156 5173
5157 5174 dot_notation = obj.task_dot_notation
5158 5175 val = '.'.join(map(safe_str, [
5159 5176 sorted(dot_notation), args, sorted(kwargs.items())]))
5160 5177 return hashlib.sha1(val).hexdigest()
5161 5178
5162 5179 @classmethod
5163 5180 def get_by_schedule_name(cls, schedule_name):
5164 5181 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5165 5182
5166 5183 @classmethod
5167 5184 def get_by_schedule_id(cls, schedule_id):
5168 5185 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5169 5186
5170 5187 @property
5171 5188 def task(self):
5172 5189 return self.task_dot_notation
5173 5190
5174 5191 @property
5175 5192 def schedule(self):
5176 5193 from rhodecode.lib.celerylib.utils import raw_2_schedule
5177 5194 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5178 5195 return schedule
5179 5196
5180 5197 @property
5181 5198 def args(self):
5182 5199 try:
5183 5200 return list(self.task_args or [])
5184 5201 except ValueError:
5185 5202 return list()
5186 5203
5187 5204 @property
5188 5205 def kwargs(self):
5189 5206 try:
5190 5207 return dict(self.task_kwargs or {})
5191 5208 except ValueError:
5192 5209 return dict()
5193 5210
5194 5211 def _as_raw(self, val):
5195 5212 if hasattr(val, 'de_coerce'):
5196 5213 val = val.de_coerce()
5197 5214 if val:
5198 5215 val = json.dumps(val)
5199 5216
5200 5217 return val
5201 5218
5202 5219 @property
5203 5220 def schedule_definition_raw(self):
5204 5221 return self._as_raw(self.schedule_definition)
5205 5222
5206 5223 @property
5207 5224 def args_raw(self):
5208 5225 return self._as_raw(self.task_args)
5209 5226
5210 5227 @property
5211 5228 def kwargs_raw(self):
5212 5229 return self._as_raw(self.task_kwargs)
5213 5230
5214 5231 def __repr__(self):
5215 5232 return '<DB:ScheduleEntry({}:{})>'.format(
5216 5233 self.schedule_entry_id, self.schedule_name)
5217 5234
5218 5235
5219 5236 @event.listens_for(ScheduleEntry, 'before_update')
5220 5237 def update_task_uid(mapper, connection, target):
5221 5238 target.task_uid = ScheduleEntry.get_uid(target)
5222 5239
5223 5240
5224 5241 @event.listens_for(ScheduleEntry, 'before_insert')
5225 5242 def set_task_uid(mapper, connection, target):
5226 5243 target.task_uid = ScheduleEntry.get_uid(target)
5227 5244
5228 5245
5229 5246 class _BaseBranchPerms(BaseModel):
5230 5247 @classmethod
5231 5248 def compute_hash(cls, value):
5232 5249 return sha1_safe(value)
5233 5250
5234 5251 @hybrid_property
5235 5252 def branch_pattern(self):
5236 5253 return self._branch_pattern or '*'
5237 5254
5238 5255 @hybrid_property
5239 5256 def branch_hash(self):
5240 5257 return self._branch_hash
5241 5258
5242 5259 def _validate_glob(self, value):
5243 5260 re.compile('^' + glob2re(value) + '$')
5244 5261
5245 5262 @branch_pattern.setter
5246 5263 def branch_pattern(self, value):
5247 5264 self._validate_glob(value)
5248 5265 self._branch_pattern = value or '*'
5249 5266 # set the Hash when setting the branch pattern
5250 5267 self._branch_hash = self.compute_hash(self._branch_pattern)
5251 5268
5252 5269 def matches(self, branch):
5253 5270 """
5254 5271 Check if this the branch matches entry
5255 5272
5256 5273 :param branch: branch name for the commit
5257 5274 """
5258 5275
5259 5276 branch = branch or ''
5260 5277
5261 5278 branch_matches = True
5262 5279 if branch:
5263 5280 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5264 5281 branch_matches = bool(branch_regex.search(branch))
5265 5282
5266 5283 return branch_matches
5267 5284
5268 5285
5269 5286 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5270 5287 __tablename__ = 'user_to_repo_branch_permissions'
5271 5288 __table_args__ = (
5272 5289 base_table_args
5273 5290 )
5274 5291
5275 5292 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5276 5293
5277 5294 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5278 5295 repo = relationship('Repository', backref='user_branch_perms')
5279 5296
5280 5297 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5281 5298 permission = relationship('Permission')
5282 5299
5283 5300 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5284 5301 user_repo_to_perm = relationship('UserRepoToPerm')
5285 5302
5286 5303 rule_order = Column('rule_order', Integer(), nullable=False)
5287 5304 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5288 5305 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5289 5306
5290 5307 def __unicode__(self):
5291 5308 return u'<UserBranchPermission(%s => %r)>' % (
5292 5309 self.user_repo_to_perm, self.branch_pattern)
5293 5310
5294 5311
5295 5312 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5296 5313 __tablename__ = 'user_group_to_repo_branch_permissions'
5297 5314 __table_args__ = (
5298 5315 base_table_args
5299 5316 )
5300 5317
5301 5318 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5302 5319
5303 5320 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5304 5321 repo = relationship('Repository', backref='user_group_branch_perms')
5305 5322
5306 5323 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5307 5324 permission = relationship('Permission')
5308 5325
5309 5326 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5310 5327 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5311 5328
5312 5329 rule_order = Column('rule_order', Integer(), nullable=False)
5313 5330 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5314 5331 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5315 5332
5316 5333 def __unicode__(self):
5317 5334 return u'<UserBranchPermission(%s => %r)>' % (
5318 5335 self.user_group_repo_to_perm, self.branch_pattern)
5319 5336
5320 5337
5321 5338 class UserBookmark(Base, BaseModel):
5322 5339 __tablename__ = 'user_bookmarks'
5323 5340 __table_args__ = (
5324 5341 UniqueConstraint('user_id', 'bookmark_repo_id'),
5325 5342 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5326 5343 UniqueConstraint('user_id', 'bookmark_position'),
5327 5344 base_table_args
5328 5345 )
5329 5346
5330 5347 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5331 5348 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5332 5349 position = Column("bookmark_position", Integer(), nullable=False)
5333 5350 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5334 5351 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5335 5352 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5336 5353
5337 5354 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5338 5355 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5339 5356
5340 5357 user = relationship("User")
5341 5358
5342 5359 repository = relationship("Repository")
5343 5360 repository_group = relationship("RepoGroup")
5344 5361
5345 5362 @classmethod
5346 5363 def get_by_position_for_user(cls, position, user_id):
5347 5364 return cls.query() \
5348 5365 .filter(UserBookmark.user_id == user_id) \
5349 5366 .filter(UserBookmark.position == position).scalar()
5350 5367
5351 5368 @classmethod
5352 5369 def get_bookmarks_for_user(cls, user_id, cache=True):
5353 5370 bookmarks = cls.query() \
5354 5371 .filter(UserBookmark.user_id == user_id) \
5355 5372 .options(joinedload(UserBookmark.repository)) \
5356 5373 .options(joinedload(UserBookmark.repository_group)) \
5357 5374 .order_by(UserBookmark.position.asc())
5358 5375
5359 5376 if cache:
5360 5377 bookmarks = bookmarks.options(
5361 5378 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5362 5379 )
5363 5380
5364 5381 return bookmarks.all()
5365 5382
5366 5383 def __unicode__(self):
5367 5384 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5368 5385
5369 5386
5370 5387 class FileStore(Base, BaseModel):
5371 5388 __tablename__ = 'file_store'
5372 5389 __table_args__ = (
5373 5390 base_table_args
5374 5391 )
5375 5392
5376 5393 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5377 5394 file_uid = Column('file_uid', String(1024), nullable=False)
5378 5395 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5379 5396 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5380 5397 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5381 5398
5382 5399 # sha256 hash
5383 5400 file_hash = Column('file_hash', String(512), nullable=False)
5384 5401 file_size = Column('file_size', BigInteger(), nullable=False)
5385 5402
5386 5403 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5387 5404 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5388 5405 accessed_count = Column('accessed_count', Integer(), default=0)
5389 5406
5390 5407 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5391 5408
5392 5409 # if repo/repo_group reference is set, check for permissions
5393 5410 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5394 5411
5395 5412 # hidden defines an attachment that should be hidden from showing in artifact listing
5396 5413 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5397 5414
5398 5415 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5399 5416 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5400 5417
5401 5418 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5402 5419
5403 5420 # scope limited to user, which requester have access to
5404 5421 scope_user_id = Column(
5405 5422 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5406 5423 nullable=True, unique=None, default=None)
5407 5424 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5408 5425
5409 5426 # scope limited to user group, which requester have access to
5410 5427 scope_user_group_id = Column(
5411 5428 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5412 5429 nullable=True, unique=None, default=None)
5413 5430 user_group = relationship('UserGroup', lazy='joined')
5414 5431
5415 5432 # scope limited to repo, which requester have access to
5416 5433 scope_repo_id = Column(
5417 5434 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5418 5435 nullable=True, unique=None, default=None)
5419 5436 repo = relationship('Repository', lazy='joined')
5420 5437
5421 5438 # scope limited to repo group, which requester have access to
5422 5439 scope_repo_group_id = Column(
5423 5440 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5424 5441 nullable=True, unique=None, default=None)
5425 5442 repo_group = relationship('RepoGroup', lazy='joined')
5426 5443
5427 5444 @classmethod
5428 5445 def get_by_store_uid(cls, file_store_uid):
5429 5446 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5430 5447
5431 5448 @classmethod
5432 5449 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5433 5450 file_description='', enabled=True, hidden=False, check_acl=True,
5434 5451 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5435 5452
5436 5453 store_entry = FileStore()
5437 5454 store_entry.file_uid = file_uid
5438 5455 store_entry.file_display_name = file_display_name
5439 5456 store_entry.file_org_name = filename
5440 5457 store_entry.file_size = file_size
5441 5458 store_entry.file_hash = file_hash
5442 5459 store_entry.file_description = file_description
5443 5460
5444 5461 store_entry.check_acl = check_acl
5445 5462 store_entry.enabled = enabled
5446 5463 store_entry.hidden = hidden
5447 5464
5448 5465 store_entry.user_id = user_id
5449 5466 store_entry.scope_user_id = scope_user_id
5450 5467 store_entry.scope_repo_id = scope_repo_id
5451 5468 store_entry.scope_repo_group_id = scope_repo_group_id
5452 5469
5453 5470 return store_entry
5454 5471
5455 5472 @classmethod
5456 5473 def store_metadata(cls, file_store_id, args, commit=True):
5457 5474 file_store = FileStore.get(file_store_id)
5458 5475 if file_store is None:
5459 5476 return
5460 5477
5461 5478 for section, key, value, value_type in args:
5462 5479 has_key = FileStoreMetadata().query() \
5463 5480 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5464 5481 .filter(FileStoreMetadata.file_store_meta_section == section) \
5465 5482 .filter(FileStoreMetadata.file_store_meta_key == key) \
5466 5483 .scalar()
5467 5484 if has_key:
5468 5485 msg = 'key `{}` already defined under section `{}` for this file.'\
5469 5486 .format(key, section)
5470 5487 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5471 5488
5472 5489 # NOTE(marcink): raises ArtifactMetadataBadValueType
5473 5490 FileStoreMetadata.valid_value_type(value_type)
5474 5491
5475 5492 meta_entry = FileStoreMetadata()
5476 5493 meta_entry.file_store = file_store
5477 5494 meta_entry.file_store_meta_section = section
5478 5495 meta_entry.file_store_meta_key = key
5479 5496 meta_entry.file_store_meta_value_type = value_type
5480 5497 meta_entry.file_store_meta_value = value
5481 5498
5482 5499 Session().add(meta_entry)
5483 5500
5484 5501 try:
5485 5502 if commit:
5486 5503 Session().commit()
5487 5504 except IntegrityError:
5488 5505 Session().rollback()
5489 5506 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5490 5507
5491 5508 @classmethod
5492 5509 def bump_access_counter(cls, file_uid, commit=True):
5493 5510 FileStore().query()\
5494 5511 .filter(FileStore.file_uid == file_uid)\
5495 5512 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5496 5513 FileStore.accessed_on: datetime.datetime.now()})
5497 5514 if commit:
5498 5515 Session().commit()
5499 5516
5500 5517 def __json__(self):
5501 5518 data = {
5502 5519 'filename': self.file_display_name,
5503 5520 'filename_org': self.file_org_name,
5504 5521 'file_uid': self.file_uid,
5505 5522 'description': self.file_description,
5506 5523 'hidden': self.hidden,
5507 5524 'size': self.file_size,
5508 5525 'created_on': self.created_on,
5509 5526 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5510 5527 'downloaded_times': self.accessed_count,
5511 5528 'sha256': self.file_hash,
5512 5529 'metadata': self.file_metadata,
5513 5530 }
5514 5531
5515 5532 return data
5516 5533
5517 5534 def __repr__(self):
5518 5535 return '<FileStore({})>'.format(self.file_store_id)
5519 5536
5520 5537
5521 5538 class FileStoreMetadata(Base, BaseModel):
5522 5539 __tablename__ = 'file_store_metadata'
5523 5540 __table_args__ = (
5524 5541 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5525 5542 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5526 5543 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5527 5544 base_table_args
5528 5545 )
5529 5546 SETTINGS_TYPES = {
5530 5547 'str': safe_str,
5531 5548 'int': safe_int,
5532 5549 'unicode': safe_unicode,
5533 5550 'bool': str2bool,
5534 5551 'list': functools.partial(aslist, sep=',')
5535 5552 }
5536 5553
5537 5554 file_store_meta_id = Column(
5538 5555 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5539 5556 primary_key=True)
5540 5557 _file_store_meta_section = Column(
5541 5558 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5542 5559 nullable=True, unique=None, default=None)
5543 5560 _file_store_meta_section_hash = Column(
5544 5561 "file_store_meta_section_hash", String(255),
5545 5562 nullable=True, unique=None, default=None)
5546 5563 _file_store_meta_key = Column(
5547 5564 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5548 5565 nullable=True, unique=None, default=None)
5549 5566 _file_store_meta_key_hash = Column(
5550 5567 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5551 5568 _file_store_meta_value = Column(
5552 5569 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5553 5570 nullable=True, unique=None, default=None)
5554 5571 _file_store_meta_value_type = Column(
5555 5572 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5556 5573 default='unicode')
5557 5574
5558 5575 file_store_id = Column(
5559 5576 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5560 5577 nullable=True, unique=None, default=None)
5561 5578
5562 5579 file_store = relationship('FileStore', lazy='joined')
5563 5580
5564 5581 @classmethod
5565 5582 def valid_value_type(cls, value):
5566 5583 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5567 5584 raise ArtifactMetadataBadValueType(
5568 5585 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5569 5586
5570 5587 @hybrid_property
5571 5588 def file_store_meta_section(self):
5572 5589 return self._file_store_meta_section
5573 5590
5574 5591 @file_store_meta_section.setter
5575 5592 def file_store_meta_section(self, value):
5576 5593 self._file_store_meta_section = value
5577 5594 self._file_store_meta_section_hash = _hash_key(value)
5578 5595
5579 5596 @hybrid_property
5580 5597 def file_store_meta_key(self):
5581 5598 return self._file_store_meta_key
5582 5599
5583 5600 @file_store_meta_key.setter
5584 5601 def file_store_meta_key(self, value):
5585 5602 self._file_store_meta_key = value
5586 5603 self._file_store_meta_key_hash = _hash_key(value)
5587 5604
5588 5605 @hybrid_property
5589 5606 def file_store_meta_value(self):
5590 5607 val = self._file_store_meta_value
5591 5608
5592 5609 if self._file_store_meta_value_type:
5593 5610 # e.g unicode.encrypted == unicode
5594 5611 _type = self._file_store_meta_value_type.split('.')[0]
5595 5612 # decode the encrypted value if it's encrypted field type
5596 5613 if '.encrypted' in self._file_store_meta_value_type:
5597 5614 cipher = EncryptedTextValue()
5598 5615 val = safe_unicode(cipher.process_result_value(val, None))
5599 5616 # do final type conversion
5600 5617 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5601 5618 val = converter(val)
5602 5619
5603 5620 return val
5604 5621
5605 5622 @file_store_meta_value.setter
5606 5623 def file_store_meta_value(self, val):
5607 5624 val = safe_unicode(val)
5608 5625 # encode the encrypted value
5609 5626 if '.encrypted' in self.file_store_meta_value_type:
5610 5627 cipher = EncryptedTextValue()
5611 5628 val = safe_unicode(cipher.process_bind_param(val, None))
5612 5629 self._file_store_meta_value = val
5613 5630
5614 5631 @hybrid_property
5615 5632 def file_store_meta_value_type(self):
5616 5633 return self._file_store_meta_value_type
5617 5634
5618 5635 @file_store_meta_value_type.setter
5619 5636 def file_store_meta_value_type(self, val):
5620 5637 # e.g unicode.encrypted
5621 5638 self.valid_value_type(val)
5622 5639 self._file_store_meta_value_type = val
5623 5640
5624 5641 def __json__(self):
5625 5642 data = {
5626 5643 'artifact': self.file_store.file_uid,
5627 5644 'section': self.file_store_meta_section,
5628 5645 'key': self.file_store_meta_key,
5629 5646 'value': self.file_store_meta_value,
5630 5647 }
5631 5648
5632 5649 return data
5633 5650
5634 5651 def __repr__(self):
5635 5652 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5636 5653 self.file_store_meta_key, self.file_store_meta_value)
5637 5654
5638 5655
5639 5656 class DbMigrateVersion(Base, BaseModel):
5640 5657 __tablename__ = 'db_migrate_version'
5641 5658 __table_args__ = (
5642 5659 base_table_args,
5643 5660 )
5644 5661
5645 5662 repository_id = Column('repository_id', String(250), primary_key=True)
5646 5663 repository_path = Column('repository_path', Text)
5647 5664 version = Column('version', Integer)
5648 5665
5649 5666 @classmethod
5650 5667 def set_version(cls, version):
5651 5668 """
5652 5669 Helper for forcing a different version, usually for debugging purposes via ishell.
5653 5670 """
5654 5671 ver = DbMigrateVersion.query().first()
5655 5672 ver.version = version
5656 5673 Session().commit()
5657 5674
5658 5675
5659 5676 class DbSession(Base, BaseModel):
5660 5677 __tablename__ = 'db_session'
5661 5678 __table_args__ = (
5662 5679 base_table_args,
5663 5680 )
5664 5681
5665 5682 def __repr__(self):
5666 5683 return '<DB:DbSession({})>'.format(self.id)
5667 5684
5668 5685 id = Column('id', Integer())
5669 5686 namespace = Column('namespace', String(255), primary_key=True)
5670 5687 accessed = Column('accessed', DateTime, nullable=False)
5671 5688 created = Column('created', DateTime, nullable=False)
5672 5689 data = Column('data', PickleType, nullable=False)
@@ -1,2072 +1,2072 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 commits = target_scm.compare(
158 158 target_ref, source_ref, source_scm, merge=True,
159 159 pre_load=["author"])
160 160
161 161 for commit in commits:
162 162 user = User.get_from_cs_author(commit.author)
163 163 if user and user not in commit_authors:
164 164 commit_authors.append(user)
165 165
166 166 # lines
167 167 if get_authors:
168 168 target_commit = source_repo.get_commit(ancestor_id)
169 169
170 170 for fname, lines in changed_lines.items():
171 171 try:
172 172 node = target_commit.get_node(fname)
173 173 except Exception:
174 174 continue
175 175
176 176 if not isinstance(node, FileNode):
177 177 continue
178 178
179 179 for annotation in node.annotate:
180 180 line_no, commit_id, get_commit_func, line_text = annotation
181 181 if line_no in lines:
182 182 if commit_id not in _commit_cache:
183 183 _commit_cache[commit_id] = get_commit_func()
184 184 commit = _commit_cache[commit_id]
185 185 author = commit.author
186 186 email = commit.author_email
187 187 user = User.get_from_cs_author(author)
188 188 if user:
189 189 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
190 190 author_counts[author] = author_counts.get(author, 0) + 1
191 191 email_counts[email] = email_counts.get(email, 0) + 1
192 192
193 193 return {
194 194 'commits': commits,
195 195 'files': all_files_changes,
196 196 'stats': stats,
197 197 'ancestor': ancestor_id,
198 198 # original authors of modified files
199 199 'original_authors': {
200 200 'users': user_counts,
201 201 'authors': author_counts,
202 202 'emails': email_counts,
203 203 },
204 204 'commit_authors': commit_authors
205 205 }
206 206
207 207
208 208 class PullRequestModel(BaseModel):
209 209
210 210 cls = PullRequest
211 211
212 212 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
213 213
214 214 UPDATE_STATUS_MESSAGES = {
215 215 UpdateFailureReason.NONE: lazy_ugettext(
216 216 'Pull request update successful.'),
217 217 UpdateFailureReason.UNKNOWN: lazy_ugettext(
218 218 'Pull request update failed because of an unknown error.'),
219 219 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
220 220 'No update needed because the source and target have not changed.'),
221 221 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
222 222 'Pull request cannot be updated because the reference type is '
223 223 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
224 224 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
225 225 'This pull request cannot be updated because the target '
226 226 'reference is missing.'),
227 227 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
228 228 'This pull request cannot be updated because the source '
229 229 'reference is missing.'),
230 230 }
231 231 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
232 232 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
233 233
234 234 def __get_pull_request(self, pull_request):
235 235 return self._get_instance((
236 236 PullRequest, PullRequestVersion), pull_request)
237 237
238 238 def _check_perms(self, perms, pull_request, user, api=False):
239 239 if not api:
240 240 return h.HasRepoPermissionAny(*perms)(
241 241 user=user, repo_name=pull_request.target_repo.repo_name)
242 242 else:
243 243 return h.HasRepoPermissionAnyApi(*perms)(
244 244 user=user, repo_name=pull_request.target_repo.repo_name)
245 245
246 246 def check_user_read(self, pull_request, user, api=False):
247 247 _perms = ('repository.admin', 'repository.write', 'repository.read',)
248 248 return self._check_perms(_perms, pull_request, user, api)
249 249
250 250 def check_user_merge(self, pull_request, user, api=False):
251 251 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
252 252 return self._check_perms(_perms, pull_request, user, api)
253 253
254 254 def check_user_update(self, pull_request, user, api=False):
255 255 owner = user.user_id == pull_request.user_id
256 256 return self.check_user_merge(pull_request, user, api) or owner
257 257
258 258 def check_user_delete(self, pull_request, user):
259 259 owner = user.user_id == pull_request.user_id
260 260 _perms = ('repository.admin',)
261 261 return self._check_perms(_perms, pull_request, user) or owner
262 262
263 263 def check_user_change_status(self, pull_request, user, api=False):
264 264 reviewer = user.user_id in [x.user_id for x in
265 265 pull_request.reviewers]
266 266 return self.check_user_update(pull_request, user, api) or reviewer
267 267
268 268 def check_user_comment(self, pull_request, user):
269 269 owner = user.user_id == pull_request.user_id
270 270 return self.check_user_read(pull_request, user) or owner
271 271
272 272 def get(self, pull_request):
273 273 return self.__get_pull_request(pull_request)
274 274
275 275 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
276 276 statuses=None, opened_by=None, order_by=None,
277 277 order_dir='desc', only_created=False):
278 278 repo = None
279 279 if repo_name:
280 280 repo = self._get_repo(repo_name)
281 281
282 282 q = PullRequest.query()
283 283
284 284 if search_q:
285 285 like_expression = u'%{}%'.format(safe_unicode(search_q))
286 286 q = q.join(User)
287 287 q = q.filter(or_(
288 288 cast(PullRequest.pull_request_id, String).ilike(like_expression),
289 289 User.username.ilike(like_expression),
290 290 PullRequest.title.ilike(like_expression),
291 291 PullRequest.description.ilike(like_expression),
292 292 ))
293 293
294 294 # source or target
295 295 if repo and source:
296 296 q = q.filter(PullRequest.source_repo == repo)
297 297 elif repo:
298 298 q = q.filter(PullRequest.target_repo == repo)
299 299
300 300 # closed,opened
301 301 if statuses:
302 302 q = q.filter(PullRequest.status.in_(statuses))
303 303
304 304 # opened by filter
305 305 if opened_by:
306 306 q = q.filter(PullRequest.user_id.in_(opened_by))
307 307
308 308 # only get those that are in "created" state
309 309 if only_created:
310 310 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
311 311
312 312 if order_by:
313 313 order_map = {
314 314 'name_raw': PullRequest.pull_request_id,
315 315 'id': PullRequest.pull_request_id,
316 316 'title': PullRequest.title,
317 317 'updated_on_raw': PullRequest.updated_on,
318 318 'target_repo': PullRequest.target_repo_id
319 319 }
320 320 if order_dir == 'asc':
321 321 q = q.order_by(order_map[order_by].asc())
322 322 else:
323 323 q = q.order_by(order_map[order_by].desc())
324 324
325 325 return q
326 326
327 327 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
328 328 opened_by=None):
329 329 """
330 330 Count the number of pull requests for a specific repository.
331 331
332 332 :param repo_name: target or source repo
333 333 :param search_q: filter by text
334 334 :param source: boolean flag to specify if repo_name refers to source
335 335 :param statuses: list of pull request statuses
336 336 :param opened_by: author user of the pull request
337 337 :returns: int number of pull requests
338 338 """
339 339 q = self._prepare_get_all_query(
340 340 repo_name, search_q=search_q, source=source, statuses=statuses,
341 341 opened_by=opened_by)
342 342
343 343 return q.count()
344 344
345 345 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
346 346 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
347 347 """
348 348 Get all pull requests for a specific repository.
349 349
350 350 :param repo_name: target or source repo
351 351 :param search_q: filter by text
352 352 :param source: boolean flag to specify if repo_name refers to source
353 353 :param statuses: list of pull request statuses
354 354 :param opened_by: author user of the pull request
355 355 :param offset: pagination offset
356 356 :param length: length of returned list
357 357 :param order_by: order of the returned list
358 358 :param order_dir: 'asc' or 'desc' ordering direction
359 359 :returns: list of pull requests
360 360 """
361 361 q = self._prepare_get_all_query(
362 362 repo_name, search_q=search_q, source=source, statuses=statuses,
363 363 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
364 364
365 365 if length:
366 366 pull_requests = q.limit(length).offset(offset).all()
367 367 else:
368 368 pull_requests = q.all()
369 369
370 370 return pull_requests
371 371
372 372 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
373 373 opened_by=None):
374 374 """
375 375 Count the number of pull requests for a specific repository that are
376 376 awaiting review.
377 377
378 378 :param repo_name: target or source repo
379 379 :param search_q: filter by text
380 380 :param source: boolean flag to specify if repo_name refers to source
381 381 :param statuses: list of pull request statuses
382 382 :param opened_by: author user of the pull request
383 383 :returns: int number of pull requests
384 384 """
385 385 pull_requests = self.get_awaiting_review(
386 386 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
387 387
388 388 return len(pull_requests)
389 389
390 390 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
391 391 opened_by=None, offset=0, length=None,
392 392 order_by=None, order_dir='desc'):
393 393 """
394 394 Get all pull requests for a specific repository that are awaiting
395 395 review.
396 396
397 397 :param repo_name: target or source repo
398 398 :param search_q: filter by text
399 399 :param source: boolean flag to specify if repo_name refers to source
400 400 :param statuses: list of pull request statuses
401 401 :param opened_by: author user of the pull request
402 402 :param offset: pagination offset
403 403 :param length: length of returned list
404 404 :param order_by: order of the returned list
405 405 :param order_dir: 'asc' or 'desc' ordering direction
406 406 :returns: list of pull requests
407 407 """
408 408 pull_requests = self.get_all(
409 409 repo_name, search_q=search_q, source=source, statuses=statuses,
410 410 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
411 411
412 412 _filtered_pull_requests = []
413 413 for pr in pull_requests:
414 414 status = pr.calculated_review_status()
415 415 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
416 416 ChangesetStatus.STATUS_UNDER_REVIEW]:
417 417 _filtered_pull_requests.append(pr)
418 418 if length:
419 419 return _filtered_pull_requests[offset:offset+length]
420 420 else:
421 421 return _filtered_pull_requests
422 422
423 423 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
424 424 opened_by=None, user_id=None):
425 425 """
426 426 Count the number of pull requests for a specific repository that are
427 427 awaiting review from a specific user.
428 428
429 429 :param repo_name: target or source repo
430 430 :param search_q: filter by text
431 431 :param source: boolean flag to specify if repo_name refers to source
432 432 :param statuses: list of pull request statuses
433 433 :param opened_by: author user of the pull request
434 434 :param user_id: reviewer user of the pull request
435 435 :returns: int number of pull requests
436 436 """
437 437 pull_requests = self.get_awaiting_my_review(
438 438 repo_name, search_q=search_q, source=source, statuses=statuses,
439 439 opened_by=opened_by, user_id=user_id)
440 440
441 441 return len(pull_requests)
442 442
443 443 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
444 444 opened_by=None, user_id=None, offset=0,
445 445 length=None, order_by=None, order_dir='desc'):
446 446 """
447 447 Get all pull requests for a specific repository that are awaiting
448 448 review from a specific user.
449 449
450 450 :param repo_name: target or source repo
451 451 :param search_q: filter by text
452 452 :param source: boolean flag to specify if repo_name refers to source
453 453 :param statuses: list of pull request statuses
454 454 :param opened_by: author user of the pull request
455 455 :param user_id: reviewer user of the pull request
456 456 :param offset: pagination offset
457 457 :param length: length of returned list
458 458 :param order_by: order of the returned list
459 459 :param order_dir: 'asc' or 'desc' ordering direction
460 460 :returns: list of pull requests
461 461 """
462 462 pull_requests = self.get_all(
463 463 repo_name, search_q=search_q, source=source, statuses=statuses,
464 464 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
465 465
466 466 _my = PullRequestModel().get_not_reviewed(user_id)
467 467 my_participation = []
468 468 for pr in pull_requests:
469 469 if pr in _my:
470 470 my_participation.append(pr)
471 471 _filtered_pull_requests = my_participation
472 472 if length:
473 473 return _filtered_pull_requests[offset:offset+length]
474 474 else:
475 475 return _filtered_pull_requests
476 476
477 477 def get_not_reviewed(self, user_id):
478 478 return [
479 479 x.pull_request for x in PullRequestReviewers.query().filter(
480 480 PullRequestReviewers.user_id == user_id).all()
481 481 ]
482 482
483 483 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
484 484 order_by=None, order_dir='desc'):
485 485 q = PullRequest.query()
486 486 if user_id:
487 487 reviewers_subquery = Session().query(
488 488 PullRequestReviewers.pull_request_id).filter(
489 489 PullRequestReviewers.user_id == user_id).subquery()
490 490 user_filter = or_(
491 491 PullRequest.user_id == user_id,
492 492 PullRequest.pull_request_id.in_(reviewers_subquery)
493 493 )
494 494 q = PullRequest.query().filter(user_filter)
495 495
496 496 # closed,opened
497 497 if statuses:
498 498 q = q.filter(PullRequest.status.in_(statuses))
499 499
500 500 if query:
501 501 like_expression = u'%{}%'.format(safe_unicode(query))
502 502 q = q.join(User)
503 503 q = q.filter(or_(
504 504 cast(PullRequest.pull_request_id, String).ilike(like_expression),
505 505 User.username.ilike(like_expression),
506 506 PullRequest.title.ilike(like_expression),
507 507 PullRequest.description.ilike(like_expression),
508 508 ))
509 509 if order_by:
510 510 order_map = {
511 511 'name_raw': PullRequest.pull_request_id,
512 512 'title': PullRequest.title,
513 513 'updated_on_raw': PullRequest.updated_on,
514 514 'target_repo': PullRequest.target_repo_id
515 515 }
516 516 if order_dir == 'asc':
517 517 q = q.order_by(order_map[order_by].asc())
518 518 else:
519 519 q = q.order_by(order_map[order_by].desc())
520 520
521 521 return q
522 522
523 523 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
524 524 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
525 525 return q.count()
526 526
527 527 def get_im_participating_in(
528 528 self, user_id=None, statuses=None, query='', offset=0,
529 529 length=None, order_by=None, order_dir='desc'):
530 530 """
531 531 Get all Pull requests that i'm participating in, or i have opened
532 532 """
533 533
534 534 q = self._prepare_participating_query(
535 535 user_id, statuses=statuses, query=query, order_by=order_by,
536 536 order_dir=order_dir)
537 537
538 538 if length:
539 539 pull_requests = q.limit(length).offset(offset).all()
540 540 else:
541 541 pull_requests = q.all()
542 542
543 543 return pull_requests
544 544
545 545 def get_versions(self, pull_request):
546 546 """
547 547 returns version of pull request sorted by ID descending
548 548 """
549 549 return PullRequestVersion.query()\
550 550 .filter(PullRequestVersion.pull_request == pull_request)\
551 551 .order_by(PullRequestVersion.pull_request_version_id.asc())\
552 552 .all()
553 553
554 554 def get_pr_version(self, pull_request_id, version=None):
555 555 at_version = None
556 556
557 557 if version and version == 'latest':
558 558 pull_request_ver = PullRequest.get(pull_request_id)
559 559 pull_request_obj = pull_request_ver
560 560 _org_pull_request_obj = pull_request_obj
561 561 at_version = 'latest'
562 562 elif version:
563 563 pull_request_ver = PullRequestVersion.get_or_404(version)
564 564 pull_request_obj = pull_request_ver
565 565 _org_pull_request_obj = pull_request_ver.pull_request
566 566 at_version = pull_request_ver.pull_request_version_id
567 567 else:
568 568 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
569 569 pull_request_id)
570 570
571 571 pull_request_display_obj = PullRequest.get_pr_display_object(
572 572 pull_request_obj, _org_pull_request_obj)
573 573
574 574 return _org_pull_request_obj, pull_request_obj, \
575 575 pull_request_display_obj, at_version
576 576
577 577 def create(self, created_by, source_repo, source_ref, target_repo,
578 578 target_ref, revisions, reviewers, title, description=None,
579 579 common_ancestor_id=None,
580 580 description_renderer=None,
581 581 reviewer_data=None, translator=None, auth_user=None):
582 582 translator = translator or get_current_request().translate
583 583
584 584 created_by_user = self._get_user(created_by)
585 585 auth_user = auth_user or created_by_user.AuthUser()
586 586 source_repo = self._get_repo(source_repo)
587 587 target_repo = self._get_repo(target_repo)
588 588
589 589 pull_request = PullRequest()
590 590 pull_request.source_repo = source_repo
591 591 pull_request.source_ref = source_ref
592 592 pull_request.target_repo = target_repo
593 593 pull_request.target_ref = target_ref
594 594 pull_request.revisions = revisions
595 595 pull_request.title = title
596 596 pull_request.description = description
597 597 pull_request.description_renderer = description_renderer
598 598 pull_request.author = created_by_user
599 599 pull_request.reviewer_data = reviewer_data
600 600 pull_request.pull_request_state = pull_request.STATE_CREATING
601 601 pull_request.common_ancestor_id = common_ancestor_id
602 602
603 603 Session().add(pull_request)
604 604 Session().flush()
605 605
606 606 reviewer_ids = set()
607 607 # members / reviewers
608 608 for reviewer_object in reviewers:
609 609 user_id, reasons, mandatory, rules = reviewer_object
610 610 user = self._get_user(user_id)
611 611
612 612 # skip duplicates
613 613 if user.user_id in reviewer_ids:
614 614 continue
615 615
616 616 reviewer_ids.add(user.user_id)
617 617
618 618 reviewer = PullRequestReviewers()
619 619 reviewer.user = user
620 620 reviewer.pull_request = pull_request
621 621 reviewer.reasons = reasons
622 622 reviewer.mandatory = mandatory
623 623
624 624 # NOTE(marcink): pick only first rule for now
625 625 rule_id = list(rules)[0] if rules else None
626 626 rule = RepoReviewRule.get(rule_id) if rule_id else None
627 627 if rule:
628 628 review_group = rule.user_group_vote_rule(user_id)
629 629 # we check if this particular reviewer is member of a voting group
630 630 if review_group:
631 631 # NOTE(marcink):
632 632 # can be that user is member of more but we pick the first same,
633 633 # same as default reviewers algo
634 634 review_group = review_group[0]
635 635
636 636 rule_data = {
637 637 'rule_name':
638 638 rule.review_rule_name,
639 639 'rule_user_group_entry_id':
640 640 review_group.repo_review_rule_users_group_id,
641 641 'rule_user_group_name':
642 642 review_group.users_group.users_group_name,
643 643 'rule_user_group_members':
644 644 [x.user.username for x in review_group.users_group.members],
645 645 'rule_user_group_members_id':
646 646 [x.user.user_id for x in review_group.users_group.members],
647 647 }
648 648 # e.g {'vote_rule': -1, 'mandatory': True}
649 649 rule_data.update(review_group.rule_data())
650 650
651 651 reviewer.rule_data = rule_data
652 652
653 653 Session().add(reviewer)
654 654 Session().flush()
655 655
656 656 # Set approval status to "Under Review" for all commits which are
657 657 # part of this pull request.
658 658 ChangesetStatusModel().set_status(
659 659 repo=target_repo,
660 660 status=ChangesetStatus.STATUS_UNDER_REVIEW,
661 661 user=created_by_user,
662 662 pull_request=pull_request
663 663 )
664 664 # we commit early at this point. This has to do with a fact
665 665 # that before queries do some row-locking. And because of that
666 666 # we need to commit and finish transaction before below validate call
667 667 # that for large repos could be long resulting in long row locks
668 668 Session().commit()
669 669
670 670 # prepare workspace, and run initial merge simulation. Set state during that
671 671 # operation
672 672 pull_request = PullRequest.get(pull_request.pull_request_id)
673 673
674 674 # set as merging, for merge simulation, and if finished to created so we mark
675 675 # simulation is working fine
676 676 with pull_request.set_state(PullRequest.STATE_MERGING,
677 677 final_state=PullRequest.STATE_CREATED) as state_obj:
678 678 MergeCheck.validate(
679 679 pull_request, auth_user=auth_user, translator=translator)
680 680
681 681 self.notify_reviewers(pull_request, reviewer_ids)
682 682 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
683 683
684 684 creation_data = pull_request.get_api_data(with_merge_state=False)
685 685 self._log_audit_action(
686 686 'repo.pull_request.create', {'data': creation_data},
687 687 auth_user, pull_request)
688 688
689 689 return pull_request
690 690
691 691 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
692 692 pull_request = self.__get_pull_request(pull_request)
693 693 target_scm = pull_request.target_repo.scm_instance()
694 694 if action == 'create':
695 695 trigger_hook = hooks_utils.trigger_create_pull_request_hook
696 696 elif action == 'merge':
697 697 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
698 698 elif action == 'close':
699 699 trigger_hook = hooks_utils.trigger_close_pull_request_hook
700 700 elif action == 'review_status_change':
701 701 trigger_hook = hooks_utils.trigger_review_pull_request_hook
702 702 elif action == 'update':
703 703 trigger_hook = hooks_utils.trigger_update_pull_request_hook
704 704 elif action == 'comment':
705 705 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
706 706 elif action == 'comment_edit':
707 707 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
708 708 else:
709 709 return
710 710
711 711 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
712 712 pull_request, action, trigger_hook)
713 713 trigger_hook(
714 714 username=user.username,
715 715 repo_name=pull_request.target_repo.repo_name,
716 716 repo_type=target_scm.alias,
717 717 pull_request=pull_request,
718 718 data=data)
719 719
720 720 def _get_commit_ids(self, pull_request):
721 721 """
722 722 Return the commit ids of the merged pull request.
723 723
724 724 This method is not dealing correctly yet with the lack of autoupdates
725 725 nor with the implicit target updates.
726 726 For example: if a commit in the source repo is already in the target it
727 727 will be reported anyways.
728 728 """
729 729 merge_rev = pull_request.merge_rev
730 730 if merge_rev is None:
731 731 raise ValueError('This pull request was not merged yet')
732 732
733 733 commit_ids = list(pull_request.revisions)
734 734 if merge_rev not in commit_ids:
735 735 commit_ids.append(merge_rev)
736 736
737 737 return commit_ids
738 738
739 739 def merge_repo(self, pull_request, user, extras):
740 740 log.debug("Merging pull request %s", pull_request.pull_request_id)
741 741 extras['user_agent'] = 'internal-merge'
742 742 merge_state = self._merge_pull_request(pull_request, user, extras)
743 743 if merge_state.executed:
744 744 log.debug("Merge was successful, updating the pull request comments.")
745 745 self._comment_and_close_pr(pull_request, user, merge_state)
746 746
747 747 self._log_audit_action(
748 748 'repo.pull_request.merge',
749 749 {'merge_state': merge_state.__dict__},
750 750 user, pull_request)
751 751
752 752 else:
753 753 log.warn("Merge failed, not updating the pull request.")
754 754 return merge_state
755 755
756 756 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
757 757 target_vcs = pull_request.target_repo.scm_instance()
758 758 source_vcs = pull_request.source_repo.scm_instance()
759 759
760 760 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
761 761 pr_id=pull_request.pull_request_id,
762 762 pr_title=pull_request.title,
763 763 source_repo=source_vcs.name,
764 764 source_ref_name=pull_request.source_ref_parts.name,
765 765 target_repo=target_vcs.name,
766 766 target_ref_name=pull_request.target_ref_parts.name,
767 767 )
768 768
769 769 workspace_id = self._workspace_id(pull_request)
770 770 repo_id = pull_request.target_repo.repo_id
771 771 use_rebase = self._use_rebase_for_merging(pull_request)
772 772 close_branch = self._close_branch_before_merging(pull_request)
773 773 user_name = self._user_name_for_merging(pull_request, user)
774 774
775 775 target_ref = self._refresh_reference(
776 776 pull_request.target_ref_parts, target_vcs)
777 777
778 778 callback_daemon, extras = prepare_callback_daemon(
779 779 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
780 780 host=vcs_settings.HOOKS_HOST,
781 781 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
782 782
783 783 with callback_daemon:
784 784 # TODO: johbo: Implement a clean way to run a config_override
785 785 # for a single call.
786 786 target_vcs.config.set(
787 787 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
788 788
789 789 merge_state = target_vcs.merge(
790 790 repo_id, workspace_id, target_ref, source_vcs,
791 791 pull_request.source_ref_parts,
792 792 user_name=user_name, user_email=user.email,
793 793 message=message, use_rebase=use_rebase,
794 794 close_branch=close_branch)
795 795 return merge_state
796 796
797 797 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
798 798 pull_request.merge_rev = merge_state.merge_ref.commit_id
799 799 pull_request.updated_on = datetime.datetime.now()
800 800 close_msg = close_msg or 'Pull request merged and closed'
801 801
802 802 CommentsModel().create(
803 803 text=safe_unicode(close_msg),
804 804 repo=pull_request.target_repo.repo_id,
805 805 user=user.user_id,
806 806 pull_request=pull_request.pull_request_id,
807 807 f_path=None,
808 808 line_no=None,
809 809 closing_pr=True
810 810 )
811 811
812 812 Session().add(pull_request)
813 813 Session().flush()
814 814 # TODO: paris: replace invalidation with less radical solution
815 815 ScmModel().mark_for_invalidation(
816 816 pull_request.target_repo.repo_name)
817 817 self.trigger_pull_request_hook(pull_request, user, 'merge')
818 818
819 819 def has_valid_update_type(self, pull_request):
820 820 source_ref_type = pull_request.source_ref_parts.type
821 821 return source_ref_type in self.REF_TYPES
822 822
823 823 def get_flow_commits(self, pull_request):
824 824
825 825 # source repo
826 826 source_ref_name = pull_request.source_ref_parts.name
827 827 source_ref_type = pull_request.source_ref_parts.type
828 828 source_ref_id = pull_request.source_ref_parts.commit_id
829 829 source_repo = pull_request.source_repo.scm_instance()
830 830
831 831 try:
832 832 if source_ref_type in self.REF_TYPES:
833 833 source_commit = source_repo.get_commit(source_ref_name)
834 834 else:
835 835 source_commit = source_repo.get_commit(source_ref_id)
836 836 except CommitDoesNotExistError:
837 837 raise SourceRefMissing()
838 838
839 839 # target repo
840 840 target_ref_name = pull_request.target_ref_parts.name
841 841 target_ref_type = pull_request.target_ref_parts.type
842 842 target_ref_id = pull_request.target_ref_parts.commit_id
843 843 target_repo = pull_request.target_repo.scm_instance()
844 844
845 845 try:
846 846 if target_ref_type in self.REF_TYPES:
847 847 target_commit = target_repo.get_commit(target_ref_name)
848 848 else:
849 849 target_commit = target_repo.get_commit(target_ref_id)
850 850 except CommitDoesNotExistError:
851 851 raise TargetRefMissing()
852 852
853 853 return source_commit, target_commit
854 854
855 855 def update_commits(self, pull_request, updating_user):
856 856 """
857 857 Get the updated list of commits for the pull request
858 858 and return the new pull request version and the list
859 859 of commits processed by this update action
860 860
861 861 updating_user is the user_object who triggered the update
862 862 """
863 863 pull_request = self.__get_pull_request(pull_request)
864 864 source_ref_type = pull_request.source_ref_parts.type
865 865 source_ref_name = pull_request.source_ref_parts.name
866 866 source_ref_id = pull_request.source_ref_parts.commit_id
867 867
868 868 target_ref_type = pull_request.target_ref_parts.type
869 869 target_ref_name = pull_request.target_ref_parts.name
870 870 target_ref_id = pull_request.target_ref_parts.commit_id
871 871
872 872 if not self.has_valid_update_type(pull_request):
873 873 log.debug("Skipping update of pull request %s due to ref type: %s",
874 874 pull_request, source_ref_type)
875 875 return UpdateResponse(
876 876 executed=False,
877 877 reason=UpdateFailureReason.WRONG_REF_TYPE,
878 878 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
879 879 source_changed=False, target_changed=False)
880 880
881 881 try:
882 882 source_commit, target_commit = self.get_flow_commits(pull_request)
883 883 except SourceRefMissing:
884 884 return UpdateResponse(
885 885 executed=False,
886 886 reason=UpdateFailureReason.MISSING_SOURCE_REF,
887 887 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
888 888 source_changed=False, target_changed=False)
889 889 except TargetRefMissing:
890 890 return UpdateResponse(
891 891 executed=False,
892 892 reason=UpdateFailureReason.MISSING_TARGET_REF,
893 893 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
894 894 source_changed=False, target_changed=False)
895 895
896 896 source_changed = source_ref_id != source_commit.raw_id
897 897 target_changed = target_ref_id != target_commit.raw_id
898 898
899 899 if not (source_changed or target_changed):
900 900 log.debug("Nothing changed in pull request %s", pull_request)
901 901 return UpdateResponse(
902 902 executed=False,
903 903 reason=UpdateFailureReason.NO_CHANGE,
904 904 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
905 905 source_changed=target_changed, target_changed=source_changed)
906 906
907 907 change_in_found = 'target repo' if target_changed else 'source repo'
908 908 log.debug('Updating pull request because of change in %s detected',
909 909 change_in_found)
910 910
911 911 # Finally there is a need for an update, in case of source change
912 912 # we create a new version, else just an update
913 913 if source_changed:
914 914 pull_request_version = self._create_version_from_snapshot(pull_request)
915 915 self._link_comments_to_version(pull_request_version)
916 916 else:
917 917 try:
918 918 ver = pull_request.versions[-1]
919 919 except IndexError:
920 920 ver = None
921 921
922 922 pull_request.pull_request_version_id = \
923 923 ver.pull_request_version_id if ver else None
924 924 pull_request_version = pull_request
925 925
926 926 source_repo = pull_request.source_repo.scm_instance()
927 927 target_repo = pull_request.target_repo.scm_instance()
928 928
929 929 # re-compute commit ids
930 930 old_commit_ids = pull_request.revisions
931 931 pre_load = ["author", "date", "message", "branch"]
932 932 commit_ranges = target_repo.compare(
933 933 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
934 934 pre_load=pre_load)
935 935
936 936 target_ref = target_commit.raw_id
937 937 source_ref = source_commit.raw_id
938 938 ancestor_commit_id = target_repo.get_common_ancestor(
939 939 target_ref, source_ref, source_repo)
940 940
941 941 if not ancestor_commit_id:
942 942 raise ValueError(
943 943 'cannot calculate diff info without a common ancestor. '
944 944 'Make sure both repositories are related, and have a common forking commit.')
945 945
946 946 pull_request.common_ancestor_id = ancestor_commit_id
947 947
948 948 pull_request.source_ref = '%s:%s:%s' % (
949 949 source_ref_type, source_ref_name, source_commit.raw_id)
950 950 pull_request.target_ref = '%s:%s:%s' % (
951 951 target_ref_type, target_ref_name, ancestor_commit_id)
952 952
953 953 pull_request.revisions = [
954 954 commit.raw_id for commit in reversed(commit_ranges)]
955 955 pull_request.updated_on = datetime.datetime.now()
956 956 Session().add(pull_request)
957 957 new_commit_ids = pull_request.revisions
958 958
959 959 old_diff_data, new_diff_data = self._generate_update_diffs(
960 960 pull_request, pull_request_version)
961 961
962 962 # calculate commit and file changes
963 963 commit_changes = self._calculate_commit_id_changes(
964 964 old_commit_ids, new_commit_ids)
965 965 file_changes = self._calculate_file_changes(
966 966 old_diff_data, new_diff_data)
967 967
968 968 # set comments as outdated if DIFFS changed
969 969 CommentsModel().outdate_comments(
970 970 pull_request, old_diff_data=old_diff_data,
971 971 new_diff_data=new_diff_data)
972 972
973 973 valid_commit_changes = (commit_changes.added or commit_changes.removed)
974 974 file_node_changes = (
975 975 file_changes.added or file_changes.modified or file_changes.removed)
976 976 pr_has_changes = valid_commit_changes or file_node_changes
977 977
978 978 # Add an automatic comment to the pull request, in case
979 979 # anything has changed
980 980 if pr_has_changes:
981 981 update_comment = CommentsModel().create(
982 982 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
983 983 repo=pull_request.target_repo,
984 984 user=pull_request.author,
985 985 pull_request=pull_request,
986 986 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
987 987
988 988 # Update status to "Under Review" for added commits
989 989 for commit_id in commit_changes.added:
990 990 ChangesetStatusModel().set_status(
991 991 repo=pull_request.source_repo,
992 992 status=ChangesetStatus.STATUS_UNDER_REVIEW,
993 993 comment=update_comment,
994 994 user=pull_request.author,
995 995 pull_request=pull_request,
996 996 revision=commit_id)
997 997
998 998 # send update email to users
999 999 try:
1000 1000 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1001 1001 ancestor_commit_id=ancestor_commit_id,
1002 1002 commit_changes=commit_changes,
1003 1003 file_changes=file_changes)
1004 1004 except Exception:
1005 1005 log.exception('Failed to send email notification to users')
1006 1006
1007 1007 log.debug(
1008 1008 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1009 1009 'removed_ids: %s', pull_request.pull_request_id,
1010 1010 commit_changes.added, commit_changes.common, commit_changes.removed)
1011 1011 log.debug(
1012 1012 'Updated pull request with the following file changes: %s',
1013 1013 file_changes)
1014 1014
1015 1015 log.info(
1016 1016 "Updated pull request %s from commit %s to commit %s, "
1017 1017 "stored new version %s of this pull request.",
1018 1018 pull_request.pull_request_id, source_ref_id,
1019 1019 pull_request.source_ref_parts.commit_id,
1020 1020 pull_request_version.pull_request_version_id)
1021 1021 Session().commit()
1022 1022 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1023 1023
1024 1024 return UpdateResponse(
1025 1025 executed=True, reason=UpdateFailureReason.NONE,
1026 1026 old=pull_request, new=pull_request_version,
1027 1027 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1028 1028 source_changed=source_changed, target_changed=target_changed)
1029 1029
1030 1030 def _create_version_from_snapshot(self, pull_request):
1031 1031 version = PullRequestVersion()
1032 1032 version.title = pull_request.title
1033 1033 version.description = pull_request.description
1034 1034 version.status = pull_request.status
1035 1035 version.pull_request_state = pull_request.pull_request_state
1036 1036 version.created_on = datetime.datetime.now()
1037 1037 version.updated_on = pull_request.updated_on
1038 1038 version.user_id = pull_request.user_id
1039 1039 version.source_repo = pull_request.source_repo
1040 1040 version.source_ref = pull_request.source_ref
1041 1041 version.target_repo = pull_request.target_repo
1042 1042 version.target_ref = pull_request.target_ref
1043 1043
1044 1044 version._last_merge_source_rev = pull_request._last_merge_source_rev
1045 1045 version._last_merge_target_rev = pull_request._last_merge_target_rev
1046 1046 version.last_merge_status = pull_request.last_merge_status
1047 1047 version.last_merge_metadata = pull_request.last_merge_metadata
1048 1048 version.shadow_merge_ref = pull_request.shadow_merge_ref
1049 1049 version.merge_rev = pull_request.merge_rev
1050 1050 version.reviewer_data = pull_request.reviewer_data
1051 1051
1052 1052 version.revisions = pull_request.revisions
1053 1053 version.common_ancestor_id = pull_request.common_ancestor_id
1054 1054 version.pull_request = pull_request
1055 1055 Session().add(version)
1056 1056 Session().flush()
1057 1057
1058 1058 return version
1059 1059
1060 1060 def _generate_update_diffs(self, pull_request, pull_request_version):
1061 1061
1062 1062 diff_context = (
1063 1063 self.DIFF_CONTEXT +
1064 1064 CommentsModel.needed_extra_diff_context())
1065 1065 hide_whitespace_changes = False
1066 1066 source_repo = pull_request_version.source_repo
1067 1067 source_ref_id = pull_request_version.source_ref_parts.commit_id
1068 1068 target_ref_id = pull_request_version.target_ref_parts.commit_id
1069 1069 old_diff = self._get_diff_from_pr_or_version(
1070 1070 source_repo, source_ref_id, target_ref_id,
1071 1071 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1072 1072
1073 1073 source_repo = pull_request.source_repo
1074 1074 source_ref_id = pull_request.source_ref_parts.commit_id
1075 1075 target_ref_id = pull_request.target_ref_parts.commit_id
1076 1076
1077 1077 new_diff = self._get_diff_from_pr_or_version(
1078 1078 source_repo, source_ref_id, target_ref_id,
1079 1079 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1080 1080
1081 1081 old_diff_data = diffs.DiffProcessor(old_diff)
1082 1082 old_diff_data.prepare()
1083 1083 new_diff_data = diffs.DiffProcessor(new_diff)
1084 1084 new_diff_data.prepare()
1085 1085
1086 1086 return old_diff_data, new_diff_data
1087 1087
1088 1088 def _link_comments_to_version(self, pull_request_version):
1089 1089 """
1090 1090 Link all unlinked comments of this pull request to the given version.
1091 1091
1092 1092 :param pull_request_version: The `PullRequestVersion` to which
1093 1093 the comments shall be linked.
1094 1094
1095 1095 """
1096 1096 pull_request = pull_request_version.pull_request
1097 1097 comments = ChangesetComment.query()\
1098 1098 .filter(
1099 1099 # TODO: johbo: Should we query for the repo at all here?
1100 1100 # Pending decision on how comments of PRs are to be related
1101 1101 # to either the source repo, the target repo or no repo at all.
1102 1102 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1103 1103 ChangesetComment.pull_request == pull_request,
1104 1104 ChangesetComment.pull_request_version == None)\
1105 1105 .order_by(ChangesetComment.comment_id.asc())
1106 1106
1107 1107 # TODO: johbo: Find out why this breaks if it is done in a bulk
1108 1108 # operation.
1109 1109 for comment in comments:
1110 1110 comment.pull_request_version_id = (
1111 1111 pull_request_version.pull_request_version_id)
1112 1112 Session().add(comment)
1113 1113
1114 1114 def _calculate_commit_id_changes(self, old_ids, new_ids):
1115 1115 added = [x for x in new_ids if x not in old_ids]
1116 1116 common = [x for x in new_ids if x in old_ids]
1117 1117 removed = [x for x in old_ids if x not in new_ids]
1118 1118 total = new_ids
1119 1119 return ChangeTuple(added, common, removed, total)
1120 1120
1121 1121 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1122 1122
1123 1123 old_files = OrderedDict()
1124 1124 for diff_data in old_diff_data.parsed_diff:
1125 1125 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1126 1126
1127 1127 added_files = []
1128 1128 modified_files = []
1129 1129 removed_files = []
1130 1130 for diff_data in new_diff_data.parsed_diff:
1131 1131 new_filename = diff_data['filename']
1132 1132 new_hash = md5_safe(diff_data['raw_diff'])
1133 1133
1134 1134 old_hash = old_files.get(new_filename)
1135 1135 if not old_hash:
1136 1136 # file is not present in old diff, we have to figure out from parsed diff
1137 1137 # operation ADD/REMOVE
1138 1138 operations_dict = diff_data['stats']['ops']
1139 1139 if diffs.DEL_FILENODE in operations_dict:
1140 1140 removed_files.append(new_filename)
1141 1141 else:
1142 1142 added_files.append(new_filename)
1143 1143 else:
1144 1144 if new_hash != old_hash:
1145 1145 modified_files.append(new_filename)
1146 1146 # now remove a file from old, since we have seen it already
1147 1147 del old_files[new_filename]
1148 1148
1149 1149 # removed files is when there are present in old, but not in NEW,
1150 1150 # since we remove old files that are present in new diff, left-overs
1151 1151 # if any should be the removed files
1152 1152 removed_files.extend(old_files.keys())
1153 1153
1154 1154 return FileChangeTuple(added_files, modified_files, removed_files)
1155 1155
1156 1156 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1157 1157 """
1158 1158 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1159 1159 so it's always looking the same disregarding on which default
1160 1160 renderer system is using.
1161 1161
1162 1162 :param ancestor_commit_id: ancestor raw_id
1163 1163 :param changes: changes named tuple
1164 1164 :param file_changes: file changes named tuple
1165 1165
1166 1166 """
1167 1167 new_status = ChangesetStatus.get_status_lbl(
1168 1168 ChangesetStatus.STATUS_UNDER_REVIEW)
1169 1169
1170 1170 changed_files = (
1171 1171 file_changes.added + file_changes.modified + file_changes.removed)
1172 1172
1173 1173 params = {
1174 1174 'under_review_label': new_status,
1175 1175 'added_commits': changes.added,
1176 1176 'removed_commits': changes.removed,
1177 1177 'changed_files': changed_files,
1178 1178 'added_files': file_changes.added,
1179 1179 'modified_files': file_changes.modified,
1180 1180 'removed_files': file_changes.removed,
1181 1181 'ancestor_commit_id': ancestor_commit_id
1182 1182 }
1183 1183 renderer = RstTemplateRenderer()
1184 1184 return renderer.render('pull_request_update.mako', **params)
1185 1185
1186 1186 def edit(self, pull_request, title, description, description_renderer, user):
1187 1187 pull_request = self.__get_pull_request(pull_request)
1188 1188 old_data = pull_request.get_api_data(with_merge_state=False)
1189 1189 if pull_request.is_closed():
1190 1190 raise ValueError('This pull request is closed')
1191 1191 if title:
1192 1192 pull_request.title = title
1193 1193 pull_request.description = description
1194 1194 pull_request.updated_on = datetime.datetime.now()
1195 1195 pull_request.description_renderer = description_renderer
1196 1196 Session().add(pull_request)
1197 1197 self._log_audit_action(
1198 1198 'repo.pull_request.edit', {'old_data': old_data},
1199 1199 user, pull_request)
1200 1200
1201 1201 def update_reviewers(self, pull_request, reviewer_data, user):
1202 1202 """
1203 1203 Update the reviewers in the pull request
1204 1204
1205 1205 :param pull_request: the pr to update
1206 1206 :param reviewer_data: list of tuples
1207 1207 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1208 1208 """
1209 1209 pull_request = self.__get_pull_request(pull_request)
1210 1210 if pull_request.is_closed():
1211 1211 raise ValueError('This pull request is closed')
1212 1212
1213 1213 reviewers = {}
1214 1214 for user_id, reasons, mandatory, rules in reviewer_data:
1215 1215 if isinstance(user_id, (int, compat.string_types)):
1216 1216 user_id = self._get_user(user_id).user_id
1217 1217 reviewers[user_id] = {
1218 1218 'reasons': reasons, 'mandatory': mandatory}
1219 1219
1220 1220 reviewers_ids = set(reviewers.keys())
1221 1221 current_reviewers = PullRequestReviewers.query()\
1222 1222 .filter(PullRequestReviewers.pull_request ==
1223 1223 pull_request).all()
1224 1224 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1225 1225
1226 1226 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1227 1227 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1228 1228
1229 1229 log.debug("Adding %s reviewers", ids_to_add)
1230 1230 log.debug("Removing %s reviewers", ids_to_remove)
1231 1231 changed = False
1232 1232 added_audit_reviewers = []
1233 1233 removed_audit_reviewers = []
1234 1234
1235 1235 for uid in ids_to_add:
1236 1236 changed = True
1237 1237 _usr = self._get_user(uid)
1238 1238 reviewer = PullRequestReviewers()
1239 1239 reviewer.user = _usr
1240 1240 reviewer.pull_request = pull_request
1241 1241 reviewer.reasons = reviewers[uid]['reasons']
1242 1242 # NOTE(marcink): mandatory shouldn't be changed now
1243 1243 # reviewer.mandatory = reviewers[uid]['reasons']
1244 1244 Session().add(reviewer)
1245 1245 added_audit_reviewers.append(reviewer.get_dict())
1246 1246
1247 1247 for uid in ids_to_remove:
1248 1248 changed = True
1249 1249 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1250 1250 # that prevents and fixes cases that we added the same reviewer twice.
1251 1251 # this CAN happen due to the lack of DB checks
1252 1252 reviewers = PullRequestReviewers.query()\
1253 1253 .filter(PullRequestReviewers.user_id == uid,
1254 1254 PullRequestReviewers.pull_request == pull_request)\
1255 1255 .all()
1256 1256
1257 1257 for obj in reviewers:
1258 1258 added_audit_reviewers.append(obj.get_dict())
1259 1259 Session().delete(obj)
1260 1260
1261 1261 if changed:
1262 1262 Session().expire_all()
1263 1263 pull_request.updated_on = datetime.datetime.now()
1264 1264 Session().add(pull_request)
1265 1265
1266 1266 # finally store audit logs
1267 1267 for user_data in added_audit_reviewers:
1268 1268 self._log_audit_action(
1269 1269 'repo.pull_request.reviewer.add', {'data': user_data},
1270 1270 user, pull_request)
1271 1271 for user_data in removed_audit_reviewers:
1272 1272 self._log_audit_action(
1273 1273 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1274 1274 user, pull_request)
1275 1275
1276 1276 self.notify_reviewers(pull_request, ids_to_add)
1277 1277 return ids_to_add, ids_to_remove
1278 1278
1279 1279 def get_url(self, pull_request, request=None, permalink=False):
1280 1280 if not request:
1281 1281 request = get_current_request()
1282 1282
1283 1283 if permalink:
1284 1284 return request.route_url(
1285 1285 'pull_requests_global',
1286 1286 pull_request_id=pull_request.pull_request_id,)
1287 1287 else:
1288 1288 return request.route_url('pullrequest_show',
1289 1289 repo_name=safe_str(pull_request.target_repo.repo_name),
1290 1290 pull_request_id=pull_request.pull_request_id,)
1291 1291
1292 1292 def get_shadow_clone_url(self, pull_request, request=None):
1293 1293 """
1294 1294 Returns qualified url pointing to the shadow repository. If this pull
1295 1295 request is closed there is no shadow repository and ``None`` will be
1296 1296 returned.
1297 1297 """
1298 1298 if pull_request.is_closed():
1299 1299 return None
1300 1300 else:
1301 1301 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1302 1302 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1303 1303
1304 1304 def notify_reviewers(self, pull_request, reviewers_ids):
1305 1305 # notification to reviewers
1306 1306 if not reviewers_ids:
1307 1307 return
1308 1308
1309 1309 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1310 1310
1311 1311 pull_request_obj = pull_request
1312 1312 # get the current participants of this pull request
1313 1313 recipients = reviewers_ids
1314 1314 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1315 1315
1316 1316 pr_source_repo = pull_request_obj.source_repo
1317 1317 pr_target_repo = pull_request_obj.target_repo
1318 1318
1319 1319 pr_url = h.route_url('pullrequest_show',
1320 1320 repo_name=pr_target_repo.repo_name,
1321 1321 pull_request_id=pull_request_obj.pull_request_id,)
1322 1322
1323 1323 # set some variables for email notification
1324 1324 pr_target_repo_url = h.route_url(
1325 1325 'repo_summary', repo_name=pr_target_repo.repo_name)
1326 1326
1327 1327 pr_source_repo_url = h.route_url(
1328 1328 'repo_summary', repo_name=pr_source_repo.repo_name)
1329 1329
1330 1330 # pull request specifics
1331 1331 pull_request_commits = [
1332 1332 (x.raw_id, x.message)
1333 1333 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1334 1334
1335 1335 kwargs = {
1336 1336 'user': pull_request.author,
1337 1337 'pull_request': pull_request_obj,
1338 1338 'pull_request_commits': pull_request_commits,
1339 1339
1340 1340 'pull_request_target_repo': pr_target_repo,
1341 1341 'pull_request_target_repo_url': pr_target_repo_url,
1342 1342
1343 1343 'pull_request_source_repo': pr_source_repo,
1344 1344 'pull_request_source_repo_url': pr_source_repo_url,
1345 1345
1346 1346 'pull_request_url': pr_url,
1347 1347 'thread_ids': [pr_url],
1348 1348 }
1349 1349
1350 1350 # pre-generate the subject for notification itself
1351 1351 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1352 1352 notification_type, **kwargs)
1353 1353
1354 1354 # create notification objects, and emails
1355 1355 NotificationModel().create(
1356 1356 created_by=pull_request.author,
1357 1357 notification_subject=subject,
1358 1358 notification_body=body_plaintext,
1359 1359 notification_type=notification_type,
1360 1360 recipients=recipients,
1361 1361 email_kwargs=kwargs,
1362 1362 )
1363 1363
1364 1364 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1365 1365 commit_changes, file_changes):
1366 1366
1367 1367 updating_user_id = updating_user.user_id
1368 1368 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1369 1369 # NOTE(marcink): send notification to all other users except to
1370 1370 # person who updated the PR
1371 1371 recipients = reviewers.difference(set([updating_user_id]))
1372 1372
1373 1373 log.debug('Notify following recipients about pull-request update %s', recipients)
1374 1374
1375 1375 pull_request_obj = pull_request
1376 1376
1377 1377 # send email about the update
1378 1378 changed_files = (
1379 1379 file_changes.added + file_changes.modified + file_changes.removed)
1380 1380
1381 1381 pr_source_repo = pull_request_obj.source_repo
1382 1382 pr_target_repo = pull_request_obj.target_repo
1383 1383
1384 1384 pr_url = h.route_url('pullrequest_show',
1385 1385 repo_name=pr_target_repo.repo_name,
1386 1386 pull_request_id=pull_request_obj.pull_request_id,)
1387 1387
1388 1388 # set some variables for email notification
1389 1389 pr_target_repo_url = h.route_url(
1390 1390 'repo_summary', repo_name=pr_target_repo.repo_name)
1391 1391
1392 1392 pr_source_repo_url = h.route_url(
1393 1393 'repo_summary', repo_name=pr_source_repo.repo_name)
1394 1394
1395 1395 email_kwargs = {
1396 1396 'date': datetime.datetime.now(),
1397 1397 'updating_user': updating_user,
1398 1398
1399 1399 'pull_request': pull_request_obj,
1400 1400
1401 1401 'pull_request_target_repo': pr_target_repo,
1402 1402 'pull_request_target_repo_url': pr_target_repo_url,
1403 1403
1404 1404 'pull_request_source_repo': pr_source_repo,
1405 1405 'pull_request_source_repo_url': pr_source_repo_url,
1406 1406
1407 1407 'pull_request_url': pr_url,
1408 1408
1409 1409 'ancestor_commit_id': ancestor_commit_id,
1410 1410 'added_commits': commit_changes.added,
1411 1411 'removed_commits': commit_changes.removed,
1412 1412 'changed_files': changed_files,
1413 1413 'added_files': file_changes.added,
1414 1414 'modified_files': file_changes.modified,
1415 1415 'removed_files': file_changes.removed,
1416 1416 'thread_ids': [pr_url],
1417 1417 }
1418 1418
1419 1419 (subject, _e, body_plaintext) = EmailNotificationModel().render_email(
1420 1420 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1421 1421
1422 1422 # create notification objects, and emails
1423 1423 NotificationModel().create(
1424 1424 created_by=updating_user,
1425 1425 notification_subject=subject,
1426 1426 notification_body=body_plaintext,
1427 1427 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1428 1428 recipients=recipients,
1429 1429 email_kwargs=email_kwargs,
1430 1430 )
1431 1431
1432 1432 def delete(self, pull_request, user=None):
1433 1433 if not user:
1434 1434 user = getattr(get_current_rhodecode_user(), 'username', None)
1435 1435
1436 1436 pull_request = self.__get_pull_request(pull_request)
1437 1437 old_data = pull_request.get_api_data(with_merge_state=False)
1438 1438 self._cleanup_merge_workspace(pull_request)
1439 1439 self._log_audit_action(
1440 1440 'repo.pull_request.delete', {'old_data': old_data},
1441 1441 user, pull_request)
1442 1442 Session().delete(pull_request)
1443 1443
1444 1444 def close_pull_request(self, pull_request, user):
1445 1445 pull_request = self.__get_pull_request(pull_request)
1446 1446 self._cleanup_merge_workspace(pull_request)
1447 1447 pull_request.status = PullRequest.STATUS_CLOSED
1448 1448 pull_request.updated_on = datetime.datetime.now()
1449 1449 Session().add(pull_request)
1450 1450 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1451 1451
1452 1452 pr_data = pull_request.get_api_data(with_merge_state=False)
1453 1453 self._log_audit_action(
1454 1454 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1455 1455
1456 1456 def close_pull_request_with_comment(
1457 1457 self, pull_request, user, repo, message=None, auth_user=None):
1458 1458
1459 1459 pull_request_review_status = pull_request.calculated_review_status()
1460 1460
1461 1461 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1462 1462 # approved only if we have voting consent
1463 1463 status = ChangesetStatus.STATUS_APPROVED
1464 1464 else:
1465 1465 status = ChangesetStatus.STATUS_REJECTED
1466 1466 status_lbl = ChangesetStatus.get_status_lbl(status)
1467 1467
1468 1468 default_message = (
1469 1469 'Closing with status change {transition_icon} {status}.'
1470 1470 ).format(transition_icon='>', status=status_lbl)
1471 1471 text = message or default_message
1472 1472
1473 1473 # create a comment, and link it to new status
1474 1474 comment = CommentsModel().create(
1475 1475 text=text,
1476 1476 repo=repo.repo_id,
1477 1477 user=user.user_id,
1478 1478 pull_request=pull_request.pull_request_id,
1479 1479 status_change=status_lbl,
1480 1480 status_change_type=status,
1481 1481 closing_pr=True,
1482 1482 auth_user=auth_user,
1483 1483 )
1484 1484
1485 1485 # calculate old status before we change it
1486 1486 old_calculated_status = pull_request.calculated_review_status()
1487 1487 ChangesetStatusModel().set_status(
1488 1488 repo.repo_id,
1489 1489 status,
1490 1490 user.user_id,
1491 1491 comment=comment,
1492 1492 pull_request=pull_request.pull_request_id
1493 1493 )
1494 1494
1495 1495 Session().flush()
1496 1496
1497 1497 self.trigger_pull_request_hook(pull_request, user, 'comment',
1498 1498 data={'comment': comment})
1499 1499
1500 1500 # we now calculate the status of pull request again, and based on that
1501 1501 # calculation trigger status change. This might happen in cases
1502 1502 # that non-reviewer admin closes a pr, which means his vote doesn't
1503 1503 # change the status, while if he's a reviewer this might change it.
1504 1504 calculated_status = pull_request.calculated_review_status()
1505 1505 if old_calculated_status != calculated_status:
1506 1506 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1507 1507 data={'status': calculated_status})
1508 1508
1509 1509 # finally close the PR
1510 1510 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1511 1511
1512 1512 return comment, status
1513 1513
1514 1514 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1515 1515 _ = translator or get_current_request().translate
1516 1516
1517 1517 if not self._is_merge_enabled(pull_request):
1518 1518 return None, False, _('Server-side pull request merging is disabled.')
1519 1519
1520 1520 if pull_request.is_closed():
1521 1521 return None, False, _('This pull request is closed.')
1522 1522
1523 1523 merge_possible, msg = self._check_repo_requirements(
1524 1524 target=pull_request.target_repo, source=pull_request.source_repo,
1525 1525 translator=_)
1526 1526 if not merge_possible:
1527 1527 return None, merge_possible, msg
1528 1528
1529 1529 try:
1530 1530 merge_response = self._try_merge(
1531 1531 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1532 1532 log.debug("Merge response: %s", merge_response)
1533 1533 return merge_response, merge_response.possible, merge_response.merge_status_message
1534 1534 except NotImplementedError:
1535 1535 return None, False, _('Pull request merging is not supported.')
1536 1536
1537 1537 def _check_repo_requirements(self, target, source, translator):
1538 1538 """
1539 1539 Check if `target` and `source` have compatible requirements.
1540 1540
1541 1541 Currently this is just checking for largefiles.
1542 1542 """
1543 1543 _ = translator
1544 1544 target_has_largefiles = self._has_largefiles(target)
1545 1545 source_has_largefiles = self._has_largefiles(source)
1546 1546 merge_possible = True
1547 1547 message = u''
1548 1548
1549 1549 if target_has_largefiles != source_has_largefiles:
1550 1550 merge_possible = False
1551 1551 if source_has_largefiles:
1552 1552 message = _(
1553 1553 'Target repository large files support is disabled.')
1554 1554 else:
1555 1555 message = _(
1556 1556 'Source repository large files support is disabled.')
1557 1557
1558 1558 return merge_possible, message
1559 1559
1560 1560 def _has_largefiles(self, repo):
1561 1561 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1562 1562 'extensions', 'largefiles')
1563 1563 return largefiles_ui and largefiles_ui[0].active
1564 1564
1565 1565 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1566 1566 """
1567 1567 Try to merge the pull request and return the merge status.
1568 1568 """
1569 1569 log.debug(
1570 1570 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1571 1571 pull_request.pull_request_id, force_shadow_repo_refresh)
1572 1572 target_vcs = pull_request.target_repo.scm_instance()
1573 1573 # Refresh the target reference.
1574 1574 try:
1575 1575 target_ref = self._refresh_reference(
1576 1576 pull_request.target_ref_parts, target_vcs)
1577 1577 except CommitDoesNotExistError:
1578 1578 merge_state = MergeResponse(
1579 1579 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1580 1580 metadata={'target_ref': pull_request.target_ref_parts})
1581 1581 return merge_state
1582 1582
1583 1583 target_locked = pull_request.target_repo.locked
1584 1584 if target_locked and target_locked[0]:
1585 1585 locked_by = 'user:{}'.format(target_locked[0])
1586 1586 log.debug("The target repository is locked by %s.", locked_by)
1587 1587 merge_state = MergeResponse(
1588 1588 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1589 1589 metadata={'locked_by': locked_by})
1590 1590 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1591 1591 pull_request, target_ref):
1592 1592 log.debug("Refreshing the merge status of the repository.")
1593 1593 merge_state = self._refresh_merge_state(
1594 1594 pull_request, target_vcs, target_ref)
1595 1595 else:
1596 1596 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1597 1597 metadata = {
1598 1598 'unresolved_files': '',
1599 1599 'target_ref': pull_request.target_ref_parts,
1600 1600 'source_ref': pull_request.source_ref_parts,
1601 1601 }
1602 1602 if pull_request.last_merge_metadata:
1603 metadata.update(pull_request.last_merge_metadata)
1603 metadata.update(pull_request.last_merge_metadata_parsed)
1604 1604
1605 1605 if not possible and target_ref.type == 'branch':
1606 1606 # NOTE(marcink): case for mercurial multiple heads on branch
1607 1607 heads = target_vcs._heads(target_ref.name)
1608 1608 if len(heads) != 1:
1609 1609 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1610 1610 metadata.update({
1611 1611 'heads': heads
1612 1612 })
1613 1613
1614 1614 merge_state = MergeResponse(
1615 1615 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1616 1616
1617 1617 return merge_state
1618 1618
1619 1619 def _refresh_reference(self, reference, vcs_repository):
1620 1620 if reference.type in self.UPDATABLE_REF_TYPES:
1621 1621 name_or_id = reference.name
1622 1622 else:
1623 1623 name_or_id = reference.commit_id
1624 1624
1625 1625 refreshed_commit = vcs_repository.get_commit(name_or_id)
1626 1626 refreshed_reference = Reference(
1627 1627 reference.type, reference.name, refreshed_commit.raw_id)
1628 1628 return refreshed_reference
1629 1629
1630 1630 def _needs_merge_state_refresh(self, pull_request, target_reference):
1631 1631 return not(
1632 1632 pull_request.revisions and
1633 1633 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1634 1634 target_reference.commit_id == pull_request._last_merge_target_rev)
1635 1635
1636 1636 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1637 1637 workspace_id = self._workspace_id(pull_request)
1638 1638 source_vcs = pull_request.source_repo.scm_instance()
1639 1639 repo_id = pull_request.target_repo.repo_id
1640 1640 use_rebase = self._use_rebase_for_merging(pull_request)
1641 1641 close_branch = self._close_branch_before_merging(pull_request)
1642 1642 merge_state = target_vcs.merge(
1643 1643 repo_id, workspace_id,
1644 1644 target_reference, source_vcs, pull_request.source_ref_parts,
1645 1645 dry_run=True, use_rebase=use_rebase,
1646 1646 close_branch=close_branch)
1647 1647
1648 1648 # Do not store the response if there was an unknown error.
1649 1649 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1650 1650 pull_request._last_merge_source_rev = \
1651 1651 pull_request.source_ref_parts.commit_id
1652 1652 pull_request._last_merge_target_rev = target_reference.commit_id
1653 1653 pull_request.last_merge_status = merge_state.failure_reason
1654 1654 pull_request.last_merge_metadata = merge_state.metadata
1655 1655
1656 1656 pull_request.shadow_merge_ref = merge_state.merge_ref
1657 1657 Session().add(pull_request)
1658 1658 Session().commit()
1659 1659
1660 1660 return merge_state
1661 1661
1662 1662 def _workspace_id(self, pull_request):
1663 1663 workspace_id = 'pr-%s' % pull_request.pull_request_id
1664 1664 return workspace_id
1665 1665
1666 1666 def generate_repo_data(self, repo, commit_id=None, branch=None,
1667 1667 bookmark=None, translator=None):
1668 1668 from rhodecode.model.repo import RepoModel
1669 1669
1670 1670 all_refs, selected_ref = \
1671 1671 self._get_repo_pullrequest_sources(
1672 1672 repo.scm_instance(), commit_id=commit_id,
1673 1673 branch=branch, bookmark=bookmark, translator=translator)
1674 1674
1675 1675 refs_select2 = []
1676 1676 for element in all_refs:
1677 1677 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1678 1678 refs_select2.append({'text': element[1], 'children': children})
1679 1679
1680 1680 return {
1681 1681 'user': {
1682 1682 'user_id': repo.user.user_id,
1683 1683 'username': repo.user.username,
1684 1684 'firstname': repo.user.first_name,
1685 1685 'lastname': repo.user.last_name,
1686 1686 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1687 1687 },
1688 1688 'name': repo.repo_name,
1689 1689 'link': RepoModel().get_url(repo),
1690 1690 'description': h.chop_at_smart(repo.description_safe, '\n'),
1691 1691 'refs': {
1692 1692 'all_refs': all_refs,
1693 1693 'selected_ref': selected_ref,
1694 1694 'select2_refs': refs_select2
1695 1695 }
1696 1696 }
1697 1697
1698 1698 def generate_pullrequest_title(self, source, source_ref, target):
1699 1699 return u'{source}#{at_ref} to {target}'.format(
1700 1700 source=source,
1701 1701 at_ref=source_ref,
1702 1702 target=target,
1703 1703 )
1704 1704
1705 1705 def _cleanup_merge_workspace(self, pull_request):
1706 1706 # Merging related cleanup
1707 1707 repo_id = pull_request.target_repo.repo_id
1708 1708 target_scm = pull_request.target_repo.scm_instance()
1709 1709 workspace_id = self._workspace_id(pull_request)
1710 1710
1711 1711 try:
1712 1712 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1713 1713 except NotImplementedError:
1714 1714 pass
1715 1715
1716 1716 def _get_repo_pullrequest_sources(
1717 1717 self, repo, commit_id=None, branch=None, bookmark=None,
1718 1718 translator=None):
1719 1719 """
1720 1720 Return a structure with repo's interesting commits, suitable for
1721 1721 the selectors in pullrequest controller
1722 1722
1723 1723 :param commit_id: a commit that must be in the list somehow
1724 1724 and selected by default
1725 1725 :param branch: a branch that must be in the list and selected
1726 1726 by default - even if closed
1727 1727 :param bookmark: a bookmark that must be in the list and selected
1728 1728 """
1729 1729 _ = translator or get_current_request().translate
1730 1730
1731 1731 commit_id = safe_str(commit_id) if commit_id else None
1732 1732 branch = safe_unicode(branch) if branch else None
1733 1733 bookmark = safe_unicode(bookmark) if bookmark else None
1734 1734
1735 1735 selected = None
1736 1736
1737 1737 # order matters: first source that has commit_id in it will be selected
1738 1738 sources = []
1739 1739 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1740 1740 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1741 1741
1742 1742 if commit_id:
1743 1743 ref_commit = (h.short_id(commit_id), commit_id)
1744 1744 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1745 1745
1746 1746 sources.append(
1747 1747 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1748 1748 )
1749 1749
1750 1750 groups = []
1751 1751
1752 1752 for group_key, ref_list, group_name, match in sources:
1753 1753 group_refs = []
1754 1754 for ref_name, ref_id in ref_list:
1755 1755 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1756 1756 group_refs.append((ref_key, ref_name))
1757 1757
1758 1758 if not selected:
1759 1759 if set([commit_id, match]) & set([ref_id, ref_name]):
1760 1760 selected = ref_key
1761 1761
1762 1762 if group_refs:
1763 1763 groups.append((group_refs, group_name))
1764 1764
1765 1765 if not selected:
1766 1766 ref = commit_id or branch or bookmark
1767 1767 if ref:
1768 1768 raise CommitDoesNotExistError(
1769 1769 u'No commit refs could be found matching: {}'.format(ref))
1770 1770 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1771 1771 selected = u'branch:{}:{}'.format(
1772 1772 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1773 1773 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1774 1774 )
1775 1775 elif repo.commit_ids:
1776 1776 # make the user select in this case
1777 1777 selected = None
1778 1778 else:
1779 1779 raise EmptyRepositoryError()
1780 1780 return groups, selected
1781 1781
1782 1782 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1783 1783 hide_whitespace_changes, diff_context):
1784 1784
1785 1785 return self._get_diff_from_pr_or_version(
1786 1786 source_repo, source_ref_id, target_ref_id,
1787 1787 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1788 1788
1789 1789 def _get_diff_from_pr_or_version(
1790 1790 self, source_repo, source_ref_id, target_ref_id,
1791 1791 hide_whitespace_changes, diff_context):
1792 1792
1793 1793 target_commit = source_repo.get_commit(
1794 1794 commit_id=safe_str(target_ref_id))
1795 1795 source_commit = source_repo.get_commit(
1796 1796 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1797 1797 if isinstance(source_repo, Repository):
1798 1798 vcs_repo = source_repo.scm_instance()
1799 1799 else:
1800 1800 vcs_repo = source_repo
1801 1801
1802 1802 # TODO: johbo: In the context of an update, we cannot reach
1803 1803 # the old commit anymore with our normal mechanisms. It needs
1804 1804 # some sort of special support in the vcs layer to avoid this
1805 1805 # workaround.
1806 1806 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1807 1807 vcs_repo.alias == 'git'):
1808 1808 source_commit.raw_id = safe_str(source_ref_id)
1809 1809
1810 1810 log.debug('calculating diff between '
1811 1811 'source_ref:%s and target_ref:%s for repo `%s`',
1812 1812 target_ref_id, source_ref_id,
1813 1813 safe_unicode(vcs_repo.path))
1814 1814
1815 1815 vcs_diff = vcs_repo.get_diff(
1816 1816 commit1=target_commit, commit2=source_commit,
1817 1817 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1818 1818 return vcs_diff
1819 1819
1820 1820 def _is_merge_enabled(self, pull_request):
1821 1821 return self._get_general_setting(
1822 1822 pull_request, 'rhodecode_pr_merge_enabled')
1823 1823
1824 1824 def _use_rebase_for_merging(self, pull_request):
1825 1825 repo_type = pull_request.target_repo.repo_type
1826 1826 if repo_type == 'hg':
1827 1827 return self._get_general_setting(
1828 1828 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1829 1829 elif repo_type == 'git':
1830 1830 return self._get_general_setting(
1831 1831 pull_request, 'rhodecode_git_use_rebase_for_merging')
1832 1832
1833 1833 return False
1834 1834
1835 1835 def _user_name_for_merging(self, pull_request, user):
1836 1836 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1837 1837 if env_user_name_attr and hasattr(user, env_user_name_attr):
1838 1838 user_name_attr = env_user_name_attr
1839 1839 else:
1840 1840 user_name_attr = 'short_contact'
1841 1841
1842 1842 user_name = getattr(user, user_name_attr)
1843 1843 return user_name
1844 1844
1845 1845 def _close_branch_before_merging(self, pull_request):
1846 1846 repo_type = pull_request.target_repo.repo_type
1847 1847 if repo_type == 'hg':
1848 1848 return self._get_general_setting(
1849 1849 pull_request, 'rhodecode_hg_close_branch_before_merging')
1850 1850 elif repo_type == 'git':
1851 1851 return self._get_general_setting(
1852 1852 pull_request, 'rhodecode_git_close_branch_before_merging')
1853 1853
1854 1854 return False
1855 1855
1856 1856 def _get_general_setting(self, pull_request, settings_key, default=False):
1857 1857 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1858 1858 settings = settings_model.get_general_settings()
1859 1859 return settings.get(settings_key, default)
1860 1860
1861 1861 def _log_audit_action(self, action, action_data, user, pull_request):
1862 1862 audit_logger.store(
1863 1863 action=action,
1864 1864 action_data=action_data,
1865 1865 user=user,
1866 1866 repo=pull_request.target_repo)
1867 1867
1868 1868 def get_reviewer_functions(self):
1869 1869 """
1870 1870 Fetches functions for validation and fetching default reviewers.
1871 1871 If available we use the EE package, else we fallback to CE
1872 1872 package functions
1873 1873 """
1874 1874 try:
1875 1875 from rc_reviewers.utils import get_default_reviewers_data
1876 1876 from rc_reviewers.utils import validate_default_reviewers
1877 1877 except ImportError:
1878 1878 from rhodecode.apps.repository.utils import get_default_reviewers_data
1879 1879 from rhodecode.apps.repository.utils import validate_default_reviewers
1880 1880
1881 1881 return get_default_reviewers_data, validate_default_reviewers
1882 1882
1883 1883
1884 1884 class MergeCheck(object):
1885 1885 """
1886 1886 Perform Merge Checks and returns a check object which stores information
1887 1887 about merge errors, and merge conditions
1888 1888 """
1889 1889 TODO_CHECK = 'todo'
1890 1890 PERM_CHECK = 'perm'
1891 1891 REVIEW_CHECK = 'review'
1892 1892 MERGE_CHECK = 'merge'
1893 1893 WIP_CHECK = 'wip'
1894 1894
1895 1895 def __init__(self):
1896 1896 self.review_status = None
1897 1897 self.merge_possible = None
1898 1898 self.merge_msg = ''
1899 1899 self.merge_response = None
1900 1900 self.failed = None
1901 1901 self.errors = []
1902 1902 self.error_details = OrderedDict()
1903 1903 self.source_commit = AttributeDict()
1904 1904 self.target_commit = AttributeDict()
1905 1905
1906 1906 def __repr__(self):
1907 1907 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1908 1908 self.merge_possible, self.failed, self.errors)
1909 1909
1910 1910 def push_error(self, error_type, message, error_key, details):
1911 1911 self.failed = True
1912 1912 self.errors.append([error_type, message])
1913 1913 self.error_details[error_key] = dict(
1914 1914 details=details,
1915 1915 error_type=error_type,
1916 1916 message=message
1917 1917 )
1918 1918
1919 1919 @classmethod
1920 1920 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1921 1921 force_shadow_repo_refresh=False):
1922 1922 _ = translator
1923 1923 merge_check = cls()
1924 1924
1925 1925 # title has WIP:
1926 1926 if pull_request.work_in_progress:
1927 1927 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1928 1928
1929 1929 msg = _('WIP marker in title prevents from accidental merge.')
1930 1930 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1931 1931 if fail_early:
1932 1932 return merge_check
1933 1933
1934 1934 # permissions to merge
1935 1935 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
1936 1936 if not user_allowed_to_merge:
1937 1937 log.debug("MergeCheck: cannot merge, approval is pending.")
1938 1938
1939 1939 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1940 1940 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1941 1941 if fail_early:
1942 1942 return merge_check
1943 1943
1944 1944 # permission to merge into the target branch
1945 1945 target_commit_id = pull_request.target_ref_parts.commit_id
1946 1946 if pull_request.target_ref_parts.type == 'branch':
1947 1947 branch_name = pull_request.target_ref_parts.name
1948 1948 else:
1949 1949 # for mercurial we can always figure out the branch from the commit
1950 1950 # in case of bookmark
1951 1951 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1952 1952 branch_name = target_commit.branch
1953 1953
1954 1954 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1955 1955 pull_request.target_repo.repo_name, branch_name)
1956 1956 if branch_perm and branch_perm == 'branch.none':
1957 1957 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1958 1958 branch_name, rule)
1959 1959 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1960 1960 if fail_early:
1961 1961 return merge_check
1962 1962
1963 1963 # review status, must be always present
1964 1964 review_status = pull_request.calculated_review_status()
1965 1965 merge_check.review_status = review_status
1966 1966
1967 1967 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1968 1968 if not status_approved:
1969 1969 log.debug("MergeCheck: cannot merge, approval is pending.")
1970 1970
1971 1971 msg = _('Pull request reviewer approval is pending.')
1972 1972
1973 1973 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1974 1974
1975 1975 if fail_early:
1976 1976 return merge_check
1977 1977
1978 1978 # left over TODOs
1979 1979 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1980 1980 if todos:
1981 1981 log.debug("MergeCheck: cannot merge, {} "
1982 1982 "unresolved TODOs left.".format(len(todos)))
1983 1983
1984 1984 if len(todos) == 1:
1985 1985 msg = _('Cannot merge, {} TODO still not resolved.').format(
1986 1986 len(todos))
1987 1987 else:
1988 1988 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1989 1989 len(todos))
1990 1990
1991 1991 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1992 1992
1993 1993 if fail_early:
1994 1994 return merge_check
1995 1995
1996 1996 # merge possible, here is the filesystem simulation + shadow repo
1997 1997 merge_response, merge_status, msg = PullRequestModel().merge_status(
1998 1998 pull_request, translator=translator,
1999 1999 force_shadow_repo_refresh=force_shadow_repo_refresh)
2000 2000
2001 2001 merge_check.merge_possible = merge_status
2002 2002 merge_check.merge_msg = msg
2003 2003 merge_check.merge_response = merge_response
2004 2004
2005 2005 source_ref_id = pull_request.source_ref_parts.commit_id
2006 2006 target_ref_id = pull_request.target_ref_parts.commit_id
2007 2007
2008 2008 try:
2009 2009 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2010 2010 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2011 2011 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2012 2012 merge_check.source_commit.current_raw_id = source_commit.raw_id
2013 2013 merge_check.source_commit.previous_raw_id = source_ref_id
2014 2014
2015 2015 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2016 2016 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2017 2017 merge_check.target_commit.current_raw_id = target_commit.raw_id
2018 2018 merge_check.target_commit.previous_raw_id = target_ref_id
2019 2019 except (SourceRefMissing, TargetRefMissing):
2020 2020 pass
2021 2021
2022 2022 if not merge_status:
2023 2023 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2024 2024 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2025 2025
2026 2026 if fail_early:
2027 2027 return merge_check
2028 2028
2029 2029 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2030 2030 return merge_check
2031 2031
2032 2032 @classmethod
2033 2033 def get_merge_conditions(cls, pull_request, translator):
2034 2034 _ = translator
2035 2035 merge_details = {}
2036 2036
2037 2037 model = PullRequestModel()
2038 2038 use_rebase = model._use_rebase_for_merging(pull_request)
2039 2039
2040 2040 if use_rebase:
2041 2041 merge_details['merge_strategy'] = dict(
2042 2042 details={},
2043 2043 message=_('Merge strategy: rebase')
2044 2044 )
2045 2045 else:
2046 2046 merge_details['merge_strategy'] = dict(
2047 2047 details={},
2048 2048 message=_('Merge strategy: explicit merge commit')
2049 2049 )
2050 2050
2051 2051 close_branch = model._close_branch_before_merging(pull_request)
2052 2052 if close_branch:
2053 2053 repo_type = pull_request.target_repo.repo_type
2054 2054 close_msg = ''
2055 2055 if repo_type == 'hg':
2056 2056 close_msg = _('Source branch will be closed before the merge.')
2057 2057 elif repo_type == 'git':
2058 2058 close_msg = _('Source branch will be deleted after the merge.')
2059 2059
2060 2060 merge_details['close_branch'] = dict(
2061 2061 details={},
2062 2062 message=close_msg
2063 2063 )
2064 2064
2065 2065 return merge_details
2066 2066
2067 2067
2068 2068 ChangeTuple = collections.namedtuple(
2069 2069 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2070 2070
2071 2071 FileChangeTuple = collections.namedtuple(
2072 2072 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now