##// END OF EJS Templates
default-reviewers: fixed problems with new diff format for more advanced default reviewer rules.
marcink -
r4385:cf2c34da stable
parent child Browse files
Show More
@@ -1,5602 +1,5604 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers2.text import remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
60 60 from rhodecode.lib.utils2 import (
61 61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 65 JsonRaw
66 66 from rhodecode.lib.ext_json import json
67 67 from rhodecode.lib.caching_query import FromCache
68 68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 69 from rhodecode.lib.encrypt2 import Encryptor
70 70 from rhodecode.lib.exceptions import (
71 71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 72 from rhodecode.model.meta import Base, Session
73 73
74 74 URL_SEP = '/'
75 75 log = logging.getLogger(__name__)
76 76
77 77 # =============================================================================
78 78 # BASE CLASSES
79 79 # =============================================================================
80 80
81 81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 82 # beaker.session.secret if first is not set.
83 83 # and initialized at environment.py
84 84 ENCRYPTION_KEY = None
85 85
86 86 # used to sort permissions by types, '#' used here is not allowed to be in
87 87 # usernames, and it's very early in sorted string.printable table.
88 88 PERMISSION_TYPE_SORT = {
89 89 'admin': '####',
90 90 'write': '###',
91 91 'read': '##',
92 92 'none': '#',
93 93 }
94 94
95 95
96 96 def display_user_sort(obj):
97 97 """
98 98 Sort function used to sort permissions in .permissions() function of
99 99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 100 of all other resources
101 101 """
102 102
103 103 if obj.username == User.DEFAULT_USER:
104 104 return '#####'
105 105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 106 return prefix + obj.username
107 107
108 108
109 109 def display_user_group_sort(obj):
110 110 """
111 111 Sort function used to sort permissions in .permissions() function of
112 112 Repository, RepoGroup, UserGroup. Also it put the default user in front
113 113 of all other resources
114 114 """
115 115
116 116 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
117 117 return prefix + obj.users_group_name
118 118
119 119
120 120 def _hash_key(k):
121 121 return sha1_safe(k)
122 122
123 123
124 124 def in_filter_generator(qry, items, limit=500):
125 125 """
126 126 Splits IN() into multiple with OR
127 127 e.g.::
128 128 cnt = Repository.query().filter(
129 129 or_(
130 130 *in_filter_generator(Repository.repo_id, range(100000))
131 131 )).count()
132 132 """
133 133 if not items:
134 134 # empty list will cause empty query which might cause security issues
135 135 # this can lead to hidden unpleasant results
136 136 items = [-1]
137 137
138 138 parts = []
139 139 for chunk in xrange(0, len(items), limit):
140 140 parts.append(
141 141 qry.in_(items[chunk: chunk + limit])
142 142 )
143 143
144 144 return parts
145 145
146 146
147 147 base_table_args = {
148 148 'extend_existing': True,
149 149 'mysql_engine': 'InnoDB',
150 150 'mysql_charset': 'utf8',
151 151 'sqlite_autoincrement': True
152 152 }
153 153
154 154
155 155 class EncryptedTextValue(TypeDecorator):
156 156 """
157 157 Special column for encrypted long text data, use like::
158 158
159 159 value = Column("encrypted_value", EncryptedValue(), nullable=False)
160 160
161 161 This column is intelligent so if value is in unencrypted form it return
162 162 unencrypted form, but on save it always encrypts
163 163 """
164 164 impl = Text
165 165
166 166 def process_bind_param(self, value, dialect):
167 167 """
168 168 Setter for storing value
169 169 """
170 170 import rhodecode
171 171 if not value:
172 172 return value
173 173
174 174 # protect against double encrypting if values is already encrypted
175 175 if value.startswith('enc$aes$') \
176 176 or value.startswith('enc$aes_hmac$') \
177 177 or value.startswith('enc2$'):
178 178 raise ValueError('value needs to be in unencrypted format, '
179 179 'ie. not starting with enc$ or enc2$')
180 180
181 181 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
182 182 if algo == 'aes':
183 183 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
184 184 elif algo == 'fernet':
185 185 return Encryptor(ENCRYPTION_KEY).encrypt(value)
186 186 else:
187 187 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
188 188
189 189 def process_result_value(self, value, dialect):
190 190 """
191 191 Getter for retrieving value
192 192 """
193 193
194 194 import rhodecode
195 195 if not value:
196 196 return value
197 197
198 198 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
199 199 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
200 200 if algo == 'aes':
201 201 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
202 202 elif algo == 'fernet':
203 203 return Encryptor(ENCRYPTION_KEY).decrypt(value)
204 204 else:
205 205 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
206 206 return decrypted_data
207 207
208 208
209 209 class BaseModel(object):
210 210 """
211 211 Base Model for all classes
212 212 """
213 213
214 214 @classmethod
215 215 def _get_keys(cls):
216 216 """return column names for this model """
217 217 return class_mapper(cls).c.keys()
218 218
219 219 def get_dict(self):
220 220 """
221 221 return dict with keys and values corresponding
222 222 to this model data """
223 223
224 224 d = {}
225 225 for k in self._get_keys():
226 226 d[k] = getattr(self, k)
227 227
228 228 # also use __json__() if present to get additional fields
229 229 _json_attr = getattr(self, '__json__', None)
230 230 if _json_attr:
231 231 # update with attributes from __json__
232 232 if callable(_json_attr):
233 233 _json_attr = _json_attr()
234 234 for k, val in _json_attr.iteritems():
235 235 d[k] = val
236 236 return d
237 237
238 238 def get_appstruct(self):
239 239 """return list with keys and values tuples corresponding
240 240 to this model data """
241 241
242 242 lst = []
243 243 for k in self._get_keys():
244 244 lst.append((k, getattr(self, k),))
245 245 return lst
246 246
247 247 def populate_obj(self, populate_dict):
248 248 """populate model with data from given populate_dict"""
249 249
250 250 for k in self._get_keys():
251 251 if k in populate_dict:
252 252 setattr(self, k, populate_dict[k])
253 253
254 254 @classmethod
255 255 def query(cls):
256 256 return Session().query(cls)
257 257
258 258 @classmethod
259 259 def get(cls, id_):
260 260 if id_:
261 261 return cls.query().get(id_)
262 262
263 263 @classmethod
264 264 def get_or_404(cls, id_):
265 265 from pyramid.httpexceptions import HTTPNotFound
266 266
267 267 try:
268 268 id_ = int(id_)
269 269 except (TypeError, ValueError):
270 270 raise HTTPNotFound()
271 271
272 272 res = cls.query().get(id_)
273 273 if not res:
274 274 raise HTTPNotFound()
275 275 return res
276 276
277 277 @classmethod
278 278 def getAll(cls):
279 279 # deprecated and left for backward compatibility
280 280 return cls.get_all()
281 281
282 282 @classmethod
283 283 def get_all(cls):
284 284 return cls.query().all()
285 285
286 286 @classmethod
287 287 def delete(cls, id_):
288 288 obj = cls.query().get(id_)
289 289 Session().delete(obj)
290 290
291 291 @classmethod
292 292 def identity_cache(cls, session, attr_name, value):
293 293 exist_in_session = []
294 294 for (item_cls, pkey), instance in session.identity_map.items():
295 295 if cls == item_cls and getattr(instance, attr_name) == value:
296 296 exist_in_session.append(instance)
297 297 if exist_in_session:
298 298 if len(exist_in_session) == 1:
299 299 return exist_in_session[0]
300 300 log.exception(
301 301 'multiple objects with attr %s and '
302 302 'value %s found with same name: %r',
303 303 attr_name, value, exist_in_session)
304 304
305 305 def __repr__(self):
306 306 if hasattr(self, '__unicode__'):
307 307 # python repr needs to return str
308 308 try:
309 309 return safe_str(self.__unicode__())
310 310 except UnicodeDecodeError:
311 311 pass
312 312 return '<DB:%s>' % (self.__class__.__name__)
313 313
314 314
315 315 class RhodeCodeSetting(Base, BaseModel):
316 316 __tablename__ = 'rhodecode_settings'
317 317 __table_args__ = (
318 318 UniqueConstraint('app_settings_name'),
319 319 base_table_args
320 320 )
321 321
322 322 SETTINGS_TYPES = {
323 323 'str': safe_str,
324 324 'int': safe_int,
325 325 'unicode': safe_unicode,
326 326 'bool': str2bool,
327 327 'list': functools.partial(aslist, sep=',')
328 328 }
329 329 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
330 330 GLOBAL_CONF_KEY = 'app_settings'
331 331
332 332 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
333 333 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
334 334 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
335 335 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
336 336
337 337 def __init__(self, key='', val='', type='unicode'):
338 338 self.app_settings_name = key
339 339 self.app_settings_type = type
340 340 self.app_settings_value = val
341 341
342 342 @validates('_app_settings_value')
343 343 def validate_settings_value(self, key, val):
344 344 assert type(val) == unicode
345 345 return val
346 346
347 347 @hybrid_property
348 348 def app_settings_value(self):
349 349 v = self._app_settings_value
350 350 _type = self.app_settings_type
351 351 if _type:
352 352 _type = self.app_settings_type.split('.')[0]
353 353 # decode the encrypted value
354 354 if 'encrypted' in self.app_settings_type:
355 355 cipher = EncryptedTextValue()
356 356 v = safe_unicode(cipher.process_result_value(v, None))
357 357
358 358 converter = self.SETTINGS_TYPES.get(_type) or \
359 359 self.SETTINGS_TYPES['unicode']
360 360 return converter(v)
361 361
362 362 @app_settings_value.setter
363 363 def app_settings_value(self, val):
364 364 """
365 365 Setter that will always make sure we use unicode in app_settings_value
366 366
367 367 :param val:
368 368 """
369 369 val = safe_unicode(val)
370 370 # encode the encrypted value
371 371 if 'encrypted' in self.app_settings_type:
372 372 cipher = EncryptedTextValue()
373 373 val = safe_unicode(cipher.process_bind_param(val, None))
374 374 self._app_settings_value = val
375 375
376 376 @hybrid_property
377 377 def app_settings_type(self):
378 378 return self._app_settings_type
379 379
380 380 @app_settings_type.setter
381 381 def app_settings_type(self, val):
382 382 if val.split('.')[0] not in self.SETTINGS_TYPES:
383 383 raise Exception('type must be one of %s got %s'
384 384 % (self.SETTINGS_TYPES.keys(), val))
385 385 self._app_settings_type = val
386 386
387 387 @classmethod
388 388 def get_by_prefix(cls, prefix):
389 389 return RhodeCodeSetting.query()\
390 390 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
391 391 .all()
392 392
393 393 def __unicode__(self):
394 394 return u"<%s('%s:%s[%s]')>" % (
395 395 self.__class__.__name__,
396 396 self.app_settings_name, self.app_settings_value,
397 397 self.app_settings_type
398 398 )
399 399
400 400
401 401 class RhodeCodeUi(Base, BaseModel):
402 402 __tablename__ = 'rhodecode_ui'
403 403 __table_args__ = (
404 404 UniqueConstraint('ui_key'),
405 405 base_table_args
406 406 )
407 407
408 408 HOOK_REPO_SIZE = 'changegroup.repo_size'
409 409 # HG
410 410 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
411 411 HOOK_PULL = 'outgoing.pull_logger'
412 412 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
413 413 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
414 414 HOOK_PUSH = 'changegroup.push_logger'
415 415 HOOK_PUSH_KEY = 'pushkey.key_push'
416 416
417 417 HOOKS_BUILTIN = [
418 418 HOOK_PRE_PULL,
419 419 HOOK_PULL,
420 420 HOOK_PRE_PUSH,
421 421 HOOK_PRETX_PUSH,
422 422 HOOK_PUSH,
423 423 HOOK_PUSH_KEY,
424 424 ]
425 425
426 426 # TODO: johbo: Unify way how hooks are configured for git and hg,
427 427 # git part is currently hardcoded.
428 428
429 429 # SVN PATTERNS
430 430 SVN_BRANCH_ID = 'vcs_svn_branch'
431 431 SVN_TAG_ID = 'vcs_svn_tag'
432 432
433 433 ui_id = Column(
434 434 "ui_id", Integer(), nullable=False, unique=True, default=None,
435 435 primary_key=True)
436 436 ui_section = Column(
437 437 "ui_section", String(255), nullable=True, unique=None, default=None)
438 438 ui_key = Column(
439 439 "ui_key", String(255), nullable=True, unique=None, default=None)
440 440 ui_value = Column(
441 441 "ui_value", String(255), nullable=True, unique=None, default=None)
442 442 ui_active = Column(
443 443 "ui_active", Boolean(), nullable=True, unique=None, default=True)
444 444
445 445 def __repr__(self):
446 446 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
447 447 self.ui_key, self.ui_value)
448 448
449 449
450 450 class RepoRhodeCodeSetting(Base, BaseModel):
451 451 __tablename__ = 'repo_rhodecode_settings'
452 452 __table_args__ = (
453 453 UniqueConstraint(
454 454 'app_settings_name', 'repository_id',
455 455 name='uq_repo_rhodecode_setting_name_repo_id'),
456 456 base_table_args
457 457 )
458 458
459 459 repository_id = Column(
460 460 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
461 461 nullable=False)
462 462 app_settings_id = Column(
463 463 "app_settings_id", Integer(), nullable=False, unique=True,
464 464 default=None, primary_key=True)
465 465 app_settings_name = Column(
466 466 "app_settings_name", String(255), nullable=True, unique=None,
467 467 default=None)
468 468 _app_settings_value = Column(
469 469 "app_settings_value", String(4096), nullable=True, unique=None,
470 470 default=None)
471 471 _app_settings_type = Column(
472 472 "app_settings_type", String(255), nullable=True, unique=None,
473 473 default=None)
474 474
475 475 repository = relationship('Repository')
476 476
477 477 def __init__(self, repository_id, key='', val='', type='unicode'):
478 478 self.repository_id = repository_id
479 479 self.app_settings_name = key
480 480 self.app_settings_type = type
481 481 self.app_settings_value = val
482 482
483 483 @validates('_app_settings_value')
484 484 def validate_settings_value(self, key, val):
485 485 assert type(val) == unicode
486 486 return val
487 487
488 488 @hybrid_property
489 489 def app_settings_value(self):
490 490 v = self._app_settings_value
491 491 type_ = self.app_settings_type
492 492 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
493 493 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
494 494 return converter(v)
495 495
496 496 @app_settings_value.setter
497 497 def app_settings_value(self, val):
498 498 """
499 499 Setter that will always make sure we use unicode in app_settings_value
500 500
501 501 :param val:
502 502 """
503 503 self._app_settings_value = safe_unicode(val)
504 504
505 505 @hybrid_property
506 506 def app_settings_type(self):
507 507 return self._app_settings_type
508 508
509 509 @app_settings_type.setter
510 510 def app_settings_type(self, val):
511 511 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
512 512 if val not in SETTINGS_TYPES:
513 513 raise Exception('type must be one of %s got %s'
514 514 % (SETTINGS_TYPES.keys(), val))
515 515 self._app_settings_type = val
516 516
517 517 def __unicode__(self):
518 518 return u"<%s('%s:%s:%s[%s]')>" % (
519 519 self.__class__.__name__, self.repository.repo_name,
520 520 self.app_settings_name, self.app_settings_value,
521 521 self.app_settings_type
522 522 )
523 523
524 524
525 525 class RepoRhodeCodeUi(Base, BaseModel):
526 526 __tablename__ = 'repo_rhodecode_ui'
527 527 __table_args__ = (
528 528 UniqueConstraint(
529 529 'repository_id', 'ui_section', 'ui_key',
530 530 name='uq_repo_rhodecode_ui_repository_id_section_key'),
531 531 base_table_args
532 532 )
533 533
534 534 repository_id = Column(
535 535 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
536 536 nullable=False)
537 537 ui_id = Column(
538 538 "ui_id", Integer(), nullable=False, unique=True, default=None,
539 539 primary_key=True)
540 540 ui_section = Column(
541 541 "ui_section", String(255), nullable=True, unique=None, default=None)
542 542 ui_key = Column(
543 543 "ui_key", String(255), nullable=True, unique=None, default=None)
544 544 ui_value = Column(
545 545 "ui_value", String(255), nullable=True, unique=None, default=None)
546 546 ui_active = Column(
547 547 "ui_active", Boolean(), nullable=True, unique=None, default=True)
548 548
549 549 repository = relationship('Repository')
550 550
551 551 def __repr__(self):
552 552 return '<%s[%s:%s]%s=>%s]>' % (
553 553 self.__class__.__name__, self.repository.repo_name,
554 554 self.ui_section, self.ui_key, self.ui_value)
555 555
556 556
557 557 class User(Base, BaseModel):
558 558 __tablename__ = 'users'
559 559 __table_args__ = (
560 560 UniqueConstraint('username'), UniqueConstraint('email'),
561 561 Index('u_username_idx', 'username'),
562 562 Index('u_email_idx', 'email'),
563 563 base_table_args
564 564 )
565 565
566 566 DEFAULT_USER = 'default'
567 567 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
568 568 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
569 569
570 570 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
571 571 username = Column("username", String(255), nullable=True, unique=None, default=None)
572 572 password = Column("password", String(255), nullable=True, unique=None, default=None)
573 573 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
574 574 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
575 575 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
576 576 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
577 577 _email = Column("email", String(255), nullable=True, unique=None, default=None)
578 578 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
579 579 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
580 580 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
581 581
582 582 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
583 583 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
584 584 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
585 585 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
586 586 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
587 587 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
588 588
589 589 user_log = relationship('UserLog')
590 590 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
591 591
592 592 repositories = relationship('Repository')
593 593 repository_groups = relationship('RepoGroup')
594 594 user_groups = relationship('UserGroup')
595 595
596 596 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
597 597 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
598 598
599 599 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
600 600 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
601 601 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
602 602
603 603 group_member = relationship('UserGroupMember', cascade='all')
604 604
605 605 notifications = relationship('UserNotification', cascade='all')
606 606 # notifications assigned to this user
607 607 user_created_notifications = relationship('Notification', cascade='all')
608 608 # comments created by this user
609 609 user_comments = relationship('ChangesetComment', cascade='all')
610 610 # user profile extra info
611 611 user_emails = relationship('UserEmailMap', cascade='all')
612 612 user_ip_map = relationship('UserIpMap', cascade='all')
613 613 user_auth_tokens = relationship('UserApiKeys', cascade='all')
614 614 user_ssh_keys = relationship('UserSshKeys', cascade='all')
615 615
616 616 # gists
617 617 user_gists = relationship('Gist', cascade='all')
618 618 # user pull requests
619 619 user_pull_requests = relationship('PullRequest', cascade='all')
620 620
621 621 # external identities
622 622 external_identities = relationship(
623 623 'ExternalIdentity',
624 624 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
625 625 cascade='all')
626 626 # review rules
627 627 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
628 628
629 629 # artifacts owned
630 630 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
631 631
632 632 # no cascade, set NULL
633 633 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
634 634
635 635 def __unicode__(self):
636 636 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
637 637 self.user_id, self.username)
638 638
639 639 @hybrid_property
640 640 def email(self):
641 641 return self._email
642 642
643 643 @email.setter
644 644 def email(self, val):
645 645 self._email = val.lower() if val else None
646 646
647 647 @hybrid_property
648 648 def first_name(self):
649 649 from rhodecode.lib import helpers as h
650 650 if self.name:
651 651 return h.escape(self.name)
652 652 return self.name
653 653
654 654 @hybrid_property
655 655 def last_name(self):
656 656 from rhodecode.lib import helpers as h
657 657 if self.lastname:
658 658 return h.escape(self.lastname)
659 659 return self.lastname
660 660
661 661 @hybrid_property
662 662 def api_key(self):
663 663 """
664 664 Fetch if exist an auth-token with role ALL connected to this user
665 665 """
666 666 user_auth_token = UserApiKeys.query()\
667 667 .filter(UserApiKeys.user_id == self.user_id)\
668 668 .filter(or_(UserApiKeys.expires == -1,
669 669 UserApiKeys.expires >= time.time()))\
670 670 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
671 671 if user_auth_token:
672 672 user_auth_token = user_auth_token.api_key
673 673
674 674 return user_auth_token
675 675
676 676 @api_key.setter
677 677 def api_key(self, val):
678 678 # don't allow to set API key this is deprecated for now
679 679 self._api_key = None
680 680
681 681 @property
682 682 def reviewer_pull_requests(self):
683 683 return PullRequestReviewers.query() \
684 684 .options(joinedload(PullRequestReviewers.pull_request)) \
685 685 .filter(PullRequestReviewers.user_id == self.user_id) \
686 686 .all()
687 687
688 688 @property
689 689 def firstname(self):
690 690 # alias for future
691 691 return self.name
692 692
693 693 @property
694 694 def emails(self):
695 695 other = UserEmailMap.query()\
696 696 .filter(UserEmailMap.user == self) \
697 697 .order_by(UserEmailMap.email_id.asc()) \
698 698 .all()
699 699 return [self.email] + [x.email for x in other]
700 700
701 701 def emails_cached(self):
702 702 emails = UserEmailMap.query()\
703 703 .filter(UserEmailMap.user == self) \
704 704 .order_by(UserEmailMap.email_id.asc())
705 705
706 706 emails = emails.options(
707 707 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
708 708 )
709 709
710 710 return [self.email] + [x.email for x in emails]
711 711
712 712 @property
713 713 def auth_tokens(self):
714 714 auth_tokens = self.get_auth_tokens()
715 715 return [x.api_key for x in auth_tokens]
716 716
717 717 def get_auth_tokens(self):
718 718 return UserApiKeys.query()\
719 719 .filter(UserApiKeys.user == self)\
720 720 .order_by(UserApiKeys.user_api_key_id.asc())\
721 721 .all()
722 722
723 723 @LazyProperty
724 724 def feed_token(self):
725 725 return self.get_feed_token()
726 726
727 727 def get_feed_token(self, cache=True):
728 728 feed_tokens = UserApiKeys.query()\
729 729 .filter(UserApiKeys.user == self)\
730 730 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
731 731 if cache:
732 732 feed_tokens = feed_tokens.options(
733 733 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
734 734
735 735 feed_tokens = feed_tokens.all()
736 736 if feed_tokens:
737 737 return feed_tokens[0].api_key
738 738 return 'NO_FEED_TOKEN_AVAILABLE'
739 739
740 740 @LazyProperty
741 741 def artifact_token(self):
742 742 return self.get_artifact_token()
743 743
744 744 def get_artifact_token(self, cache=True):
745 745 artifacts_tokens = UserApiKeys.query()\
746 746 .filter(UserApiKeys.user == self)\
747 747 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
748 748 if cache:
749 749 artifacts_tokens = artifacts_tokens.options(
750 750 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
751 751
752 752 artifacts_tokens = artifacts_tokens.all()
753 753 if artifacts_tokens:
754 754 return artifacts_tokens[0].api_key
755 755 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
756 756
757 757 @classmethod
758 758 def get(cls, user_id, cache=False):
759 759 if not user_id:
760 760 return
761 761
762 762 user = cls.query()
763 763 if cache:
764 764 user = user.options(
765 765 FromCache("sql_cache_short", "get_users_%s" % user_id))
766 766 return user.get(user_id)
767 767
768 768 @classmethod
769 769 def extra_valid_auth_tokens(cls, user, role=None):
770 770 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
771 771 .filter(or_(UserApiKeys.expires == -1,
772 772 UserApiKeys.expires >= time.time()))
773 773 if role:
774 774 tokens = tokens.filter(or_(UserApiKeys.role == role,
775 775 UserApiKeys.role == UserApiKeys.ROLE_ALL))
776 776 return tokens.all()
777 777
778 778 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
779 779 from rhodecode.lib import auth
780 780
781 781 log.debug('Trying to authenticate user: %s via auth-token, '
782 782 'and roles: %s', self, roles)
783 783
784 784 if not auth_token:
785 785 return False
786 786
787 787 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
788 788 tokens_q = UserApiKeys.query()\
789 789 .filter(UserApiKeys.user_id == self.user_id)\
790 790 .filter(or_(UserApiKeys.expires == -1,
791 791 UserApiKeys.expires >= time.time()))
792 792
793 793 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
794 794
795 795 crypto_backend = auth.crypto_backend()
796 796 enc_token_map = {}
797 797 plain_token_map = {}
798 798 for token in tokens_q:
799 799 if token.api_key.startswith(crypto_backend.ENC_PREF):
800 800 enc_token_map[token.api_key] = token
801 801 else:
802 802 plain_token_map[token.api_key] = token
803 803 log.debug(
804 804 'Found %s plain and %s encrypted tokens to check for authentication for this user',
805 805 len(plain_token_map), len(enc_token_map))
806 806
807 807 # plain token match comes first
808 808 match = plain_token_map.get(auth_token)
809 809
810 810 # check encrypted tokens now
811 811 if not match:
812 812 for token_hash, token in enc_token_map.items():
813 813 # NOTE(marcink): this is expensive to calculate, but most secure
814 814 if crypto_backend.hash_check(auth_token, token_hash):
815 815 match = token
816 816 break
817 817
818 818 if match:
819 819 log.debug('Found matching token %s', match)
820 820 if match.repo_id:
821 821 log.debug('Found scope, checking for scope match of token %s', match)
822 822 if match.repo_id == scope_repo_id:
823 823 return True
824 824 else:
825 825 log.debug(
826 826 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
827 827 'and calling scope is:%s, skipping further checks',
828 828 match.repo, scope_repo_id)
829 829 return False
830 830 else:
831 831 return True
832 832
833 833 return False
834 834
835 835 @property
836 836 def ip_addresses(self):
837 837 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
838 838 return [x.ip_addr for x in ret]
839 839
840 840 @property
841 841 def username_and_name(self):
842 842 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
843 843
844 844 @property
845 845 def username_or_name_or_email(self):
846 846 full_name = self.full_name if self.full_name is not ' ' else None
847 847 return self.username or full_name or self.email
848 848
849 849 @property
850 850 def full_name(self):
851 851 return '%s %s' % (self.first_name, self.last_name)
852 852
853 853 @property
854 854 def full_name_or_username(self):
855 855 return ('%s %s' % (self.first_name, self.last_name)
856 856 if (self.first_name and self.last_name) else self.username)
857 857
858 858 @property
859 859 def full_contact(self):
860 860 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
861 861
862 862 @property
863 863 def short_contact(self):
864 864 return '%s %s' % (self.first_name, self.last_name)
865 865
866 866 @property
867 867 def is_admin(self):
868 868 return self.admin
869 869
870 870 @property
871 871 def language(self):
872 872 return self.user_data.get('language')
873 873
874 874 def AuthUser(self, **kwargs):
875 875 """
876 876 Returns instance of AuthUser for this user
877 877 """
878 878 from rhodecode.lib.auth import AuthUser
879 879 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
880 880
881 881 @hybrid_property
882 882 def user_data(self):
883 883 if not self._user_data:
884 884 return {}
885 885
886 886 try:
887 887 return json.loads(self._user_data)
888 888 except TypeError:
889 889 return {}
890 890
891 891 @user_data.setter
892 892 def user_data(self, val):
893 893 if not isinstance(val, dict):
894 894 raise Exception('user_data must be dict, got %s' % type(val))
895 895 try:
896 896 self._user_data = json.dumps(val)
897 897 except Exception:
898 898 log.error(traceback.format_exc())
899 899
900 900 @classmethod
901 901 def get_by_username(cls, username, case_insensitive=False,
902 902 cache=False, identity_cache=False):
903 903 session = Session()
904 904
905 905 if case_insensitive:
906 906 q = cls.query().filter(
907 907 func.lower(cls.username) == func.lower(username))
908 908 else:
909 909 q = cls.query().filter(cls.username == username)
910 910
911 911 if cache:
912 912 if identity_cache:
913 913 val = cls.identity_cache(session, 'username', username)
914 914 if val:
915 915 return val
916 916 else:
917 917 cache_key = "get_user_by_name_%s" % _hash_key(username)
918 918 q = q.options(
919 919 FromCache("sql_cache_short", cache_key))
920 920
921 921 return q.scalar()
922 922
923 923 @classmethod
924 924 def get_by_auth_token(cls, auth_token, cache=False):
925 925 q = UserApiKeys.query()\
926 926 .filter(UserApiKeys.api_key == auth_token)\
927 927 .filter(or_(UserApiKeys.expires == -1,
928 928 UserApiKeys.expires >= time.time()))
929 929 if cache:
930 930 q = q.options(
931 931 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
932 932
933 933 match = q.first()
934 934 if match:
935 935 return match.user
936 936
937 937 @classmethod
938 938 def get_by_email(cls, email, case_insensitive=False, cache=False):
939 939
940 940 if case_insensitive:
941 941 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
942 942
943 943 else:
944 944 q = cls.query().filter(cls.email == email)
945 945
946 946 email_key = _hash_key(email)
947 947 if cache:
948 948 q = q.options(
949 949 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
950 950
951 951 ret = q.scalar()
952 952 if ret is None:
953 953 q = UserEmailMap.query()
954 954 # try fetching in alternate email map
955 955 if case_insensitive:
956 956 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
957 957 else:
958 958 q = q.filter(UserEmailMap.email == email)
959 959 q = q.options(joinedload(UserEmailMap.user))
960 960 if cache:
961 961 q = q.options(
962 962 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
963 963 ret = getattr(q.scalar(), 'user', None)
964 964
965 965 return ret
966 966
967 967 @classmethod
968 968 def get_from_cs_author(cls, author):
969 969 """
970 970 Tries to get User objects out of commit author string
971 971
972 972 :param author:
973 973 """
974 974 from rhodecode.lib.helpers import email, author_name
975 975 # Valid email in the attribute passed, see if they're in the system
976 976 _email = email(author)
977 977 if _email:
978 978 user = cls.get_by_email(_email, case_insensitive=True)
979 979 if user:
980 980 return user
981 981 # Maybe we can match by username?
982 982 _author = author_name(author)
983 983 user = cls.get_by_username(_author, case_insensitive=True)
984 984 if user:
985 985 return user
986 986
987 987 def update_userdata(self, **kwargs):
988 988 usr = self
989 989 old = usr.user_data
990 990 old.update(**kwargs)
991 991 usr.user_data = old
992 992 Session().add(usr)
993 993 log.debug('updated userdata with %s', kwargs)
994 994
995 995 def update_lastlogin(self):
996 996 """Update user lastlogin"""
997 997 self.last_login = datetime.datetime.now()
998 998 Session().add(self)
999 999 log.debug('updated user %s lastlogin', self.username)
1000 1000
1001 1001 def update_password(self, new_password):
1002 1002 from rhodecode.lib.auth import get_crypt_password
1003 1003
1004 1004 self.password = get_crypt_password(new_password)
1005 1005 Session().add(self)
1006 1006
1007 1007 @classmethod
1008 1008 def get_first_super_admin(cls):
1009 1009 user = User.query()\
1010 1010 .filter(User.admin == true()) \
1011 1011 .order_by(User.user_id.asc()) \
1012 1012 .first()
1013 1013
1014 1014 if user is None:
1015 1015 raise Exception('FATAL: Missing administrative account!')
1016 1016 return user
1017 1017
1018 1018 @classmethod
1019 1019 def get_all_super_admins(cls, only_active=False):
1020 1020 """
1021 1021 Returns all admin accounts sorted by username
1022 1022 """
1023 1023 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1024 1024 if only_active:
1025 1025 qry = qry.filter(User.active == true())
1026 1026 return qry.all()
1027 1027
1028 1028 @classmethod
1029 1029 def get_all_user_ids(cls, only_active=True):
1030 1030 """
1031 1031 Returns all users IDs
1032 1032 """
1033 1033 qry = Session().query(User.user_id)
1034 1034
1035 1035 if only_active:
1036 1036 qry = qry.filter(User.active == true())
1037 1037 return [x.user_id for x in qry]
1038 1038
1039 1039 @classmethod
1040 1040 def get_default_user(cls, cache=False, refresh=False):
1041 1041 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1042 1042 if user is None:
1043 1043 raise Exception('FATAL: Missing default account!')
1044 1044 if refresh:
1045 1045 # The default user might be based on outdated state which
1046 1046 # has been loaded from the cache.
1047 1047 # A call to refresh() ensures that the
1048 1048 # latest state from the database is used.
1049 1049 Session().refresh(user)
1050 1050 return user
1051 1051
1052 1052 @classmethod
1053 1053 def get_default_user_id(cls):
1054 1054 import rhodecode
1055 1055 return rhodecode.CONFIG['default_user_id']
1056 1056
1057 1057 def _get_default_perms(self, user, suffix=''):
1058 1058 from rhodecode.model.permission import PermissionModel
1059 1059 return PermissionModel().get_default_perms(user.user_perms, suffix)
1060 1060
1061 1061 def get_default_perms(self, suffix=''):
1062 1062 return self._get_default_perms(self, suffix)
1063 1063
1064 1064 def get_api_data(self, include_secrets=False, details='full'):
1065 1065 """
1066 1066 Common function for generating user related data for API
1067 1067
1068 1068 :param include_secrets: By default secrets in the API data will be replaced
1069 1069 by a placeholder value to prevent exposing this data by accident. In case
1070 1070 this data shall be exposed, set this flag to ``True``.
1071 1071
1072 1072 :param details: details can be 'basic|full' basic gives only a subset of
1073 1073 the available user information that includes user_id, name and emails.
1074 1074 """
1075 1075 user = self
1076 1076 user_data = self.user_data
1077 1077 data = {
1078 1078 'user_id': user.user_id,
1079 1079 'username': user.username,
1080 1080 'firstname': user.name,
1081 1081 'lastname': user.lastname,
1082 1082 'description': user.description,
1083 1083 'email': user.email,
1084 1084 'emails': user.emails,
1085 1085 }
1086 1086 if details == 'basic':
1087 1087 return data
1088 1088
1089 1089 auth_token_length = 40
1090 1090 auth_token_replacement = '*' * auth_token_length
1091 1091
1092 1092 extras = {
1093 1093 'auth_tokens': [auth_token_replacement],
1094 1094 'active': user.active,
1095 1095 'admin': user.admin,
1096 1096 'extern_type': user.extern_type,
1097 1097 'extern_name': user.extern_name,
1098 1098 'last_login': user.last_login,
1099 1099 'last_activity': user.last_activity,
1100 1100 'ip_addresses': user.ip_addresses,
1101 1101 'language': user_data.get('language')
1102 1102 }
1103 1103 data.update(extras)
1104 1104
1105 1105 if include_secrets:
1106 1106 data['auth_tokens'] = user.auth_tokens
1107 1107 return data
1108 1108
1109 1109 def __json__(self):
1110 1110 data = {
1111 1111 'full_name': self.full_name,
1112 1112 'full_name_or_username': self.full_name_or_username,
1113 1113 'short_contact': self.short_contact,
1114 1114 'full_contact': self.full_contact,
1115 1115 }
1116 1116 data.update(self.get_api_data())
1117 1117 return data
1118 1118
1119 1119
1120 1120 class UserApiKeys(Base, BaseModel):
1121 1121 __tablename__ = 'user_api_keys'
1122 1122 __table_args__ = (
1123 1123 Index('uak_api_key_idx', 'api_key'),
1124 1124 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1125 1125 base_table_args
1126 1126 )
1127 1127 __mapper_args__ = {}
1128 1128
1129 1129 # ApiKey role
1130 1130 ROLE_ALL = 'token_role_all'
1131 1131 ROLE_HTTP = 'token_role_http'
1132 1132 ROLE_VCS = 'token_role_vcs'
1133 1133 ROLE_API = 'token_role_api'
1134 1134 ROLE_FEED = 'token_role_feed'
1135 1135 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1136 1136 ROLE_PASSWORD_RESET = 'token_password_reset'
1137 1137
1138 1138 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1139 1139
1140 1140 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1141 1141 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1142 1142 api_key = Column("api_key", String(255), nullable=False, unique=True)
1143 1143 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1144 1144 expires = Column('expires', Float(53), nullable=False)
1145 1145 role = Column('role', String(255), nullable=True)
1146 1146 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1147 1147
1148 1148 # scope columns
1149 1149 repo_id = Column(
1150 1150 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1151 1151 nullable=True, unique=None, default=None)
1152 1152 repo = relationship('Repository', lazy='joined')
1153 1153
1154 1154 repo_group_id = Column(
1155 1155 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1156 1156 nullable=True, unique=None, default=None)
1157 1157 repo_group = relationship('RepoGroup', lazy='joined')
1158 1158
1159 1159 user = relationship('User', lazy='joined')
1160 1160
1161 1161 def __unicode__(self):
1162 1162 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1163 1163
1164 1164 def __json__(self):
1165 1165 data = {
1166 1166 'auth_token': self.api_key,
1167 1167 'role': self.role,
1168 1168 'scope': self.scope_humanized,
1169 1169 'expired': self.expired
1170 1170 }
1171 1171 return data
1172 1172
1173 1173 def get_api_data(self, include_secrets=False):
1174 1174 data = self.__json__()
1175 1175 if include_secrets:
1176 1176 return data
1177 1177 else:
1178 1178 data['auth_token'] = self.token_obfuscated
1179 1179 return data
1180 1180
1181 1181 @hybrid_property
1182 1182 def description_safe(self):
1183 1183 from rhodecode.lib import helpers as h
1184 1184 return h.escape(self.description)
1185 1185
1186 1186 @property
1187 1187 def expired(self):
1188 1188 if self.expires == -1:
1189 1189 return False
1190 1190 return time.time() > self.expires
1191 1191
1192 1192 @classmethod
1193 1193 def _get_role_name(cls, role):
1194 1194 return {
1195 1195 cls.ROLE_ALL: _('all'),
1196 1196 cls.ROLE_HTTP: _('http/web interface'),
1197 1197 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1198 1198 cls.ROLE_API: _('api calls'),
1199 1199 cls.ROLE_FEED: _('feed access'),
1200 1200 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1201 1201 }.get(role, role)
1202 1202
1203 1203 @property
1204 1204 def role_humanized(self):
1205 1205 return self._get_role_name(self.role)
1206 1206
1207 1207 def _get_scope(self):
1208 1208 if self.repo:
1209 1209 return 'Repository: {}'.format(self.repo.repo_name)
1210 1210 if self.repo_group:
1211 1211 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1212 1212 return 'Global'
1213 1213
1214 1214 @property
1215 1215 def scope_humanized(self):
1216 1216 return self._get_scope()
1217 1217
1218 1218 @property
1219 1219 def token_obfuscated(self):
1220 1220 if self.api_key:
1221 1221 return self.api_key[:4] + "****"
1222 1222
1223 1223
1224 1224 class UserEmailMap(Base, BaseModel):
1225 1225 __tablename__ = 'user_email_map'
1226 1226 __table_args__ = (
1227 1227 Index('uem_email_idx', 'email'),
1228 1228 UniqueConstraint('email'),
1229 1229 base_table_args
1230 1230 )
1231 1231 __mapper_args__ = {}
1232 1232
1233 1233 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1234 1234 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1235 1235 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1236 1236 user = relationship('User', lazy='joined')
1237 1237
1238 1238 @validates('_email')
1239 1239 def validate_email(self, key, email):
1240 1240 # check if this email is not main one
1241 1241 main_email = Session().query(User).filter(User.email == email).scalar()
1242 1242 if main_email is not None:
1243 1243 raise AttributeError('email %s is present is user table' % email)
1244 1244 return email
1245 1245
1246 1246 @hybrid_property
1247 1247 def email(self):
1248 1248 return self._email
1249 1249
1250 1250 @email.setter
1251 1251 def email(self, val):
1252 1252 self._email = val.lower() if val else None
1253 1253
1254 1254
1255 1255 class UserIpMap(Base, BaseModel):
1256 1256 __tablename__ = 'user_ip_map'
1257 1257 __table_args__ = (
1258 1258 UniqueConstraint('user_id', 'ip_addr'),
1259 1259 base_table_args
1260 1260 )
1261 1261 __mapper_args__ = {}
1262 1262
1263 1263 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1264 1264 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1265 1265 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1266 1266 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1267 1267 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1268 1268 user = relationship('User', lazy='joined')
1269 1269
1270 1270 @hybrid_property
1271 1271 def description_safe(self):
1272 1272 from rhodecode.lib import helpers as h
1273 1273 return h.escape(self.description)
1274 1274
1275 1275 @classmethod
1276 1276 def _get_ip_range(cls, ip_addr):
1277 1277 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1278 1278 return [str(net.network_address), str(net.broadcast_address)]
1279 1279
1280 1280 def __json__(self):
1281 1281 return {
1282 1282 'ip_addr': self.ip_addr,
1283 1283 'ip_range': self._get_ip_range(self.ip_addr),
1284 1284 }
1285 1285
1286 1286 def __unicode__(self):
1287 1287 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1288 1288 self.user_id, self.ip_addr)
1289 1289
1290 1290
1291 1291 class UserSshKeys(Base, BaseModel):
1292 1292 __tablename__ = 'user_ssh_keys'
1293 1293 __table_args__ = (
1294 1294 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1295 1295
1296 1296 UniqueConstraint('ssh_key_fingerprint'),
1297 1297
1298 1298 base_table_args
1299 1299 )
1300 1300 __mapper_args__ = {}
1301 1301
1302 1302 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1303 1303 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1304 1304 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1305 1305
1306 1306 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1307 1307
1308 1308 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1309 1309 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1310 1310 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1311 1311
1312 1312 user = relationship('User', lazy='joined')
1313 1313
1314 1314 def __json__(self):
1315 1315 data = {
1316 1316 'ssh_fingerprint': self.ssh_key_fingerprint,
1317 1317 'description': self.description,
1318 1318 'created_on': self.created_on
1319 1319 }
1320 1320 return data
1321 1321
1322 1322 def get_api_data(self):
1323 1323 data = self.__json__()
1324 1324 return data
1325 1325
1326 1326
1327 1327 class UserLog(Base, BaseModel):
1328 1328 __tablename__ = 'user_logs'
1329 1329 __table_args__ = (
1330 1330 base_table_args,
1331 1331 )
1332 1332
1333 1333 VERSION_1 = 'v1'
1334 1334 VERSION_2 = 'v2'
1335 1335 VERSIONS = [VERSION_1, VERSION_2]
1336 1336
1337 1337 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1338 1338 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1339 1339 username = Column("username", String(255), nullable=True, unique=None, default=None)
1340 1340 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1341 1341 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1342 1342 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1343 1343 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1344 1344 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1345 1345
1346 1346 version = Column("version", String(255), nullable=True, default=VERSION_1)
1347 1347 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1348 1348 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1349 1349
1350 1350 def __unicode__(self):
1351 1351 return u"<%s('id:%s:%s')>" % (
1352 1352 self.__class__.__name__, self.repository_name, self.action)
1353 1353
1354 1354 def __json__(self):
1355 1355 return {
1356 1356 'user_id': self.user_id,
1357 1357 'username': self.username,
1358 1358 'repository_id': self.repository_id,
1359 1359 'repository_name': self.repository_name,
1360 1360 'user_ip': self.user_ip,
1361 1361 'action_date': self.action_date,
1362 1362 'action': self.action,
1363 1363 }
1364 1364
1365 1365 @hybrid_property
1366 1366 def entry_id(self):
1367 1367 return self.user_log_id
1368 1368
1369 1369 @property
1370 1370 def action_as_day(self):
1371 1371 return datetime.date(*self.action_date.timetuple()[:3])
1372 1372
1373 1373 user = relationship('User')
1374 1374 repository = relationship('Repository', cascade='')
1375 1375
1376 1376
1377 1377 class UserGroup(Base, BaseModel):
1378 1378 __tablename__ = 'users_groups'
1379 1379 __table_args__ = (
1380 1380 base_table_args,
1381 1381 )
1382 1382
1383 1383 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1384 1384 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1385 1385 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1386 1386 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1387 1387 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1388 1388 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1389 1389 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1390 1390 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1391 1391
1392 1392 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1393 1393 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1394 1394 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1395 1395 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1396 1396 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1397 1397 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1398 1398
1399 1399 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1400 1400 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1401 1401
1402 1402 @classmethod
1403 1403 def _load_group_data(cls, column):
1404 1404 if not column:
1405 1405 return {}
1406 1406
1407 1407 try:
1408 1408 return json.loads(column) or {}
1409 1409 except TypeError:
1410 1410 return {}
1411 1411
1412 1412 @hybrid_property
1413 1413 def description_safe(self):
1414 1414 from rhodecode.lib import helpers as h
1415 1415 return h.escape(self.user_group_description)
1416 1416
1417 1417 @hybrid_property
1418 1418 def group_data(self):
1419 1419 return self._load_group_data(self._group_data)
1420 1420
1421 1421 @group_data.expression
1422 1422 def group_data(self, **kwargs):
1423 1423 return self._group_data
1424 1424
1425 1425 @group_data.setter
1426 1426 def group_data(self, val):
1427 1427 try:
1428 1428 self._group_data = json.dumps(val)
1429 1429 except Exception:
1430 1430 log.error(traceback.format_exc())
1431 1431
1432 1432 @classmethod
1433 1433 def _load_sync(cls, group_data):
1434 1434 if group_data:
1435 1435 return group_data.get('extern_type')
1436 1436
1437 1437 @property
1438 1438 def sync(self):
1439 1439 return self._load_sync(self.group_data)
1440 1440
1441 1441 def __unicode__(self):
1442 1442 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1443 1443 self.users_group_id,
1444 1444 self.users_group_name)
1445 1445
1446 1446 @classmethod
1447 1447 def get_by_group_name(cls, group_name, cache=False,
1448 1448 case_insensitive=False):
1449 1449 if case_insensitive:
1450 1450 q = cls.query().filter(func.lower(cls.users_group_name) ==
1451 1451 func.lower(group_name))
1452 1452
1453 1453 else:
1454 1454 q = cls.query().filter(cls.users_group_name == group_name)
1455 1455 if cache:
1456 1456 q = q.options(
1457 1457 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1458 1458 return q.scalar()
1459 1459
1460 1460 @classmethod
1461 1461 def get(cls, user_group_id, cache=False):
1462 1462 if not user_group_id:
1463 1463 return
1464 1464
1465 1465 user_group = cls.query()
1466 1466 if cache:
1467 1467 user_group = user_group.options(
1468 1468 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1469 1469 return user_group.get(user_group_id)
1470 1470
1471 1471 def permissions(self, with_admins=True, with_owner=True,
1472 1472 expand_from_user_groups=False):
1473 1473 """
1474 1474 Permissions for user groups
1475 1475 """
1476 1476 _admin_perm = 'usergroup.admin'
1477 1477
1478 1478 owner_row = []
1479 1479 if with_owner:
1480 1480 usr = AttributeDict(self.user.get_dict())
1481 1481 usr.owner_row = True
1482 1482 usr.permission = _admin_perm
1483 1483 owner_row.append(usr)
1484 1484
1485 1485 super_admin_ids = []
1486 1486 super_admin_rows = []
1487 1487 if with_admins:
1488 1488 for usr in User.get_all_super_admins():
1489 1489 super_admin_ids.append(usr.user_id)
1490 1490 # if this admin is also owner, don't double the record
1491 1491 if usr.user_id == owner_row[0].user_id:
1492 1492 owner_row[0].admin_row = True
1493 1493 else:
1494 1494 usr = AttributeDict(usr.get_dict())
1495 1495 usr.admin_row = True
1496 1496 usr.permission = _admin_perm
1497 1497 super_admin_rows.append(usr)
1498 1498
1499 1499 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1500 1500 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1501 1501 joinedload(UserUserGroupToPerm.user),
1502 1502 joinedload(UserUserGroupToPerm.permission),)
1503 1503
1504 1504 # get owners and admins and permissions. We do a trick of re-writing
1505 1505 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1506 1506 # has a global reference and changing one object propagates to all
1507 1507 # others. This means if admin is also an owner admin_row that change
1508 1508 # would propagate to both objects
1509 1509 perm_rows = []
1510 1510 for _usr in q.all():
1511 1511 usr = AttributeDict(_usr.user.get_dict())
1512 1512 # if this user is also owner/admin, mark as duplicate record
1513 1513 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1514 1514 usr.duplicate_perm = True
1515 1515 usr.permission = _usr.permission.permission_name
1516 1516 perm_rows.append(usr)
1517 1517
1518 1518 # filter the perm rows by 'default' first and then sort them by
1519 1519 # admin,write,read,none permissions sorted again alphabetically in
1520 1520 # each group
1521 1521 perm_rows = sorted(perm_rows, key=display_user_sort)
1522 1522
1523 1523 user_groups_rows = []
1524 1524 if expand_from_user_groups:
1525 1525 for ug in self.permission_user_groups(with_members=True):
1526 1526 for user_data in ug.members:
1527 1527 user_groups_rows.append(user_data)
1528 1528
1529 1529 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1530 1530
1531 1531 def permission_user_groups(self, with_members=False):
1532 1532 q = UserGroupUserGroupToPerm.query()\
1533 1533 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1534 1534 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1535 1535 joinedload(UserGroupUserGroupToPerm.target_user_group),
1536 1536 joinedload(UserGroupUserGroupToPerm.permission),)
1537 1537
1538 1538 perm_rows = []
1539 1539 for _user_group in q.all():
1540 1540 entry = AttributeDict(_user_group.user_group.get_dict())
1541 1541 entry.permission = _user_group.permission.permission_name
1542 1542 if with_members:
1543 1543 entry.members = [x.user.get_dict()
1544 1544 for x in _user_group.user_group.members]
1545 1545 perm_rows.append(entry)
1546 1546
1547 1547 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1548 1548 return perm_rows
1549 1549
1550 1550 def _get_default_perms(self, user_group, suffix=''):
1551 1551 from rhodecode.model.permission import PermissionModel
1552 1552 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1553 1553
1554 1554 def get_default_perms(self, suffix=''):
1555 1555 return self._get_default_perms(self, suffix)
1556 1556
1557 1557 def get_api_data(self, with_group_members=True, include_secrets=False):
1558 1558 """
1559 1559 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1560 1560 basically forwarded.
1561 1561
1562 1562 """
1563 1563 user_group = self
1564 1564 data = {
1565 1565 'users_group_id': user_group.users_group_id,
1566 1566 'group_name': user_group.users_group_name,
1567 1567 'group_description': user_group.user_group_description,
1568 1568 'active': user_group.users_group_active,
1569 1569 'owner': user_group.user.username,
1570 1570 'sync': user_group.sync,
1571 1571 'owner_email': user_group.user.email,
1572 1572 }
1573 1573
1574 1574 if with_group_members:
1575 1575 users = []
1576 1576 for user in user_group.members:
1577 1577 user = user.user
1578 1578 users.append(user.get_api_data(include_secrets=include_secrets))
1579 1579 data['users'] = users
1580 1580
1581 1581 return data
1582 1582
1583 1583
1584 1584 class UserGroupMember(Base, BaseModel):
1585 1585 __tablename__ = 'users_groups_members'
1586 1586 __table_args__ = (
1587 1587 base_table_args,
1588 1588 )
1589 1589
1590 1590 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1591 1591 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1592 1592 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1593 1593
1594 1594 user = relationship('User', lazy='joined')
1595 1595 users_group = relationship('UserGroup')
1596 1596
1597 1597 def __init__(self, gr_id='', u_id=''):
1598 1598 self.users_group_id = gr_id
1599 1599 self.user_id = u_id
1600 1600
1601 1601
1602 1602 class RepositoryField(Base, BaseModel):
1603 1603 __tablename__ = 'repositories_fields'
1604 1604 __table_args__ = (
1605 1605 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1606 1606 base_table_args,
1607 1607 )
1608 1608
1609 1609 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1610 1610
1611 1611 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1612 1612 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1613 1613 field_key = Column("field_key", String(250))
1614 1614 field_label = Column("field_label", String(1024), nullable=False)
1615 1615 field_value = Column("field_value", String(10000), nullable=False)
1616 1616 field_desc = Column("field_desc", String(1024), nullable=False)
1617 1617 field_type = Column("field_type", String(255), nullable=False, unique=None)
1618 1618 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1619 1619
1620 1620 repository = relationship('Repository')
1621 1621
1622 1622 @property
1623 1623 def field_key_prefixed(self):
1624 1624 return 'ex_%s' % self.field_key
1625 1625
1626 1626 @classmethod
1627 1627 def un_prefix_key(cls, key):
1628 1628 if key.startswith(cls.PREFIX):
1629 1629 return key[len(cls.PREFIX):]
1630 1630 return key
1631 1631
1632 1632 @classmethod
1633 1633 def get_by_key_name(cls, key, repo):
1634 1634 row = cls.query()\
1635 1635 .filter(cls.repository == repo)\
1636 1636 .filter(cls.field_key == key).scalar()
1637 1637 return row
1638 1638
1639 1639
1640 1640 class Repository(Base, BaseModel):
1641 1641 __tablename__ = 'repositories'
1642 1642 __table_args__ = (
1643 1643 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1644 1644 base_table_args,
1645 1645 )
1646 1646 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1647 1647 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1648 1648 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1649 1649
1650 1650 STATE_CREATED = 'repo_state_created'
1651 1651 STATE_PENDING = 'repo_state_pending'
1652 1652 STATE_ERROR = 'repo_state_error'
1653 1653
1654 1654 LOCK_AUTOMATIC = 'lock_auto'
1655 1655 LOCK_API = 'lock_api'
1656 1656 LOCK_WEB = 'lock_web'
1657 1657 LOCK_PULL = 'lock_pull'
1658 1658
1659 1659 NAME_SEP = URL_SEP
1660 1660
1661 1661 repo_id = Column(
1662 1662 "repo_id", Integer(), nullable=False, unique=True, default=None,
1663 1663 primary_key=True)
1664 1664 _repo_name = Column(
1665 1665 "repo_name", Text(), nullable=False, default=None)
1666 1666 repo_name_hash = Column(
1667 1667 "repo_name_hash", String(255), nullable=False, unique=True)
1668 1668 repo_state = Column("repo_state", String(255), nullable=True)
1669 1669
1670 1670 clone_uri = Column(
1671 1671 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1672 1672 default=None)
1673 1673 push_uri = Column(
1674 1674 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1675 1675 default=None)
1676 1676 repo_type = Column(
1677 1677 "repo_type", String(255), nullable=False, unique=False, default=None)
1678 1678 user_id = Column(
1679 1679 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1680 1680 unique=False, default=None)
1681 1681 private = Column(
1682 1682 "private", Boolean(), nullable=True, unique=None, default=None)
1683 1683 archived = Column(
1684 1684 "archived", Boolean(), nullable=True, unique=None, default=None)
1685 1685 enable_statistics = Column(
1686 1686 "statistics", Boolean(), nullable=True, unique=None, default=True)
1687 1687 enable_downloads = Column(
1688 1688 "downloads", Boolean(), nullable=True, unique=None, default=True)
1689 1689 description = Column(
1690 1690 "description", String(10000), nullable=True, unique=None, default=None)
1691 1691 created_on = Column(
1692 1692 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1693 1693 default=datetime.datetime.now)
1694 1694 updated_on = Column(
1695 1695 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1696 1696 default=datetime.datetime.now)
1697 1697 _landing_revision = Column(
1698 1698 "landing_revision", String(255), nullable=False, unique=False,
1699 1699 default=None)
1700 1700 enable_locking = Column(
1701 1701 "enable_locking", Boolean(), nullable=False, unique=None,
1702 1702 default=False)
1703 1703 _locked = Column(
1704 1704 "locked", String(255), nullable=True, unique=False, default=None)
1705 1705 _changeset_cache = Column(
1706 1706 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1707 1707
1708 1708 fork_id = Column(
1709 1709 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1710 1710 nullable=True, unique=False, default=None)
1711 1711 group_id = Column(
1712 1712 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1713 1713 unique=False, default=None)
1714 1714
1715 1715 user = relationship('User', lazy='joined')
1716 1716 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1717 1717 group = relationship('RepoGroup', lazy='joined')
1718 1718 repo_to_perm = relationship(
1719 1719 'UserRepoToPerm', cascade='all',
1720 1720 order_by='UserRepoToPerm.repo_to_perm_id')
1721 1721 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1722 1722 stats = relationship('Statistics', cascade='all', uselist=False)
1723 1723
1724 1724 followers = relationship(
1725 1725 'UserFollowing',
1726 1726 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1727 1727 cascade='all')
1728 1728 extra_fields = relationship(
1729 1729 'RepositoryField', cascade="all, delete-orphan")
1730 1730 logs = relationship('UserLog')
1731 1731 comments = relationship(
1732 1732 'ChangesetComment', cascade="all, delete-orphan")
1733 1733 pull_requests_source = relationship(
1734 1734 'PullRequest',
1735 1735 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1736 1736 cascade="all, delete-orphan")
1737 1737 pull_requests_target = relationship(
1738 1738 'PullRequest',
1739 1739 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1740 1740 cascade="all, delete-orphan")
1741 1741 ui = relationship('RepoRhodeCodeUi', cascade="all")
1742 1742 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1743 1743 integrations = relationship('Integration', cascade="all, delete-orphan")
1744 1744
1745 1745 scoped_tokens = relationship('UserApiKeys', cascade="all")
1746 1746
1747 1747 # no cascade, set NULL
1748 1748 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1749 1749
1750 1750 def __unicode__(self):
1751 1751 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1752 1752 safe_unicode(self.repo_name))
1753 1753
1754 1754 @hybrid_property
1755 1755 def description_safe(self):
1756 1756 from rhodecode.lib import helpers as h
1757 1757 return h.escape(self.description)
1758 1758
1759 1759 @hybrid_property
1760 1760 def landing_rev(self):
1761 1761 # always should return [rev_type, rev], e.g ['branch', 'master']
1762 1762 if self._landing_revision:
1763 1763 _rev_info = self._landing_revision.split(':')
1764 1764 if len(_rev_info) < 2:
1765 1765 _rev_info.insert(0, 'rev')
1766 1766 return [_rev_info[0], _rev_info[1]]
1767 1767 return [None, None]
1768 1768
1769 1769 @property
1770 1770 def landing_ref_type(self):
1771 1771 return self.landing_rev[0]
1772 1772
1773 1773 @property
1774 1774 def landing_ref_name(self):
1775 1775 return self.landing_rev[1]
1776 1776
1777 1777 @landing_rev.setter
1778 1778 def landing_rev(self, val):
1779 1779 if ':' not in val:
1780 1780 raise ValueError('value must be delimited with `:` and consist '
1781 1781 'of <rev_type>:<rev>, got %s instead' % val)
1782 1782 self._landing_revision = val
1783 1783
1784 1784 @hybrid_property
1785 1785 def locked(self):
1786 1786 if self._locked:
1787 1787 user_id, timelocked, reason = self._locked.split(':')
1788 1788 lock_values = int(user_id), timelocked, reason
1789 1789 else:
1790 1790 lock_values = [None, None, None]
1791 1791 return lock_values
1792 1792
1793 1793 @locked.setter
1794 1794 def locked(self, val):
1795 1795 if val and isinstance(val, (list, tuple)):
1796 1796 self._locked = ':'.join(map(str, val))
1797 1797 else:
1798 1798 self._locked = None
1799 1799
1800 1800 @classmethod
1801 1801 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1802 1802 from rhodecode.lib.vcs.backends.base import EmptyCommit
1803 1803 dummy = EmptyCommit().__json__()
1804 1804 if not changeset_cache_raw:
1805 1805 dummy['source_repo_id'] = repo_id
1806 1806 return json.loads(json.dumps(dummy))
1807 1807
1808 1808 try:
1809 1809 return json.loads(changeset_cache_raw)
1810 1810 except TypeError:
1811 1811 return dummy
1812 1812 except Exception:
1813 1813 log.error(traceback.format_exc())
1814 1814 return dummy
1815 1815
1816 1816 @hybrid_property
1817 1817 def changeset_cache(self):
1818 1818 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1819 1819
1820 1820 @changeset_cache.setter
1821 1821 def changeset_cache(self, val):
1822 1822 try:
1823 1823 self._changeset_cache = json.dumps(val)
1824 1824 except Exception:
1825 1825 log.error(traceback.format_exc())
1826 1826
1827 1827 @hybrid_property
1828 1828 def repo_name(self):
1829 1829 return self._repo_name
1830 1830
1831 1831 @repo_name.setter
1832 1832 def repo_name(self, value):
1833 1833 self._repo_name = value
1834 1834 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1835 1835
1836 1836 @classmethod
1837 1837 def normalize_repo_name(cls, repo_name):
1838 1838 """
1839 1839 Normalizes os specific repo_name to the format internally stored inside
1840 1840 database using URL_SEP
1841 1841
1842 1842 :param cls:
1843 1843 :param repo_name:
1844 1844 """
1845 1845 return cls.NAME_SEP.join(repo_name.split(os.sep))
1846 1846
1847 1847 @classmethod
1848 1848 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1849 1849 session = Session()
1850 1850 q = session.query(cls).filter(cls.repo_name == repo_name)
1851 1851
1852 1852 if cache:
1853 1853 if identity_cache:
1854 1854 val = cls.identity_cache(session, 'repo_name', repo_name)
1855 1855 if val:
1856 1856 return val
1857 1857 else:
1858 1858 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1859 1859 q = q.options(
1860 1860 FromCache("sql_cache_short", cache_key))
1861 1861
1862 1862 return q.scalar()
1863 1863
1864 1864 @classmethod
1865 1865 def get_by_id_or_repo_name(cls, repoid):
1866 1866 if isinstance(repoid, (int, long)):
1867 1867 try:
1868 1868 repo = cls.get(repoid)
1869 1869 except ValueError:
1870 1870 repo = None
1871 1871 else:
1872 1872 repo = cls.get_by_repo_name(repoid)
1873 1873 return repo
1874 1874
1875 1875 @classmethod
1876 1876 def get_by_full_path(cls, repo_full_path):
1877 1877 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1878 1878 repo_name = cls.normalize_repo_name(repo_name)
1879 1879 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1880 1880
1881 1881 @classmethod
1882 1882 def get_repo_forks(cls, repo_id):
1883 1883 return cls.query().filter(Repository.fork_id == repo_id)
1884 1884
1885 1885 @classmethod
1886 1886 def base_path(cls):
1887 1887 """
1888 1888 Returns base path when all repos are stored
1889 1889
1890 1890 :param cls:
1891 1891 """
1892 1892 q = Session().query(RhodeCodeUi)\
1893 1893 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1894 1894 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1895 1895 return q.one().ui_value
1896 1896
1897 1897 @classmethod
1898 1898 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1899 1899 case_insensitive=True, archived=False):
1900 1900 q = Repository.query()
1901 1901
1902 1902 if not archived:
1903 1903 q = q.filter(Repository.archived.isnot(true()))
1904 1904
1905 1905 if not isinstance(user_id, Optional):
1906 1906 q = q.filter(Repository.user_id == user_id)
1907 1907
1908 1908 if not isinstance(group_id, Optional):
1909 1909 q = q.filter(Repository.group_id == group_id)
1910 1910
1911 1911 if case_insensitive:
1912 1912 q = q.order_by(func.lower(Repository.repo_name))
1913 1913 else:
1914 1914 q = q.order_by(Repository.repo_name)
1915 1915
1916 1916 return q.all()
1917 1917
1918 1918 @property
1919 1919 def repo_uid(self):
1920 1920 return '_{}'.format(self.repo_id)
1921 1921
1922 1922 @property
1923 1923 def forks(self):
1924 1924 """
1925 1925 Return forks of this repo
1926 1926 """
1927 1927 return Repository.get_repo_forks(self.repo_id)
1928 1928
1929 1929 @property
1930 1930 def parent(self):
1931 1931 """
1932 1932 Returns fork parent
1933 1933 """
1934 1934 return self.fork
1935 1935
1936 1936 @property
1937 1937 def just_name(self):
1938 1938 return self.repo_name.split(self.NAME_SEP)[-1]
1939 1939
1940 1940 @property
1941 1941 def groups_with_parents(self):
1942 1942 groups = []
1943 1943 if self.group is None:
1944 1944 return groups
1945 1945
1946 1946 cur_gr = self.group
1947 1947 groups.insert(0, cur_gr)
1948 1948 while 1:
1949 1949 gr = getattr(cur_gr, 'parent_group', None)
1950 1950 cur_gr = cur_gr.parent_group
1951 1951 if gr is None:
1952 1952 break
1953 1953 groups.insert(0, gr)
1954 1954
1955 1955 return groups
1956 1956
1957 1957 @property
1958 1958 def groups_and_repo(self):
1959 1959 return self.groups_with_parents, self
1960 1960
1961 1961 @LazyProperty
1962 1962 def repo_path(self):
1963 1963 """
1964 1964 Returns base full path for that repository means where it actually
1965 1965 exists on a filesystem
1966 1966 """
1967 1967 q = Session().query(RhodeCodeUi).filter(
1968 1968 RhodeCodeUi.ui_key == self.NAME_SEP)
1969 1969 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1970 1970 return q.one().ui_value
1971 1971
1972 1972 @property
1973 1973 def repo_full_path(self):
1974 1974 p = [self.repo_path]
1975 1975 # we need to split the name by / since this is how we store the
1976 1976 # names in the database, but that eventually needs to be converted
1977 1977 # into a valid system path
1978 1978 p += self.repo_name.split(self.NAME_SEP)
1979 1979 return os.path.join(*map(safe_unicode, p))
1980 1980
1981 1981 @property
1982 1982 def cache_keys(self):
1983 1983 """
1984 1984 Returns associated cache keys for that repo
1985 1985 """
1986 1986 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1987 1987 repo_id=self.repo_id)
1988 1988 return CacheKey.query()\
1989 1989 .filter(CacheKey.cache_args == invalidation_namespace)\
1990 1990 .order_by(CacheKey.cache_key)\
1991 1991 .all()
1992 1992
1993 1993 @property
1994 1994 def cached_diffs_relative_dir(self):
1995 1995 """
1996 1996 Return a relative to the repository store path of cached diffs
1997 1997 used for safe display for users, who shouldn't know the absolute store
1998 1998 path
1999 1999 """
2000 2000 return os.path.join(
2001 2001 os.path.dirname(self.repo_name),
2002 2002 self.cached_diffs_dir.split(os.path.sep)[-1])
2003 2003
2004 2004 @property
2005 2005 def cached_diffs_dir(self):
2006 2006 path = self.repo_full_path
2007 2007 return os.path.join(
2008 2008 os.path.dirname(path),
2009 2009 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2010 2010
2011 2011 def cached_diffs(self):
2012 2012 diff_cache_dir = self.cached_diffs_dir
2013 2013 if os.path.isdir(diff_cache_dir):
2014 2014 return os.listdir(diff_cache_dir)
2015 2015 return []
2016 2016
2017 2017 def shadow_repos(self):
2018 2018 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2019 2019 return [
2020 2020 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2021 2021 if x.startswith(shadow_repos_pattern)]
2022 2022
2023 2023 def get_new_name(self, repo_name):
2024 2024 """
2025 2025 returns new full repository name based on assigned group and new new
2026 2026
2027 2027 :param group_name:
2028 2028 """
2029 2029 path_prefix = self.group.full_path_splitted if self.group else []
2030 2030 return self.NAME_SEP.join(path_prefix + [repo_name])
2031 2031
2032 2032 @property
2033 2033 def _config(self):
2034 2034 """
2035 2035 Returns db based config object.
2036 2036 """
2037 2037 from rhodecode.lib.utils import make_db_config
2038 2038 return make_db_config(clear_session=False, repo=self)
2039 2039
2040 2040 def permissions(self, with_admins=True, with_owner=True,
2041 2041 expand_from_user_groups=False):
2042 2042 """
2043 2043 Permissions for repositories
2044 2044 """
2045 2045 _admin_perm = 'repository.admin'
2046 2046
2047 2047 owner_row = []
2048 2048 if with_owner:
2049 2049 usr = AttributeDict(self.user.get_dict())
2050 2050 usr.owner_row = True
2051 2051 usr.permission = _admin_perm
2052 2052 usr.permission_id = None
2053 2053 owner_row.append(usr)
2054 2054
2055 2055 super_admin_ids = []
2056 2056 super_admin_rows = []
2057 2057 if with_admins:
2058 2058 for usr in User.get_all_super_admins():
2059 2059 super_admin_ids.append(usr.user_id)
2060 2060 # if this admin is also owner, don't double the record
2061 2061 if usr.user_id == owner_row[0].user_id:
2062 2062 owner_row[0].admin_row = True
2063 2063 else:
2064 2064 usr = AttributeDict(usr.get_dict())
2065 2065 usr.admin_row = True
2066 2066 usr.permission = _admin_perm
2067 2067 usr.permission_id = None
2068 2068 super_admin_rows.append(usr)
2069 2069
2070 2070 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2071 2071 q = q.options(joinedload(UserRepoToPerm.repository),
2072 2072 joinedload(UserRepoToPerm.user),
2073 2073 joinedload(UserRepoToPerm.permission),)
2074 2074
2075 2075 # get owners and admins and permissions. We do a trick of re-writing
2076 2076 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2077 2077 # has a global reference and changing one object propagates to all
2078 2078 # others. This means if admin is also an owner admin_row that change
2079 2079 # would propagate to both objects
2080 2080 perm_rows = []
2081 2081 for _usr in q.all():
2082 2082 usr = AttributeDict(_usr.user.get_dict())
2083 2083 # if this user is also owner/admin, mark as duplicate record
2084 2084 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2085 2085 usr.duplicate_perm = True
2086 2086 # also check if this permission is maybe used by branch_permissions
2087 2087 if _usr.branch_perm_entry:
2088 2088 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2089 2089
2090 2090 usr.permission = _usr.permission.permission_name
2091 2091 usr.permission_id = _usr.repo_to_perm_id
2092 2092 perm_rows.append(usr)
2093 2093
2094 2094 # filter the perm rows by 'default' first and then sort them by
2095 2095 # admin,write,read,none permissions sorted again alphabetically in
2096 2096 # each group
2097 2097 perm_rows = sorted(perm_rows, key=display_user_sort)
2098 2098
2099 2099 user_groups_rows = []
2100 2100 if expand_from_user_groups:
2101 2101 for ug in self.permission_user_groups(with_members=True):
2102 2102 for user_data in ug.members:
2103 2103 user_groups_rows.append(user_data)
2104 2104
2105 2105 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2106 2106
2107 2107 def permission_user_groups(self, with_members=True):
2108 2108 q = UserGroupRepoToPerm.query()\
2109 2109 .filter(UserGroupRepoToPerm.repository == self)
2110 2110 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2111 2111 joinedload(UserGroupRepoToPerm.users_group),
2112 2112 joinedload(UserGroupRepoToPerm.permission),)
2113 2113
2114 2114 perm_rows = []
2115 2115 for _user_group in q.all():
2116 2116 entry = AttributeDict(_user_group.users_group.get_dict())
2117 2117 entry.permission = _user_group.permission.permission_name
2118 2118 if with_members:
2119 2119 entry.members = [x.user.get_dict()
2120 2120 for x in _user_group.users_group.members]
2121 2121 perm_rows.append(entry)
2122 2122
2123 2123 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2124 2124 return perm_rows
2125 2125
2126 2126 def get_api_data(self, include_secrets=False):
2127 2127 """
2128 2128 Common function for generating repo api data
2129 2129
2130 2130 :param include_secrets: See :meth:`User.get_api_data`.
2131 2131
2132 2132 """
2133 2133 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2134 2134 # move this methods on models level.
2135 2135 from rhodecode.model.settings import SettingsModel
2136 2136 from rhodecode.model.repo import RepoModel
2137 2137
2138 2138 repo = self
2139 2139 _user_id, _time, _reason = self.locked
2140 2140
2141 2141 data = {
2142 2142 'repo_id': repo.repo_id,
2143 2143 'repo_name': repo.repo_name,
2144 2144 'repo_type': repo.repo_type,
2145 2145 'clone_uri': repo.clone_uri or '',
2146 2146 'push_uri': repo.push_uri or '',
2147 2147 'url': RepoModel().get_url(self),
2148 2148 'private': repo.private,
2149 2149 'created_on': repo.created_on,
2150 2150 'description': repo.description_safe,
2151 2151 'landing_rev': repo.landing_rev,
2152 2152 'owner': repo.user.username,
2153 2153 'fork_of': repo.fork.repo_name if repo.fork else None,
2154 2154 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2155 2155 'enable_statistics': repo.enable_statistics,
2156 2156 'enable_locking': repo.enable_locking,
2157 2157 'enable_downloads': repo.enable_downloads,
2158 2158 'last_changeset': repo.changeset_cache,
2159 2159 'locked_by': User.get(_user_id).get_api_data(
2160 2160 include_secrets=include_secrets) if _user_id else None,
2161 2161 'locked_date': time_to_datetime(_time) if _time else None,
2162 2162 'lock_reason': _reason if _reason else None,
2163 2163 }
2164 2164
2165 2165 # TODO: mikhail: should be per-repo settings here
2166 2166 rc_config = SettingsModel().get_all_settings()
2167 2167 repository_fields = str2bool(
2168 2168 rc_config.get('rhodecode_repository_fields'))
2169 2169 if repository_fields:
2170 2170 for f in self.extra_fields:
2171 2171 data[f.field_key_prefixed] = f.field_value
2172 2172
2173 2173 return data
2174 2174
2175 2175 @classmethod
2176 2176 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2177 2177 if not lock_time:
2178 2178 lock_time = time.time()
2179 2179 if not lock_reason:
2180 2180 lock_reason = cls.LOCK_AUTOMATIC
2181 2181 repo.locked = [user_id, lock_time, lock_reason]
2182 2182 Session().add(repo)
2183 2183 Session().commit()
2184 2184
2185 2185 @classmethod
2186 2186 def unlock(cls, repo):
2187 2187 repo.locked = None
2188 2188 Session().add(repo)
2189 2189 Session().commit()
2190 2190
2191 2191 @classmethod
2192 2192 def getlock(cls, repo):
2193 2193 return repo.locked
2194 2194
2195 2195 def is_user_lock(self, user_id):
2196 2196 if self.lock[0]:
2197 2197 lock_user_id = safe_int(self.lock[0])
2198 2198 user_id = safe_int(user_id)
2199 2199 # both are ints, and they are equal
2200 2200 return all([lock_user_id, user_id]) and lock_user_id == user_id
2201 2201
2202 2202 return False
2203 2203
2204 2204 def get_locking_state(self, action, user_id, only_when_enabled=True):
2205 2205 """
2206 2206 Checks locking on this repository, if locking is enabled and lock is
2207 2207 present returns a tuple of make_lock, locked, locked_by.
2208 2208 make_lock can have 3 states None (do nothing) True, make lock
2209 2209 False release lock, This value is later propagated to hooks, which
2210 2210 do the locking. Think about this as signals passed to hooks what to do.
2211 2211
2212 2212 """
2213 2213 # TODO: johbo: This is part of the business logic and should be moved
2214 2214 # into the RepositoryModel.
2215 2215
2216 2216 if action not in ('push', 'pull'):
2217 2217 raise ValueError("Invalid action value: %s" % repr(action))
2218 2218
2219 2219 # defines if locked error should be thrown to user
2220 2220 currently_locked = False
2221 2221 # defines if new lock should be made, tri-state
2222 2222 make_lock = None
2223 2223 repo = self
2224 2224 user = User.get(user_id)
2225 2225
2226 2226 lock_info = repo.locked
2227 2227
2228 2228 if repo and (repo.enable_locking or not only_when_enabled):
2229 2229 if action == 'push':
2230 2230 # check if it's already locked !, if it is compare users
2231 2231 locked_by_user_id = lock_info[0]
2232 2232 if user.user_id == locked_by_user_id:
2233 2233 log.debug(
2234 2234 'Got `push` action from user %s, now unlocking', user)
2235 2235 # unlock if we have push from user who locked
2236 2236 make_lock = False
2237 2237 else:
2238 2238 # we're not the same user who locked, ban with
2239 2239 # code defined in settings (default is 423 HTTP Locked) !
2240 2240 log.debug('Repo %s is currently locked by %s', repo, user)
2241 2241 currently_locked = True
2242 2242 elif action == 'pull':
2243 2243 # [0] user [1] date
2244 2244 if lock_info[0] and lock_info[1]:
2245 2245 log.debug('Repo %s is currently locked by %s', repo, user)
2246 2246 currently_locked = True
2247 2247 else:
2248 2248 log.debug('Setting lock on repo %s by %s', repo, user)
2249 2249 make_lock = True
2250 2250
2251 2251 else:
2252 2252 log.debug('Repository %s do not have locking enabled', repo)
2253 2253
2254 2254 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2255 2255 make_lock, currently_locked, lock_info)
2256 2256
2257 2257 from rhodecode.lib.auth import HasRepoPermissionAny
2258 2258 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2259 2259 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2260 2260 # if we don't have at least write permission we cannot make a lock
2261 2261 log.debug('lock state reset back to FALSE due to lack '
2262 2262 'of at least read permission')
2263 2263 make_lock = False
2264 2264
2265 2265 return make_lock, currently_locked, lock_info
2266 2266
2267 2267 @property
2268 2268 def last_commit_cache_update_diff(self):
2269 2269 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2270 2270
2271 2271 @classmethod
2272 2272 def _load_commit_change(cls, last_commit_cache):
2273 2273 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2274 2274 empty_date = datetime.datetime.fromtimestamp(0)
2275 2275 date_latest = last_commit_cache.get('date', empty_date)
2276 2276 try:
2277 2277 return parse_datetime(date_latest)
2278 2278 except Exception:
2279 2279 return empty_date
2280 2280
2281 2281 @property
2282 2282 def last_commit_change(self):
2283 2283 return self._load_commit_change(self.changeset_cache)
2284 2284
2285 2285 @property
2286 2286 def last_db_change(self):
2287 2287 return self.updated_on
2288 2288
2289 2289 @property
2290 2290 def clone_uri_hidden(self):
2291 2291 clone_uri = self.clone_uri
2292 2292 if clone_uri:
2293 2293 import urlobject
2294 2294 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2295 2295 if url_obj.password:
2296 2296 clone_uri = url_obj.with_password('*****')
2297 2297 return clone_uri
2298 2298
2299 2299 @property
2300 2300 def push_uri_hidden(self):
2301 2301 push_uri = self.push_uri
2302 2302 if push_uri:
2303 2303 import urlobject
2304 2304 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2305 2305 if url_obj.password:
2306 2306 push_uri = url_obj.with_password('*****')
2307 2307 return push_uri
2308 2308
2309 2309 def clone_url(self, **override):
2310 2310 from rhodecode.model.settings import SettingsModel
2311 2311
2312 2312 uri_tmpl = None
2313 2313 if 'with_id' in override:
2314 2314 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2315 2315 del override['with_id']
2316 2316
2317 2317 if 'uri_tmpl' in override:
2318 2318 uri_tmpl = override['uri_tmpl']
2319 2319 del override['uri_tmpl']
2320 2320
2321 2321 ssh = False
2322 2322 if 'ssh' in override:
2323 2323 ssh = True
2324 2324 del override['ssh']
2325 2325
2326 2326 # we didn't override our tmpl from **overrides
2327 2327 request = get_current_request()
2328 2328 if not uri_tmpl:
2329 2329 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2330 2330 rc_config = request.call_context.rc_config
2331 2331 else:
2332 2332 rc_config = SettingsModel().get_all_settings(cache=True)
2333 2333
2334 2334 if ssh:
2335 2335 uri_tmpl = rc_config.get(
2336 2336 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2337 2337
2338 2338 else:
2339 2339 uri_tmpl = rc_config.get(
2340 2340 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2341 2341
2342 2342 return get_clone_url(request=request,
2343 2343 uri_tmpl=uri_tmpl,
2344 2344 repo_name=self.repo_name,
2345 2345 repo_id=self.repo_id,
2346 2346 repo_type=self.repo_type,
2347 2347 **override)
2348 2348
2349 2349 def set_state(self, state):
2350 2350 self.repo_state = state
2351 2351 Session().add(self)
2352 2352 #==========================================================================
2353 2353 # SCM PROPERTIES
2354 2354 #==========================================================================
2355 2355
2356 2356 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2357 2357 return get_commit_safe(
2358 2358 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2359 2359 maybe_unreachable=maybe_unreachable)
2360 2360
2361 2361 def get_changeset(self, rev=None, pre_load=None):
2362 2362 warnings.warn("Use get_commit", DeprecationWarning)
2363 2363 commit_id = None
2364 2364 commit_idx = None
2365 2365 if isinstance(rev, compat.string_types):
2366 2366 commit_id = rev
2367 2367 else:
2368 2368 commit_idx = rev
2369 2369 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2370 2370 pre_load=pre_load)
2371 2371
2372 2372 def get_landing_commit(self):
2373 2373 """
2374 2374 Returns landing commit, or if that doesn't exist returns the tip
2375 2375 """
2376 2376 _rev_type, _rev = self.landing_rev
2377 2377 commit = self.get_commit(_rev)
2378 2378 if isinstance(commit, EmptyCommit):
2379 2379 return self.get_commit()
2380 2380 return commit
2381 2381
2382 2382 def flush_commit_cache(self):
2383 2383 self.update_commit_cache(cs_cache={'raw_id':'0'})
2384 2384 self.update_commit_cache()
2385 2385
2386 2386 def update_commit_cache(self, cs_cache=None, config=None):
2387 2387 """
2388 2388 Update cache of last commit for repository
2389 2389 cache_keys should be::
2390 2390
2391 2391 source_repo_id
2392 2392 short_id
2393 2393 raw_id
2394 2394 revision
2395 2395 parents
2396 2396 message
2397 2397 date
2398 2398 author
2399 2399 updated_on
2400 2400
2401 2401 """
2402 2402 from rhodecode.lib.vcs.backends.base import BaseChangeset
2403 2403 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2404 2404 empty_date = datetime.datetime.fromtimestamp(0)
2405 2405
2406 2406 if cs_cache is None:
2407 2407 # use no-cache version here
2408 2408 try:
2409 2409 scm_repo = self.scm_instance(cache=False, config=config)
2410 2410 except VCSError:
2411 2411 scm_repo = None
2412 2412 empty = scm_repo is None or scm_repo.is_empty()
2413 2413
2414 2414 if not empty:
2415 2415 cs_cache = scm_repo.get_commit(
2416 2416 pre_load=["author", "date", "message", "parents", "branch"])
2417 2417 else:
2418 2418 cs_cache = EmptyCommit()
2419 2419
2420 2420 if isinstance(cs_cache, BaseChangeset):
2421 2421 cs_cache = cs_cache.__json__()
2422 2422
2423 2423 def is_outdated(new_cs_cache):
2424 2424 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2425 2425 new_cs_cache['revision'] != self.changeset_cache['revision']):
2426 2426 return True
2427 2427 return False
2428 2428
2429 2429 # check if we have maybe already latest cached revision
2430 2430 if is_outdated(cs_cache) or not self.changeset_cache:
2431 2431 _current_datetime = datetime.datetime.utcnow()
2432 2432 last_change = cs_cache.get('date') or _current_datetime
2433 2433 # we check if last update is newer than the new value
2434 2434 # if yes, we use the current timestamp instead. Imagine you get
2435 2435 # old commit pushed 1y ago, we'd set last update 1y to ago.
2436 2436 last_change_timestamp = datetime_to_time(last_change)
2437 2437 current_timestamp = datetime_to_time(last_change)
2438 2438 if last_change_timestamp > current_timestamp and not empty:
2439 2439 cs_cache['date'] = _current_datetime
2440 2440
2441 2441 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2442 2442 cs_cache['updated_on'] = time.time()
2443 2443 self.changeset_cache = cs_cache
2444 2444 self.updated_on = last_change
2445 2445 Session().add(self)
2446 2446 Session().commit()
2447 2447
2448 2448 else:
2449 2449 if empty:
2450 2450 cs_cache = EmptyCommit().__json__()
2451 2451 else:
2452 2452 cs_cache = self.changeset_cache
2453 2453
2454 2454 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2455 2455
2456 2456 cs_cache['updated_on'] = time.time()
2457 2457 self.changeset_cache = cs_cache
2458 2458 self.updated_on = _date_latest
2459 2459 Session().add(self)
2460 2460 Session().commit()
2461 2461
2462 2462 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2463 2463 self.repo_name, cs_cache, _date_latest)
2464 2464
2465 2465 @property
2466 2466 def tip(self):
2467 2467 return self.get_commit('tip')
2468 2468
2469 2469 @property
2470 2470 def author(self):
2471 2471 return self.tip.author
2472 2472
2473 2473 @property
2474 2474 def last_change(self):
2475 2475 return self.scm_instance().last_change
2476 2476
2477 2477 def get_comments(self, revisions=None):
2478 2478 """
2479 2479 Returns comments for this repository grouped by revisions
2480 2480
2481 2481 :param revisions: filter query by revisions only
2482 2482 """
2483 2483 cmts = ChangesetComment.query()\
2484 2484 .filter(ChangesetComment.repo == self)
2485 2485 if revisions:
2486 2486 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2487 2487 grouped = collections.defaultdict(list)
2488 2488 for cmt in cmts.all():
2489 2489 grouped[cmt.revision].append(cmt)
2490 2490 return grouped
2491 2491
2492 2492 def statuses(self, revisions=None):
2493 2493 """
2494 2494 Returns statuses for this repository
2495 2495
2496 2496 :param revisions: list of revisions to get statuses for
2497 2497 """
2498 2498 statuses = ChangesetStatus.query()\
2499 2499 .filter(ChangesetStatus.repo == self)\
2500 2500 .filter(ChangesetStatus.version == 0)
2501 2501
2502 2502 if revisions:
2503 2503 # Try doing the filtering in chunks to avoid hitting limits
2504 2504 size = 500
2505 2505 status_results = []
2506 2506 for chunk in xrange(0, len(revisions), size):
2507 2507 status_results += statuses.filter(
2508 2508 ChangesetStatus.revision.in_(
2509 2509 revisions[chunk: chunk+size])
2510 2510 ).all()
2511 2511 else:
2512 2512 status_results = statuses.all()
2513 2513
2514 2514 grouped = {}
2515 2515
2516 2516 # maybe we have open new pullrequest without a status?
2517 2517 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2518 2518 status_lbl = ChangesetStatus.get_status_lbl(stat)
2519 2519 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2520 2520 for rev in pr.revisions:
2521 2521 pr_id = pr.pull_request_id
2522 2522 pr_repo = pr.target_repo.repo_name
2523 2523 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2524 2524
2525 2525 for stat in status_results:
2526 2526 pr_id = pr_repo = None
2527 2527 if stat.pull_request:
2528 2528 pr_id = stat.pull_request.pull_request_id
2529 2529 pr_repo = stat.pull_request.target_repo.repo_name
2530 2530 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2531 2531 pr_id, pr_repo]
2532 2532 return grouped
2533 2533
2534 2534 # ==========================================================================
2535 2535 # SCM CACHE INSTANCE
2536 2536 # ==========================================================================
2537 2537
2538 2538 def scm_instance(self, **kwargs):
2539 2539 import rhodecode
2540 2540
2541 2541 # Passing a config will not hit the cache currently only used
2542 2542 # for repo2dbmapper
2543 2543 config = kwargs.pop('config', None)
2544 2544 cache = kwargs.pop('cache', None)
2545 2545 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2546 2546 if vcs_full_cache is not None:
2547 2547 # allows override global config
2548 2548 full_cache = vcs_full_cache
2549 2549 else:
2550 2550 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2551 2551 # if cache is NOT defined use default global, else we have a full
2552 2552 # control over cache behaviour
2553 2553 if cache is None and full_cache and not config:
2554 2554 log.debug('Initializing pure cached instance for %s', self.repo_path)
2555 2555 return self._get_instance_cached()
2556 2556
2557 2557 # cache here is sent to the "vcs server"
2558 2558 return self._get_instance(cache=bool(cache), config=config)
2559 2559
2560 2560 def _get_instance_cached(self):
2561 2561 from rhodecode.lib import rc_cache
2562 2562
2563 2563 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2564 2564 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2565 2565 repo_id=self.repo_id)
2566 2566 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2567 2567
2568 2568 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2569 2569 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2570 2570 return self._get_instance(repo_state_uid=_cache_state_uid)
2571 2571
2572 2572 # we must use thread scoped cache here,
2573 2573 # because each thread of gevent needs it's own not shared connection and cache
2574 2574 # we also alter `args` so the cache key is individual for every green thread.
2575 2575 inv_context_manager = rc_cache.InvalidationContext(
2576 2576 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2577 2577 thread_scoped=True)
2578 2578 with inv_context_manager as invalidation_context:
2579 2579 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2580 2580 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2581 2581
2582 2582 # re-compute and store cache if we get invalidate signal
2583 2583 if invalidation_context.should_invalidate():
2584 2584 instance = get_instance_cached.refresh(*args)
2585 2585 else:
2586 2586 instance = get_instance_cached(*args)
2587 2587
2588 2588 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2589 2589 return instance
2590 2590
2591 2591 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2592 2592 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2593 2593 self.repo_type, self.repo_path, cache)
2594 2594 config = config or self._config
2595 2595 custom_wire = {
2596 2596 'cache': cache, # controls the vcs.remote cache
2597 2597 'repo_state_uid': repo_state_uid
2598 2598 }
2599 2599 repo = get_vcs_instance(
2600 2600 repo_path=safe_str(self.repo_full_path),
2601 2601 config=config,
2602 2602 with_wire=custom_wire,
2603 2603 create=False,
2604 2604 _vcs_alias=self.repo_type)
2605 2605 if repo is not None:
2606 2606 repo.count() # cache rebuild
2607 2607 return repo
2608 2608
2609 2609 def get_shadow_repository_path(self, workspace_id):
2610 2610 from rhodecode.lib.vcs.backends.base import BaseRepository
2611 2611 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2612 2612 self.repo_full_path, self.repo_id, workspace_id)
2613 2613 return shadow_repo_path
2614 2614
2615 2615 def __json__(self):
2616 2616 return {'landing_rev': self.landing_rev}
2617 2617
2618 2618 def get_dict(self):
2619 2619
2620 2620 # Since we transformed `repo_name` to a hybrid property, we need to
2621 2621 # keep compatibility with the code which uses `repo_name` field.
2622 2622
2623 2623 result = super(Repository, self).get_dict()
2624 2624 result['repo_name'] = result.pop('_repo_name', None)
2625 2625 return result
2626 2626
2627 2627
2628 2628 class RepoGroup(Base, BaseModel):
2629 2629 __tablename__ = 'groups'
2630 2630 __table_args__ = (
2631 2631 UniqueConstraint('group_name', 'group_parent_id'),
2632 2632 base_table_args,
2633 2633 )
2634 2634 __mapper_args__ = {'order_by': 'group_name'}
2635 2635
2636 2636 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2637 2637
2638 2638 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2639 2639 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2640 2640 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2641 2641 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2642 2642 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2643 2643 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2644 2644 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2645 2645 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2646 2646 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2647 2647 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2648 2648 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2649 2649
2650 2650 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2651 2651 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2652 2652 parent_group = relationship('RepoGroup', remote_side=group_id)
2653 2653 user = relationship('User')
2654 2654 integrations = relationship('Integration', cascade="all, delete-orphan")
2655 2655
2656 2656 # no cascade, set NULL
2657 2657 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2658 2658
2659 2659 def __init__(self, group_name='', parent_group=None):
2660 2660 self.group_name = group_name
2661 2661 self.parent_group = parent_group
2662 2662
2663 2663 def __unicode__(self):
2664 2664 return u"<%s('id:%s:%s')>" % (
2665 2665 self.__class__.__name__, self.group_id, self.group_name)
2666 2666
2667 2667 @hybrid_property
2668 2668 def group_name(self):
2669 2669 return self._group_name
2670 2670
2671 2671 @group_name.setter
2672 2672 def group_name(self, value):
2673 2673 self._group_name = value
2674 2674 self.group_name_hash = self.hash_repo_group_name(value)
2675 2675
2676 2676 @classmethod
2677 2677 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2678 2678 from rhodecode.lib.vcs.backends.base import EmptyCommit
2679 2679 dummy = EmptyCommit().__json__()
2680 2680 if not changeset_cache_raw:
2681 2681 dummy['source_repo_id'] = repo_id
2682 2682 return json.loads(json.dumps(dummy))
2683 2683
2684 2684 try:
2685 2685 return json.loads(changeset_cache_raw)
2686 2686 except TypeError:
2687 2687 return dummy
2688 2688 except Exception:
2689 2689 log.error(traceback.format_exc())
2690 2690 return dummy
2691 2691
2692 2692 @hybrid_property
2693 2693 def changeset_cache(self):
2694 2694 return self._load_changeset_cache('', self._changeset_cache)
2695 2695
2696 2696 @changeset_cache.setter
2697 2697 def changeset_cache(self, val):
2698 2698 try:
2699 2699 self._changeset_cache = json.dumps(val)
2700 2700 except Exception:
2701 2701 log.error(traceback.format_exc())
2702 2702
2703 2703 @validates('group_parent_id')
2704 2704 def validate_group_parent_id(self, key, val):
2705 2705 """
2706 2706 Check cycle references for a parent group to self
2707 2707 """
2708 2708 if self.group_id and val:
2709 2709 assert val != self.group_id
2710 2710
2711 2711 return val
2712 2712
2713 2713 @hybrid_property
2714 2714 def description_safe(self):
2715 2715 from rhodecode.lib import helpers as h
2716 2716 return h.escape(self.group_description)
2717 2717
2718 2718 @classmethod
2719 2719 def hash_repo_group_name(cls, repo_group_name):
2720 2720 val = remove_formatting(repo_group_name)
2721 2721 val = safe_str(val).lower()
2722 2722 chars = []
2723 2723 for c in val:
2724 2724 if c not in string.ascii_letters:
2725 2725 c = str(ord(c))
2726 2726 chars.append(c)
2727 2727
2728 2728 return ''.join(chars)
2729 2729
2730 2730 @classmethod
2731 2731 def _generate_choice(cls, repo_group):
2732 2732 from webhelpers2.html import literal as _literal
2733 2733 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2734 2734 return repo_group.group_id, _name(repo_group.full_path_splitted)
2735 2735
2736 2736 @classmethod
2737 2737 def groups_choices(cls, groups=None, show_empty_group=True):
2738 2738 if not groups:
2739 2739 groups = cls.query().all()
2740 2740
2741 2741 repo_groups = []
2742 2742 if show_empty_group:
2743 2743 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2744 2744
2745 2745 repo_groups.extend([cls._generate_choice(x) for x in groups])
2746 2746
2747 2747 repo_groups = sorted(
2748 2748 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2749 2749 return repo_groups
2750 2750
2751 2751 @classmethod
2752 2752 def url_sep(cls):
2753 2753 return URL_SEP
2754 2754
2755 2755 @classmethod
2756 2756 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2757 2757 if case_insensitive:
2758 2758 gr = cls.query().filter(func.lower(cls.group_name)
2759 2759 == func.lower(group_name))
2760 2760 else:
2761 2761 gr = cls.query().filter(cls.group_name == group_name)
2762 2762 if cache:
2763 2763 name_key = _hash_key(group_name)
2764 2764 gr = gr.options(
2765 2765 FromCache("sql_cache_short", "get_group_%s" % name_key))
2766 2766 return gr.scalar()
2767 2767
2768 2768 @classmethod
2769 2769 def get_user_personal_repo_group(cls, user_id):
2770 2770 user = User.get(user_id)
2771 2771 if user.username == User.DEFAULT_USER:
2772 2772 return None
2773 2773
2774 2774 return cls.query()\
2775 2775 .filter(cls.personal == true()) \
2776 2776 .filter(cls.user == user) \
2777 2777 .order_by(cls.group_id.asc()) \
2778 2778 .first()
2779 2779
2780 2780 @classmethod
2781 2781 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2782 2782 case_insensitive=True):
2783 2783 q = RepoGroup.query()
2784 2784
2785 2785 if not isinstance(user_id, Optional):
2786 2786 q = q.filter(RepoGroup.user_id == user_id)
2787 2787
2788 2788 if not isinstance(group_id, Optional):
2789 2789 q = q.filter(RepoGroup.group_parent_id == group_id)
2790 2790
2791 2791 if case_insensitive:
2792 2792 q = q.order_by(func.lower(RepoGroup.group_name))
2793 2793 else:
2794 2794 q = q.order_by(RepoGroup.group_name)
2795 2795 return q.all()
2796 2796
2797 2797 @property
2798 2798 def parents(self, parents_recursion_limit=10):
2799 2799 groups = []
2800 2800 if self.parent_group is None:
2801 2801 return groups
2802 2802 cur_gr = self.parent_group
2803 2803 groups.insert(0, cur_gr)
2804 2804 cnt = 0
2805 2805 while 1:
2806 2806 cnt += 1
2807 2807 gr = getattr(cur_gr, 'parent_group', None)
2808 2808 cur_gr = cur_gr.parent_group
2809 2809 if gr is None:
2810 2810 break
2811 2811 if cnt == parents_recursion_limit:
2812 2812 # this will prevent accidental infinit loops
2813 2813 log.error('more than %s parents found for group %s, stopping '
2814 2814 'recursive parent fetching', parents_recursion_limit, self)
2815 2815 break
2816 2816
2817 2817 groups.insert(0, gr)
2818 2818 return groups
2819 2819
2820 2820 @property
2821 2821 def last_commit_cache_update_diff(self):
2822 2822 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2823 2823
2824 2824 @classmethod
2825 2825 def _load_commit_change(cls, last_commit_cache):
2826 2826 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2827 2827 empty_date = datetime.datetime.fromtimestamp(0)
2828 2828 date_latest = last_commit_cache.get('date', empty_date)
2829 2829 try:
2830 2830 return parse_datetime(date_latest)
2831 2831 except Exception:
2832 2832 return empty_date
2833 2833
2834 2834 @property
2835 2835 def last_commit_change(self):
2836 2836 return self._load_commit_change(self.changeset_cache)
2837 2837
2838 2838 @property
2839 2839 def last_db_change(self):
2840 2840 return self.updated_on
2841 2841
2842 2842 @property
2843 2843 def children(self):
2844 2844 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2845 2845
2846 2846 @property
2847 2847 def name(self):
2848 2848 return self.group_name.split(RepoGroup.url_sep())[-1]
2849 2849
2850 2850 @property
2851 2851 def full_path(self):
2852 2852 return self.group_name
2853 2853
2854 2854 @property
2855 2855 def full_path_splitted(self):
2856 2856 return self.group_name.split(RepoGroup.url_sep())
2857 2857
2858 2858 @property
2859 2859 def repositories(self):
2860 2860 return Repository.query()\
2861 2861 .filter(Repository.group == self)\
2862 2862 .order_by(Repository.repo_name)
2863 2863
2864 2864 @property
2865 2865 def repositories_recursive_count(self):
2866 2866 cnt = self.repositories.count()
2867 2867
2868 2868 def children_count(group):
2869 2869 cnt = 0
2870 2870 for child in group.children:
2871 2871 cnt += child.repositories.count()
2872 2872 cnt += children_count(child)
2873 2873 return cnt
2874 2874
2875 2875 return cnt + children_count(self)
2876 2876
2877 2877 def _recursive_objects(self, include_repos=True, include_groups=True):
2878 2878 all_ = []
2879 2879
2880 2880 def _get_members(root_gr):
2881 2881 if include_repos:
2882 2882 for r in root_gr.repositories:
2883 2883 all_.append(r)
2884 2884 childs = root_gr.children.all()
2885 2885 if childs:
2886 2886 for gr in childs:
2887 2887 if include_groups:
2888 2888 all_.append(gr)
2889 2889 _get_members(gr)
2890 2890
2891 2891 root_group = []
2892 2892 if include_groups:
2893 2893 root_group = [self]
2894 2894
2895 2895 _get_members(self)
2896 2896 return root_group + all_
2897 2897
2898 2898 def recursive_groups_and_repos(self):
2899 2899 """
2900 2900 Recursive return all groups, with repositories in those groups
2901 2901 """
2902 2902 return self._recursive_objects()
2903 2903
2904 2904 def recursive_groups(self):
2905 2905 """
2906 2906 Returns all children groups for this group including children of children
2907 2907 """
2908 2908 return self._recursive_objects(include_repos=False)
2909 2909
2910 2910 def recursive_repos(self):
2911 2911 """
2912 2912 Returns all children repositories for this group
2913 2913 """
2914 2914 return self._recursive_objects(include_groups=False)
2915 2915
2916 2916 def get_new_name(self, group_name):
2917 2917 """
2918 2918 returns new full group name based on parent and new name
2919 2919
2920 2920 :param group_name:
2921 2921 """
2922 2922 path_prefix = (self.parent_group.full_path_splitted if
2923 2923 self.parent_group else [])
2924 2924 return RepoGroup.url_sep().join(path_prefix + [group_name])
2925 2925
2926 2926 def update_commit_cache(self, config=None):
2927 2927 """
2928 2928 Update cache of last commit for newest repository inside this repository group.
2929 2929 cache_keys should be::
2930 2930
2931 2931 source_repo_id
2932 2932 short_id
2933 2933 raw_id
2934 2934 revision
2935 2935 parents
2936 2936 message
2937 2937 date
2938 2938 author
2939 2939
2940 2940 """
2941 2941 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2942 2942 empty_date = datetime.datetime.fromtimestamp(0)
2943 2943
2944 2944 def repo_groups_and_repos(root_gr):
2945 2945 for _repo in root_gr.repositories:
2946 2946 yield _repo
2947 2947 for child_group in root_gr.children.all():
2948 2948 yield child_group
2949 2949
2950 2950 latest_repo_cs_cache = {}
2951 2951 for obj in repo_groups_and_repos(self):
2952 2952 repo_cs_cache = obj.changeset_cache
2953 2953 date_latest = latest_repo_cs_cache.get('date', empty_date)
2954 2954 date_current = repo_cs_cache.get('date', empty_date)
2955 2955 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2956 2956 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2957 2957 latest_repo_cs_cache = repo_cs_cache
2958 2958 if hasattr(obj, 'repo_id'):
2959 2959 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2960 2960 else:
2961 2961 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2962 2962
2963 2963 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
2964 2964
2965 2965 latest_repo_cs_cache['updated_on'] = time.time()
2966 2966 self.changeset_cache = latest_repo_cs_cache
2967 2967 self.updated_on = _date_latest
2968 2968 Session().add(self)
2969 2969 Session().commit()
2970 2970
2971 2971 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
2972 2972 self.group_name, latest_repo_cs_cache, _date_latest)
2973 2973
2974 2974 def permissions(self, with_admins=True, with_owner=True,
2975 2975 expand_from_user_groups=False):
2976 2976 """
2977 2977 Permissions for repository groups
2978 2978 """
2979 2979 _admin_perm = 'group.admin'
2980 2980
2981 2981 owner_row = []
2982 2982 if with_owner:
2983 2983 usr = AttributeDict(self.user.get_dict())
2984 2984 usr.owner_row = True
2985 2985 usr.permission = _admin_perm
2986 2986 owner_row.append(usr)
2987 2987
2988 2988 super_admin_ids = []
2989 2989 super_admin_rows = []
2990 2990 if with_admins:
2991 2991 for usr in User.get_all_super_admins():
2992 2992 super_admin_ids.append(usr.user_id)
2993 2993 # if this admin is also owner, don't double the record
2994 2994 if usr.user_id == owner_row[0].user_id:
2995 2995 owner_row[0].admin_row = True
2996 2996 else:
2997 2997 usr = AttributeDict(usr.get_dict())
2998 2998 usr.admin_row = True
2999 2999 usr.permission = _admin_perm
3000 3000 super_admin_rows.append(usr)
3001 3001
3002 3002 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3003 3003 q = q.options(joinedload(UserRepoGroupToPerm.group),
3004 3004 joinedload(UserRepoGroupToPerm.user),
3005 3005 joinedload(UserRepoGroupToPerm.permission),)
3006 3006
3007 3007 # get owners and admins and permissions. We do a trick of re-writing
3008 3008 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3009 3009 # has a global reference and changing one object propagates to all
3010 3010 # others. This means if admin is also an owner admin_row that change
3011 3011 # would propagate to both objects
3012 3012 perm_rows = []
3013 3013 for _usr in q.all():
3014 3014 usr = AttributeDict(_usr.user.get_dict())
3015 3015 # if this user is also owner/admin, mark as duplicate record
3016 3016 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3017 3017 usr.duplicate_perm = True
3018 3018 usr.permission = _usr.permission.permission_name
3019 3019 perm_rows.append(usr)
3020 3020
3021 3021 # filter the perm rows by 'default' first and then sort them by
3022 3022 # admin,write,read,none permissions sorted again alphabetically in
3023 3023 # each group
3024 3024 perm_rows = sorted(perm_rows, key=display_user_sort)
3025 3025
3026 3026 user_groups_rows = []
3027 3027 if expand_from_user_groups:
3028 3028 for ug in self.permission_user_groups(with_members=True):
3029 3029 for user_data in ug.members:
3030 3030 user_groups_rows.append(user_data)
3031 3031
3032 3032 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3033 3033
3034 3034 def permission_user_groups(self, with_members=False):
3035 3035 q = UserGroupRepoGroupToPerm.query()\
3036 3036 .filter(UserGroupRepoGroupToPerm.group == self)
3037 3037 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3038 3038 joinedload(UserGroupRepoGroupToPerm.users_group),
3039 3039 joinedload(UserGroupRepoGroupToPerm.permission),)
3040 3040
3041 3041 perm_rows = []
3042 3042 for _user_group in q.all():
3043 3043 entry = AttributeDict(_user_group.users_group.get_dict())
3044 3044 entry.permission = _user_group.permission.permission_name
3045 3045 if with_members:
3046 3046 entry.members = [x.user.get_dict()
3047 3047 for x in _user_group.users_group.members]
3048 3048 perm_rows.append(entry)
3049 3049
3050 3050 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3051 3051 return perm_rows
3052 3052
3053 3053 def get_api_data(self):
3054 3054 """
3055 3055 Common function for generating api data
3056 3056
3057 3057 """
3058 3058 group = self
3059 3059 data = {
3060 3060 'group_id': group.group_id,
3061 3061 'group_name': group.group_name,
3062 3062 'group_description': group.description_safe,
3063 3063 'parent_group': group.parent_group.group_name if group.parent_group else None,
3064 3064 'repositories': [x.repo_name for x in group.repositories],
3065 3065 'owner': group.user.username,
3066 3066 }
3067 3067 return data
3068 3068
3069 3069 def get_dict(self):
3070 3070 # Since we transformed `group_name` to a hybrid property, we need to
3071 3071 # keep compatibility with the code which uses `group_name` field.
3072 3072 result = super(RepoGroup, self).get_dict()
3073 3073 result['group_name'] = result.pop('_group_name', None)
3074 3074 return result
3075 3075
3076 3076
3077 3077 class Permission(Base, BaseModel):
3078 3078 __tablename__ = 'permissions'
3079 3079 __table_args__ = (
3080 3080 Index('p_perm_name_idx', 'permission_name'),
3081 3081 base_table_args,
3082 3082 )
3083 3083
3084 3084 PERMS = [
3085 3085 ('hg.admin', _('RhodeCode Super Administrator')),
3086 3086
3087 3087 ('repository.none', _('Repository no access')),
3088 3088 ('repository.read', _('Repository read access')),
3089 3089 ('repository.write', _('Repository write access')),
3090 3090 ('repository.admin', _('Repository admin access')),
3091 3091
3092 3092 ('group.none', _('Repository group no access')),
3093 3093 ('group.read', _('Repository group read access')),
3094 3094 ('group.write', _('Repository group write access')),
3095 3095 ('group.admin', _('Repository group admin access')),
3096 3096
3097 3097 ('usergroup.none', _('User group no access')),
3098 3098 ('usergroup.read', _('User group read access')),
3099 3099 ('usergroup.write', _('User group write access')),
3100 3100 ('usergroup.admin', _('User group admin access')),
3101 3101
3102 3102 ('branch.none', _('Branch no permissions')),
3103 3103 ('branch.merge', _('Branch access by web merge')),
3104 3104 ('branch.push', _('Branch access by push')),
3105 3105 ('branch.push_force', _('Branch access by push with force')),
3106 3106
3107 3107 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3108 3108 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3109 3109
3110 3110 ('hg.usergroup.create.false', _('User Group creation disabled')),
3111 3111 ('hg.usergroup.create.true', _('User Group creation enabled')),
3112 3112
3113 3113 ('hg.create.none', _('Repository creation disabled')),
3114 3114 ('hg.create.repository', _('Repository creation enabled')),
3115 3115 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3116 3116 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3117 3117
3118 3118 ('hg.fork.none', _('Repository forking disabled')),
3119 3119 ('hg.fork.repository', _('Repository forking enabled')),
3120 3120
3121 3121 ('hg.register.none', _('Registration disabled')),
3122 3122 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3123 3123 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3124 3124
3125 3125 ('hg.password_reset.enabled', _('Password reset enabled')),
3126 3126 ('hg.password_reset.hidden', _('Password reset hidden')),
3127 3127 ('hg.password_reset.disabled', _('Password reset disabled')),
3128 3128
3129 3129 ('hg.extern_activate.manual', _('Manual activation of external account')),
3130 3130 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3131 3131
3132 3132 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3133 3133 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3134 3134 ]
3135 3135
3136 3136 # definition of system default permissions for DEFAULT user, created on
3137 3137 # system setup
3138 3138 DEFAULT_USER_PERMISSIONS = [
3139 3139 # object perms
3140 3140 'repository.read',
3141 3141 'group.read',
3142 3142 'usergroup.read',
3143 3143 # branch, for backward compat we need same value as before so forced pushed
3144 3144 'branch.push_force',
3145 3145 # global
3146 3146 'hg.create.repository',
3147 3147 'hg.repogroup.create.false',
3148 3148 'hg.usergroup.create.false',
3149 3149 'hg.create.write_on_repogroup.true',
3150 3150 'hg.fork.repository',
3151 3151 'hg.register.manual_activate',
3152 3152 'hg.password_reset.enabled',
3153 3153 'hg.extern_activate.auto',
3154 3154 'hg.inherit_default_perms.true',
3155 3155 ]
3156 3156
3157 3157 # defines which permissions are more important higher the more important
3158 3158 # Weight defines which permissions are more important.
3159 3159 # The higher number the more important.
3160 3160 PERM_WEIGHTS = {
3161 3161 'repository.none': 0,
3162 3162 'repository.read': 1,
3163 3163 'repository.write': 3,
3164 3164 'repository.admin': 4,
3165 3165
3166 3166 'group.none': 0,
3167 3167 'group.read': 1,
3168 3168 'group.write': 3,
3169 3169 'group.admin': 4,
3170 3170
3171 3171 'usergroup.none': 0,
3172 3172 'usergroup.read': 1,
3173 3173 'usergroup.write': 3,
3174 3174 'usergroup.admin': 4,
3175 3175
3176 3176 'branch.none': 0,
3177 3177 'branch.merge': 1,
3178 3178 'branch.push': 3,
3179 3179 'branch.push_force': 4,
3180 3180
3181 3181 'hg.repogroup.create.false': 0,
3182 3182 'hg.repogroup.create.true': 1,
3183 3183
3184 3184 'hg.usergroup.create.false': 0,
3185 3185 'hg.usergroup.create.true': 1,
3186 3186
3187 3187 'hg.fork.none': 0,
3188 3188 'hg.fork.repository': 1,
3189 3189 'hg.create.none': 0,
3190 3190 'hg.create.repository': 1
3191 3191 }
3192 3192
3193 3193 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3194 3194 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3195 3195 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3196 3196
3197 3197 def __unicode__(self):
3198 3198 return u"<%s('%s:%s')>" % (
3199 3199 self.__class__.__name__, self.permission_id, self.permission_name
3200 3200 )
3201 3201
3202 3202 @classmethod
3203 3203 def get_by_key(cls, key):
3204 3204 return cls.query().filter(cls.permission_name == key).scalar()
3205 3205
3206 3206 @classmethod
3207 3207 def get_default_repo_perms(cls, user_id, repo_id=None):
3208 3208 q = Session().query(UserRepoToPerm, Repository, Permission)\
3209 3209 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3210 3210 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3211 3211 .filter(UserRepoToPerm.user_id == user_id)
3212 3212 if repo_id:
3213 3213 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3214 3214 return q.all()
3215 3215
3216 3216 @classmethod
3217 3217 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3218 3218 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3219 3219 .join(
3220 3220 Permission,
3221 3221 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3222 3222 .join(
3223 3223 UserRepoToPerm,
3224 3224 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3225 3225 .filter(UserRepoToPerm.user_id == user_id)
3226 3226
3227 3227 if repo_id:
3228 3228 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3229 3229 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3230 3230
3231 3231 @classmethod
3232 3232 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3233 3233 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3234 3234 .join(
3235 3235 Permission,
3236 3236 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3237 3237 .join(
3238 3238 Repository,
3239 3239 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3240 3240 .join(
3241 3241 UserGroup,
3242 3242 UserGroupRepoToPerm.users_group_id ==
3243 3243 UserGroup.users_group_id)\
3244 3244 .join(
3245 3245 UserGroupMember,
3246 3246 UserGroupRepoToPerm.users_group_id ==
3247 3247 UserGroupMember.users_group_id)\
3248 3248 .filter(
3249 3249 UserGroupMember.user_id == user_id,
3250 3250 UserGroup.users_group_active == true())
3251 3251 if repo_id:
3252 3252 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3253 3253 return q.all()
3254 3254
3255 3255 @classmethod
3256 3256 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3257 3257 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3258 3258 .join(
3259 3259 Permission,
3260 3260 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3261 3261 .join(
3262 3262 UserGroupRepoToPerm,
3263 3263 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3264 3264 .join(
3265 3265 UserGroup,
3266 3266 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3267 3267 .join(
3268 3268 UserGroupMember,
3269 3269 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3270 3270 .filter(
3271 3271 UserGroupMember.user_id == user_id,
3272 3272 UserGroup.users_group_active == true())
3273 3273
3274 3274 if repo_id:
3275 3275 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3276 3276 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3277 3277
3278 3278 @classmethod
3279 3279 def get_default_group_perms(cls, user_id, repo_group_id=None):
3280 3280 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3281 3281 .join(
3282 3282 Permission,
3283 3283 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3284 3284 .join(
3285 3285 RepoGroup,
3286 3286 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3287 3287 .filter(UserRepoGroupToPerm.user_id == user_id)
3288 3288 if repo_group_id:
3289 3289 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3290 3290 return q.all()
3291 3291
3292 3292 @classmethod
3293 3293 def get_default_group_perms_from_user_group(
3294 3294 cls, user_id, repo_group_id=None):
3295 3295 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3296 3296 .join(
3297 3297 Permission,
3298 3298 UserGroupRepoGroupToPerm.permission_id ==
3299 3299 Permission.permission_id)\
3300 3300 .join(
3301 3301 RepoGroup,
3302 3302 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3303 3303 .join(
3304 3304 UserGroup,
3305 3305 UserGroupRepoGroupToPerm.users_group_id ==
3306 3306 UserGroup.users_group_id)\
3307 3307 .join(
3308 3308 UserGroupMember,
3309 3309 UserGroupRepoGroupToPerm.users_group_id ==
3310 3310 UserGroupMember.users_group_id)\
3311 3311 .filter(
3312 3312 UserGroupMember.user_id == user_id,
3313 3313 UserGroup.users_group_active == true())
3314 3314 if repo_group_id:
3315 3315 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3316 3316 return q.all()
3317 3317
3318 3318 @classmethod
3319 3319 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3320 3320 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3321 3321 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3322 3322 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3323 3323 .filter(UserUserGroupToPerm.user_id == user_id)
3324 3324 if user_group_id:
3325 3325 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3326 3326 return q.all()
3327 3327
3328 3328 @classmethod
3329 3329 def get_default_user_group_perms_from_user_group(
3330 3330 cls, user_id, user_group_id=None):
3331 3331 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3332 3332 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3333 3333 .join(
3334 3334 Permission,
3335 3335 UserGroupUserGroupToPerm.permission_id ==
3336 3336 Permission.permission_id)\
3337 3337 .join(
3338 3338 TargetUserGroup,
3339 3339 UserGroupUserGroupToPerm.target_user_group_id ==
3340 3340 TargetUserGroup.users_group_id)\
3341 3341 .join(
3342 3342 UserGroup,
3343 3343 UserGroupUserGroupToPerm.user_group_id ==
3344 3344 UserGroup.users_group_id)\
3345 3345 .join(
3346 3346 UserGroupMember,
3347 3347 UserGroupUserGroupToPerm.user_group_id ==
3348 3348 UserGroupMember.users_group_id)\
3349 3349 .filter(
3350 3350 UserGroupMember.user_id == user_id,
3351 3351 UserGroup.users_group_active == true())
3352 3352 if user_group_id:
3353 3353 q = q.filter(
3354 3354 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3355 3355
3356 3356 return q.all()
3357 3357
3358 3358
3359 3359 class UserRepoToPerm(Base, BaseModel):
3360 3360 __tablename__ = 'repo_to_perm'
3361 3361 __table_args__ = (
3362 3362 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3363 3363 base_table_args
3364 3364 )
3365 3365
3366 3366 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3367 3367 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3368 3368 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3369 3369 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3370 3370
3371 3371 user = relationship('User')
3372 3372 repository = relationship('Repository')
3373 3373 permission = relationship('Permission')
3374 3374
3375 3375 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3376 3376
3377 3377 @classmethod
3378 3378 def create(cls, user, repository, permission):
3379 3379 n = cls()
3380 3380 n.user = user
3381 3381 n.repository = repository
3382 3382 n.permission = permission
3383 3383 Session().add(n)
3384 3384 return n
3385 3385
3386 3386 def __unicode__(self):
3387 3387 return u'<%s => %s >' % (self.user, self.repository)
3388 3388
3389 3389
3390 3390 class UserUserGroupToPerm(Base, BaseModel):
3391 3391 __tablename__ = 'user_user_group_to_perm'
3392 3392 __table_args__ = (
3393 3393 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3394 3394 base_table_args
3395 3395 )
3396 3396
3397 3397 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3398 3398 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3399 3399 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3400 3400 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3401 3401
3402 3402 user = relationship('User')
3403 3403 user_group = relationship('UserGroup')
3404 3404 permission = relationship('Permission')
3405 3405
3406 3406 @classmethod
3407 3407 def create(cls, user, user_group, permission):
3408 3408 n = cls()
3409 3409 n.user = user
3410 3410 n.user_group = user_group
3411 3411 n.permission = permission
3412 3412 Session().add(n)
3413 3413 return n
3414 3414
3415 3415 def __unicode__(self):
3416 3416 return u'<%s => %s >' % (self.user, self.user_group)
3417 3417
3418 3418
3419 3419 class UserToPerm(Base, BaseModel):
3420 3420 __tablename__ = 'user_to_perm'
3421 3421 __table_args__ = (
3422 3422 UniqueConstraint('user_id', 'permission_id'),
3423 3423 base_table_args
3424 3424 )
3425 3425
3426 3426 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3427 3427 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3428 3428 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3429 3429
3430 3430 user = relationship('User')
3431 3431 permission = relationship('Permission', lazy='joined')
3432 3432
3433 3433 def __unicode__(self):
3434 3434 return u'<%s => %s >' % (self.user, self.permission)
3435 3435
3436 3436
3437 3437 class UserGroupRepoToPerm(Base, BaseModel):
3438 3438 __tablename__ = 'users_group_repo_to_perm'
3439 3439 __table_args__ = (
3440 3440 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3441 3441 base_table_args
3442 3442 )
3443 3443
3444 3444 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3445 3445 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3446 3446 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3447 3447 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3448 3448
3449 3449 users_group = relationship('UserGroup')
3450 3450 permission = relationship('Permission')
3451 3451 repository = relationship('Repository')
3452 3452 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3453 3453
3454 3454 @classmethod
3455 3455 def create(cls, users_group, repository, permission):
3456 3456 n = cls()
3457 3457 n.users_group = users_group
3458 3458 n.repository = repository
3459 3459 n.permission = permission
3460 3460 Session().add(n)
3461 3461 return n
3462 3462
3463 3463 def __unicode__(self):
3464 3464 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3465 3465
3466 3466
3467 3467 class UserGroupUserGroupToPerm(Base, BaseModel):
3468 3468 __tablename__ = 'user_group_user_group_to_perm'
3469 3469 __table_args__ = (
3470 3470 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3471 3471 CheckConstraint('target_user_group_id != user_group_id'),
3472 3472 base_table_args
3473 3473 )
3474 3474
3475 3475 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3476 3476 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3477 3477 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3478 3478 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3479 3479
3480 3480 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3481 3481 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3482 3482 permission = relationship('Permission')
3483 3483
3484 3484 @classmethod
3485 3485 def create(cls, target_user_group, user_group, permission):
3486 3486 n = cls()
3487 3487 n.target_user_group = target_user_group
3488 3488 n.user_group = user_group
3489 3489 n.permission = permission
3490 3490 Session().add(n)
3491 3491 return n
3492 3492
3493 3493 def __unicode__(self):
3494 3494 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3495 3495
3496 3496
3497 3497 class UserGroupToPerm(Base, BaseModel):
3498 3498 __tablename__ = 'users_group_to_perm'
3499 3499 __table_args__ = (
3500 3500 UniqueConstraint('users_group_id', 'permission_id',),
3501 3501 base_table_args
3502 3502 )
3503 3503
3504 3504 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3505 3505 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3506 3506 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3507 3507
3508 3508 users_group = relationship('UserGroup')
3509 3509 permission = relationship('Permission')
3510 3510
3511 3511
3512 3512 class UserRepoGroupToPerm(Base, BaseModel):
3513 3513 __tablename__ = 'user_repo_group_to_perm'
3514 3514 __table_args__ = (
3515 3515 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3516 3516 base_table_args
3517 3517 )
3518 3518
3519 3519 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3520 3520 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3521 3521 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3522 3522 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3523 3523
3524 3524 user = relationship('User')
3525 3525 group = relationship('RepoGroup')
3526 3526 permission = relationship('Permission')
3527 3527
3528 3528 @classmethod
3529 3529 def create(cls, user, repository_group, permission):
3530 3530 n = cls()
3531 3531 n.user = user
3532 3532 n.group = repository_group
3533 3533 n.permission = permission
3534 3534 Session().add(n)
3535 3535 return n
3536 3536
3537 3537
3538 3538 class UserGroupRepoGroupToPerm(Base, BaseModel):
3539 3539 __tablename__ = 'users_group_repo_group_to_perm'
3540 3540 __table_args__ = (
3541 3541 UniqueConstraint('users_group_id', 'group_id'),
3542 3542 base_table_args
3543 3543 )
3544 3544
3545 3545 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3546 3546 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3547 3547 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3548 3548 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3549 3549
3550 3550 users_group = relationship('UserGroup')
3551 3551 permission = relationship('Permission')
3552 3552 group = relationship('RepoGroup')
3553 3553
3554 3554 @classmethod
3555 3555 def create(cls, user_group, repository_group, permission):
3556 3556 n = cls()
3557 3557 n.users_group = user_group
3558 3558 n.group = repository_group
3559 3559 n.permission = permission
3560 3560 Session().add(n)
3561 3561 return n
3562 3562
3563 3563 def __unicode__(self):
3564 3564 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3565 3565
3566 3566
3567 3567 class Statistics(Base, BaseModel):
3568 3568 __tablename__ = 'statistics'
3569 3569 __table_args__ = (
3570 3570 base_table_args
3571 3571 )
3572 3572
3573 3573 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3574 3574 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3575 3575 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3576 3576 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3577 3577 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3578 3578 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3579 3579
3580 3580 repository = relationship('Repository', single_parent=True)
3581 3581
3582 3582
3583 3583 class UserFollowing(Base, BaseModel):
3584 3584 __tablename__ = 'user_followings'
3585 3585 __table_args__ = (
3586 3586 UniqueConstraint('user_id', 'follows_repository_id'),
3587 3587 UniqueConstraint('user_id', 'follows_user_id'),
3588 3588 base_table_args
3589 3589 )
3590 3590
3591 3591 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3592 3592 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3593 3593 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3594 3594 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3595 3595 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3596 3596
3597 3597 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3598 3598
3599 3599 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3600 3600 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3601 3601
3602 3602 @classmethod
3603 3603 def get_repo_followers(cls, repo_id):
3604 3604 return cls.query().filter(cls.follows_repo_id == repo_id)
3605 3605
3606 3606
3607 3607 class CacheKey(Base, BaseModel):
3608 3608 __tablename__ = 'cache_invalidation'
3609 3609 __table_args__ = (
3610 3610 UniqueConstraint('cache_key'),
3611 3611 Index('key_idx', 'cache_key'),
3612 3612 base_table_args,
3613 3613 )
3614 3614
3615 3615 CACHE_TYPE_FEED = 'FEED'
3616 3616
3617 3617 # namespaces used to register process/thread aware caches
3618 3618 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3619 3619 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3620 3620
3621 3621 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3622 3622 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3623 3623 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3624 3624 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3625 3625 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3626 3626
3627 3627 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3628 3628 self.cache_key = cache_key
3629 3629 self.cache_args = cache_args
3630 3630 self.cache_active = False
3631 3631 # first key should be same for all entries, since all workers should share it
3632 3632 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3633 3633
3634 3634 def __unicode__(self):
3635 3635 return u"<%s('%s:%s[%s]')>" % (
3636 3636 self.__class__.__name__,
3637 3637 self.cache_id, self.cache_key, self.cache_active)
3638 3638
3639 3639 def _cache_key_partition(self):
3640 3640 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3641 3641 return prefix, repo_name, suffix
3642 3642
3643 3643 def get_prefix(self):
3644 3644 """
3645 3645 Try to extract prefix from existing cache key. The key could consist
3646 3646 of prefix, repo_name, suffix
3647 3647 """
3648 3648 # this returns prefix, repo_name, suffix
3649 3649 return self._cache_key_partition()[0]
3650 3650
3651 3651 def get_suffix(self):
3652 3652 """
3653 3653 get suffix that might have been used in _get_cache_key to
3654 3654 generate self.cache_key. Only used for informational purposes
3655 3655 in repo_edit.mako.
3656 3656 """
3657 3657 # prefix, repo_name, suffix
3658 3658 return self._cache_key_partition()[2]
3659 3659
3660 3660 @classmethod
3661 3661 def generate_new_state_uid(cls, based_on=None):
3662 3662 if based_on:
3663 3663 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3664 3664 else:
3665 3665 return str(uuid.uuid4())
3666 3666
3667 3667 @classmethod
3668 3668 def delete_all_cache(cls):
3669 3669 """
3670 3670 Delete all cache keys from database.
3671 3671 Should only be run when all instances are down and all entries
3672 3672 thus stale.
3673 3673 """
3674 3674 cls.query().delete()
3675 3675 Session().commit()
3676 3676
3677 3677 @classmethod
3678 3678 def set_invalidate(cls, cache_uid, delete=False):
3679 3679 """
3680 3680 Mark all caches of a repo as invalid in the database.
3681 3681 """
3682 3682
3683 3683 try:
3684 3684 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3685 3685 if delete:
3686 3686 qry.delete()
3687 3687 log.debug('cache objects deleted for cache args %s',
3688 3688 safe_str(cache_uid))
3689 3689 else:
3690 3690 qry.update({"cache_active": False,
3691 3691 "cache_state_uid": cls.generate_new_state_uid()})
3692 3692 log.debug('cache objects marked as invalid for cache args %s',
3693 3693 safe_str(cache_uid))
3694 3694
3695 3695 Session().commit()
3696 3696 except Exception:
3697 3697 log.exception(
3698 3698 'Cache key invalidation failed for cache args %s',
3699 3699 safe_str(cache_uid))
3700 3700 Session().rollback()
3701 3701
3702 3702 @classmethod
3703 3703 def get_active_cache(cls, cache_key):
3704 3704 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3705 3705 if inv_obj:
3706 3706 return inv_obj
3707 3707 return None
3708 3708
3709 3709 @classmethod
3710 3710 def get_namespace_map(cls, namespace):
3711 3711 return {
3712 3712 x.cache_key: x
3713 3713 for x in cls.query().filter(cls.cache_args == namespace)}
3714 3714
3715 3715
3716 3716 class ChangesetComment(Base, BaseModel):
3717 3717 __tablename__ = 'changeset_comments'
3718 3718 __table_args__ = (
3719 3719 Index('cc_revision_idx', 'revision'),
3720 3720 base_table_args,
3721 3721 )
3722 3722
3723 3723 COMMENT_OUTDATED = u'comment_outdated'
3724 3724 COMMENT_TYPE_NOTE = u'note'
3725 3725 COMMENT_TYPE_TODO = u'todo'
3726 3726 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3727 3727
3728 3728 OP_IMMUTABLE = u'immutable'
3729 3729 OP_CHANGEABLE = u'changeable'
3730 3730
3731 3731 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3732 3732 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3733 3733 revision = Column('revision', String(40), nullable=True)
3734 3734 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3735 3735 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3736 3736 line_no = Column('line_no', Unicode(10), nullable=True)
3737 3737 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3738 3738 f_path = Column('f_path', Unicode(1000), nullable=True)
3739 3739 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3740 3740 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3741 3741 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3742 3742 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3743 3743 renderer = Column('renderer', Unicode(64), nullable=True)
3744 3744 display_state = Column('display_state', Unicode(128), nullable=True)
3745 3745 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3746 3746
3747 3747 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3748 3748 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3749 3749
3750 3750 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3751 3751 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3752 3752
3753 3753 author = relationship('User', lazy='joined')
3754 3754 repo = relationship('Repository')
3755 3755 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined')
3756 3756 pull_request = relationship('PullRequest', lazy='joined')
3757 3757 pull_request_version = relationship('PullRequestVersion')
3758 3758
3759 3759 @classmethod
3760 3760 def get_users(cls, revision=None, pull_request_id=None):
3761 3761 """
3762 3762 Returns user associated with this ChangesetComment. ie those
3763 3763 who actually commented
3764 3764
3765 3765 :param cls:
3766 3766 :param revision:
3767 3767 """
3768 3768 q = Session().query(User)\
3769 3769 .join(ChangesetComment.author)
3770 3770 if revision:
3771 3771 q = q.filter(cls.revision == revision)
3772 3772 elif pull_request_id:
3773 3773 q = q.filter(cls.pull_request_id == pull_request_id)
3774 3774 return q.all()
3775 3775
3776 3776 @classmethod
3777 3777 def get_index_from_version(cls, pr_version, versions):
3778 3778 num_versions = [x.pull_request_version_id for x in versions]
3779 3779 try:
3780 3780 return num_versions.index(pr_version) +1
3781 3781 except (IndexError, ValueError):
3782 3782 return
3783 3783
3784 3784 @property
3785 3785 def outdated(self):
3786 3786 return self.display_state == self.COMMENT_OUTDATED
3787 3787
3788 3788 @property
3789 3789 def immutable(self):
3790 3790 return self.immutable_state == self.OP_IMMUTABLE
3791 3791
3792 3792 def outdated_at_version(self, version):
3793 3793 """
3794 3794 Checks if comment is outdated for given pull request version
3795 3795 """
3796 3796 return self.outdated and self.pull_request_version_id != version
3797 3797
3798 3798 def older_than_version(self, version):
3799 3799 """
3800 3800 Checks if comment is made from previous version than given
3801 3801 """
3802 3802 if version is None:
3803 3803 return self.pull_request_version_id is not None
3804 3804
3805 3805 return self.pull_request_version_id < version
3806 3806
3807 3807 @property
3808 3808 def resolved(self):
3809 3809 return self.resolved_by[0] if self.resolved_by else None
3810 3810
3811 3811 @property
3812 3812 def is_todo(self):
3813 3813 return self.comment_type == self.COMMENT_TYPE_TODO
3814 3814
3815 3815 @property
3816 3816 def is_inline(self):
3817 3817 return self.line_no and self.f_path
3818 3818
3819 3819 def get_index_version(self, versions):
3820 3820 return self.get_index_from_version(
3821 3821 self.pull_request_version_id, versions)
3822 3822
3823 3823 def __repr__(self):
3824 3824 if self.comment_id:
3825 3825 return '<DB:Comment #%s>' % self.comment_id
3826 3826 else:
3827 3827 return '<DB:Comment at %#x>' % id(self)
3828 3828
3829 3829 def get_api_data(self):
3830 3830 comment = self
3831 3831 data = {
3832 3832 'comment_id': comment.comment_id,
3833 3833 'comment_type': comment.comment_type,
3834 3834 'comment_text': comment.text,
3835 3835 'comment_status': comment.status_change,
3836 3836 'comment_f_path': comment.f_path,
3837 3837 'comment_lineno': comment.line_no,
3838 3838 'comment_author': comment.author,
3839 3839 'comment_created_on': comment.created_on,
3840 3840 'comment_resolved_by': self.resolved,
3841 3841 'comment_commit_id': comment.revision,
3842 3842 'comment_pull_request_id': comment.pull_request_id,
3843 3843 }
3844 3844 return data
3845 3845
3846 3846 def __json__(self):
3847 3847 data = dict()
3848 3848 data.update(self.get_api_data())
3849 3849 return data
3850 3850
3851 3851
3852 3852 class ChangesetStatus(Base, BaseModel):
3853 3853 __tablename__ = 'changeset_statuses'
3854 3854 __table_args__ = (
3855 3855 Index('cs_revision_idx', 'revision'),
3856 3856 Index('cs_version_idx', 'version'),
3857 3857 UniqueConstraint('repo_id', 'revision', 'version'),
3858 3858 base_table_args
3859 3859 )
3860 3860
3861 3861 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3862 3862 STATUS_APPROVED = 'approved'
3863 3863 STATUS_REJECTED = 'rejected'
3864 3864 STATUS_UNDER_REVIEW = 'under_review'
3865 3865
3866 3866 STATUSES = [
3867 3867 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3868 3868 (STATUS_APPROVED, _("Approved")),
3869 3869 (STATUS_REJECTED, _("Rejected")),
3870 3870 (STATUS_UNDER_REVIEW, _("Under Review")),
3871 3871 ]
3872 3872
3873 3873 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3874 3874 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3875 3875 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3876 3876 revision = Column('revision', String(40), nullable=False)
3877 3877 status = Column('status', String(128), nullable=False, default=DEFAULT)
3878 3878 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3879 3879 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3880 3880 version = Column('version', Integer(), nullable=False, default=0)
3881 3881 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3882 3882
3883 3883 author = relationship('User', lazy='joined')
3884 3884 repo = relationship('Repository')
3885 3885 comment = relationship('ChangesetComment', lazy='joined')
3886 3886 pull_request = relationship('PullRequest', lazy='joined')
3887 3887
3888 3888 def __unicode__(self):
3889 3889 return u"<%s('%s[v%s]:%s')>" % (
3890 3890 self.__class__.__name__,
3891 3891 self.status, self.version, self.author
3892 3892 )
3893 3893
3894 3894 @classmethod
3895 3895 def get_status_lbl(cls, value):
3896 3896 return dict(cls.STATUSES).get(value)
3897 3897
3898 3898 @property
3899 3899 def status_lbl(self):
3900 3900 return ChangesetStatus.get_status_lbl(self.status)
3901 3901
3902 3902 def get_api_data(self):
3903 3903 status = self
3904 3904 data = {
3905 3905 'status_id': status.changeset_status_id,
3906 3906 'status': status.status,
3907 3907 }
3908 3908 return data
3909 3909
3910 3910 def __json__(self):
3911 3911 data = dict()
3912 3912 data.update(self.get_api_data())
3913 3913 return data
3914 3914
3915 3915
3916 3916 class _SetState(object):
3917 3917 """
3918 3918 Context processor allowing changing state for sensitive operation such as
3919 3919 pull request update or merge
3920 3920 """
3921 3921
3922 3922 def __init__(self, pull_request, pr_state, back_state=None):
3923 3923 self._pr = pull_request
3924 3924 self._org_state = back_state or pull_request.pull_request_state
3925 3925 self._pr_state = pr_state
3926 3926 self._current_state = None
3927 3927
3928 3928 def __enter__(self):
3929 3929 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
3930 3930 self._pr, self._pr_state)
3931 3931 self.set_pr_state(self._pr_state)
3932 3932 return self
3933 3933
3934 3934 def __exit__(self, exc_type, exc_val, exc_tb):
3935 3935 if exc_val is not None:
3936 3936 log.error(traceback.format_exc(exc_tb))
3937 3937 return None
3938 3938
3939 3939 self.set_pr_state(self._org_state)
3940 3940 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
3941 3941 self._pr, self._org_state)
3942 3942
3943 3943 @property
3944 3944 def state(self):
3945 3945 return self._current_state
3946 3946
3947 3947 def set_pr_state(self, pr_state):
3948 3948 try:
3949 3949 self._pr.pull_request_state = pr_state
3950 3950 Session().add(self._pr)
3951 3951 Session().commit()
3952 3952 self._current_state = pr_state
3953 3953 except Exception:
3954 3954 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
3955 3955 raise
3956 3956
3957 3957
3958 3958 class _PullRequestBase(BaseModel):
3959 3959 """
3960 3960 Common attributes of pull request and version entries.
3961 3961 """
3962 3962
3963 3963 # .status values
3964 3964 STATUS_NEW = u'new'
3965 3965 STATUS_OPEN = u'open'
3966 3966 STATUS_CLOSED = u'closed'
3967 3967
3968 3968 # available states
3969 3969 STATE_CREATING = u'creating'
3970 3970 STATE_UPDATING = u'updating'
3971 3971 STATE_MERGING = u'merging'
3972 3972 STATE_CREATED = u'created'
3973 3973
3974 3974 title = Column('title', Unicode(255), nullable=True)
3975 3975 description = Column(
3976 3976 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3977 3977 nullable=True)
3978 3978 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3979 3979
3980 3980 # new/open/closed status of pull request (not approve/reject/etc)
3981 3981 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3982 3982 created_on = Column(
3983 3983 'created_on', DateTime(timezone=False), nullable=False,
3984 3984 default=datetime.datetime.now)
3985 3985 updated_on = Column(
3986 3986 'updated_on', DateTime(timezone=False), nullable=False,
3987 3987 default=datetime.datetime.now)
3988 3988
3989 3989 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3990 3990
3991 3991 @declared_attr
3992 3992 def user_id(cls):
3993 3993 return Column(
3994 3994 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3995 3995 unique=None)
3996 3996
3997 3997 # 500 revisions max
3998 3998 _revisions = Column(
3999 3999 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4000 4000
4001 4001 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4002 4002
4003 4003 @declared_attr
4004 4004 def source_repo_id(cls):
4005 4005 # TODO: dan: rename column to source_repo_id
4006 4006 return Column(
4007 4007 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4008 4008 nullable=False)
4009 4009
4010 4010 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4011 4011
4012 4012 @hybrid_property
4013 4013 def source_ref(self):
4014 4014 return self._source_ref
4015 4015
4016 4016 @source_ref.setter
4017 4017 def source_ref(self, val):
4018 4018 parts = (val or '').split(':')
4019 4019 if len(parts) != 3:
4020 4020 raise ValueError(
4021 4021 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4022 4022 self._source_ref = safe_unicode(val)
4023 4023
4024 4024 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4025 4025
4026 4026 @hybrid_property
4027 4027 def target_ref(self):
4028 4028 return self._target_ref
4029 4029
4030 4030 @target_ref.setter
4031 4031 def target_ref(self, val):
4032 4032 parts = (val or '').split(':')
4033 4033 if len(parts) != 3:
4034 4034 raise ValueError(
4035 4035 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4036 4036 self._target_ref = safe_unicode(val)
4037 4037
4038 4038 @declared_attr
4039 4039 def target_repo_id(cls):
4040 4040 # TODO: dan: rename column to target_repo_id
4041 4041 return Column(
4042 4042 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4043 4043 nullable=False)
4044 4044
4045 4045 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4046 4046
4047 4047 # TODO: dan: rename column to last_merge_source_rev
4048 4048 _last_merge_source_rev = Column(
4049 4049 'last_merge_org_rev', String(40), nullable=True)
4050 4050 # TODO: dan: rename column to last_merge_target_rev
4051 4051 _last_merge_target_rev = Column(
4052 4052 'last_merge_other_rev', String(40), nullable=True)
4053 4053 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4054 4054 last_merge_metadata = Column(
4055 4055 'last_merge_metadata', MutationObj.as_mutable(
4056 4056 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4057 4057
4058 4058 merge_rev = Column('merge_rev', String(40), nullable=True)
4059 4059
4060 4060 reviewer_data = Column(
4061 4061 'reviewer_data_json', MutationObj.as_mutable(
4062 4062 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4063 4063
4064 4064 @property
4065 4065 def reviewer_data_json(self):
4066 4066 return json.dumps(self.reviewer_data)
4067 4067
4068 4068 @property
4069 4069 def work_in_progress(self):
4070 4070 """checks if pull request is work in progress by checking the title"""
4071 4071 title = self.title.upper()
4072 4072 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4073 4073 return True
4074 4074 return False
4075 4075
4076 4076 @hybrid_property
4077 4077 def description_safe(self):
4078 4078 from rhodecode.lib import helpers as h
4079 4079 return h.escape(self.description)
4080 4080
4081 4081 @hybrid_property
4082 4082 def revisions(self):
4083 4083 return self._revisions.split(':') if self._revisions else []
4084 4084
4085 4085 @revisions.setter
4086 4086 def revisions(self, val):
4087 4087 self._revisions = u':'.join(val)
4088 4088
4089 4089 @hybrid_property
4090 4090 def last_merge_status(self):
4091 4091 return safe_int(self._last_merge_status)
4092 4092
4093 4093 @last_merge_status.setter
4094 4094 def last_merge_status(self, val):
4095 4095 self._last_merge_status = val
4096 4096
4097 4097 @declared_attr
4098 4098 def author(cls):
4099 4099 return relationship('User', lazy='joined')
4100 4100
4101 4101 @declared_attr
4102 4102 def source_repo(cls):
4103 4103 return relationship(
4104 4104 'Repository',
4105 4105 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4106 4106
4107 4107 @property
4108 4108 def source_ref_parts(self):
4109 4109 return self.unicode_to_reference(self.source_ref)
4110 4110
4111 4111 @declared_attr
4112 4112 def target_repo(cls):
4113 4113 return relationship(
4114 4114 'Repository',
4115 4115 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4116 4116
4117 4117 @property
4118 4118 def target_ref_parts(self):
4119 4119 return self.unicode_to_reference(self.target_ref)
4120 4120
4121 4121 @property
4122 4122 def shadow_merge_ref(self):
4123 4123 return self.unicode_to_reference(self._shadow_merge_ref)
4124 4124
4125 4125 @shadow_merge_ref.setter
4126 4126 def shadow_merge_ref(self, ref):
4127 4127 self._shadow_merge_ref = self.reference_to_unicode(ref)
4128 4128
4129 4129 @staticmethod
4130 4130 def unicode_to_reference(raw):
4131 4131 """
4132 4132 Convert a unicode (or string) to a reference object.
4133 4133 If unicode evaluates to False it returns None.
4134 4134 """
4135 4135 if raw:
4136 4136 refs = raw.split(':')
4137 4137 return Reference(*refs)
4138 4138 else:
4139 4139 return None
4140 4140
4141 4141 @staticmethod
4142 4142 def reference_to_unicode(ref):
4143 4143 """
4144 4144 Convert a reference object to unicode.
4145 4145 If reference is None it returns None.
4146 4146 """
4147 4147 if ref:
4148 4148 return u':'.join(ref)
4149 4149 else:
4150 4150 return None
4151 4151
4152 4152 def get_api_data(self, with_merge_state=True):
4153 4153 from rhodecode.model.pull_request import PullRequestModel
4154 4154
4155 4155 pull_request = self
4156 4156 if with_merge_state:
4157 4157 merge_response, merge_status, msg = \
4158 4158 PullRequestModel().merge_status(pull_request)
4159 4159 merge_state = {
4160 4160 'status': merge_status,
4161 4161 'message': safe_unicode(msg),
4162 4162 }
4163 4163 else:
4164 4164 merge_state = {'status': 'not_available',
4165 4165 'message': 'not_available'}
4166 4166
4167 4167 merge_data = {
4168 4168 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4169 4169 'reference': (
4170 4170 pull_request.shadow_merge_ref._asdict()
4171 4171 if pull_request.shadow_merge_ref else None),
4172 4172 }
4173 4173
4174 4174 data = {
4175 4175 'pull_request_id': pull_request.pull_request_id,
4176 4176 'url': PullRequestModel().get_url(pull_request),
4177 4177 'title': pull_request.title,
4178 4178 'description': pull_request.description,
4179 4179 'status': pull_request.status,
4180 4180 'state': pull_request.pull_request_state,
4181 4181 'created_on': pull_request.created_on,
4182 4182 'updated_on': pull_request.updated_on,
4183 4183 'commit_ids': pull_request.revisions,
4184 4184 'review_status': pull_request.calculated_review_status(),
4185 4185 'mergeable': merge_state,
4186 4186 'source': {
4187 4187 'clone_url': pull_request.source_repo.clone_url(),
4188 4188 'repository': pull_request.source_repo.repo_name,
4189 4189 'reference': {
4190 4190 'name': pull_request.source_ref_parts.name,
4191 4191 'type': pull_request.source_ref_parts.type,
4192 4192 'commit_id': pull_request.source_ref_parts.commit_id,
4193 4193 },
4194 4194 },
4195 4195 'target': {
4196 4196 'clone_url': pull_request.target_repo.clone_url(),
4197 4197 'repository': pull_request.target_repo.repo_name,
4198 4198 'reference': {
4199 4199 'name': pull_request.target_ref_parts.name,
4200 4200 'type': pull_request.target_ref_parts.type,
4201 4201 'commit_id': pull_request.target_ref_parts.commit_id,
4202 4202 },
4203 4203 },
4204 4204 'merge': merge_data,
4205 4205 'author': pull_request.author.get_api_data(include_secrets=False,
4206 4206 details='basic'),
4207 4207 'reviewers': [
4208 4208 {
4209 4209 'user': reviewer.get_api_data(include_secrets=False,
4210 4210 details='basic'),
4211 4211 'reasons': reasons,
4212 4212 'review_status': st[0][1].status if st else 'not_reviewed',
4213 4213 }
4214 4214 for obj, reviewer, reasons, mandatory, st in
4215 4215 pull_request.reviewers_statuses()
4216 4216 ]
4217 4217 }
4218 4218
4219 4219 return data
4220 4220
4221 4221 def set_state(self, pull_request_state, final_state=None):
4222 4222 """
4223 4223 # goes from initial state to updating to initial state.
4224 4224 # initial state can be changed by specifying back_state=
4225 4225 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4226 4226 pull_request.merge()
4227 4227
4228 4228 :param pull_request_state:
4229 4229 :param final_state:
4230 4230
4231 4231 """
4232 4232
4233 4233 return _SetState(self, pull_request_state, back_state=final_state)
4234 4234
4235 4235
4236 4236 class PullRequest(Base, _PullRequestBase):
4237 4237 __tablename__ = 'pull_requests'
4238 4238 __table_args__ = (
4239 4239 base_table_args,
4240 4240 )
4241 4241
4242 4242 pull_request_id = Column(
4243 4243 'pull_request_id', Integer(), nullable=False, primary_key=True)
4244 4244
4245 4245 def __repr__(self):
4246 4246 if self.pull_request_id:
4247 4247 return '<DB:PullRequest #%s>' % self.pull_request_id
4248 4248 else:
4249 4249 return '<DB:PullRequest at %#x>' % id(self)
4250 4250
4251 4251 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4252 4252 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4253 4253 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4254 4254 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4255 4255 lazy='dynamic')
4256 4256
4257 4257 @classmethod
4258 4258 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4259 4259 internal_methods=None):
4260 4260
4261 4261 class PullRequestDisplay(object):
4262 4262 """
4263 4263 Special object wrapper for showing PullRequest data via Versions
4264 4264 It mimics PR object as close as possible. This is read only object
4265 4265 just for display
4266 4266 """
4267 4267
4268 4268 def __init__(self, attrs, internal=None):
4269 4269 self.attrs = attrs
4270 4270 # internal have priority over the given ones via attrs
4271 4271 self.internal = internal or ['versions']
4272 4272
4273 4273 def __getattr__(self, item):
4274 4274 if item in self.internal:
4275 4275 return getattr(self, item)
4276 4276 try:
4277 4277 return self.attrs[item]
4278 4278 except KeyError:
4279 4279 raise AttributeError(
4280 4280 '%s object has no attribute %s' % (self, item))
4281 4281
4282 4282 def __repr__(self):
4283 4283 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4284 4284
4285 4285 def versions(self):
4286 4286 return pull_request_obj.versions.order_by(
4287 4287 PullRequestVersion.pull_request_version_id).all()
4288 4288
4289 4289 def is_closed(self):
4290 4290 return pull_request_obj.is_closed()
4291 4291
4292 4292 def is_state_changing(self):
4293 4293 return pull_request_obj.is_state_changing()
4294 4294
4295 4295 @property
4296 4296 def pull_request_version_id(self):
4297 4297 return getattr(pull_request_obj, 'pull_request_version_id', None)
4298 4298
4299 4299 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4300 4300
4301 4301 attrs.author = StrictAttributeDict(
4302 4302 pull_request_obj.author.get_api_data())
4303 4303 if pull_request_obj.target_repo:
4304 4304 attrs.target_repo = StrictAttributeDict(
4305 4305 pull_request_obj.target_repo.get_api_data())
4306 4306 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4307 4307
4308 4308 if pull_request_obj.source_repo:
4309 4309 attrs.source_repo = StrictAttributeDict(
4310 4310 pull_request_obj.source_repo.get_api_data())
4311 4311 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4312 4312
4313 4313 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4314 4314 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4315 4315 attrs.revisions = pull_request_obj.revisions
4316 4316 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4317 4317 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4318 4318 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4319 4319 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4320 4320
4321 4321 return PullRequestDisplay(attrs, internal=internal_methods)
4322 4322
4323 4323 def is_closed(self):
4324 4324 return self.status == self.STATUS_CLOSED
4325 4325
4326 4326 def is_state_changing(self):
4327 4327 return self.pull_request_state != PullRequest.STATE_CREATED
4328 4328
4329 4329 def __json__(self):
4330 4330 return {
4331 4331 'revisions': self.revisions,
4332 4332 'versions': self.versions_count
4333 4333 }
4334 4334
4335 4335 def calculated_review_status(self):
4336 4336 from rhodecode.model.changeset_status import ChangesetStatusModel
4337 4337 return ChangesetStatusModel().calculated_review_status(self)
4338 4338
4339 4339 def reviewers_statuses(self):
4340 4340 from rhodecode.model.changeset_status import ChangesetStatusModel
4341 4341 return ChangesetStatusModel().reviewers_statuses(self)
4342 4342
4343 4343 @property
4344 4344 def workspace_id(self):
4345 4345 from rhodecode.model.pull_request import PullRequestModel
4346 4346 return PullRequestModel()._workspace_id(self)
4347 4347
4348 4348 def get_shadow_repo(self):
4349 4349 workspace_id = self.workspace_id
4350 4350 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4351 4351 if os.path.isdir(shadow_repository_path):
4352 4352 vcs_obj = self.target_repo.scm_instance()
4353 4353 return vcs_obj.get_shadow_instance(shadow_repository_path)
4354 4354
4355 4355 @property
4356 4356 def versions_count(self):
4357 4357 """
4358 4358 return number of versions this PR have, e.g a PR that once been
4359 4359 updated will have 2 versions
4360 4360 """
4361 4361 return self.versions.count() + 1
4362 4362
4363 4363
4364 4364 class PullRequestVersion(Base, _PullRequestBase):
4365 4365 __tablename__ = 'pull_request_versions'
4366 4366 __table_args__ = (
4367 4367 base_table_args,
4368 4368 )
4369 4369
4370 4370 pull_request_version_id = Column(
4371 4371 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4372 4372 pull_request_id = Column(
4373 4373 'pull_request_id', Integer(),
4374 4374 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4375 4375 pull_request = relationship('PullRequest')
4376 4376
4377 4377 def __repr__(self):
4378 4378 if self.pull_request_version_id:
4379 4379 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4380 4380 else:
4381 4381 return '<DB:PullRequestVersion at %#x>' % id(self)
4382 4382
4383 4383 @property
4384 4384 def reviewers(self):
4385 4385 return self.pull_request.reviewers
4386 4386
4387 4387 @property
4388 4388 def versions(self):
4389 4389 return self.pull_request.versions
4390 4390
4391 4391 def is_closed(self):
4392 4392 # calculate from original
4393 4393 return self.pull_request.status == self.STATUS_CLOSED
4394 4394
4395 4395 def is_state_changing(self):
4396 4396 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4397 4397
4398 4398 def calculated_review_status(self):
4399 4399 return self.pull_request.calculated_review_status()
4400 4400
4401 4401 def reviewers_statuses(self):
4402 4402 return self.pull_request.reviewers_statuses()
4403 4403
4404 4404
4405 4405 class PullRequestReviewers(Base, BaseModel):
4406 4406 __tablename__ = 'pull_request_reviewers'
4407 4407 __table_args__ = (
4408 4408 base_table_args,
4409 4409 )
4410 4410
4411 4411 @hybrid_property
4412 4412 def reasons(self):
4413 4413 if not self._reasons:
4414 4414 return []
4415 4415 return self._reasons
4416 4416
4417 4417 @reasons.setter
4418 4418 def reasons(self, val):
4419 4419 val = val or []
4420 4420 if any(not isinstance(x, compat.string_types) for x in val):
4421 4421 raise Exception('invalid reasons type, must be list of strings')
4422 4422 self._reasons = val
4423 4423
4424 4424 pull_requests_reviewers_id = Column(
4425 4425 'pull_requests_reviewers_id', Integer(), nullable=False,
4426 4426 primary_key=True)
4427 4427 pull_request_id = Column(
4428 4428 "pull_request_id", Integer(),
4429 4429 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4430 4430 user_id = Column(
4431 4431 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4432 4432 _reasons = Column(
4433 4433 'reason', MutationList.as_mutable(
4434 4434 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4435 4435
4436 4436 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4437 4437 user = relationship('User')
4438 4438 pull_request = relationship('PullRequest')
4439 4439
4440 4440 rule_data = Column(
4441 4441 'rule_data_json',
4442 4442 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4443 4443
4444 4444 def rule_user_group_data(self):
4445 4445 """
4446 4446 Returns the voting user group rule data for this reviewer
4447 4447 """
4448 4448
4449 4449 if self.rule_data and 'vote_rule' in self.rule_data:
4450 4450 user_group_data = {}
4451 4451 if 'rule_user_group_entry_id' in self.rule_data:
4452 4452 # means a group with voting rules !
4453 4453 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4454 4454 user_group_data['name'] = self.rule_data['rule_name']
4455 4455 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4456 4456
4457 4457 return user_group_data
4458 4458
4459 4459 def __unicode__(self):
4460 4460 return u"<%s('id:%s')>" % (self.__class__.__name__,
4461 4461 self.pull_requests_reviewers_id)
4462 4462
4463 4463
4464 4464 class Notification(Base, BaseModel):
4465 4465 __tablename__ = 'notifications'
4466 4466 __table_args__ = (
4467 4467 Index('notification_type_idx', 'type'),
4468 4468 base_table_args,
4469 4469 )
4470 4470
4471 4471 TYPE_CHANGESET_COMMENT = u'cs_comment'
4472 4472 TYPE_MESSAGE = u'message'
4473 4473 TYPE_MENTION = u'mention'
4474 4474 TYPE_REGISTRATION = u'registration'
4475 4475 TYPE_PULL_REQUEST = u'pull_request'
4476 4476 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4477 4477 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4478 4478
4479 4479 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4480 4480 subject = Column('subject', Unicode(512), nullable=True)
4481 4481 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4482 4482 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4483 4483 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4484 4484 type_ = Column('type', Unicode(255))
4485 4485
4486 4486 created_by_user = relationship('User')
4487 4487 notifications_to_users = relationship('UserNotification', lazy='joined',
4488 4488 cascade="all, delete-orphan")
4489 4489
4490 4490 @property
4491 4491 def recipients(self):
4492 4492 return [x.user for x in UserNotification.query()\
4493 4493 .filter(UserNotification.notification == self)\
4494 4494 .order_by(UserNotification.user_id.asc()).all()]
4495 4495
4496 4496 @classmethod
4497 4497 def create(cls, created_by, subject, body, recipients, type_=None):
4498 4498 if type_ is None:
4499 4499 type_ = Notification.TYPE_MESSAGE
4500 4500
4501 4501 notification = cls()
4502 4502 notification.created_by_user = created_by
4503 4503 notification.subject = subject
4504 4504 notification.body = body
4505 4505 notification.type_ = type_
4506 4506 notification.created_on = datetime.datetime.now()
4507 4507
4508 4508 # For each recipient link the created notification to his account
4509 4509 for u in recipients:
4510 4510 assoc = UserNotification()
4511 4511 assoc.user_id = u.user_id
4512 4512 assoc.notification = notification
4513 4513
4514 4514 # if created_by is inside recipients mark his notification
4515 4515 # as read
4516 4516 if u.user_id == created_by.user_id:
4517 4517 assoc.read = True
4518 4518 Session().add(assoc)
4519 4519
4520 4520 Session().add(notification)
4521 4521
4522 4522 return notification
4523 4523
4524 4524
4525 4525 class UserNotification(Base, BaseModel):
4526 4526 __tablename__ = 'user_to_notification'
4527 4527 __table_args__ = (
4528 4528 UniqueConstraint('user_id', 'notification_id'),
4529 4529 base_table_args
4530 4530 )
4531 4531
4532 4532 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4533 4533 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4534 4534 read = Column('read', Boolean, default=False)
4535 4535 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4536 4536
4537 4537 user = relationship('User', lazy="joined")
4538 4538 notification = relationship('Notification', lazy="joined",
4539 4539 order_by=lambda: Notification.created_on.desc(),)
4540 4540
4541 4541 def mark_as_read(self):
4542 4542 self.read = True
4543 4543 Session().add(self)
4544 4544
4545 4545
4546 4546 class UserNotice(Base, BaseModel):
4547 4547 __tablename__ = 'user_notices'
4548 4548 __table_args__ = (
4549 4549 base_table_args
4550 4550 )
4551 4551
4552 4552 NOTIFICATION_TYPE_MESSAGE = 'message'
4553 4553 NOTIFICATION_TYPE_NOTICE = 'notice'
4554 4554
4555 4555 NOTIFICATION_LEVEL_INFO = 'info'
4556 4556 NOTIFICATION_LEVEL_WARNING = 'warning'
4557 4557 NOTIFICATION_LEVEL_ERROR = 'error'
4558 4558
4559 4559 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4560 4560
4561 4561 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4562 4562 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4563 4563
4564 4564 notice_read = Column('notice_read', Boolean, default=False)
4565 4565
4566 4566 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4567 4567 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4568 4568
4569 4569 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4570 4570 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4571 4571
4572 4572 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4573 4573 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4574 4574
4575 4575 @classmethod
4576 4576 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4577 4577
4578 4578 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4579 4579 cls.NOTIFICATION_LEVEL_WARNING,
4580 4580 cls.NOTIFICATION_LEVEL_INFO]:
4581 4581 return
4582 4582
4583 4583 from rhodecode.model.user import UserModel
4584 4584 user = UserModel().get_user(user)
4585 4585
4586 4586 new_notice = UserNotice()
4587 4587 if not allow_duplicate:
4588 4588 existing_msg = UserNotice().query() \
4589 4589 .filter(UserNotice.user == user) \
4590 4590 .filter(UserNotice.notice_body == body) \
4591 4591 .filter(UserNotice.notice_read == false()) \
4592 4592 .scalar()
4593 4593 if existing_msg:
4594 4594 log.warning('Ignoring duplicate notice for user %s', user)
4595 4595 return
4596 4596
4597 4597 new_notice.user = user
4598 4598 new_notice.notice_subject = subject
4599 4599 new_notice.notice_body = body
4600 4600 new_notice.notification_level = notice_level
4601 4601 Session().add(new_notice)
4602 4602 Session().commit()
4603 4603
4604 4604
4605 4605 class Gist(Base, BaseModel):
4606 4606 __tablename__ = 'gists'
4607 4607 __table_args__ = (
4608 4608 Index('g_gist_access_id_idx', 'gist_access_id'),
4609 4609 Index('g_created_on_idx', 'created_on'),
4610 4610 base_table_args
4611 4611 )
4612 4612
4613 4613 GIST_PUBLIC = u'public'
4614 4614 GIST_PRIVATE = u'private'
4615 4615 DEFAULT_FILENAME = u'gistfile1.txt'
4616 4616
4617 4617 ACL_LEVEL_PUBLIC = u'acl_public'
4618 4618 ACL_LEVEL_PRIVATE = u'acl_private'
4619 4619
4620 4620 gist_id = Column('gist_id', Integer(), primary_key=True)
4621 4621 gist_access_id = Column('gist_access_id', Unicode(250))
4622 4622 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4623 4623 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4624 4624 gist_expires = Column('gist_expires', Float(53), nullable=False)
4625 4625 gist_type = Column('gist_type', Unicode(128), nullable=False)
4626 4626 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4627 4627 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4628 4628 acl_level = Column('acl_level', Unicode(128), nullable=True)
4629 4629
4630 4630 owner = relationship('User')
4631 4631
4632 4632 def __repr__(self):
4633 4633 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4634 4634
4635 4635 @hybrid_property
4636 4636 def description_safe(self):
4637 4637 from rhodecode.lib import helpers as h
4638 4638 return h.escape(self.gist_description)
4639 4639
4640 4640 @classmethod
4641 4641 def get_or_404(cls, id_):
4642 4642 from pyramid.httpexceptions import HTTPNotFound
4643 4643
4644 4644 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4645 4645 if not res:
4646 4646 raise HTTPNotFound()
4647 4647 return res
4648 4648
4649 4649 @classmethod
4650 4650 def get_by_access_id(cls, gist_access_id):
4651 4651 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4652 4652
4653 4653 def gist_url(self):
4654 4654 from rhodecode.model.gist import GistModel
4655 4655 return GistModel().get_url(self)
4656 4656
4657 4657 @classmethod
4658 4658 def base_path(cls):
4659 4659 """
4660 4660 Returns base path when all gists are stored
4661 4661
4662 4662 :param cls:
4663 4663 """
4664 4664 from rhodecode.model.gist import GIST_STORE_LOC
4665 4665 q = Session().query(RhodeCodeUi)\
4666 4666 .filter(RhodeCodeUi.ui_key == URL_SEP)
4667 4667 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4668 4668 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4669 4669
4670 4670 def get_api_data(self):
4671 4671 """
4672 4672 Common function for generating gist related data for API
4673 4673 """
4674 4674 gist = self
4675 4675 data = {
4676 4676 'gist_id': gist.gist_id,
4677 4677 'type': gist.gist_type,
4678 4678 'access_id': gist.gist_access_id,
4679 4679 'description': gist.gist_description,
4680 4680 'url': gist.gist_url(),
4681 4681 'expires': gist.gist_expires,
4682 4682 'created_on': gist.created_on,
4683 4683 'modified_at': gist.modified_at,
4684 4684 'content': None,
4685 4685 'acl_level': gist.acl_level,
4686 4686 }
4687 4687 return data
4688 4688
4689 4689 def __json__(self):
4690 4690 data = dict(
4691 4691 )
4692 4692 data.update(self.get_api_data())
4693 4693 return data
4694 4694 # SCM functions
4695 4695
4696 4696 def scm_instance(self, **kwargs):
4697 4697 """
4698 4698 Get an instance of VCS Repository
4699 4699
4700 4700 :param kwargs:
4701 4701 """
4702 4702 from rhodecode.model.gist import GistModel
4703 4703 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4704 4704 return get_vcs_instance(
4705 4705 repo_path=safe_str(full_repo_path), create=False,
4706 4706 _vcs_alias=GistModel.vcs_backend)
4707 4707
4708 4708
4709 4709 class ExternalIdentity(Base, BaseModel):
4710 4710 __tablename__ = 'external_identities'
4711 4711 __table_args__ = (
4712 4712 Index('local_user_id_idx', 'local_user_id'),
4713 4713 Index('external_id_idx', 'external_id'),
4714 4714 base_table_args
4715 4715 )
4716 4716
4717 4717 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4718 4718 external_username = Column('external_username', Unicode(1024), default=u'')
4719 4719 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4720 4720 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4721 4721 access_token = Column('access_token', String(1024), default=u'')
4722 4722 alt_token = Column('alt_token', String(1024), default=u'')
4723 4723 token_secret = Column('token_secret', String(1024), default=u'')
4724 4724
4725 4725 @classmethod
4726 4726 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4727 4727 """
4728 4728 Returns ExternalIdentity instance based on search params
4729 4729
4730 4730 :param external_id:
4731 4731 :param provider_name:
4732 4732 :return: ExternalIdentity
4733 4733 """
4734 4734 query = cls.query()
4735 4735 query = query.filter(cls.external_id == external_id)
4736 4736 query = query.filter(cls.provider_name == provider_name)
4737 4737 if local_user_id:
4738 4738 query = query.filter(cls.local_user_id == local_user_id)
4739 4739 return query.first()
4740 4740
4741 4741 @classmethod
4742 4742 def user_by_external_id_and_provider(cls, external_id, provider_name):
4743 4743 """
4744 4744 Returns User instance based on search params
4745 4745
4746 4746 :param external_id:
4747 4747 :param provider_name:
4748 4748 :return: User
4749 4749 """
4750 4750 query = User.query()
4751 4751 query = query.filter(cls.external_id == external_id)
4752 4752 query = query.filter(cls.provider_name == provider_name)
4753 4753 query = query.filter(User.user_id == cls.local_user_id)
4754 4754 return query.first()
4755 4755
4756 4756 @classmethod
4757 4757 def by_local_user_id(cls, local_user_id):
4758 4758 """
4759 4759 Returns all tokens for user
4760 4760
4761 4761 :param local_user_id:
4762 4762 :return: ExternalIdentity
4763 4763 """
4764 4764 query = cls.query()
4765 4765 query = query.filter(cls.local_user_id == local_user_id)
4766 4766 return query
4767 4767
4768 4768 @classmethod
4769 4769 def load_provider_plugin(cls, plugin_id):
4770 4770 from rhodecode.authentication.base import loadplugin
4771 4771 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4772 4772 auth_plugin = loadplugin(_plugin_id)
4773 4773 return auth_plugin
4774 4774
4775 4775
4776 4776 class Integration(Base, BaseModel):
4777 4777 __tablename__ = 'integrations'
4778 4778 __table_args__ = (
4779 4779 base_table_args
4780 4780 )
4781 4781
4782 4782 integration_id = Column('integration_id', Integer(), primary_key=True)
4783 4783 integration_type = Column('integration_type', String(255))
4784 4784 enabled = Column('enabled', Boolean(), nullable=False)
4785 4785 name = Column('name', String(255), nullable=False)
4786 4786 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4787 4787 default=False)
4788 4788
4789 4789 settings = Column(
4790 4790 'settings_json', MutationObj.as_mutable(
4791 4791 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4792 4792 repo_id = Column(
4793 4793 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4794 4794 nullable=True, unique=None, default=None)
4795 4795 repo = relationship('Repository', lazy='joined')
4796 4796
4797 4797 repo_group_id = Column(
4798 4798 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4799 4799 nullable=True, unique=None, default=None)
4800 4800 repo_group = relationship('RepoGroup', lazy='joined')
4801 4801
4802 4802 @property
4803 4803 def scope(self):
4804 4804 if self.repo:
4805 4805 return repr(self.repo)
4806 4806 if self.repo_group:
4807 4807 if self.child_repos_only:
4808 4808 return repr(self.repo_group) + ' (child repos only)'
4809 4809 else:
4810 4810 return repr(self.repo_group) + ' (recursive)'
4811 4811 if self.child_repos_only:
4812 4812 return 'root_repos'
4813 4813 return 'global'
4814 4814
4815 4815 def __repr__(self):
4816 4816 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4817 4817
4818 4818
4819 4819 class RepoReviewRuleUser(Base, BaseModel):
4820 4820 __tablename__ = 'repo_review_rules_users'
4821 4821 __table_args__ = (
4822 4822 base_table_args
4823 4823 )
4824 4824
4825 4825 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4826 4826 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4827 4827 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4828 4828 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4829 4829 user = relationship('User')
4830 4830
4831 4831 def rule_data(self):
4832 4832 return {
4833 4833 'mandatory': self.mandatory
4834 4834 }
4835 4835
4836 4836
4837 4837 class RepoReviewRuleUserGroup(Base, BaseModel):
4838 4838 __tablename__ = 'repo_review_rules_users_groups'
4839 4839 __table_args__ = (
4840 4840 base_table_args
4841 4841 )
4842 4842
4843 4843 VOTE_RULE_ALL = -1
4844 4844
4845 4845 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4846 4846 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4847 4847 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4848 4848 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4849 4849 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4850 4850 users_group = relationship('UserGroup')
4851 4851
4852 4852 def rule_data(self):
4853 4853 return {
4854 4854 'mandatory': self.mandatory,
4855 4855 'vote_rule': self.vote_rule
4856 4856 }
4857 4857
4858 4858 @property
4859 4859 def vote_rule_label(self):
4860 4860 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4861 4861 return 'all must vote'
4862 4862 else:
4863 4863 return 'min. vote {}'.format(self.vote_rule)
4864 4864
4865 4865
4866 4866 class RepoReviewRule(Base, BaseModel):
4867 4867 __tablename__ = 'repo_review_rules'
4868 4868 __table_args__ = (
4869 4869 base_table_args
4870 4870 )
4871 4871
4872 4872 repo_review_rule_id = Column(
4873 4873 'repo_review_rule_id', Integer(), primary_key=True)
4874 4874 repo_id = Column(
4875 4875 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4876 4876 repo = relationship('Repository', backref='review_rules')
4877 4877
4878 4878 review_rule_name = Column('review_rule_name', String(255))
4879 4879 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4880 4880 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4881 4881 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4882 4882
4883 4883 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4884 4884 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4885 4885 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4886 4886 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4887 4887
4888 4888 rule_users = relationship('RepoReviewRuleUser')
4889 4889 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4890 4890
4891 4891 def _validate_pattern(self, value):
4892 4892 re.compile('^' + glob2re(value) + '$')
4893 4893
4894 4894 @hybrid_property
4895 4895 def source_branch_pattern(self):
4896 4896 return self._branch_pattern or '*'
4897 4897
4898 4898 @source_branch_pattern.setter
4899 4899 def source_branch_pattern(self, value):
4900 4900 self._validate_pattern(value)
4901 4901 self._branch_pattern = value or '*'
4902 4902
4903 4903 @hybrid_property
4904 4904 def target_branch_pattern(self):
4905 4905 return self._target_branch_pattern or '*'
4906 4906
4907 4907 @target_branch_pattern.setter
4908 4908 def target_branch_pattern(self, value):
4909 4909 self._validate_pattern(value)
4910 4910 self._target_branch_pattern = value or '*'
4911 4911
4912 4912 @hybrid_property
4913 4913 def file_pattern(self):
4914 4914 return self._file_pattern or '*'
4915 4915
4916 4916 @file_pattern.setter
4917 4917 def file_pattern(self, value):
4918 4918 self._validate_pattern(value)
4919 4919 self._file_pattern = value or '*'
4920 4920
4921 4921 def matches(self, source_branch, target_branch, files_changed):
4922 4922 """
4923 4923 Check if this review rule matches a branch/files in a pull request
4924 4924
4925 4925 :param source_branch: source branch name for the commit
4926 4926 :param target_branch: target branch name for the commit
4927 4927 :param files_changed: list of file paths changed in the pull request
4928 4928 """
4929 4929
4930 4930 source_branch = source_branch or ''
4931 4931 target_branch = target_branch or ''
4932 4932 files_changed = files_changed or []
4933 4933
4934 4934 branch_matches = True
4935 4935 if source_branch or target_branch:
4936 4936 if self.source_branch_pattern == '*':
4937 4937 source_branch_match = True
4938 4938 else:
4939 4939 if self.source_branch_pattern.startswith('re:'):
4940 4940 source_pattern = self.source_branch_pattern[3:]
4941 4941 else:
4942 4942 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4943 4943 source_branch_regex = re.compile(source_pattern)
4944 4944 source_branch_match = bool(source_branch_regex.search(source_branch))
4945 4945 if self.target_branch_pattern == '*':
4946 4946 target_branch_match = True
4947 4947 else:
4948 4948 if self.target_branch_pattern.startswith('re:'):
4949 4949 target_pattern = self.target_branch_pattern[3:]
4950 4950 else:
4951 4951 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4952 4952 target_branch_regex = re.compile(target_pattern)
4953 4953 target_branch_match = bool(target_branch_regex.search(target_branch))
4954 4954
4955 4955 branch_matches = source_branch_match and target_branch_match
4956 4956
4957 4957 files_matches = True
4958 4958 if self.file_pattern != '*':
4959 4959 files_matches = False
4960 4960 if self.file_pattern.startswith('re:'):
4961 4961 file_pattern = self.file_pattern[3:]
4962 4962 else:
4963 4963 file_pattern = glob2re(self.file_pattern)
4964 4964 file_regex = re.compile(file_pattern)
4965 for filename in files_changed:
4965 for file_data in files_changed:
4966 filename = file_data.get('filename')
4967
4966 4968 if file_regex.search(filename):
4967 4969 files_matches = True
4968 4970 break
4969 4971
4970 4972 return branch_matches and files_matches
4971 4973
4972 4974 @property
4973 4975 def review_users(self):
4974 4976 """ Returns the users which this rule applies to """
4975 4977
4976 4978 users = collections.OrderedDict()
4977 4979
4978 4980 for rule_user in self.rule_users:
4979 4981 if rule_user.user.active:
4980 4982 if rule_user.user not in users:
4981 4983 users[rule_user.user.username] = {
4982 4984 'user': rule_user.user,
4983 4985 'source': 'user',
4984 4986 'source_data': {},
4985 4987 'data': rule_user.rule_data()
4986 4988 }
4987 4989
4988 4990 for rule_user_group in self.rule_user_groups:
4989 4991 source_data = {
4990 4992 'user_group_id': rule_user_group.users_group.users_group_id,
4991 4993 'name': rule_user_group.users_group.users_group_name,
4992 4994 'members': len(rule_user_group.users_group.members)
4993 4995 }
4994 4996 for member in rule_user_group.users_group.members:
4995 4997 if member.user.active:
4996 4998 key = member.user.username
4997 4999 if key in users:
4998 5000 # skip this member as we have him already
4999 5001 # this prevents from override the "first" matched
5000 5002 # users with duplicates in multiple groups
5001 5003 continue
5002 5004
5003 5005 users[key] = {
5004 5006 'user': member.user,
5005 5007 'source': 'user_group',
5006 5008 'source_data': source_data,
5007 5009 'data': rule_user_group.rule_data()
5008 5010 }
5009 5011
5010 5012 return users
5011 5013
5012 5014 def user_group_vote_rule(self, user_id):
5013 5015
5014 5016 rules = []
5015 5017 if not self.rule_user_groups:
5016 5018 return rules
5017 5019
5018 5020 for user_group in self.rule_user_groups:
5019 5021 user_group_members = [x.user_id for x in user_group.users_group.members]
5020 5022 if user_id in user_group_members:
5021 5023 rules.append(user_group)
5022 5024 return rules
5023 5025
5024 5026 def __repr__(self):
5025 5027 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5026 5028 self.repo_review_rule_id, self.repo)
5027 5029
5028 5030
5029 5031 class ScheduleEntry(Base, BaseModel):
5030 5032 __tablename__ = 'schedule_entries'
5031 5033 __table_args__ = (
5032 5034 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5033 5035 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5034 5036 base_table_args,
5035 5037 )
5036 5038
5037 5039 schedule_types = ['crontab', 'timedelta', 'integer']
5038 5040 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5039 5041
5040 5042 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5041 5043 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5042 5044 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5043 5045
5044 5046 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5045 5047 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5046 5048
5047 5049 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5048 5050 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5049 5051
5050 5052 # task
5051 5053 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5052 5054 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5053 5055 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5054 5056 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5055 5057
5056 5058 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5057 5059 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5058 5060
5059 5061 @hybrid_property
5060 5062 def schedule_type(self):
5061 5063 return self._schedule_type
5062 5064
5063 5065 @schedule_type.setter
5064 5066 def schedule_type(self, val):
5065 5067 if val not in self.schedule_types:
5066 5068 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5067 5069 val, self.schedule_type))
5068 5070
5069 5071 self._schedule_type = val
5070 5072
5071 5073 @classmethod
5072 5074 def get_uid(cls, obj):
5073 5075 args = obj.task_args
5074 5076 kwargs = obj.task_kwargs
5075 5077 if isinstance(args, JsonRaw):
5076 5078 try:
5077 5079 args = json.loads(args)
5078 5080 except ValueError:
5079 5081 args = tuple()
5080 5082
5081 5083 if isinstance(kwargs, JsonRaw):
5082 5084 try:
5083 5085 kwargs = json.loads(kwargs)
5084 5086 except ValueError:
5085 5087 kwargs = dict()
5086 5088
5087 5089 dot_notation = obj.task_dot_notation
5088 5090 val = '.'.join(map(safe_str, [
5089 5091 sorted(dot_notation), args, sorted(kwargs.items())]))
5090 5092 return hashlib.sha1(val).hexdigest()
5091 5093
5092 5094 @classmethod
5093 5095 def get_by_schedule_name(cls, schedule_name):
5094 5096 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5095 5097
5096 5098 @classmethod
5097 5099 def get_by_schedule_id(cls, schedule_id):
5098 5100 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5099 5101
5100 5102 @property
5101 5103 def task(self):
5102 5104 return self.task_dot_notation
5103 5105
5104 5106 @property
5105 5107 def schedule(self):
5106 5108 from rhodecode.lib.celerylib.utils import raw_2_schedule
5107 5109 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5108 5110 return schedule
5109 5111
5110 5112 @property
5111 5113 def args(self):
5112 5114 try:
5113 5115 return list(self.task_args or [])
5114 5116 except ValueError:
5115 5117 return list()
5116 5118
5117 5119 @property
5118 5120 def kwargs(self):
5119 5121 try:
5120 5122 return dict(self.task_kwargs or {})
5121 5123 except ValueError:
5122 5124 return dict()
5123 5125
5124 5126 def _as_raw(self, val):
5125 5127 if hasattr(val, 'de_coerce'):
5126 5128 val = val.de_coerce()
5127 5129 if val:
5128 5130 val = json.dumps(val)
5129 5131
5130 5132 return val
5131 5133
5132 5134 @property
5133 5135 def schedule_definition_raw(self):
5134 5136 return self._as_raw(self.schedule_definition)
5135 5137
5136 5138 @property
5137 5139 def args_raw(self):
5138 5140 return self._as_raw(self.task_args)
5139 5141
5140 5142 @property
5141 5143 def kwargs_raw(self):
5142 5144 return self._as_raw(self.task_kwargs)
5143 5145
5144 5146 def __repr__(self):
5145 5147 return '<DB:ScheduleEntry({}:{})>'.format(
5146 5148 self.schedule_entry_id, self.schedule_name)
5147 5149
5148 5150
5149 5151 @event.listens_for(ScheduleEntry, 'before_update')
5150 5152 def update_task_uid(mapper, connection, target):
5151 5153 target.task_uid = ScheduleEntry.get_uid(target)
5152 5154
5153 5155
5154 5156 @event.listens_for(ScheduleEntry, 'before_insert')
5155 5157 def set_task_uid(mapper, connection, target):
5156 5158 target.task_uid = ScheduleEntry.get_uid(target)
5157 5159
5158 5160
5159 5161 class _BaseBranchPerms(BaseModel):
5160 5162 @classmethod
5161 5163 def compute_hash(cls, value):
5162 5164 return sha1_safe(value)
5163 5165
5164 5166 @hybrid_property
5165 5167 def branch_pattern(self):
5166 5168 return self._branch_pattern or '*'
5167 5169
5168 5170 @hybrid_property
5169 5171 def branch_hash(self):
5170 5172 return self._branch_hash
5171 5173
5172 5174 def _validate_glob(self, value):
5173 5175 re.compile('^' + glob2re(value) + '$')
5174 5176
5175 5177 @branch_pattern.setter
5176 5178 def branch_pattern(self, value):
5177 5179 self._validate_glob(value)
5178 5180 self._branch_pattern = value or '*'
5179 5181 # set the Hash when setting the branch pattern
5180 5182 self._branch_hash = self.compute_hash(self._branch_pattern)
5181 5183
5182 5184 def matches(self, branch):
5183 5185 """
5184 5186 Check if this the branch matches entry
5185 5187
5186 5188 :param branch: branch name for the commit
5187 5189 """
5188 5190
5189 5191 branch = branch or ''
5190 5192
5191 5193 branch_matches = True
5192 5194 if branch:
5193 5195 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5194 5196 branch_matches = bool(branch_regex.search(branch))
5195 5197
5196 5198 return branch_matches
5197 5199
5198 5200
5199 5201 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5200 5202 __tablename__ = 'user_to_repo_branch_permissions'
5201 5203 __table_args__ = (
5202 5204 base_table_args
5203 5205 )
5204 5206
5205 5207 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5206 5208
5207 5209 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5208 5210 repo = relationship('Repository', backref='user_branch_perms')
5209 5211
5210 5212 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5211 5213 permission = relationship('Permission')
5212 5214
5213 5215 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5214 5216 user_repo_to_perm = relationship('UserRepoToPerm')
5215 5217
5216 5218 rule_order = Column('rule_order', Integer(), nullable=False)
5217 5219 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5218 5220 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5219 5221
5220 5222 def __unicode__(self):
5221 5223 return u'<UserBranchPermission(%s => %r)>' % (
5222 5224 self.user_repo_to_perm, self.branch_pattern)
5223 5225
5224 5226
5225 5227 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5226 5228 __tablename__ = 'user_group_to_repo_branch_permissions'
5227 5229 __table_args__ = (
5228 5230 base_table_args
5229 5231 )
5230 5232
5231 5233 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5232 5234
5233 5235 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5234 5236 repo = relationship('Repository', backref='user_group_branch_perms')
5235 5237
5236 5238 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5237 5239 permission = relationship('Permission')
5238 5240
5239 5241 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5240 5242 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5241 5243
5242 5244 rule_order = Column('rule_order', Integer(), nullable=False)
5243 5245 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5244 5246 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5245 5247
5246 5248 def __unicode__(self):
5247 5249 return u'<UserBranchPermission(%s => %r)>' % (
5248 5250 self.user_group_repo_to_perm, self.branch_pattern)
5249 5251
5250 5252
5251 5253 class UserBookmark(Base, BaseModel):
5252 5254 __tablename__ = 'user_bookmarks'
5253 5255 __table_args__ = (
5254 5256 UniqueConstraint('user_id', 'bookmark_repo_id'),
5255 5257 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5256 5258 UniqueConstraint('user_id', 'bookmark_position'),
5257 5259 base_table_args
5258 5260 )
5259 5261
5260 5262 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5261 5263 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5262 5264 position = Column("bookmark_position", Integer(), nullable=False)
5263 5265 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5264 5266 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5265 5267 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5266 5268
5267 5269 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5268 5270 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5269 5271
5270 5272 user = relationship("User")
5271 5273
5272 5274 repository = relationship("Repository")
5273 5275 repository_group = relationship("RepoGroup")
5274 5276
5275 5277 @classmethod
5276 5278 def get_by_position_for_user(cls, position, user_id):
5277 5279 return cls.query() \
5278 5280 .filter(UserBookmark.user_id == user_id) \
5279 5281 .filter(UserBookmark.position == position).scalar()
5280 5282
5281 5283 @classmethod
5282 5284 def get_bookmarks_for_user(cls, user_id, cache=True):
5283 5285 bookmarks = cls.query() \
5284 5286 .filter(UserBookmark.user_id == user_id) \
5285 5287 .options(joinedload(UserBookmark.repository)) \
5286 5288 .options(joinedload(UserBookmark.repository_group)) \
5287 5289 .order_by(UserBookmark.position.asc())
5288 5290
5289 5291 if cache:
5290 5292 bookmarks = bookmarks.options(
5291 5293 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5292 5294 )
5293 5295
5294 5296 return bookmarks.all()
5295 5297
5296 5298 def __unicode__(self):
5297 5299 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5298 5300
5299 5301
5300 5302 class FileStore(Base, BaseModel):
5301 5303 __tablename__ = 'file_store'
5302 5304 __table_args__ = (
5303 5305 base_table_args
5304 5306 )
5305 5307
5306 5308 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5307 5309 file_uid = Column('file_uid', String(1024), nullable=False)
5308 5310 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5309 5311 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5310 5312 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5311 5313
5312 5314 # sha256 hash
5313 5315 file_hash = Column('file_hash', String(512), nullable=False)
5314 5316 file_size = Column('file_size', BigInteger(), nullable=False)
5315 5317
5316 5318 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5317 5319 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5318 5320 accessed_count = Column('accessed_count', Integer(), default=0)
5319 5321
5320 5322 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5321 5323
5322 5324 # if repo/repo_group reference is set, check for permissions
5323 5325 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5324 5326
5325 5327 # hidden defines an attachment that should be hidden from showing in artifact listing
5326 5328 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5327 5329
5328 5330 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5329 5331 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5330 5332
5331 5333 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5332 5334
5333 5335 # scope limited to user, which requester have access to
5334 5336 scope_user_id = Column(
5335 5337 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5336 5338 nullable=True, unique=None, default=None)
5337 5339 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5338 5340
5339 5341 # scope limited to user group, which requester have access to
5340 5342 scope_user_group_id = Column(
5341 5343 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5342 5344 nullable=True, unique=None, default=None)
5343 5345 user_group = relationship('UserGroup', lazy='joined')
5344 5346
5345 5347 # scope limited to repo, which requester have access to
5346 5348 scope_repo_id = Column(
5347 5349 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5348 5350 nullable=True, unique=None, default=None)
5349 5351 repo = relationship('Repository', lazy='joined')
5350 5352
5351 5353 # scope limited to repo group, which requester have access to
5352 5354 scope_repo_group_id = Column(
5353 5355 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5354 5356 nullable=True, unique=None, default=None)
5355 5357 repo_group = relationship('RepoGroup', lazy='joined')
5356 5358
5357 5359 @classmethod
5358 5360 def get_by_store_uid(cls, file_store_uid):
5359 5361 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5360 5362
5361 5363 @classmethod
5362 5364 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5363 5365 file_description='', enabled=True, hidden=False, check_acl=True,
5364 5366 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5365 5367
5366 5368 store_entry = FileStore()
5367 5369 store_entry.file_uid = file_uid
5368 5370 store_entry.file_display_name = file_display_name
5369 5371 store_entry.file_org_name = filename
5370 5372 store_entry.file_size = file_size
5371 5373 store_entry.file_hash = file_hash
5372 5374 store_entry.file_description = file_description
5373 5375
5374 5376 store_entry.check_acl = check_acl
5375 5377 store_entry.enabled = enabled
5376 5378 store_entry.hidden = hidden
5377 5379
5378 5380 store_entry.user_id = user_id
5379 5381 store_entry.scope_user_id = scope_user_id
5380 5382 store_entry.scope_repo_id = scope_repo_id
5381 5383 store_entry.scope_repo_group_id = scope_repo_group_id
5382 5384
5383 5385 return store_entry
5384 5386
5385 5387 @classmethod
5386 5388 def store_metadata(cls, file_store_id, args, commit=True):
5387 5389 file_store = FileStore.get(file_store_id)
5388 5390 if file_store is None:
5389 5391 return
5390 5392
5391 5393 for section, key, value, value_type in args:
5392 5394 has_key = FileStoreMetadata().query() \
5393 5395 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5394 5396 .filter(FileStoreMetadata.file_store_meta_section == section) \
5395 5397 .filter(FileStoreMetadata.file_store_meta_key == key) \
5396 5398 .scalar()
5397 5399 if has_key:
5398 5400 msg = 'key `{}` already defined under section `{}` for this file.'\
5399 5401 .format(key, section)
5400 5402 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5401 5403
5402 5404 # NOTE(marcink): raises ArtifactMetadataBadValueType
5403 5405 FileStoreMetadata.valid_value_type(value_type)
5404 5406
5405 5407 meta_entry = FileStoreMetadata()
5406 5408 meta_entry.file_store = file_store
5407 5409 meta_entry.file_store_meta_section = section
5408 5410 meta_entry.file_store_meta_key = key
5409 5411 meta_entry.file_store_meta_value_type = value_type
5410 5412 meta_entry.file_store_meta_value = value
5411 5413
5412 5414 Session().add(meta_entry)
5413 5415
5414 5416 try:
5415 5417 if commit:
5416 5418 Session().commit()
5417 5419 except IntegrityError:
5418 5420 Session().rollback()
5419 5421 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5420 5422
5421 5423 @classmethod
5422 5424 def bump_access_counter(cls, file_uid, commit=True):
5423 5425 FileStore().query()\
5424 5426 .filter(FileStore.file_uid == file_uid)\
5425 5427 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5426 5428 FileStore.accessed_on: datetime.datetime.now()})
5427 5429 if commit:
5428 5430 Session().commit()
5429 5431
5430 5432 def __json__(self):
5431 5433 data = {
5432 5434 'filename': self.file_display_name,
5433 5435 'filename_org': self.file_org_name,
5434 5436 'file_uid': self.file_uid,
5435 5437 'description': self.file_description,
5436 5438 'hidden': self.hidden,
5437 5439 'size': self.file_size,
5438 5440 'created_on': self.created_on,
5439 5441 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5440 5442 'downloaded_times': self.accessed_count,
5441 5443 'sha256': self.file_hash,
5442 5444 'metadata': self.file_metadata,
5443 5445 }
5444 5446
5445 5447 return data
5446 5448
5447 5449 def __repr__(self):
5448 5450 return '<FileStore({})>'.format(self.file_store_id)
5449 5451
5450 5452
5451 5453 class FileStoreMetadata(Base, BaseModel):
5452 5454 __tablename__ = 'file_store_metadata'
5453 5455 __table_args__ = (
5454 5456 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5455 5457 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5456 5458 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5457 5459 base_table_args
5458 5460 )
5459 5461 SETTINGS_TYPES = {
5460 5462 'str': safe_str,
5461 5463 'int': safe_int,
5462 5464 'unicode': safe_unicode,
5463 5465 'bool': str2bool,
5464 5466 'list': functools.partial(aslist, sep=',')
5465 5467 }
5466 5468
5467 5469 file_store_meta_id = Column(
5468 5470 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5469 5471 primary_key=True)
5470 5472 _file_store_meta_section = Column(
5471 5473 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5472 5474 nullable=True, unique=None, default=None)
5473 5475 _file_store_meta_section_hash = Column(
5474 5476 "file_store_meta_section_hash", String(255),
5475 5477 nullable=True, unique=None, default=None)
5476 5478 _file_store_meta_key = Column(
5477 5479 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5478 5480 nullable=True, unique=None, default=None)
5479 5481 _file_store_meta_key_hash = Column(
5480 5482 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5481 5483 _file_store_meta_value = Column(
5482 5484 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5483 5485 nullable=True, unique=None, default=None)
5484 5486 _file_store_meta_value_type = Column(
5485 5487 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5486 5488 default='unicode')
5487 5489
5488 5490 file_store_id = Column(
5489 5491 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5490 5492 nullable=True, unique=None, default=None)
5491 5493
5492 5494 file_store = relationship('FileStore', lazy='joined')
5493 5495
5494 5496 @classmethod
5495 5497 def valid_value_type(cls, value):
5496 5498 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5497 5499 raise ArtifactMetadataBadValueType(
5498 5500 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5499 5501
5500 5502 @hybrid_property
5501 5503 def file_store_meta_section(self):
5502 5504 return self._file_store_meta_section
5503 5505
5504 5506 @file_store_meta_section.setter
5505 5507 def file_store_meta_section(self, value):
5506 5508 self._file_store_meta_section = value
5507 5509 self._file_store_meta_section_hash = _hash_key(value)
5508 5510
5509 5511 @hybrid_property
5510 5512 def file_store_meta_key(self):
5511 5513 return self._file_store_meta_key
5512 5514
5513 5515 @file_store_meta_key.setter
5514 5516 def file_store_meta_key(self, value):
5515 5517 self._file_store_meta_key = value
5516 5518 self._file_store_meta_key_hash = _hash_key(value)
5517 5519
5518 5520 @hybrid_property
5519 5521 def file_store_meta_value(self):
5520 5522 val = self._file_store_meta_value
5521 5523
5522 5524 if self._file_store_meta_value_type:
5523 5525 # e.g unicode.encrypted == unicode
5524 5526 _type = self._file_store_meta_value_type.split('.')[0]
5525 5527 # decode the encrypted value if it's encrypted field type
5526 5528 if '.encrypted' in self._file_store_meta_value_type:
5527 5529 cipher = EncryptedTextValue()
5528 5530 val = safe_unicode(cipher.process_result_value(val, None))
5529 5531 # do final type conversion
5530 5532 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5531 5533 val = converter(val)
5532 5534
5533 5535 return val
5534 5536
5535 5537 @file_store_meta_value.setter
5536 5538 def file_store_meta_value(self, val):
5537 5539 val = safe_unicode(val)
5538 5540 # encode the encrypted value
5539 5541 if '.encrypted' in self.file_store_meta_value_type:
5540 5542 cipher = EncryptedTextValue()
5541 5543 val = safe_unicode(cipher.process_bind_param(val, None))
5542 5544 self._file_store_meta_value = val
5543 5545
5544 5546 @hybrid_property
5545 5547 def file_store_meta_value_type(self):
5546 5548 return self._file_store_meta_value_type
5547 5549
5548 5550 @file_store_meta_value_type.setter
5549 5551 def file_store_meta_value_type(self, val):
5550 5552 # e.g unicode.encrypted
5551 5553 self.valid_value_type(val)
5552 5554 self._file_store_meta_value_type = val
5553 5555
5554 5556 def __json__(self):
5555 5557 data = {
5556 5558 'artifact': self.file_store.file_uid,
5557 5559 'section': self.file_store_meta_section,
5558 5560 'key': self.file_store_meta_key,
5559 5561 'value': self.file_store_meta_value,
5560 5562 }
5561 5563
5562 5564 return data
5563 5565
5564 5566 def __repr__(self):
5565 5567 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5566 5568 self.file_store_meta_key, self.file_store_meta_value)
5567 5569
5568 5570
5569 5571 class DbMigrateVersion(Base, BaseModel):
5570 5572 __tablename__ = 'db_migrate_version'
5571 5573 __table_args__ = (
5572 5574 base_table_args,
5573 5575 )
5574 5576
5575 5577 repository_id = Column('repository_id', String(250), primary_key=True)
5576 5578 repository_path = Column('repository_path', Text)
5577 5579 version = Column('version', Integer)
5578 5580
5579 5581 @classmethod
5580 5582 def set_version(cls, version):
5581 5583 """
5582 5584 Helper for forcing a different version, usually for debugging purposes via ishell.
5583 5585 """
5584 5586 ver = DbMigrateVersion.query().first()
5585 5587 ver.version = version
5586 5588 Session().commit()
5587 5589
5588 5590
5589 5591 class DbSession(Base, BaseModel):
5590 5592 __tablename__ = 'db_session'
5591 5593 __table_args__ = (
5592 5594 base_table_args,
5593 5595 )
5594 5596
5595 5597 def __repr__(self):
5596 5598 return '<DB:DbSession({})>'.format(self.id)
5597 5599
5598 5600 id = Column('id', Integer())
5599 5601 namespace = Column('namespace', String(255), primary_key=True)
5600 5602 accessed = Column('accessed', DateTime, nullable=False)
5601 5603 created = Column('created', DateTime, nullable=False)
5602 5604 data = Column('data', PickleType, nullable=False)
@@ -1,2072 +1,2072 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 commits = target_scm.compare(
158 158 target_ref, source_ref, source_scm, merge=True,
159 159 pre_load=["author"])
160 160
161 161 for commit in commits:
162 162 user = User.get_from_cs_author(commit.author)
163 163 if user and user not in commit_authors:
164 164 commit_authors.append(user)
165 165
166 166 # lines
167 167 if get_authors:
168 168 target_commit = source_repo.get_commit(ancestor_id)
169 169
170 170 for fname, lines in changed_lines.items():
171 171 try:
172 172 node = target_commit.get_node(fname)
173 173 except Exception:
174 174 continue
175 175
176 176 if not isinstance(node, FileNode):
177 177 continue
178 178
179 179 for annotation in node.annotate:
180 180 line_no, commit_id, get_commit_func, line_text = annotation
181 181 if line_no in lines:
182 182 if commit_id not in _commit_cache:
183 183 _commit_cache[commit_id] = get_commit_func()
184 184 commit = _commit_cache[commit_id]
185 185 author = commit.author
186 186 email = commit.author_email
187 187 user = User.get_from_cs_author(author)
188 188 if user:
189 user_counts[user] = user_counts.get(user, 0) + 1
189 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
190 190 author_counts[author] = author_counts.get(author, 0) + 1
191 191 email_counts[email] = email_counts.get(email, 0) + 1
192 192
193 193 return {
194 194 'commits': commits,
195 195 'files': all_files_changes,
196 196 'stats': stats,
197 197 'ancestor': ancestor_id,
198 198 # original authors of modified files
199 199 'original_authors': {
200 200 'users': user_counts,
201 201 'authors': author_counts,
202 202 'emails': email_counts,
203 203 },
204 204 'commit_authors': commit_authors
205 205 }
206 206
207 207
208 208 class PullRequestModel(BaseModel):
209 209
210 210 cls = PullRequest
211 211
212 212 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
213 213
214 214 UPDATE_STATUS_MESSAGES = {
215 215 UpdateFailureReason.NONE: lazy_ugettext(
216 216 'Pull request update successful.'),
217 217 UpdateFailureReason.UNKNOWN: lazy_ugettext(
218 218 'Pull request update failed because of an unknown error.'),
219 219 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
220 220 'No update needed because the source and target have not changed.'),
221 221 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
222 222 'Pull request cannot be updated because the reference type is '
223 223 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
224 224 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
225 225 'This pull request cannot be updated because the target '
226 226 'reference is missing.'),
227 227 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
228 228 'This pull request cannot be updated because the source '
229 229 'reference is missing.'),
230 230 }
231 231 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
232 232 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
233 233
234 234 def __get_pull_request(self, pull_request):
235 235 return self._get_instance((
236 236 PullRequest, PullRequestVersion), pull_request)
237 237
238 238 def _check_perms(self, perms, pull_request, user, api=False):
239 239 if not api:
240 240 return h.HasRepoPermissionAny(*perms)(
241 241 user=user, repo_name=pull_request.target_repo.repo_name)
242 242 else:
243 243 return h.HasRepoPermissionAnyApi(*perms)(
244 244 user=user, repo_name=pull_request.target_repo.repo_name)
245 245
246 246 def check_user_read(self, pull_request, user, api=False):
247 247 _perms = ('repository.admin', 'repository.write', 'repository.read',)
248 248 return self._check_perms(_perms, pull_request, user, api)
249 249
250 250 def check_user_merge(self, pull_request, user, api=False):
251 251 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
252 252 return self._check_perms(_perms, pull_request, user, api)
253 253
254 254 def check_user_update(self, pull_request, user, api=False):
255 255 owner = user.user_id == pull_request.user_id
256 256 return self.check_user_merge(pull_request, user, api) or owner
257 257
258 258 def check_user_delete(self, pull_request, user):
259 259 owner = user.user_id == pull_request.user_id
260 260 _perms = ('repository.admin',)
261 261 return self._check_perms(_perms, pull_request, user) or owner
262 262
263 263 def check_user_change_status(self, pull_request, user, api=False):
264 264 reviewer = user.user_id in [x.user_id for x in
265 265 pull_request.reviewers]
266 266 return self.check_user_update(pull_request, user, api) or reviewer
267 267
268 268 def check_user_comment(self, pull_request, user):
269 269 owner = user.user_id == pull_request.user_id
270 270 return self.check_user_read(pull_request, user) or owner
271 271
272 272 def get(self, pull_request):
273 273 return self.__get_pull_request(pull_request)
274 274
275 275 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
276 276 statuses=None, opened_by=None, order_by=None,
277 277 order_dir='desc', only_created=False):
278 278 repo = None
279 279 if repo_name:
280 280 repo = self._get_repo(repo_name)
281 281
282 282 q = PullRequest.query()
283 283
284 284 if search_q:
285 285 like_expression = u'%{}%'.format(safe_unicode(search_q))
286 286 q = q.join(User)
287 287 q = q.filter(or_(
288 288 cast(PullRequest.pull_request_id, String).ilike(like_expression),
289 289 User.username.ilike(like_expression),
290 290 PullRequest.title.ilike(like_expression),
291 291 PullRequest.description.ilike(like_expression),
292 292 ))
293 293
294 294 # source or target
295 295 if repo and source:
296 296 q = q.filter(PullRequest.source_repo == repo)
297 297 elif repo:
298 298 q = q.filter(PullRequest.target_repo == repo)
299 299
300 300 # closed,opened
301 301 if statuses:
302 302 q = q.filter(PullRequest.status.in_(statuses))
303 303
304 304 # opened by filter
305 305 if opened_by:
306 306 q = q.filter(PullRequest.user_id.in_(opened_by))
307 307
308 308 # only get those that are in "created" state
309 309 if only_created:
310 310 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
311 311
312 312 if order_by:
313 313 order_map = {
314 314 'name_raw': PullRequest.pull_request_id,
315 315 'id': PullRequest.pull_request_id,
316 316 'title': PullRequest.title,
317 317 'updated_on_raw': PullRequest.updated_on,
318 318 'target_repo': PullRequest.target_repo_id
319 319 }
320 320 if order_dir == 'asc':
321 321 q = q.order_by(order_map[order_by].asc())
322 322 else:
323 323 q = q.order_by(order_map[order_by].desc())
324 324
325 325 return q
326 326
327 327 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
328 328 opened_by=None):
329 329 """
330 330 Count the number of pull requests for a specific repository.
331 331
332 332 :param repo_name: target or source repo
333 333 :param search_q: filter by text
334 334 :param source: boolean flag to specify if repo_name refers to source
335 335 :param statuses: list of pull request statuses
336 336 :param opened_by: author user of the pull request
337 337 :returns: int number of pull requests
338 338 """
339 339 q = self._prepare_get_all_query(
340 340 repo_name, search_q=search_q, source=source, statuses=statuses,
341 341 opened_by=opened_by)
342 342
343 343 return q.count()
344 344
345 345 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
346 346 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
347 347 """
348 348 Get all pull requests for a specific repository.
349 349
350 350 :param repo_name: target or source repo
351 351 :param search_q: filter by text
352 352 :param source: boolean flag to specify if repo_name refers to source
353 353 :param statuses: list of pull request statuses
354 354 :param opened_by: author user of the pull request
355 355 :param offset: pagination offset
356 356 :param length: length of returned list
357 357 :param order_by: order of the returned list
358 358 :param order_dir: 'asc' or 'desc' ordering direction
359 359 :returns: list of pull requests
360 360 """
361 361 q = self._prepare_get_all_query(
362 362 repo_name, search_q=search_q, source=source, statuses=statuses,
363 363 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
364 364
365 365 if length:
366 366 pull_requests = q.limit(length).offset(offset).all()
367 367 else:
368 368 pull_requests = q.all()
369 369
370 370 return pull_requests
371 371
372 372 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
373 373 opened_by=None):
374 374 """
375 375 Count the number of pull requests for a specific repository that are
376 376 awaiting review.
377 377
378 378 :param repo_name: target or source repo
379 379 :param search_q: filter by text
380 380 :param source: boolean flag to specify if repo_name refers to source
381 381 :param statuses: list of pull request statuses
382 382 :param opened_by: author user of the pull request
383 383 :returns: int number of pull requests
384 384 """
385 385 pull_requests = self.get_awaiting_review(
386 386 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
387 387
388 388 return len(pull_requests)
389 389
390 390 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
391 391 opened_by=None, offset=0, length=None,
392 392 order_by=None, order_dir='desc'):
393 393 """
394 394 Get all pull requests for a specific repository that are awaiting
395 395 review.
396 396
397 397 :param repo_name: target or source repo
398 398 :param search_q: filter by text
399 399 :param source: boolean flag to specify if repo_name refers to source
400 400 :param statuses: list of pull request statuses
401 401 :param opened_by: author user of the pull request
402 402 :param offset: pagination offset
403 403 :param length: length of returned list
404 404 :param order_by: order of the returned list
405 405 :param order_dir: 'asc' or 'desc' ordering direction
406 406 :returns: list of pull requests
407 407 """
408 408 pull_requests = self.get_all(
409 409 repo_name, search_q=search_q, source=source, statuses=statuses,
410 410 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
411 411
412 412 _filtered_pull_requests = []
413 413 for pr in pull_requests:
414 414 status = pr.calculated_review_status()
415 415 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
416 416 ChangesetStatus.STATUS_UNDER_REVIEW]:
417 417 _filtered_pull_requests.append(pr)
418 418 if length:
419 419 return _filtered_pull_requests[offset:offset+length]
420 420 else:
421 421 return _filtered_pull_requests
422 422
423 423 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
424 424 opened_by=None, user_id=None):
425 425 """
426 426 Count the number of pull requests for a specific repository that are
427 427 awaiting review from a specific user.
428 428
429 429 :param repo_name: target or source repo
430 430 :param search_q: filter by text
431 431 :param source: boolean flag to specify if repo_name refers to source
432 432 :param statuses: list of pull request statuses
433 433 :param opened_by: author user of the pull request
434 434 :param user_id: reviewer user of the pull request
435 435 :returns: int number of pull requests
436 436 """
437 437 pull_requests = self.get_awaiting_my_review(
438 438 repo_name, search_q=search_q, source=source, statuses=statuses,
439 439 opened_by=opened_by, user_id=user_id)
440 440
441 441 return len(pull_requests)
442 442
443 443 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
444 444 opened_by=None, user_id=None, offset=0,
445 445 length=None, order_by=None, order_dir='desc'):
446 446 """
447 447 Get all pull requests for a specific repository that are awaiting
448 448 review from a specific user.
449 449
450 450 :param repo_name: target or source repo
451 451 :param search_q: filter by text
452 452 :param source: boolean flag to specify if repo_name refers to source
453 453 :param statuses: list of pull request statuses
454 454 :param opened_by: author user of the pull request
455 455 :param user_id: reviewer user of the pull request
456 456 :param offset: pagination offset
457 457 :param length: length of returned list
458 458 :param order_by: order of the returned list
459 459 :param order_dir: 'asc' or 'desc' ordering direction
460 460 :returns: list of pull requests
461 461 """
462 462 pull_requests = self.get_all(
463 463 repo_name, search_q=search_q, source=source, statuses=statuses,
464 464 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
465 465
466 466 _my = PullRequestModel().get_not_reviewed(user_id)
467 467 my_participation = []
468 468 for pr in pull_requests:
469 469 if pr in _my:
470 470 my_participation.append(pr)
471 471 _filtered_pull_requests = my_participation
472 472 if length:
473 473 return _filtered_pull_requests[offset:offset+length]
474 474 else:
475 475 return _filtered_pull_requests
476 476
477 477 def get_not_reviewed(self, user_id):
478 478 return [
479 479 x.pull_request for x in PullRequestReviewers.query().filter(
480 480 PullRequestReviewers.user_id == user_id).all()
481 481 ]
482 482
483 483 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
484 484 order_by=None, order_dir='desc'):
485 485 q = PullRequest.query()
486 486 if user_id:
487 487 reviewers_subquery = Session().query(
488 488 PullRequestReviewers.pull_request_id).filter(
489 489 PullRequestReviewers.user_id == user_id).subquery()
490 490 user_filter = or_(
491 491 PullRequest.user_id == user_id,
492 492 PullRequest.pull_request_id.in_(reviewers_subquery)
493 493 )
494 494 q = PullRequest.query().filter(user_filter)
495 495
496 496 # closed,opened
497 497 if statuses:
498 498 q = q.filter(PullRequest.status.in_(statuses))
499 499
500 500 if query:
501 501 like_expression = u'%{}%'.format(safe_unicode(query))
502 502 q = q.join(User)
503 503 q = q.filter(or_(
504 504 cast(PullRequest.pull_request_id, String).ilike(like_expression),
505 505 User.username.ilike(like_expression),
506 506 PullRequest.title.ilike(like_expression),
507 507 PullRequest.description.ilike(like_expression),
508 508 ))
509 509 if order_by:
510 510 order_map = {
511 511 'name_raw': PullRequest.pull_request_id,
512 512 'title': PullRequest.title,
513 513 'updated_on_raw': PullRequest.updated_on,
514 514 'target_repo': PullRequest.target_repo_id
515 515 }
516 516 if order_dir == 'asc':
517 517 q = q.order_by(order_map[order_by].asc())
518 518 else:
519 519 q = q.order_by(order_map[order_by].desc())
520 520
521 521 return q
522 522
523 523 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
524 524 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
525 525 return q.count()
526 526
527 527 def get_im_participating_in(
528 528 self, user_id=None, statuses=None, query='', offset=0,
529 529 length=None, order_by=None, order_dir='desc'):
530 530 """
531 531 Get all Pull requests that i'm participating in, or i have opened
532 532 """
533 533
534 534 q = self._prepare_participating_query(
535 535 user_id, statuses=statuses, query=query, order_by=order_by,
536 536 order_dir=order_dir)
537 537
538 538 if length:
539 539 pull_requests = q.limit(length).offset(offset).all()
540 540 else:
541 541 pull_requests = q.all()
542 542
543 543 return pull_requests
544 544
545 545 def get_versions(self, pull_request):
546 546 """
547 547 returns version of pull request sorted by ID descending
548 548 """
549 549 return PullRequestVersion.query()\
550 550 .filter(PullRequestVersion.pull_request == pull_request)\
551 551 .order_by(PullRequestVersion.pull_request_version_id.asc())\
552 552 .all()
553 553
554 554 def get_pr_version(self, pull_request_id, version=None):
555 555 at_version = None
556 556
557 557 if version and version == 'latest':
558 558 pull_request_ver = PullRequest.get(pull_request_id)
559 559 pull_request_obj = pull_request_ver
560 560 _org_pull_request_obj = pull_request_obj
561 561 at_version = 'latest'
562 562 elif version:
563 563 pull_request_ver = PullRequestVersion.get_or_404(version)
564 564 pull_request_obj = pull_request_ver
565 565 _org_pull_request_obj = pull_request_ver.pull_request
566 566 at_version = pull_request_ver.pull_request_version_id
567 567 else:
568 568 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
569 569 pull_request_id)
570 570
571 571 pull_request_display_obj = PullRequest.get_pr_display_object(
572 572 pull_request_obj, _org_pull_request_obj)
573 573
574 574 return _org_pull_request_obj, pull_request_obj, \
575 575 pull_request_display_obj, at_version
576 576
577 577 def create(self, created_by, source_repo, source_ref, target_repo,
578 578 target_ref, revisions, reviewers, title, description=None,
579 579 common_ancestor_id=None,
580 580 description_renderer=None,
581 581 reviewer_data=None, translator=None, auth_user=None):
582 582 translator = translator or get_current_request().translate
583 583
584 584 created_by_user = self._get_user(created_by)
585 585 auth_user = auth_user or created_by_user.AuthUser()
586 586 source_repo = self._get_repo(source_repo)
587 587 target_repo = self._get_repo(target_repo)
588 588
589 589 pull_request = PullRequest()
590 590 pull_request.source_repo = source_repo
591 591 pull_request.source_ref = source_ref
592 592 pull_request.target_repo = target_repo
593 593 pull_request.target_ref = target_ref
594 594 pull_request.revisions = revisions
595 595 pull_request.title = title
596 596 pull_request.description = description
597 597 pull_request.description_renderer = description_renderer
598 598 pull_request.author = created_by_user
599 599 pull_request.reviewer_data = reviewer_data
600 600 pull_request.pull_request_state = pull_request.STATE_CREATING
601 601 pull_request.common_ancestor_id = common_ancestor_id
602 602
603 603 Session().add(pull_request)
604 604 Session().flush()
605 605
606 606 reviewer_ids = set()
607 607 # members / reviewers
608 608 for reviewer_object in reviewers:
609 609 user_id, reasons, mandatory, rules = reviewer_object
610 610 user = self._get_user(user_id)
611 611
612 612 # skip duplicates
613 613 if user.user_id in reviewer_ids:
614 614 continue
615 615
616 616 reviewer_ids.add(user.user_id)
617 617
618 618 reviewer = PullRequestReviewers()
619 619 reviewer.user = user
620 620 reviewer.pull_request = pull_request
621 621 reviewer.reasons = reasons
622 622 reviewer.mandatory = mandatory
623 623
624 624 # NOTE(marcink): pick only first rule for now
625 625 rule_id = list(rules)[0] if rules else None
626 626 rule = RepoReviewRule.get(rule_id) if rule_id else None
627 627 if rule:
628 628 review_group = rule.user_group_vote_rule(user_id)
629 629 # we check if this particular reviewer is member of a voting group
630 630 if review_group:
631 631 # NOTE(marcink):
632 632 # can be that user is member of more but we pick the first same,
633 633 # same as default reviewers algo
634 634 review_group = review_group[0]
635 635
636 636 rule_data = {
637 637 'rule_name':
638 638 rule.review_rule_name,
639 639 'rule_user_group_entry_id':
640 640 review_group.repo_review_rule_users_group_id,
641 641 'rule_user_group_name':
642 642 review_group.users_group.users_group_name,
643 643 'rule_user_group_members':
644 644 [x.user.username for x in review_group.users_group.members],
645 645 'rule_user_group_members_id':
646 646 [x.user.user_id for x in review_group.users_group.members],
647 647 }
648 648 # e.g {'vote_rule': -1, 'mandatory': True}
649 649 rule_data.update(review_group.rule_data())
650 650
651 651 reviewer.rule_data = rule_data
652 652
653 653 Session().add(reviewer)
654 654 Session().flush()
655 655
656 656 # Set approval status to "Under Review" for all commits which are
657 657 # part of this pull request.
658 658 ChangesetStatusModel().set_status(
659 659 repo=target_repo,
660 660 status=ChangesetStatus.STATUS_UNDER_REVIEW,
661 661 user=created_by_user,
662 662 pull_request=pull_request
663 663 )
664 664 # we commit early at this point. This has to do with a fact
665 665 # that before queries do some row-locking. And because of that
666 666 # we need to commit and finish transaction before below validate call
667 667 # that for large repos could be long resulting in long row locks
668 668 Session().commit()
669 669
670 670 # prepare workspace, and run initial merge simulation. Set state during that
671 671 # operation
672 672 pull_request = PullRequest.get(pull_request.pull_request_id)
673 673
674 674 # set as merging, for merge simulation, and if finished to created so we mark
675 675 # simulation is working fine
676 676 with pull_request.set_state(PullRequest.STATE_MERGING,
677 677 final_state=PullRequest.STATE_CREATED) as state_obj:
678 678 MergeCheck.validate(
679 679 pull_request, auth_user=auth_user, translator=translator)
680 680
681 681 self.notify_reviewers(pull_request, reviewer_ids)
682 682 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
683 683
684 684 creation_data = pull_request.get_api_data(with_merge_state=False)
685 685 self._log_audit_action(
686 686 'repo.pull_request.create', {'data': creation_data},
687 687 auth_user, pull_request)
688 688
689 689 return pull_request
690 690
691 691 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
692 692 pull_request = self.__get_pull_request(pull_request)
693 693 target_scm = pull_request.target_repo.scm_instance()
694 694 if action == 'create':
695 695 trigger_hook = hooks_utils.trigger_create_pull_request_hook
696 696 elif action == 'merge':
697 697 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
698 698 elif action == 'close':
699 699 trigger_hook = hooks_utils.trigger_close_pull_request_hook
700 700 elif action == 'review_status_change':
701 701 trigger_hook = hooks_utils.trigger_review_pull_request_hook
702 702 elif action == 'update':
703 703 trigger_hook = hooks_utils.trigger_update_pull_request_hook
704 704 elif action == 'comment':
705 705 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
706 706 else:
707 707 return
708 708
709 709 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
710 710 pull_request, action, trigger_hook)
711 711 trigger_hook(
712 712 username=user.username,
713 713 repo_name=pull_request.target_repo.repo_name,
714 714 repo_type=target_scm.alias,
715 715 pull_request=pull_request,
716 716 data=data)
717 717
718 718 def _get_commit_ids(self, pull_request):
719 719 """
720 720 Return the commit ids of the merged pull request.
721 721
722 722 This method is not dealing correctly yet with the lack of autoupdates
723 723 nor with the implicit target updates.
724 724 For example: if a commit in the source repo is already in the target it
725 725 will be reported anyways.
726 726 """
727 727 merge_rev = pull_request.merge_rev
728 728 if merge_rev is None:
729 729 raise ValueError('This pull request was not merged yet')
730 730
731 731 commit_ids = list(pull_request.revisions)
732 732 if merge_rev not in commit_ids:
733 733 commit_ids.append(merge_rev)
734 734
735 735 return commit_ids
736 736
737 737 def merge_repo(self, pull_request, user, extras):
738 738 log.debug("Merging pull request %s", pull_request.pull_request_id)
739 739 extras['user_agent'] = 'internal-merge'
740 740 merge_state = self._merge_pull_request(pull_request, user, extras)
741 741 if merge_state.executed:
742 742 log.debug("Merge was successful, updating the pull request comments.")
743 743 self._comment_and_close_pr(pull_request, user, merge_state)
744 744
745 745 self._log_audit_action(
746 746 'repo.pull_request.merge',
747 747 {'merge_state': merge_state.__dict__},
748 748 user, pull_request)
749 749
750 750 else:
751 751 log.warn("Merge failed, not updating the pull request.")
752 752 return merge_state
753 753
754 754 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
755 755 target_vcs = pull_request.target_repo.scm_instance()
756 756 source_vcs = pull_request.source_repo.scm_instance()
757 757
758 758 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
759 759 pr_id=pull_request.pull_request_id,
760 760 pr_title=pull_request.title,
761 761 source_repo=source_vcs.name,
762 762 source_ref_name=pull_request.source_ref_parts.name,
763 763 target_repo=target_vcs.name,
764 764 target_ref_name=pull_request.target_ref_parts.name,
765 765 )
766 766
767 767 workspace_id = self._workspace_id(pull_request)
768 768 repo_id = pull_request.target_repo.repo_id
769 769 use_rebase = self._use_rebase_for_merging(pull_request)
770 770 close_branch = self._close_branch_before_merging(pull_request)
771 771 user_name = self._user_name_for_merging(pull_request, user)
772 772
773 773 target_ref = self._refresh_reference(
774 774 pull_request.target_ref_parts, target_vcs)
775 775
776 776 callback_daemon, extras = prepare_callback_daemon(
777 777 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
778 778 host=vcs_settings.HOOKS_HOST,
779 779 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
780 780
781 781 with callback_daemon:
782 782 # TODO: johbo: Implement a clean way to run a config_override
783 783 # for a single call.
784 784 target_vcs.config.set(
785 785 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
786 786
787 787 merge_state = target_vcs.merge(
788 788 repo_id, workspace_id, target_ref, source_vcs,
789 789 pull_request.source_ref_parts,
790 790 user_name=user_name, user_email=user.email,
791 791 message=message, use_rebase=use_rebase,
792 792 close_branch=close_branch)
793 793 return merge_state
794 794
795 795 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
796 796 pull_request.merge_rev = merge_state.merge_ref.commit_id
797 797 pull_request.updated_on = datetime.datetime.now()
798 798 close_msg = close_msg or 'Pull request merged and closed'
799 799
800 800 CommentsModel().create(
801 801 text=safe_unicode(close_msg),
802 802 repo=pull_request.target_repo.repo_id,
803 803 user=user.user_id,
804 804 pull_request=pull_request.pull_request_id,
805 805 f_path=None,
806 806 line_no=None,
807 807 closing_pr=True
808 808 )
809 809
810 810 Session().add(pull_request)
811 811 Session().flush()
812 812 # TODO: paris: replace invalidation with less radical solution
813 813 ScmModel().mark_for_invalidation(
814 814 pull_request.target_repo.repo_name)
815 815 self.trigger_pull_request_hook(pull_request, user, 'merge')
816 816
817 817 def has_valid_update_type(self, pull_request):
818 818 source_ref_type = pull_request.source_ref_parts.type
819 819 return source_ref_type in self.REF_TYPES
820 820
821 821 def get_flow_commits(self, pull_request):
822 822
823 823 # source repo
824 824 source_ref_name = pull_request.source_ref_parts.name
825 825 source_ref_type = pull_request.source_ref_parts.type
826 826 source_ref_id = pull_request.source_ref_parts.commit_id
827 827 source_repo = pull_request.source_repo.scm_instance()
828 828
829 829 try:
830 830 if source_ref_type in self.REF_TYPES:
831 831 source_commit = source_repo.get_commit(source_ref_name)
832 832 else:
833 833 source_commit = source_repo.get_commit(source_ref_id)
834 834 except CommitDoesNotExistError:
835 835 raise SourceRefMissing()
836 836
837 837 # target repo
838 838 target_ref_name = pull_request.target_ref_parts.name
839 839 target_ref_type = pull_request.target_ref_parts.type
840 840 target_ref_id = pull_request.target_ref_parts.commit_id
841 841 target_repo = pull_request.target_repo.scm_instance()
842 842
843 843 try:
844 844 if target_ref_type in self.REF_TYPES:
845 845 target_commit = target_repo.get_commit(target_ref_name)
846 846 else:
847 847 target_commit = target_repo.get_commit(target_ref_id)
848 848 except CommitDoesNotExistError:
849 849 raise TargetRefMissing()
850 850
851 851 return source_commit, target_commit
852 852
853 853 def update_commits(self, pull_request, updating_user):
854 854 """
855 855 Get the updated list of commits for the pull request
856 856 and return the new pull request version and the list
857 857 of commits processed by this update action
858 858
859 859 updating_user is the user_object who triggered the update
860 860 """
861 861 pull_request = self.__get_pull_request(pull_request)
862 862 source_ref_type = pull_request.source_ref_parts.type
863 863 source_ref_name = pull_request.source_ref_parts.name
864 864 source_ref_id = pull_request.source_ref_parts.commit_id
865 865
866 866 target_ref_type = pull_request.target_ref_parts.type
867 867 target_ref_name = pull_request.target_ref_parts.name
868 868 target_ref_id = pull_request.target_ref_parts.commit_id
869 869
870 870 if not self.has_valid_update_type(pull_request):
871 871 log.debug("Skipping update of pull request %s due to ref type: %s",
872 872 pull_request, source_ref_type)
873 873 return UpdateResponse(
874 874 executed=False,
875 875 reason=UpdateFailureReason.WRONG_REF_TYPE,
876 876 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
877 877 source_changed=False, target_changed=False)
878 878
879 879 try:
880 880 source_commit, target_commit = self.get_flow_commits(pull_request)
881 881 except SourceRefMissing:
882 882 return UpdateResponse(
883 883 executed=False,
884 884 reason=UpdateFailureReason.MISSING_SOURCE_REF,
885 885 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
886 886 source_changed=False, target_changed=False)
887 887 except TargetRefMissing:
888 888 return UpdateResponse(
889 889 executed=False,
890 890 reason=UpdateFailureReason.MISSING_TARGET_REF,
891 891 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
892 892 source_changed=False, target_changed=False)
893 893
894 894 source_changed = source_ref_id != source_commit.raw_id
895 895 target_changed = target_ref_id != target_commit.raw_id
896 896
897 897 if not (source_changed or target_changed):
898 898 log.debug("Nothing changed in pull request %s", pull_request)
899 899 return UpdateResponse(
900 900 executed=False,
901 901 reason=UpdateFailureReason.NO_CHANGE,
902 902 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
903 903 source_changed=target_changed, target_changed=source_changed)
904 904
905 905 change_in_found = 'target repo' if target_changed else 'source repo'
906 906 log.debug('Updating pull request because of change in %s detected',
907 907 change_in_found)
908 908
909 909 # Finally there is a need for an update, in case of source change
910 910 # we create a new version, else just an update
911 911 if source_changed:
912 912 pull_request_version = self._create_version_from_snapshot(pull_request)
913 913 self._link_comments_to_version(pull_request_version)
914 914 else:
915 915 try:
916 916 ver = pull_request.versions[-1]
917 917 except IndexError:
918 918 ver = None
919 919
920 920 pull_request.pull_request_version_id = \
921 921 ver.pull_request_version_id if ver else None
922 922 pull_request_version = pull_request
923 923
924 924 source_repo = pull_request.source_repo.scm_instance()
925 925 target_repo = pull_request.target_repo.scm_instance()
926 926
927 927 # re-compute commit ids
928 928 old_commit_ids = pull_request.revisions
929 929 pre_load = ["author", "date", "message", "branch"]
930 930 commit_ranges = target_repo.compare(
931 931 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
932 932 pre_load=pre_load)
933 933
934 934 target_ref = target_commit.raw_id
935 935 source_ref = source_commit.raw_id
936 936 ancestor_commit_id = target_repo.get_common_ancestor(
937 937 target_ref, source_ref, source_repo)
938 938
939 939 if not ancestor_commit_id:
940 940 raise ValueError(
941 941 'cannot calculate diff info without a common ancestor. '
942 942 'Make sure both repositories are related, and have a common forking commit.')
943 943
944 944 pull_request.common_ancestor_id = ancestor_commit_id
945 945
946 946 pull_request.source_ref = '%s:%s:%s' % (
947 947 source_ref_type, source_ref_name, source_commit.raw_id)
948 948 pull_request.target_ref = '%s:%s:%s' % (
949 949 target_ref_type, target_ref_name, ancestor_commit_id)
950 950
951 951 pull_request.revisions = [
952 952 commit.raw_id for commit in reversed(commit_ranges)]
953 953 pull_request.updated_on = datetime.datetime.now()
954 954 Session().add(pull_request)
955 955 new_commit_ids = pull_request.revisions
956 956
957 957 old_diff_data, new_diff_data = self._generate_update_diffs(
958 958 pull_request, pull_request_version)
959 959
960 960 # calculate commit and file changes
961 961 commit_changes = self._calculate_commit_id_changes(
962 962 old_commit_ids, new_commit_ids)
963 963 file_changes = self._calculate_file_changes(
964 964 old_diff_data, new_diff_data)
965 965
966 966 # set comments as outdated if DIFFS changed
967 967 CommentsModel().outdate_comments(
968 968 pull_request, old_diff_data=old_diff_data,
969 969 new_diff_data=new_diff_data)
970 970
971 971 valid_commit_changes = (commit_changes.added or commit_changes.removed)
972 972 file_node_changes = (
973 973 file_changes.added or file_changes.modified or file_changes.removed)
974 974 pr_has_changes = valid_commit_changes or file_node_changes
975 975
976 976 # Add an automatic comment to the pull request, in case
977 977 # anything has changed
978 978 if pr_has_changes:
979 979 update_comment = CommentsModel().create(
980 980 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
981 981 repo=pull_request.target_repo,
982 982 user=pull_request.author,
983 983 pull_request=pull_request,
984 984 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
985 985
986 986 # Update status to "Under Review" for added commits
987 987 for commit_id in commit_changes.added:
988 988 ChangesetStatusModel().set_status(
989 989 repo=pull_request.source_repo,
990 990 status=ChangesetStatus.STATUS_UNDER_REVIEW,
991 991 comment=update_comment,
992 992 user=pull_request.author,
993 993 pull_request=pull_request,
994 994 revision=commit_id)
995 995
996 996 # send update email to users
997 997 try:
998 998 self.notify_users(pull_request=pull_request, updating_user=updating_user,
999 999 ancestor_commit_id=ancestor_commit_id,
1000 1000 commit_changes=commit_changes,
1001 1001 file_changes=file_changes)
1002 1002 except Exception:
1003 1003 log.exception('Failed to send email notification to users')
1004 1004
1005 1005 log.debug(
1006 1006 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1007 1007 'removed_ids: %s', pull_request.pull_request_id,
1008 1008 commit_changes.added, commit_changes.common, commit_changes.removed)
1009 1009 log.debug(
1010 1010 'Updated pull request with the following file changes: %s',
1011 1011 file_changes)
1012 1012
1013 1013 log.info(
1014 1014 "Updated pull request %s from commit %s to commit %s, "
1015 1015 "stored new version %s of this pull request.",
1016 1016 pull_request.pull_request_id, source_ref_id,
1017 1017 pull_request.source_ref_parts.commit_id,
1018 1018 pull_request_version.pull_request_version_id)
1019 1019 Session().commit()
1020 1020 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1021 1021
1022 1022 return UpdateResponse(
1023 1023 executed=True, reason=UpdateFailureReason.NONE,
1024 1024 old=pull_request, new=pull_request_version,
1025 1025 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1026 1026 source_changed=source_changed, target_changed=target_changed)
1027 1027
1028 1028 def _create_version_from_snapshot(self, pull_request):
1029 1029 version = PullRequestVersion()
1030 1030 version.title = pull_request.title
1031 1031 version.description = pull_request.description
1032 1032 version.status = pull_request.status
1033 1033 version.pull_request_state = pull_request.pull_request_state
1034 1034 version.created_on = datetime.datetime.now()
1035 1035 version.updated_on = pull_request.updated_on
1036 1036 version.user_id = pull_request.user_id
1037 1037 version.source_repo = pull_request.source_repo
1038 1038 version.source_ref = pull_request.source_ref
1039 1039 version.target_repo = pull_request.target_repo
1040 1040 version.target_ref = pull_request.target_ref
1041 1041
1042 1042 version._last_merge_source_rev = pull_request._last_merge_source_rev
1043 1043 version._last_merge_target_rev = pull_request._last_merge_target_rev
1044 1044 version.last_merge_status = pull_request.last_merge_status
1045 1045 version.last_merge_metadata = pull_request.last_merge_metadata
1046 1046 version.shadow_merge_ref = pull_request.shadow_merge_ref
1047 1047 version.merge_rev = pull_request.merge_rev
1048 1048 version.reviewer_data = pull_request.reviewer_data
1049 1049
1050 1050 version.revisions = pull_request.revisions
1051 1051 version.common_ancestor_id = pull_request.common_ancestor_id
1052 1052 version.pull_request = pull_request
1053 1053 Session().add(version)
1054 1054 Session().flush()
1055 1055
1056 1056 return version
1057 1057
1058 1058 def _generate_update_diffs(self, pull_request, pull_request_version):
1059 1059
1060 1060 diff_context = (
1061 1061 self.DIFF_CONTEXT +
1062 1062 CommentsModel.needed_extra_diff_context())
1063 1063 hide_whitespace_changes = False
1064 1064 source_repo = pull_request_version.source_repo
1065 1065 source_ref_id = pull_request_version.source_ref_parts.commit_id
1066 1066 target_ref_id = pull_request_version.target_ref_parts.commit_id
1067 1067 old_diff = self._get_diff_from_pr_or_version(
1068 1068 source_repo, source_ref_id, target_ref_id,
1069 1069 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1070 1070
1071 1071 source_repo = pull_request.source_repo
1072 1072 source_ref_id = pull_request.source_ref_parts.commit_id
1073 1073 target_ref_id = pull_request.target_ref_parts.commit_id
1074 1074
1075 1075 new_diff = self._get_diff_from_pr_or_version(
1076 1076 source_repo, source_ref_id, target_ref_id,
1077 1077 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1078 1078
1079 1079 old_diff_data = diffs.DiffProcessor(old_diff)
1080 1080 old_diff_data.prepare()
1081 1081 new_diff_data = diffs.DiffProcessor(new_diff)
1082 1082 new_diff_data.prepare()
1083 1083
1084 1084 return old_diff_data, new_diff_data
1085 1085
1086 1086 def _link_comments_to_version(self, pull_request_version):
1087 1087 """
1088 1088 Link all unlinked comments of this pull request to the given version.
1089 1089
1090 1090 :param pull_request_version: The `PullRequestVersion` to which
1091 1091 the comments shall be linked.
1092 1092
1093 1093 """
1094 1094 pull_request = pull_request_version.pull_request
1095 1095 comments = ChangesetComment.query()\
1096 1096 .filter(
1097 1097 # TODO: johbo: Should we query for the repo at all here?
1098 1098 # Pending decision on how comments of PRs are to be related
1099 1099 # to either the source repo, the target repo or no repo at all.
1100 1100 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1101 1101 ChangesetComment.pull_request == pull_request,
1102 1102 ChangesetComment.pull_request_version == None)\
1103 1103 .order_by(ChangesetComment.comment_id.asc())
1104 1104
1105 1105 # TODO: johbo: Find out why this breaks if it is done in a bulk
1106 1106 # operation.
1107 1107 for comment in comments:
1108 1108 comment.pull_request_version_id = (
1109 1109 pull_request_version.pull_request_version_id)
1110 1110 Session().add(comment)
1111 1111
1112 1112 def _calculate_commit_id_changes(self, old_ids, new_ids):
1113 1113 added = [x for x in new_ids if x not in old_ids]
1114 1114 common = [x for x in new_ids if x in old_ids]
1115 1115 removed = [x for x in old_ids if x not in new_ids]
1116 1116 total = new_ids
1117 1117 return ChangeTuple(added, common, removed, total)
1118 1118
1119 1119 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1120 1120
1121 1121 old_files = OrderedDict()
1122 1122 for diff_data in old_diff_data.parsed_diff:
1123 1123 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1124 1124
1125 1125 added_files = []
1126 1126 modified_files = []
1127 1127 removed_files = []
1128 1128 for diff_data in new_diff_data.parsed_diff:
1129 1129 new_filename = diff_data['filename']
1130 1130 new_hash = md5_safe(diff_data['raw_diff'])
1131 1131
1132 1132 old_hash = old_files.get(new_filename)
1133 1133 if not old_hash:
1134 1134 # file is not present in old diff, we have to figure out from parsed diff
1135 1135 # operation ADD/REMOVE
1136 1136 operations_dict = diff_data['stats']['ops']
1137 1137 if diffs.DEL_FILENODE in operations_dict:
1138 1138 removed_files.append(new_filename)
1139 1139 else:
1140 1140 added_files.append(new_filename)
1141 1141 else:
1142 1142 if new_hash != old_hash:
1143 1143 modified_files.append(new_filename)
1144 1144 # now remove a file from old, since we have seen it already
1145 1145 del old_files[new_filename]
1146 1146
1147 1147 # removed files is when there are present in old, but not in NEW,
1148 1148 # since we remove old files that are present in new diff, left-overs
1149 1149 # if any should be the removed files
1150 1150 removed_files.extend(old_files.keys())
1151 1151
1152 1152 return FileChangeTuple(added_files, modified_files, removed_files)
1153 1153
1154 1154 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1155 1155 """
1156 1156 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1157 1157 so it's always looking the same disregarding on which default
1158 1158 renderer system is using.
1159 1159
1160 1160 :param ancestor_commit_id: ancestor raw_id
1161 1161 :param changes: changes named tuple
1162 1162 :param file_changes: file changes named tuple
1163 1163
1164 1164 """
1165 1165 new_status = ChangesetStatus.get_status_lbl(
1166 1166 ChangesetStatus.STATUS_UNDER_REVIEW)
1167 1167
1168 1168 changed_files = (
1169 1169 file_changes.added + file_changes.modified + file_changes.removed)
1170 1170
1171 1171 params = {
1172 1172 'under_review_label': new_status,
1173 1173 'added_commits': changes.added,
1174 1174 'removed_commits': changes.removed,
1175 1175 'changed_files': changed_files,
1176 1176 'added_files': file_changes.added,
1177 1177 'modified_files': file_changes.modified,
1178 1178 'removed_files': file_changes.removed,
1179 1179 'ancestor_commit_id': ancestor_commit_id
1180 1180 }
1181 1181 renderer = RstTemplateRenderer()
1182 1182 return renderer.render('pull_request_update.mako', **params)
1183 1183
1184 1184 def edit(self, pull_request, title, description, description_renderer, user):
1185 1185 pull_request = self.__get_pull_request(pull_request)
1186 1186 old_data = pull_request.get_api_data(with_merge_state=False)
1187 1187 if pull_request.is_closed():
1188 1188 raise ValueError('This pull request is closed')
1189 1189 if title:
1190 1190 pull_request.title = title
1191 1191 pull_request.description = description
1192 1192 pull_request.updated_on = datetime.datetime.now()
1193 1193 pull_request.description_renderer = description_renderer
1194 1194 Session().add(pull_request)
1195 1195 self._log_audit_action(
1196 1196 'repo.pull_request.edit', {'old_data': old_data},
1197 1197 user, pull_request)
1198 1198
1199 1199 def update_reviewers(self, pull_request, reviewer_data, user):
1200 1200 """
1201 1201 Update the reviewers in the pull request
1202 1202
1203 1203 :param pull_request: the pr to update
1204 1204 :param reviewer_data: list of tuples
1205 1205 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1206 1206 """
1207 1207 pull_request = self.__get_pull_request(pull_request)
1208 1208 if pull_request.is_closed():
1209 1209 raise ValueError('This pull request is closed')
1210 1210
1211 1211 reviewers = {}
1212 1212 for user_id, reasons, mandatory, rules in reviewer_data:
1213 1213 if isinstance(user_id, (int, compat.string_types)):
1214 1214 user_id = self._get_user(user_id).user_id
1215 1215 reviewers[user_id] = {
1216 1216 'reasons': reasons, 'mandatory': mandatory}
1217 1217
1218 1218 reviewers_ids = set(reviewers.keys())
1219 1219 current_reviewers = PullRequestReviewers.query()\
1220 1220 .filter(PullRequestReviewers.pull_request ==
1221 1221 pull_request).all()
1222 1222 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1223 1223
1224 1224 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1225 1225 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1226 1226
1227 1227 log.debug("Adding %s reviewers", ids_to_add)
1228 1228 log.debug("Removing %s reviewers", ids_to_remove)
1229 1229 changed = False
1230 1230 added_audit_reviewers = []
1231 1231 removed_audit_reviewers = []
1232 1232
1233 1233 for uid in ids_to_add:
1234 1234 changed = True
1235 1235 _usr = self._get_user(uid)
1236 1236 reviewer = PullRequestReviewers()
1237 1237 reviewer.user = _usr
1238 1238 reviewer.pull_request = pull_request
1239 1239 reviewer.reasons = reviewers[uid]['reasons']
1240 1240 # NOTE(marcink): mandatory shouldn't be changed now
1241 1241 # reviewer.mandatory = reviewers[uid]['reasons']
1242 1242 Session().add(reviewer)
1243 1243 added_audit_reviewers.append(reviewer.get_dict())
1244 1244
1245 1245 for uid in ids_to_remove:
1246 1246 changed = True
1247 1247 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1248 1248 # that prevents and fixes cases that we added the same reviewer twice.
1249 1249 # this CAN happen due to the lack of DB checks
1250 1250 reviewers = PullRequestReviewers.query()\
1251 1251 .filter(PullRequestReviewers.user_id == uid,
1252 1252 PullRequestReviewers.pull_request == pull_request)\
1253 1253 .all()
1254 1254
1255 1255 for obj in reviewers:
1256 1256 added_audit_reviewers.append(obj.get_dict())
1257 1257 Session().delete(obj)
1258 1258
1259 1259 if changed:
1260 1260 Session().expire_all()
1261 1261 pull_request.updated_on = datetime.datetime.now()
1262 1262 Session().add(pull_request)
1263 1263
1264 1264 # finally store audit logs
1265 1265 for user_data in added_audit_reviewers:
1266 1266 self._log_audit_action(
1267 1267 'repo.pull_request.reviewer.add', {'data': user_data},
1268 1268 user, pull_request)
1269 1269 for user_data in removed_audit_reviewers:
1270 1270 self._log_audit_action(
1271 1271 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1272 1272 user, pull_request)
1273 1273
1274 1274 self.notify_reviewers(pull_request, ids_to_add)
1275 1275 return ids_to_add, ids_to_remove
1276 1276
1277 1277 def get_url(self, pull_request, request=None, permalink=False):
1278 1278 if not request:
1279 1279 request = get_current_request()
1280 1280
1281 1281 if permalink:
1282 1282 return request.route_url(
1283 1283 'pull_requests_global',
1284 1284 pull_request_id=pull_request.pull_request_id,)
1285 1285 else:
1286 1286 return request.route_url('pullrequest_show',
1287 1287 repo_name=safe_str(pull_request.target_repo.repo_name),
1288 1288 pull_request_id=pull_request.pull_request_id,)
1289 1289
1290 1290 def get_shadow_clone_url(self, pull_request, request=None):
1291 1291 """
1292 1292 Returns qualified url pointing to the shadow repository. If this pull
1293 1293 request is closed there is no shadow repository and ``None`` will be
1294 1294 returned.
1295 1295 """
1296 1296 if pull_request.is_closed():
1297 1297 return None
1298 1298 else:
1299 1299 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1300 1300 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1301 1301
1302 1302 def notify_reviewers(self, pull_request, reviewers_ids):
1303 1303 # notification to reviewers
1304 1304 if not reviewers_ids:
1305 1305 return
1306 1306
1307 1307 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1308 1308
1309 1309 pull_request_obj = pull_request
1310 1310 # get the current participants of this pull request
1311 1311 recipients = reviewers_ids
1312 1312 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1313 1313
1314 1314 pr_source_repo = pull_request_obj.source_repo
1315 1315 pr_target_repo = pull_request_obj.target_repo
1316 1316
1317 1317 pr_url = h.route_url('pullrequest_show',
1318 1318 repo_name=pr_target_repo.repo_name,
1319 1319 pull_request_id=pull_request_obj.pull_request_id,)
1320 1320
1321 1321 # set some variables for email notification
1322 1322 pr_target_repo_url = h.route_url(
1323 1323 'repo_summary', repo_name=pr_target_repo.repo_name)
1324 1324
1325 1325 pr_source_repo_url = h.route_url(
1326 1326 'repo_summary', repo_name=pr_source_repo.repo_name)
1327 1327
1328 1328 # pull request specifics
1329 1329 pull_request_commits = [
1330 1330 (x.raw_id, x.message)
1331 1331 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1332 1332
1333 1333 kwargs = {
1334 1334 'user': pull_request.author,
1335 1335 'pull_request': pull_request_obj,
1336 1336 'pull_request_commits': pull_request_commits,
1337 1337
1338 1338 'pull_request_target_repo': pr_target_repo,
1339 1339 'pull_request_target_repo_url': pr_target_repo_url,
1340 1340
1341 1341 'pull_request_source_repo': pr_source_repo,
1342 1342 'pull_request_source_repo_url': pr_source_repo_url,
1343 1343
1344 1344 'pull_request_url': pr_url,
1345 1345 }
1346 1346
1347 1347 # pre-generate the subject for notification itself
1348 1348 (subject,
1349 1349 _h, _e, # we don't care about those
1350 1350 body_plaintext) = EmailNotificationModel().render_email(
1351 1351 notification_type, **kwargs)
1352 1352
1353 1353 # create notification objects, and emails
1354 1354 NotificationModel().create(
1355 1355 created_by=pull_request.author,
1356 1356 notification_subject=subject,
1357 1357 notification_body=body_plaintext,
1358 1358 notification_type=notification_type,
1359 1359 recipients=recipients,
1360 1360 email_kwargs=kwargs,
1361 1361 )
1362 1362
1363 1363 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1364 1364 commit_changes, file_changes):
1365 1365
1366 1366 updating_user_id = updating_user.user_id
1367 1367 reviewers = set([x.user.user_id for x in pull_request.reviewers])
1368 1368 # NOTE(marcink): send notification to all other users except to
1369 1369 # person who updated the PR
1370 1370 recipients = reviewers.difference(set([updating_user_id]))
1371 1371
1372 1372 log.debug('Notify following recipients about pull-request update %s', recipients)
1373 1373
1374 1374 pull_request_obj = pull_request
1375 1375
1376 1376 # send email about the update
1377 1377 changed_files = (
1378 1378 file_changes.added + file_changes.modified + file_changes.removed)
1379 1379
1380 1380 pr_source_repo = pull_request_obj.source_repo
1381 1381 pr_target_repo = pull_request_obj.target_repo
1382 1382
1383 1383 pr_url = h.route_url('pullrequest_show',
1384 1384 repo_name=pr_target_repo.repo_name,
1385 1385 pull_request_id=pull_request_obj.pull_request_id,)
1386 1386
1387 1387 # set some variables for email notification
1388 1388 pr_target_repo_url = h.route_url(
1389 1389 'repo_summary', repo_name=pr_target_repo.repo_name)
1390 1390
1391 1391 pr_source_repo_url = h.route_url(
1392 1392 'repo_summary', repo_name=pr_source_repo.repo_name)
1393 1393
1394 1394 email_kwargs = {
1395 1395 'date': datetime.datetime.now(),
1396 1396 'updating_user': updating_user,
1397 1397
1398 1398 'pull_request': pull_request_obj,
1399 1399
1400 1400 'pull_request_target_repo': pr_target_repo,
1401 1401 'pull_request_target_repo_url': pr_target_repo_url,
1402 1402
1403 1403 'pull_request_source_repo': pr_source_repo,
1404 1404 'pull_request_source_repo_url': pr_source_repo_url,
1405 1405
1406 1406 'pull_request_url': pr_url,
1407 1407
1408 1408 'ancestor_commit_id': ancestor_commit_id,
1409 1409 'added_commits': commit_changes.added,
1410 1410 'removed_commits': commit_changes.removed,
1411 1411 'changed_files': changed_files,
1412 1412 'added_files': file_changes.added,
1413 1413 'modified_files': file_changes.modified,
1414 1414 'removed_files': file_changes.removed,
1415 1415 }
1416 1416
1417 1417 (subject,
1418 1418 _h, _e, # we don't care about those
1419 1419 body_plaintext) = EmailNotificationModel().render_email(
1420 1420 EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs)
1421 1421
1422 1422 # create notification objects, and emails
1423 1423 NotificationModel().create(
1424 1424 created_by=updating_user,
1425 1425 notification_subject=subject,
1426 1426 notification_body=body_plaintext,
1427 1427 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1428 1428 recipients=recipients,
1429 1429 email_kwargs=email_kwargs,
1430 1430 )
1431 1431
1432 1432 def delete(self, pull_request, user=None):
1433 1433 if not user:
1434 1434 user = getattr(get_current_rhodecode_user(), 'username', None)
1435 1435
1436 1436 pull_request = self.__get_pull_request(pull_request)
1437 1437 old_data = pull_request.get_api_data(with_merge_state=False)
1438 1438 self._cleanup_merge_workspace(pull_request)
1439 1439 self._log_audit_action(
1440 1440 'repo.pull_request.delete', {'old_data': old_data},
1441 1441 user, pull_request)
1442 1442 Session().delete(pull_request)
1443 1443
1444 1444 def close_pull_request(self, pull_request, user):
1445 1445 pull_request = self.__get_pull_request(pull_request)
1446 1446 self._cleanup_merge_workspace(pull_request)
1447 1447 pull_request.status = PullRequest.STATUS_CLOSED
1448 1448 pull_request.updated_on = datetime.datetime.now()
1449 1449 Session().add(pull_request)
1450 1450 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1451 1451
1452 1452 pr_data = pull_request.get_api_data(with_merge_state=False)
1453 1453 self._log_audit_action(
1454 1454 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1455 1455
1456 1456 def close_pull_request_with_comment(
1457 1457 self, pull_request, user, repo, message=None, auth_user=None):
1458 1458
1459 1459 pull_request_review_status = pull_request.calculated_review_status()
1460 1460
1461 1461 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1462 1462 # approved only if we have voting consent
1463 1463 status = ChangesetStatus.STATUS_APPROVED
1464 1464 else:
1465 1465 status = ChangesetStatus.STATUS_REJECTED
1466 1466 status_lbl = ChangesetStatus.get_status_lbl(status)
1467 1467
1468 1468 default_message = (
1469 1469 'Closing with status change {transition_icon} {status}.'
1470 1470 ).format(transition_icon='>', status=status_lbl)
1471 1471 text = message or default_message
1472 1472
1473 1473 # create a comment, and link it to new status
1474 1474 comment = CommentsModel().create(
1475 1475 text=text,
1476 1476 repo=repo.repo_id,
1477 1477 user=user.user_id,
1478 1478 pull_request=pull_request.pull_request_id,
1479 1479 status_change=status_lbl,
1480 1480 status_change_type=status,
1481 1481 closing_pr=True,
1482 1482 auth_user=auth_user,
1483 1483 )
1484 1484
1485 1485 # calculate old status before we change it
1486 1486 old_calculated_status = pull_request.calculated_review_status()
1487 1487 ChangesetStatusModel().set_status(
1488 1488 repo.repo_id,
1489 1489 status,
1490 1490 user.user_id,
1491 1491 comment=comment,
1492 1492 pull_request=pull_request.pull_request_id
1493 1493 )
1494 1494
1495 1495 Session().flush()
1496 1496
1497 1497 self.trigger_pull_request_hook(pull_request, user, 'comment',
1498 1498 data={'comment': comment})
1499 1499
1500 1500 # we now calculate the status of pull request again, and based on that
1501 1501 # calculation trigger status change. This might happen in cases
1502 1502 # that non-reviewer admin closes a pr, which means his vote doesn't
1503 1503 # change the status, while if he's a reviewer this might change it.
1504 1504 calculated_status = pull_request.calculated_review_status()
1505 1505 if old_calculated_status != calculated_status:
1506 1506 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1507 1507 data={'status': calculated_status})
1508 1508
1509 1509 # finally close the PR
1510 1510 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1511 1511
1512 1512 return comment, status
1513 1513
1514 1514 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1515 1515 _ = translator or get_current_request().translate
1516 1516
1517 1517 if not self._is_merge_enabled(pull_request):
1518 1518 return None, False, _('Server-side pull request merging is disabled.')
1519 1519
1520 1520 if pull_request.is_closed():
1521 1521 return None, False, _('This pull request is closed.')
1522 1522
1523 1523 merge_possible, msg = self._check_repo_requirements(
1524 1524 target=pull_request.target_repo, source=pull_request.source_repo,
1525 1525 translator=_)
1526 1526 if not merge_possible:
1527 1527 return None, merge_possible, msg
1528 1528
1529 1529 try:
1530 1530 merge_response = self._try_merge(
1531 1531 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1532 1532 log.debug("Merge response: %s", merge_response)
1533 1533 return merge_response, merge_response.possible, merge_response.merge_status_message
1534 1534 except NotImplementedError:
1535 1535 return None, False, _('Pull request merging is not supported.')
1536 1536
1537 1537 def _check_repo_requirements(self, target, source, translator):
1538 1538 """
1539 1539 Check if `target` and `source` have compatible requirements.
1540 1540
1541 1541 Currently this is just checking for largefiles.
1542 1542 """
1543 1543 _ = translator
1544 1544 target_has_largefiles = self._has_largefiles(target)
1545 1545 source_has_largefiles = self._has_largefiles(source)
1546 1546 merge_possible = True
1547 1547 message = u''
1548 1548
1549 1549 if target_has_largefiles != source_has_largefiles:
1550 1550 merge_possible = False
1551 1551 if source_has_largefiles:
1552 1552 message = _(
1553 1553 'Target repository large files support is disabled.')
1554 1554 else:
1555 1555 message = _(
1556 1556 'Source repository large files support is disabled.')
1557 1557
1558 1558 return merge_possible, message
1559 1559
1560 1560 def _has_largefiles(self, repo):
1561 1561 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1562 1562 'extensions', 'largefiles')
1563 1563 return largefiles_ui and largefiles_ui[0].active
1564 1564
1565 1565 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1566 1566 """
1567 1567 Try to merge the pull request and return the merge status.
1568 1568 """
1569 1569 log.debug(
1570 1570 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1571 1571 pull_request.pull_request_id, force_shadow_repo_refresh)
1572 1572 target_vcs = pull_request.target_repo.scm_instance()
1573 1573 # Refresh the target reference.
1574 1574 try:
1575 1575 target_ref = self._refresh_reference(
1576 1576 pull_request.target_ref_parts, target_vcs)
1577 1577 except CommitDoesNotExistError:
1578 1578 merge_state = MergeResponse(
1579 1579 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1580 1580 metadata={'target_ref': pull_request.target_ref_parts})
1581 1581 return merge_state
1582 1582
1583 1583 target_locked = pull_request.target_repo.locked
1584 1584 if target_locked and target_locked[0]:
1585 1585 locked_by = 'user:{}'.format(target_locked[0])
1586 1586 log.debug("The target repository is locked by %s.", locked_by)
1587 1587 merge_state = MergeResponse(
1588 1588 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1589 1589 metadata={'locked_by': locked_by})
1590 1590 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1591 1591 pull_request, target_ref):
1592 1592 log.debug("Refreshing the merge status of the repository.")
1593 1593 merge_state = self._refresh_merge_state(
1594 1594 pull_request, target_vcs, target_ref)
1595 1595 else:
1596 1596 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1597 1597 metadata = {
1598 1598 'unresolved_files': '',
1599 1599 'target_ref': pull_request.target_ref_parts,
1600 1600 'source_ref': pull_request.source_ref_parts,
1601 1601 }
1602 1602 if pull_request.last_merge_metadata:
1603 1603 metadata.update(pull_request.last_merge_metadata)
1604 1604
1605 1605 if not possible and target_ref.type == 'branch':
1606 1606 # NOTE(marcink): case for mercurial multiple heads on branch
1607 1607 heads = target_vcs._heads(target_ref.name)
1608 1608 if len(heads) != 1:
1609 1609 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1610 1610 metadata.update({
1611 1611 'heads': heads
1612 1612 })
1613 1613
1614 1614 merge_state = MergeResponse(
1615 1615 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1616 1616
1617 1617 return merge_state
1618 1618
1619 1619 def _refresh_reference(self, reference, vcs_repository):
1620 1620 if reference.type in self.UPDATABLE_REF_TYPES:
1621 1621 name_or_id = reference.name
1622 1622 else:
1623 1623 name_or_id = reference.commit_id
1624 1624
1625 1625 refreshed_commit = vcs_repository.get_commit(name_or_id)
1626 1626 refreshed_reference = Reference(
1627 1627 reference.type, reference.name, refreshed_commit.raw_id)
1628 1628 return refreshed_reference
1629 1629
1630 1630 def _needs_merge_state_refresh(self, pull_request, target_reference):
1631 1631 return not(
1632 1632 pull_request.revisions and
1633 1633 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1634 1634 target_reference.commit_id == pull_request._last_merge_target_rev)
1635 1635
1636 1636 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1637 1637 workspace_id = self._workspace_id(pull_request)
1638 1638 source_vcs = pull_request.source_repo.scm_instance()
1639 1639 repo_id = pull_request.target_repo.repo_id
1640 1640 use_rebase = self._use_rebase_for_merging(pull_request)
1641 1641 close_branch = self._close_branch_before_merging(pull_request)
1642 1642 merge_state = target_vcs.merge(
1643 1643 repo_id, workspace_id,
1644 1644 target_reference, source_vcs, pull_request.source_ref_parts,
1645 1645 dry_run=True, use_rebase=use_rebase,
1646 1646 close_branch=close_branch)
1647 1647
1648 1648 # Do not store the response if there was an unknown error.
1649 1649 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1650 1650 pull_request._last_merge_source_rev = \
1651 1651 pull_request.source_ref_parts.commit_id
1652 1652 pull_request._last_merge_target_rev = target_reference.commit_id
1653 1653 pull_request.last_merge_status = merge_state.failure_reason
1654 1654 pull_request.last_merge_metadata = merge_state.metadata
1655 1655
1656 1656 pull_request.shadow_merge_ref = merge_state.merge_ref
1657 1657 Session().add(pull_request)
1658 1658 Session().commit()
1659 1659
1660 1660 return merge_state
1661 1661
1662 1662 def _workspace_id(self, pull_request):
1663 1663 workspace_id = 'pr-%s' % pull_request.pull_request_id
1664 1664 return workspace_id
1665 1665
1666 1666 def generate_repo_data(self, repo, commit_id=None, branch=None,
1667 1667 bookmark=None, translator=None):
1668 1668 from rhodecode.model.repo import RepoModel
1669 1669
1670 1670 all_refs, selected_ref = \
1671 1671 self._get_repo_pullrequest_sources(
1672 1672 repo.scm_instance(), commit_id=commit_id,
1673 1673 branch=branch, bookmark=bookmark, translator=translator)
1674 1674
1675 1675 refs_select2 = []
1676 1676 for element in all_refs:
1677 1677 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1678 1678 refs_select2.append({'text': element[1], 'children': children})
1679 1679
1680 1680 return {
1681 1681 'user': {
1682 1682 'user_id': repo.user.user_id,
1683 1683 'username': repo.user.username,
1684 1684 'firstname': repo.user.first_name,
1685 1685 'lastname': repo.user.last_name,
1686 1686 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1687 1687 },
1688 1688 'name': repo.repo_name,
1689 1689 'link': RepoModel().get_url(repo),
1690 1690 'description': h.chop_at_smart(repo.description_safe, '\n'),
1691 1691 'refs': {
1692 1692 'all_refs': all_refs,
1693 1693 'selected_ref': selected_ref,
1694 1694 'select2_refs': refs_select2
1695 1695 }
1696 1696 }
1697 1697
1698 1698 def generate_pullrequest_title(self, source, source_ref, target):
1699 1699 return u'{source}#{at_ref} to {target}'.format(
1700 1700 source=source,
1701 1701 at_ref=source_ref,
1702 1702 target=target,
1703 1703 )
1704 1704
1705 1705 def _cleanup_merge_workspace(self, pull_request):
1706 1706 # Merging related cleanup
1707 1707 repo_id = pull_request.target_repo.repo_id
1708 1708 target_scm = pull_request.target_repo.scm_instance()
1709 1709 workspace_id = self._workspace_id(pull_request)
1710 1710
1711 1711 try:
1712 1712 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1713 1713 except NotImplementedError:
1714 1714 pass
1715 1715
1716 1716 def _get_repo_pullrequest_sources(
1717 1717 self, repo, commit_id=None, branch=None, bookmark=None,
1718 1718 translator=None):
1719 1719 """
1720 1720 Return a structure with repo's interesting commits, suitable for
1721 1721 the selectors in pullrequest controller
1722 1722
1723 1723 :param commit_id: a commit that must be in the list somehow
1724 1724 and selected by default
1725 1725 :param branch: a branch that must be in the list and selected
1726 1726 by default - even if closed
1727 1727 :param bookmark: a bookmark that must be in the list and selected
1728 1728 """
1729 1729 _ = translator or get_current_request().translate
1730 1730
1731 1731 commit_id = safe_str(commit_id) if commit_id else None
1732 1732 branch = safe_unicode(branch) if branch else None
1733 1733 bookmark = safe_unicode(bookmark) if bookmark else None
1734 1734
1735 1735 selected = None
1736 1736
1737 1737 # order matters: first source that has commit_id in it will be selected
1738 1738 sources = []
1739 1739 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1740 1740 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1741 1741
1742 1742 if commit_id:
1743 1743 ref_commit = (h.short_id(commit_id), commit_id)
1744 1744 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1745 1745
1746 1746 sources.append(
1747 1747 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1748 1748 )
1749 1749
1750 1750 groups = []
1751 1751
1752 1752 for group_key, ref_list, group_name, match in sources:
1753 1753 group_refs = []
1754 1754 for ref_name, ref_id in ref_list:
1755 1755 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1756 1756 group_refs.append((ref_key, ref_name))
1757 1757
1758 1758 if not selected:
1759 1759 if set([commit_id, match]) & set([ref_id, ref_name]):
1760 1760 selected = ref_key
1761 1761
1762 1762 if group_refs:
1763 1763 groups.append((group_refs, group_name))
1764 1764
1765 1765 if not selected:
1766 1766 ref = commit_id or branch or bookmark
1767 1767 if ref:
1768 1768 raise CommitDoesNotExistError(
1769 1769 u'No commit refs could be found matching: {}'.format(ref))
1770 1770 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1771 1771 selected = u'branch:{}:{}'.format(
1772 1772 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1773 1773 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1774 1774 )
1775 1775 elif repo.commit_ids:
1776 1776 # make the user select in this case
1777 1777 selected = None
1778 1778 else:
1779 1779 raise EmptyRepositoryError()
1780 1780 return groups, selected
1781 1781
1782 1782 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1783 1783 hide_whitespace_changes, diff_context):
1784 1784
1785 1785 return self._get_diff_from_pr_or_version(
1786 1786 source_repo, source_ref_id, target_ref_id,
1787 1787 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1788 1788
1789 1789 def _get_diff_from_pr_or_version(
1790 1790 self, source_repo, source_ref_id, target_ref_id,
1791 1791 hide_whitespace_changes, diff_context):
1792 1792
1793 1793 target_commit = source_repo.get_commit(
1794 1794 commit_id=safe_str(target_ref_id))
1795 1795 source_commit = source_repo.get_commit(
1796 1796 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1797 1797 if isinstance(source_repo, Repository):
1798 1798 vcs_repo = source_repo.scm_instance()
1799 1799 else:
1800 1800 vcs_repo = source_repo
1801 1801
1802 1802 # TODO: johbo: In the context of an update, we cannot reach
1803 1803 # the old commit anymore with our normal mechanisms. It needs
1804 1804 # some sort of special support in the vcs layer to avoid this
1805 1805 # workaround.
1806 1806 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1807 1807 vcs_repo.alias == 'git'):
1808 1808 source_commit.raw_id = safe_str(source_ref_id)
1809 1809
1810 1810 log.debug('calculating diff between '
1811 1811 'source_ref:%s and target_ref:%s for repo `%s`',
1812 1812 target_ref_id, source_ref_id,
1813 1813 safe_unicode(vcs_repo.path))
1814 1814
1815 1815 vcs_diff = vcs_repo.get_diff(
1816 1816 commit1=target_commit, commit2=source_commit,
1817 1817 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1818 1818 return vcs_diff
1819 1819
1820 1820 def _is_merge_enabled(self, pull_request):
1821 1821 return self._get_general_setting(
1822 1822 pull_request, 'rhodecode_pr_merge_enabled')
1823 1823
1824 1824 def _use_rebase_for_merging(self, pull_request):
1825 1825 repo_type = pull_request.target_repo.repo_type
1826 1826 if repo_type == 'hg':
1827 1827 return self._get_general_setting(
1828 1828 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1829 1829 elif repo_type == 'git':
1830 1830 return self._get_general_setting(
1831 1831 pull_request, 'rhodecode_git_use_rebase_for_merging')
1832 1832
1833 1833 return False
1834 1834
1835 1835 def _user_name_for_merging(self, pull_request, user):
1836 1836 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
1837 1837 if env_user_name_attr and hasattr(user, env_user_name_attr):
1838 1838 user_name_attr = env_user_name_attr
1839 1839 else:
1840 1840 user_name_attr = 'short_contact'
1841 1841
1842 1842 user_name = getattr(user, user_name_attr)
1843 1843 return user_name
1844 1844
1845 1845 def _close_branch_before_merging(self, pull_request):
1846 1846 repo_type = pull_request.target_repo.repo_type
1847 1847 if repo_type == 'hg':
1848 1848 return self._get_general_setting(
1849 1849 pull_request, 'rhodecode_hg_close_branch_before_merging')
1850 1850 elif repo_type == 'git':
1851 1851 return self._get_general_setting(
1852 1852 pull_request, 'rhodecode_git_close_branch_before_merging')
1853 1853
1854 1854 return False
1855 1855
1856 1856 def _get_general_setting(self, pull_request, settings_key, default=False):
1857 1857 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1858 1858 settings = settings_model.get_general_settings()
1859 1859 return settings.get(settings_key, default)
1860 1860
1861 1861 def _log_audit_action(self, action, action_data, user, pull_request):
1862 1862 audit_logger.store(
1863 1863 action=action,
1864 1864 action_data=action_data,
1865 1865 user=user,
1866 1866 repo=pull_request.target_repo)
1867 1867
1868 1868 def get_reviewer_functions(self):
1869 1869 """
1870 1870 Fetches functions for validation and fetching default reviewers.
1871 1871 If available we use the EE package, else we fallback to CE
1872 1872 package functions
1873 1873 """
1874 1874 try:
1875 1875 from rc_reviewers.utils import get_default_reviewers_data
1876 1876 from rc_reviewers.utils import validate_default_reviewers
1877 1877 except ImportError:
1878 1878 from rhodecode.apps.repository.utils import get_default_reviewers_data
1879 1879 from rhodecode.apps.repository.utils import validate_default_reviewers
1880 1880
1881 1881 return get_default_reviewers_data, validate_default_reviewers
1882 1882
1883 1883
1884 1884 class MergeCheck(object):
1885 1885 """
1886 1886 Perform Merge Checks and returns a check object which stores information
1887 1887 about merge errors, and merge conditions
1888 1888 """
1889 1889 TODO_CHECK = 'todo'
1890 1890 PERM_CHECK = 'perm'
1891 1891 REVIEW_CHECK = 'review'
1892 1892 MERGE_CHECK = 'merge'
1893 1893 WIP_CHECK = 'wip'
1894 1894
1895 1895 def __init__(self):
1896 1896 self.review_status = None
1897 1897 self.merge_possible = None
1898 1898 self.merge_msg = ''
1899 1899 self.merge_response = None
1900 1900 self.failed = None
1901 1901 self.errors = []
1902 1902 self.error_details = OrderedDict()
1903 1903 self.source_commit = AttributeDict()
1904 1904 self.target_commit = AttributeDict()
1905 1905
1906 1906 def __repr__(self):
1907 1907 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
1908 1908 self.merge_possible, self.failed, self.errors)
1909 1909
1910 1910 def push_error(self, error_type, message, error_key, details):
1911 1911 self.failed = True
1912 1912 self.errors.append([error_type, message])
1913 1913 self.error_details[error_key] = dict(
1914 1914 details=details,
1915 1915 error_type=error_type,
1916 1916 message=message
1917 1917 )
1918 1918
1919 1919 @classmethod
1920 1920 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1921 1921 force_shadow_repo_refresh=False):
1922 1922 _ = translator
1923 1923 merge_check = cls()
1924 1924
1925 1925 # title has WIP:
1926 1926 if pull_request.work_in_progress:
1927 1927 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1928 1928
1929 1929 msg = _('WIP marker in title prevents from accidental merge.')
1930 1930 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1931 1931 if fail_early:
1932 1932 return merge_check
1933 1933
1934 1934 # permissions to merge
1935 1935 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
1936 1936 if not user_allowed_to_merge:
1937 1937 log.debug("MergeCheck: cannot merge, approval is pending.")
1938 1938
1939 1939 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1940 1940 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1941 1941 if fail_early:
1942 1942 return merge_check
1943 1943
1944 1944 # permission to merge into the target branch
1945 1945 target_commit_id = pull_request.target_ref_parts.commit_id
1946 1946 if pull_request.target_ref_parts.type == 'branch':
1947 1947 branch_name = pull_request.target_ref_parts.name
1948 1948 else:
1949 1949 # for mercurial we can always figure out the branch from the commit
1950 1950 # in case of bookmark
1951 1951 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1952 1952 branch_name = target_commit.branch
1953 1953
1954 1954 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1955 1955 pull_request.target_repo.repo_name, branch_name)
1956 1956 if branch_perm and branch_perm == 'branch.none':
1957 1957 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1958 1958 branch_name, rule)
1959 1959 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1960 1960 if fail_early:
1961 1961 return merge_check
1962 1962
1963 1963 # review status, must be always present
1964 1964 review_status = pull_request.calculated_review_status()
1965 1965 merge_check.review_status = review_status
1966 1966
1967 1967 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1968 1968 if not status_approved:
1969 1969 log.debug("MergeCheck: cannot merge, approval is pending.")
1970 1970
1971 1971 msg = _('Pull request reviewer approval is pending.')
1972 1972
1973 1973 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1974 1974
1975 1975 if fail_early:
1976 1976 return merge_check
1977 1977
1978 1978 # left over TODOs
1979 1979 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1980 1980 if todos:
1981 1981 log.debug("MergeCheck: cannot merge, {} "
1982 1982 "unresolved TODOs left.".format(len(todos)))
1983 1983
1984 1984 if len(todos) == 1:
1985 1985 msg = _('Cannot merge, {} TODO still not resolved.').format(
1986 1986 len(todos))
1987 1987 else:
1988 1988 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1989 1989 len(todos))
1990 1990
1991 1991 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1992 1992
1993 1993 if fail_early:
1994 1994 return merge_check
1995 1995
1996 1996 # merge possible, here is the filesystem simulation + shadow repo
1997 1997 merge_response, merge_status, msg = PullRequestModel().merge_status(
1998 1998 pull_request, translator=translator,
1999 1999 force_shadow_repo_refresh=force_shadow_repo_refresh)
2000 2000
2001 2001 merge_check.merge_possible = merge_status
2002 2002 merge_check.merge_msg = msg
2003 2003 merge_check.merge_response = merge_response
2004 2004
2005 2005 source_ref_id = pull_request.source_ref_parts.commit_id
2006 2006 target_ref_id = pull_request.target_ref_parts.commit_id
2007 2007
2008 2008 try:
2009 2009 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2010 2010 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2011 2011 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2012 2012 merge_check.source_commit.current_raw_id = source_commit.raw_id
2013 2013 merge_check.source_commit.previous_raw_id = source_ref_id
2014 2014
2015 2015 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2016 2016 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2017 2017 merge_check.target_commit.current_raw_id = target_commit.raw_id
2018 2018 merge_check.target_commit.previous_raw_id = target_ref_id
2019 2019 except (SourceRefMissing, TargetRefMissing):
2020 2020 pass
2021 2021
2022 2022 if not merge_status:
2023 2023 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2024 2024 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2025 2025
2026 2026 if fail_early:
2027 2027 return merge_check
2028 2028
2029 2029 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2030 2030 return merge_check
2031 2031
2032 2032 @classmethod
2033 2033 def get_merge_conditions(cls, pull_request, translator):
2034 2034 _ = translator
2035 2035 merge_details = {}
2036 2036
2037 2037 model = PullRequestModel()
2038 2038 use_rebase = model._use_rebase_for_merging(pull_request)
2039 2039
2040 2040 if use_rebase:
2041 2041 merge_details['merge_strategy'] = dict(
2042 2042 details={},
2043 2043 message=_('Merge strategy: rebase')
2044 2044 )
2045 2045 else:
2046 2046 merge_details['merge_strategy'] = dict(
2047 2047 details={},
2048 2048 message=_('Merge strategy: explicit merge commit')
2049 2049 )
2050 2050
2051 2051 close_branch = model._close_branch_before_merging(pull_request)
2052 2052 if close_branch:
2053 2053 repo_type = pull_request.target_repo.repo_type
2054 2054 close_msg = ''
2055 2055 if repo_type == 'hg':
2056 2056 close_msg = _('Source branch will be closed after merge.')
2057 2057 elif repo_type == 'git':
2058 2058 close_msg = _('Source branch will be deleted after merge.')
2059 2059
2060 2060 merge_details['close_branch'] = dict(
2061 2061 details={},
2062 2062 message=close_msg
2063 2063 )
2064 2064
2065 2065 return merge_details
2066 2066
2067 2067
2068 2068 ChangeTuple = collections.namedtuple(
2069 2069 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2070 2070
2071 2071 FileChangeTuple = collections.namedtuple(
2072 2072 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now