##// END OF EJS Templates
security: use safe escaped version of description for repo and repo group to potentially...
ergo -
r1830:d786fdd7 default
parent child Browse files
Show More
@@ -1,4092 +1,4127 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37
38 38 from sqlalchemy import *
39 39 from sqlalchemy.ext.declarative import declared_attr
40 40 from sqlalchemy.ext.hybrid import hybrid_property
41 41 from sqlalchemy.orm import (
42 42 relationship, joinedload, class_mapper, validates, aliased)
43 43 from sqlalchemy.sql.expression import true
44 44 from beaker.cache import cache_region
45 45 from zope.cachedescriptors.property import Lazy as LazyProperty
46 46
47 47 from pylons.i18n.translation import lazy_ugettext as _
48 48 from pyramid.threadlocal import get_current_request
49 49
50 50 from rhodecode.lib.vcs import get_vcs_instance
51 51 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
52 52 from rhodecode.lib.utils2 import (
53 53 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
54 54 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
55 55 glob2re, StrictAttributeDict, cleaned_uri)
56 56 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType
57 57 from rhodecode.lib.ext_json import json
58 58 from rhodecode.lib.caching_query import FromCache
59 59 from rhodecode.lib.encrypt import AESCipher
60 60
61 61 from rhodecode.model.meta import Base, Session
62 62
63 63 URL_SEP = '/'
64 64 log = logging.getLogger(__name__)
65 65
66 66 # =============================================================================
67 67 # BASE CLASSES
68 68 # =============================================================================
69 69
70 70 # this is propagated from .ini file rhodecode.encrypted_values.secret or
71 71 # beaker.session.secret if first is not set.
72 72 # and initialized at environment.py
73 73 ENCRYPTION_KEY = None
74 74
75 75 # used to sort permissions by types, '#' used here is not allowed to be in
76 76 # usernames, and it's very early in sorted string.printable table.
77 77 PERMISSION_TYPE_SORT = {
78 78 'admin': '####',
79 79 'write': '###',
80 80 'read': '##',
81 81 'none': '#',
82 82 }
83 83
84 84
85 85 def display_sort(obj):
86 86 """
87 87 Sort function used to sort permissions in .permissions() function of
88 88 Repository, RepoGroup, UserGroup. Also it put the default user in front
89 89 of all other resources
90 90 """
91 91
92 92 if obj.username == User.DEFAULT_USER:
93 93 return '#####'
94 94 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
95 95 return prefix + obj.username
96 96
97 97
98 98 def _hash_key(k):
99 99 return md5_safe(k)
100 100
101 101
102 102 class EncryptedTextValue(TypeDecorator):
103 103 """
104 104 Special column for encrypted long text data, use like::
105 105
106 106 value = Column("encrypted_value", EncryptedValue(), nullable=False)
107 107
108 108 This column is intelligent so if value is in unencrypted form it return
109 109 unencrypted form, but on save it always encrypts
110 110 """
111 111 impl = Text
112 112
113 113 def process_bind_param(self, value, dialect):
114 114 if not value:
115 115 return value
116 116 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
117 117 # protect against double encrypting if someone manually starts
118 118 # doing
119 119 raise ValueError('value needs to be in unencrypted format, ie. '
120 120 'not starting with enc$aes')
121 121 return 'enc$aes_hmac$%s' % AESCipher(
122 122 ENCRYPTION_KEY, hmac=True).encrypt(value)
123 123
124 124 def process_result_value(self, value, dialect):
125 125 import rhodecode
126 126
127 127 if not value:
128 128 return value
129 129
130 130 parts = value.split('$', 3)
131 131 if not len(parts) == 3:
132 132 # probably not encrypted values
133 133 return value
134 134 else:
135 135 if parts[0] != 'enc':
136 136 # parts ok but without our header ?
137 137 return value
138 138 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
139 139 'rhodecode.encrypted_values.strict') or True)
140 140 # at that stage we know it's our encryption
141 141 if parts[1] == 'aes':
142 142 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
143 143 elif parts[1] == 'aes_hmac':
144 144 decrypted_data = AESCipher(
145 145 ENCRYPTION_KEY, hmac=True,
146 146 strict_verification=enc_strict_mode).decrypt(parts[2])
147 147 else:
148 148 raise ValueError(
149 149 'Encryption type part is wrong, must be `aes` '
150 150 'or `aes_hmac`, got `%s` instead' % (parts[1]))
151 151 return decrypted_data
152 152
153 153
154 154 class BaseModel(object):
155 155 """
156 156 Base Model for all classes
157 157 """
158 158
159 159 @classmethod
160 160 def _get_keys(cls):
161 161 """return column names for this model """
162 162 return class_mapper(cls).c.keys()
163 163
164 164 def get_dict(self):
165 165 """
166 166 return dict with keys and values corresponding
167 167 to this model data """
168 168
169 169 d = {}
170 170 for k in self._get_keys():
171 171 d[k] = getattr(self, k)
172 172
173 173 # also use __json__() if present to get additional fields
174 174 _json_attr = getattr(self, '__json__', None)
175 175 if _json_attr:
176 176 # update with attributes from __json__
177 177 if callable(_json_attr):
178 178 _json_attr = _json_attr()
179 179 for k, val in _json_attr.iteritems():
180 180 d[k] = val
181 181 return d
182 182
183 183 def get_appstruct(self):
184 184 """return list with keys and values tuples corresponding
185 185 to this model data """
186 186
187 187 l = []
188 188 for k in self._get_keys():
189 189 l.append((k, getattr(self, k),))
190 190 return l
191 191
192 192 def populate_obj(self, populate_dict):
193 193 """populate model with data from given populate_dict"""
194 194
195 195 for k in self._get_keys():
196 196 if k in populate_dict:
197 197 setattr(self, k, populate_dict[k])
198 198
199 199 @classmethod
200 200 def query(cls):
201 201 return Session().query(cls)
202 202
203 203 @classmethod
204 204 def get(cls, id_):
205 205 if id_:
206 206 return cls.query().get(id_)
207 207
208 208 @classmethod
209 209 def get_or_404(cls, id_, pyramid_exc=False):
210 210 if pyramid_exc:
211 211 # NOTE(marcink): backward compat, once migration to pyramid
212 212 # this should only use pyramid exceptions
213 213 from pyramid.httpexceptions import HTTPNotFound
214 214 else:
215 215 from webob.exc import HTTPNotFound
216 216
217 217 try:
218 218 id_ = int(id_)
219 219 except (TypeError, ValueError):
220 220 raise HTTPNotFound
221 221
222 222 res = cls.query().get(id_)
223 223 if not res:
224 224 raise HTTPNotFound
225 225 return res
226 226
227 227 @classmethod
228 228 def getAll(cls):
229 229 # deprecated and left for backward compatibility
230 230 return cls.get_all()
231 231
232 232 @classmethod
233 233 def get_all(cls):
234 234 return cls.query().all()
235 235
236 236 @classmethod
237 237 def delete(cls, id_):
238 238 obj = cls.query().get(id_)
239 239 Session().delete(obj)
240 240
241 241 @classmethod
242 242 def identity_cache(cls, session, attr_name, value):
243 243 exist_in_session = []
244 244 for (item_cls, pkey), instance in session.identity_map.items():
245 245 if cls == item_cls and getattr(instance, attr_name) == value:
246 246 exist_in_session.append(instance)
247 247 if exist_in_session:
248 248 if len(exist_in_session) == 1:
249 249 return exist_in_session[0]
250 250 log.exception(
251 251 'multiple objects with attr %s and '
252 252 'value %s found with same name: %r',
253 253 attr_name, value, exist_in_session)
254 254
255 255 def __repr__(self):
256 256 if hasattr(self, '__unicode__'):
257 257 # python repr needs to return str
258 258 try:
259 259 return safe_str(self.__unicode__())
260 260 except UnicodeDecodeError:
261 261 pass
262 262 return '<DB:%s>' % (self.__class__.__name__)
263 263
264 264
265 265 class RhodeCodeSetting(Base, BaseModel):
266 266 __tablename__ = 'rhodecode_settings'
267 267 __table_args__ = (
268 268 UniqueConstraint('app_settings_name'),
269 269 {'extend_existing': True, 'mysql_engine': 'InnoDB',
270 270 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
271 271 )
272 272
273 273 SETTINGS_TYPES = {
274 274 'str': safe_str,
275 275 'int': safe_int,
276 276 'unicode': safe_unicode,
277 277 'bool': str2bool,
278 278 'list': functools.partial(aslist, sep=',')
279 279 }
280 280 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
281 281 GLOBAL_CONF_KEY = 'app_settings'
282 282
283 283 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
284 284 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
285 285 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
286 286 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
287 287
288 288 def __init__(self, key='', val='', type='unicode'):
289 289 self.app_settings_name = key
290 290 self.app_settings_type = type
291 291 self.app_settings_value = val
292 292
293 293 @validates('_app_settings_value')
294 294 def validate_settings_value(self, key, val):
295 295 assert type(val) == unicode
296 296 return val
297 297
298 298 @hybrid_property
299 299 def app_settings_value(self):
300 300 v = self._app_settings_value
301 301 _type = self.app_settings_type
302 302 if _type:
303 303 _type = self.app_settings_type.split('.')[0]
304 304 # decode the encrypted value
305 305 if 'encrypted' in self.app_settings_type:
306 306 cipher = EncryptedTextValue()
307 307 v = safe_unicode(cipher.process_result_value(v, None))
308 308
309 309 converter = self.SETTINGS_TYPES.get(_type) or \
310 310 self.SETTINGS_TYPES['unicode']
311 311 return converter(v)
312 312
313 313 @app_settings_value.setter
314 314 def app_settings_value(self, val):
315 315 """
316 316 Setter that will always make sure we use unicode in app_settings_value
317 317
318 318 :param val:
319 319 """
320 320 val = safe_unicode(val)
321 321 # encode the encrypted value
322 322 if 'encrypted' in self.app_settings_type:
323 323 cipher = EncryptedTextValue()
324 324 val = safe_unicode(cipher.process_bind_param(val, None))
325 325 self._app_settings_value = val
326 326
327 327 @hybrid_property
328 328 def app_settings_type(self):
329 329 return self._app_settings_type
330 330
331 331 @app_settings_type.setter
332 332 def app_settings_type(self, val):
333 333 if val.split('.')[0] not in self.SETTINGS_TYPES:
334 334 raise Exception('type must be one of %s got %s'
335 335 % (self.SETTINGS_TYPES.keys(), val))
336 336 self._app_settings_type = val
337 337
338 338 def __unicode__(self):
339 339 return u"<%s('%s:%s[%s]')>" % (
340 340 self.__class__.__name__,
341 341 self.app_settings_name, self.app_settings_value,
342 342 self.app_settings_type
343 343 )
344 344
345 345
346 346 class RhodeCodeUi(Base, BaseModel):
347 347 __tablename__ = 'rhodecode_ui'
348 348 __table_args__ = (
349 349 UniqueConstraint('ui_key'),
350 350 {'extend_existing': True, 'mysql_engine': 'InnoDB',
351 351 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
352 352 )
353 353
354 354 HOOK_REPO_SIZE = 'changegroup.repo_size'
355 355 # HG
356 356 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
357 357 HOOK_PULL = 'outgoing.pull_logger'
358 358 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
359 359 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
360 360 HOOK_PUSH = 'changegroup.push_logger'
361 361 HOOK_PUSH_KEY = 'pushkey.key_push'
362 362
363 363 # TODO: johbo: Unify way how hooks are configured for git and hg,
364 364 # git part is currently hardcoded.
365 365
366 366 # SVN PATTERNS
367 367 SVN_BRANCH_ID = 'vcs_svn_branch'
368 368 SVN_TAG_ID = 'vcs_svn_tag'
369 369
370 370 ui_id = Column(
371 371 "ui_id", Integer(), nullable=False, unique=True, default=None,
372 372 primary_key=True)
373 373 ui_section = Column(
374 374 "ui_section", String(255), nullable=True, unique=None, default=None)
375 375 ui_key = Column(
376 376 "ui_key", String(255), nullable=True, unique=None, default=None)
377 377 ui_value = Column(
378 378 "ui_value", String(255), nullable=True, unique=None, default=None)
379 379 ui_active = Column(
380 380 "ui_active", Boolean(), nullable=True, unique=None, default=True)
381 381
382 382 def __repr__(self):
383 383 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
384 384 self.ui_key, self.ui_value)
385 385
386 386
387 387 class RepoRhodeCodeSetting(Base, BaseModel):
388 388 __tablename__ = 'repo_rhodecode_settings'
389 389 __table_args__ = (
390 390 UniqueConstraint(
391 391 'app_settings_name', 'repository_id',
392 392 name='uq_repo_rhodecode_setting_name_repo_id'),
393 393 {'extend_existing': True, 'mysql_engine': 'InnoDB',
394 394 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
395 395 )
396 396
397 397 repository_id = Column(
398 398 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
399 399 nullable=False)
400 400 app_settings_id = Column(
401 401 "app_settings_id", Integer(), nullable=False, unique=True,
402 402 default=None, primary_key=True)
403 403 app_settings_name = Column(
404 404 "app_settings_name", String(255), nullable=True, unique=None,
405 405 default=None)
406 406 _app_settings_value = Column(
407 407 "app_settings_value", String(4096), nullable=True, unique=None,
408 408 default=None)
409 409 _app_settings_type = Column(
410 410 "app_settings_type", String(255), nullable=True, unique=None,
411 411 default=None)
412 412
413 413 repository = relationship('Repository')
414 414
415 415 def __init__(self, repository_id, key='', val='', type='unicode'):
416 416 self.repository_id = repository_id
417 417 self.app_settings_name = key
418 418 self.app_settings_type = type
419 419 self.app_settings_value = val
420 420
421 421 @validates('_app_settings_value')
422 422 def validate_settings_value(self, key, val):
423 423 assert type(val) == unicode
424 424 return val
425 425
426 426 @hybrid_property
427 427 def app_settings_value(self):
428 428 v = self._app_settings_value
429 429 type_ = self.app_settings_type
430 430 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
431 431 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
432 432 return converter(v)
433 433
434 434 @app_settings_value.setter
435 435 def app_settings_value(self, val):
436 436 """
437 437 Setter that will always make sure we use unicode in app_settings_value
438 438
439 439 :param val:
440 440 """
441 441 self._app_settings_value = safe_unicode(val)
442 442
443 443 @hybrid_property
444 444 def app_settings_type(self):
445 445 return self._app_settings_type
446 446
447 447 @app_settings_type.setter
448 448 def app_settings_type(self, val):
449 449 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
450 450 if val not in SETTINGS_TYPES:
451 451 raise Exception('type must be one of %s got %s'
452 452 % (SETTINGS_TYPES.keys(), val))
453 453 self._app_settings_type = val
454 454
455 455 def __unicode__(self):
456 456 return u"<%s('%s:%s:%s[%s]')>" % (
457 457 self.__class__.__name__, self.repository.repo_name,
458 458 self.app_settings_name, self.app_settings_value,
459 459 self.app_settings_type
460 460 )
461 461
462 462
463 463 class RepoRhodeCodeUi(Base, BaseModel):
464 464 __tablename__ = 'repo_rhodecode_ui'
465 465 __table_args__ = (
466 466 UniqueConstraint(
467 467 'repository_id', 'ui_section', 'ui_key',
468 468 name='uq_repo_rhodecode_ui_repository_id_section_key'),
469 469 {'extend_existing': True, 'mysql_engine': 'InnoDB',
470 470 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
471 471 )
472 472
473 473 repository_id = Column(
474 474 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
475 475 nullable=False)
476 476 ui_id = Column(
477 477 "ui_id", Integer(), nullable=False, unique=True, default=None,
478 478 primary_key=True)
479 479 ui_section = Column(
480 480 "ui_section", String(255), nullable=True, unique=None, default=None)
481 481 ui_key = Column(
482 482 "ui_key", String(255), nullable=True, unique=None, default=None)
483 483 ui_value = Column(
484 484 "ui_value", String(255), nullable=True, unique=None, default=None)
485 485 ui_active = Column(
486 486 "ui_active", Boolean(), nullable=True, unique=None, default=True)
487 487
488 488 repository = relationship('Repository')
489 489
490 490 def __repr__(self):
491 491 return '<%s[%s:%s]%s=>%s]>' % (
492 492 self.__class__.__name__, self.repository.repo_name,
493 493 self.ui_section, self.ui_key, self.ui_value)
494 494
495 495
496 496 class User(Base, BaseModel):
497 497 __tablename__ = 'users'
498 498 __table_args__ = (
499 499 UniqueConstraint('username'), UniqueConstraint('email'),
500 500 Index('u_username_idx', 'username'),
501 501 Index('u_email_idx', 'email'),
502 502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
503 503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
504 504 )
505 505 DEFAULT_USER = 'default'
506 506 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
507 507 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
508 508
509 509 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
510 510 username = Column("username", String(255), nullable=True, unique=None, default=None)
511 511 password = Column("password", String(255), nullable=True, unique=None, default=None)
512 512 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
513 513 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
514 514 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
515 515 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
516 516 _email = Column("email", String(255), nullable=True, unique=None, default=None)
517 517 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
518 518 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
519 519
520 520 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
521 521 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
522 522 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
523 523 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
524 524 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
525 525 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
526 526
527 527 user_log = relationship('UserLog')
528 528 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
529 529
530 530 repositories = relationship('Repository')
531 531 repository_groups = relationship('RepoGroup')
532 532 user_groups = relationship('UserGroup')
533 533
534 534 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
535 535 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
536 536
537 537 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
538 538 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
539 539 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
540 540
541 541 group_member = relationship('UserGroupMember', cascade='all')
542 542
543 543 notifications = relationship('UserNotification', cascade='all')
544 544 # notifications assigned to this user
545 545 user_created_notifications = relationship('Notification', cascade='all')
546 546 # comments created by this user
547 547 user_comments = relationship('ChangesetComment', cascade='all')
548 548 # user profile extra info
549 549 user_emails = relationship('UserEmailMap', cascade='all')
550 550 user_ip_map = relationship('UserIpMap', cascade='all')
551 551 user_auth_tokens = relationship('UserApiKeys', cascade='all')
552 552 # gists
553 553 user_gists = relationship('Gist', cascade='all')
554 554 # user pull requests
555 555 user_pull_requests = relationship('PullRequest', cascade='all')
556 556 # external identities
557 557 extenal_identities = relationship(
558 558 'ExternalIdentity',
559 559 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
560 560 cascade='all')
561 561
562 562 def __unicode__(self):
563 563 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
564 564 self.user_id, self.username)
565 565
566 566 @hybrid_property
567 567 def email(self):
568 568 return self._email
569 569
570 570 @email.setter
571 571 def email(self, val):
572 572 self._email = val.lower() if val else None
573 573
574 574 @hybrid_property
575 575 def first_name(self):
576 576 from rhodecode.lib import helpers as h
577 577 if self.name:
578 578 return h.escape(self.name)
579 579 return self.name
580 580
581 581 @hybrid_property
582 582 def last_name(self):
583 583 from rhodecode.lib import helpers as h
584 584 if self.lastname:
585 585 return h.escape(self.lastname)
586 586 return self.lastname
587 587
588 588 @hybrid_property
589 589 def api_key(self):
590 590 """
591 591 Fetch if exist an auth-token with role ALL connected to this user
592 592 """
593 593 user_auth_token = UserApiKeys.query()\
594 594 .filter(UserApiKeys.user_id == self.user_id)\
595 595 .filter(or_(UserApiKeys.expires == -1,
596 596 UserApiKeys.expires >= time.time()))\
597 597 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
598 598 if user_auth_token:
599 599 user_auth_token = user_auth_token.api_key
600 600
601 601 return user_auth_token
602 602
603 603 @api_key.setter
604 604 def api_key(self, val):
605 605 # don't allow to set API key this is deprecated for now
606 606 self._api_key = None
607 607
608 608 @property
609 609 def firstname(self):
610 610 # alias for future
611 611 return self.name
612 612
613 613 @property
614 614 def emails(self):
615 615 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
616 616 return [self.email] + [x.email for x in other]
617 617
618 618 @property
619 619 def auth_tokens(self):
620 620 return [x.api_key for x in self.extra_auth_tokens]
621 621
622 622 @property
623 623 def extra_auth_tokens(self):
624 624 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
625 625
626 626 @property
627 627 def feed_token(self):
628 628 return self.get_feed_token()
629 629
630 630 def get_feed_token(self):
631 631 feed_tokens = UserApiKeys.query()\
632 632 .filter(UserApiKeys.user == self)\
633 633 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
634 634 .all()
635 635 if feed_tokens:
636 636 return feed_tokens[0].api_key
637 637 return 'NO_FEED_TOKEN_AVAILABLE'
638 638
639 639 @classmethod
640 640 def extra_valid_auth_tokens(cls, user, role=None):
641 641 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
642 642 .filter(or_(UserApiKeys.expires == -1,
643 643 UserApiKeys.expires >= time.time()))
644 644 if role:
645 645 tokens = tokens.filter(or_(UserApiKeys.role == role,
646 646 UserApiKeys.role == UserApiKeys.ROLE_ALL))
647 647 return tokens.all()
648 648
649 649 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
650 650 from rhodecode.lib import auth
651 651
652 652 log.debug('Trying to authenticate user: %s via auth-token, '
653 653 'and roles: %s', self, roles)
654 654
655 655 if not auth_token:
656 656 return False
657 657
658 658 crypto_backend = auth.crypto_backend()
659 659
660 660 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
661 661 tokens_q = UserApiKeys.query()\
662 662 .filter(UserApiKeys.user_id == self.user_id)\
663 663 .filter(or_(UserApiKeys.expires == -1,
664 664 UserApiKeys.expires >= time.time()))
665 665
666 666 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
667 667
668 668 plain_tokens = []
669 669 hash_tokens = []
670 670
671 671 for token in tokens_q.all():
672 672 # verify scope first
673 673 if token.repo_id:
674 674 # token has a scope, we need to verify it
675 675 if scope_repo_id != token.repo_id:
676 676 log.debug(
677 677 'Scope mismatch: token has a set repo scope: %s, '
678 678 'and calling scope is:%s, skipping further checks',
679 679 token.repo, scope_repo_id)
680 680 # token has a scope, and it doesn't match, skip token
681 681 continue
682 682
683 683 if token.api_key.startswith(crypto_backend.ENC_PREF):
684 684 hash_tokens.append(token.api_key)
685 685 else:
686 686 plain_tokens.append(token.api_key)
687 687
688 688 is_plain_match = auth_token in plain_tokens
689 689 if is_plain_match:
690 690 return True
691 691
692 692 for hashed in hash_tokens:
693 693 # TODO(marcink): this is expensive to calculate, but most secure
694 694 match = crypto_backend.hash_check(auth_token, hashed)
695 695 if match:
696 696 return True
697 697
698 698 return False
699 699
700 700 @property
701 701 def ip_addresses(self):
702 702 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
703 703 return [x.ip_addr for x in ret]
704 704
705 705 @property
706 706 def username_and_name(self):
707 707 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
708 708
709 709 @property
710 710 def username_or_name_or_email(self):
711 711 full_name = self.full_name if self.full_name is not ' ' else None
712 712 return self.username or full_name or self.email
713 713
714 714 @property
715 715 def full_name(self):
716 716 return '%s %s' % (self.first_name, self.last_name)
717 717
718 718 @property
719 719 def full_name_or_username(self):
720 720 return ('%s %s' % (self.first_name, self.last_name)
721 721 if (self.first_name and self.last_name) else self.username)
722 722
723 723 @property
724 724 def full_contact(self):
725 725 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
726 726
727 727 @property
728 728 def short_contact(self):
729 729 return '%s %s' % (self.first_name, self.last_name)
730 730
731 731 @property
732 732 def is_admin(self):
733 733 return self.admin
734 734
735 735 @property
736 736 def AuthUser(self):
737 737 """
738 738 Returns instance of AuthUser for this user
739 739 """
740 740 from rhodecode.lib.auth import AuthUser
741 741 return AuthUser(user_id=self.user_id, username=self.username)
742 742
743 743 @hybrid_property
744 744 def user_data(self):
745 745 if not self._user_data:
746 746 return {}
747 747
748 748 try:
749 749 return json.loads(self._user_data)
750 750 except TypeError:
751 751 return {}
752 752
753 753 @user_data.setter
754 754 def user_data(self, val):
755 755 if not isinstance(val, dict):
756 756 raise Exception('user_data must be dict, got %s' % type(val))
757 757 try:
758 758 self._user_data = json.dumps(val)
759 759 except Exception:
760 760 log.error(traceback.format_exc())
761 761
762 762 @classmethod
763 763 def get_by_username(cls, username, case_insensitive=False,
764 764 cache=False, identity_cache=False):
765 765 session = Session()
766 766
767 767 if case_insensitive:
768 768 q = cls.query().filter(
769 769 func.lower(cls.username) == func.lower(username))
770 770 else:
771 771 q = cls.query().filter(cls.username == username)
772 772
773 773 if cache:
774 774 if identity_cache:
775 775 val = cls.identity_cache(session, 'username', username)
776 776 if val:
777 777 return val
778 778 else:
779 779 cache_key = "get_user_by_name_%s" % _hash_key(username)
780 780 q = q.options(
781 781 FromCache("sql_cache_short", cache_key))
782 782
783 783 return q.scalar()
784 784
785 785 @classmethod
786 786 def get_by_auth_token(cls, auth_token, cache=False):
787 787 q = UserApiKeys.query()\
788 788 .filter(UserApiKeys.api_key == auth_token)\
789 789 .filter(or_(UserApiKeys.expires == -1,
790 790 UserApiKeys.expires >= time.time()))
791 791 if cache:
792 792 q = q.options(
793 793 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
794 794
795 795 match = q.first()
796 796 if match:
797 797 return match.user
798 798
799 799 @classmethod
800 800 def get_by_email(cls, email, case_insensitive=False, cache=False):
801 801
802 802 if case_insensitive:
803 803 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
804 804
805 805 else:
806 806 q = cls.query().filter(cls.email == email)
807 807
808 808 email_key = _hash_key(email)
809 809 if cache:
810 810 q = q.options(
811 811 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
812 812
813 813 ret = q.scalar()
814 814 if ret is None:
815 815 q = UserEmailMap.query()
816 816 # try fetching in alternate email map
817 817 if case_insensitive:
818 818 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
819 819 else:
820 820 q = q.filter(UserEmailMap.email == email)
821 821 q = q.options(joinedload(UserEmailMap.user))
822 822 if cache:
823 823 q = q.options(
824 824 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
825 825 ret = getattr(q.scalar(), 'user', None)
826 826
827 827 return ret
828 828
829 829 @classmethod
830 830 def get_from_cs_author(cls, author):
831 831 """
832 832 Tries to get User objects out of commit author string
833 833
834 834 :param author:
835 835 """
836 836 from rhodecode.lib.helpers import email, author_name
837 837 # Valid email in the attribute passed, see if they're in the system
838 838 _email = email(author)
839 839 if _email:
840 840 user = cls.get_by_email(_email, case_insensitive=True)
841 841 if user:
842 842 return user
843 843 # Maybe we can match by username?
844 844 _author = author_name(author)
845 845 user = cls.get_by_username(_author, case_insensitive=True)
846 846 if user:
847 847 return user
848 848
849 849 def update_userdata(self, **kwargs):
850 850 usr = self
851 851 old = usr.user_data
852 852 old.update(**kwargs)
853 853 usr.user_data = old
854 854 Session().add(usr)
855 855 log.debug('updated userdata with ', kwargs)
856 856
857 857 def update_lastlogin(self):
858 858 """Update user lastlogin"""
859 859 self.last_login = datetime.datetime.now()
860 860 Session().add(self)
861 861 log.debug('updated user %s lastlogin', self.username)
862 862
863 863 def update_lastactivity(self):
864 864 """Update user lastactivity"""
865 865 self.last_activity = datetime.datetime.now()
866 866 Session().add(self)
867 867 log.debug('updated user %s lastactivity', self.username)
868 868
869 869 def update_password(self, new_password):
870 870 from rhodecode.lib.auth import get_crypt_password
871 871
872 872 self.password = get_crypt_password(new_password)
873 873 Session().add(self)
874 874
875 875 @classmethod
876 876 def get_first_super_admin(cls):
877 877 user = User.query().filter(User.admin == true()).first()
878 878 if user is None:
879 879 raise Exception('FATAL: Missing administrative account!')
880 880 return user
881 881
882 882 @classmethod
883 883 def get_all_super_admins(cls):
884 884 """
885 885 Returns all admin accounts sorted by username
886 886 """
887 887 return User.query().filter(User.admin == true())\
888 888 .order_by(User.username.asc()).all()
889 889
890 890 @classmethod
891 891 def get_default_user(cls, cache=False, refresh=False):
892 892 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
893 893 if user is None:
894 894 raise Exception('FATAL: Missing default account!')
895 895 if refresh:
896 896 # The default user might be based on outdated state which
897 897 # has been loaded from the cache.
898 898 # A call to refresh() ensures that the
899 899 # latest state from the database is used.
900 900 Session().refresh(user)
901 901 return user
902 902
903 903 def _get_default_perms(self, user, suffix=''):
904 904 from rhodecode.model.permission import PermissionModel
905 905 return PermissionModel().get_default_perms(user.user_perms, suffix)
906 906
907 907 def get_default_perms(self, suffix=''):
908 908 return self._get_default_perms(self, suffix)
909 909
910 910 def get_api_data(self, include_secrets=False, details='full'):
911 911 """
912 912 Common function for generating user related data for API
913 913
914 914 :param include_secrets: By default secrets in the API data will be replaced
915 915 by a placeholder value to prevent exposing this data by accident. In case
916 916 this data shall be exposed, set this flag to ``True``.
917 917
918 918 :param details: details can be 'basic|full' basic gives only a subset of
919 919 the available user information that includes user_id, name and emails.
920 920 """
921 921 user = self
922 922 user_data = self.user_data
923 923 data = {
924 924 'user_id': user.user_id,
925 925 'username': user.username,
926 926 'firstname': user.name,
927 927 'lastname': user.lastname,
928 928 'email': user.email,
929 929 'emails': user.emails,
930 930 }
931 931 if details == 'basic':
932 932 return data
933 933
934 934 api_key_length = 40
935 935 api_key_replacement = '*' * api_key_length
936 936
937 937 extras = {
938 938 'api_keys': [api_key_replacement],
939 939 'auth_tokens': [api_key_replacement],
940 940 'active': user.active,
941 941 'admin': user.admin,
942 942 'extern_type': user.extern_type,
943 943 'extern_name': user.extern_name,
944 944 'last_login': user.last_login,
945 945 'last_activity': user.last_activity,
946 946 'ip_addresses': user.ip_addresses,
947 947 'language': user_data.get('language')
948 948 }
949 949 data.update(extras)
950 950
951 951 if include_secrets:
952 952 data['api_keys'] = user.auth_tokens
953 953 data['auth_tokens'] = user.extra_auth_tokens
954 954 return data
955 955
956 956 def __json__(self):
957 957 data = {
958 958 'full_name': self.full_name,
959 959 'full_name_or_username': self.full_name_or_username,
960 960 'short_contact': self.short_contact,
961 961 'full_contact': self.full_contact,
962 962 }
963 963 data.update(self.get_api_data())
964 964 return data
965 965
966 966
967 967 class UserApiKeys(Base, BaseModel):
968 968 __tablename__ = 'user_api_keys'
969 969 __table_args__ = (
970 970 Index('uak_api_key_idx', 'api_key'),
971 971 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
972 972 UniqueConstraint('api_key'),
973 973 {'extend_existing': True, 'mysql_engine': 'InnoDB',
974 974 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
975 975 )
976 976 __mapper_args__ = {}
977 977
978 978 # ApiKey role
979 979 ROLE_ALL = 'token_role_all'
980 980 ROLE_HTTP = 'token_role_http'
981 981 ROLE_VCS = 'token_role_vcs'
982 982 ROLE_API = 'token_role_api'
983 983 ROLE_FEED = 'token_role_feed'
984 984 ROLE_PASSWORD_RESET = 'token_password_reset'
985 985
986 986 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
987 987
988 988 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
989 989 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
990 990 api_key = Column("api_key", String(255), nullable=False, unique=True)
991 991 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
992 992 expires = Column('expires', Float(53), nullable=False)
993 993 role = Column('role', String(255), nullable=True)
994 994 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
995 995
996 996 # scope columns
997 997 repo_id = Column(
998 998 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
999 999 nullable=True, unique=None, default=None)
1000 1000 repo = relationship('Repository', lazy='joined')
1001 1001
1002 1002 repo_group_id = Column(
1003 1003 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1004 1004 nullable=True, unique=None, default=None)
1005 1005 repo_group = relationship('RepoGroup', lazy='joined')
1006 1006
1007 1007 user = relationship('User', lazy='joined')
1008 1008
1009 1009 def __unicode__(self):
1010 1010 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1011 1011
1012 1012 def __json__(self):
1013 1013 data = {
1014 1014 'auth_token': self.api_key,
1015 1015 'role': self.role,
1016 1016 'scope': self.scope_humanized,
1017 1017 'expired': self.expired
1018 1018 }
1019 1019 return data
1020 1020
1021 1021 def get_api_data(self, include_secrets=False):
1022 1022 data = self.__json__()
1023 1023 if include_secrets:
1024 1024 return data
1025 1025 else:
1026 1026 data['auth_token'] = self.token_obfuscated
1027 1027 return data
1028 1028
1029 @hybrid_property
1030 def description_safe(self):
1031 from rhodecode.lib import helpers as h
1032 return h.escape(self.description)
1033
1029 1034 @property
1030 1035 def expired(self):
1031 1036 if self.expires == -1:
1032 1037 return False
1033 1038 return time.time() > self.expires
1034 1039
1035 1040 @classmethod
1036 1041 def _get_role_name(cls, role):
1037 1042 return {
1038 1043 cls.ROLE_ALL: _('all'),
1039 1044 cls.ROLE_HTTP: _('http/web interface'),
1040 1045 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1041 1046 cls.ROLE_API: _('api calls'),
1042 1047 cls.ROLE_FEED: _('feed access'),
1043 1048 }.get(role, role)
1044 1049
1045 1050 @property
1046 1051 def role_humanized(self):
1047 1052 return self._get_role_name(self.role)
1048 1053
1049 1054 def _get_scope(self):
1050 1055 if self.repo:
1051 1056 return repr(self.repo)
1052 1057 if self.repo_group:
1053 1058 return repr(self.repo_group) + ' (recursive)'
1054 1059 return 'global'
1055 1060
1056 1061 @property
1057 1062 def scope_humanized(self):
1058 1063 return self._get_scope()
1059 1064
1060 1065 @property
1061 1066 def token_obfuscated(self):
1062 1067 if self.api_key:
1063 1068 return self.api_key[:4] + "****"
1064 1069
1065 1070
1066 1071 class UserEmailMap(Base, BaseModel):
1067 1072 __tablename__ = 'user_email_map'
1068 1073 __table_args__ = (
1069 1074 Index('uem_email_idx', 'email'),
1070 1075 UniqueConstraint('email'),
1071 1076 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1072 1077 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1073 1078 )
1074 1079 __mapper_args__ = {}
1075 1080
1076 1081 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1077 1082 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1078 1083 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1079 1084 user = relationship('User', lazy='joined')
1080 1085
1081 1086 @validates('_email')
1082 1087 def validate_email(self, key, email):
1083 1088 # check if this email is not main one
1084 1089 main_email = Session().query(User).filter(User.email == email).scalar()
1085 1090 if main_email is not None:
1086 1091 raise AttributeError('email %s is present is user table' % email)
1087 1092 return email
1088 1093
1089 1094 @hybrid_property
1090 1095 def email(self):
1091 1096 return self._email
1092 1097
1093 1098 @email.setter
1094 1099 def email(self, val):
1095 1100 self._email = val.lower() if val else None
1096 1101
1097 1102
1098 1103 class UserIpMap(Base, BaseModel):
1099 1104 __tablename__ = 'user_ip_map'
1100 1105 __table_args__ = (
1101 1106 UniqueConstraint('user_id', 'ip_addr'),
1102 1107 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1103 1108 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1104 1109 )
1105 1110 __mapper_args__ = {}
1106 1111
1107 1112 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1108 1113 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1109 1114 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1110 1115 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1111 1116 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1112 1117 user = relationship('User', lazy='joined')
1113 1118
1119 @hybrid_property
1120 def description_safe(self):
1121 from rhodecode.lib import helpers as h
1122 return h.escape(self.description)
1123
1114 1124 @classmethod
1115 1125 def _get_ip_range(cls, ip_addr):
1116 1126 net = ipaddress.ip_network(ip_addr, strict=False)
1117 1127 return [str(net.network_address), str(net.broadcast_address)]
1118 1128
1119 1129 def __json__(self):
1120 1130 return {
1121 1131 'ip_addr': self.ip_addr,
1122 1132 'ip_range': self._get_ip_range(self.ip_addr),
1123 1133 }
1124 1134
1125 1135 def __unicode__(self):
1126 1136 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1127 1137 self.user_id, self.ip_addr)
1128 1138
1129 1139
1130 1140 class UserLog(Base, BaseModel):
1131 1141 __tablename__ = 'user_logs'
1132 1142 __table_args__ = (
1133 1143 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1134 1144 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1135 1145 )
1136 1146 VERSION_1 = 'v1'
1137 1147 VERSION_2 = 'v2'
1138 1148 VERSIONS = [VERSION_1, VERSION_2]
1139 1149
1140 1150 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1141 1151 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1142 1152 username = Column("username", String(255), nullable=True, unique=None, default=None)
1143 1153 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1144 1154 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1145 1155 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1146 1156 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1147 1157 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1148 1158
1149 1159 version = Column("version", String(255), nullable=True, default=VERSION_1)
1150 1160 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
1151 1161 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
1152 1162
1153 1163 def __unicode__(self):
1154 1164 return u"<%s('id:%s:%s')>" % (
1155 1165 self.__class__.__name__, self.repository_name, self.action)
1156 1166
1157 1167 def __json__(self):
1158 1168 return {
1159 1169 'user_id': self.user_id,
1160 1170 'username': self.username,
1161 1171 'repository_id': self.repository_id,
1162 1172 'repository_name': self.repository_name,
1163 1173 'user_ip': self.user_ip,
1164 1174 'action_date': self.action_date,
1165 1175 'action': self.action,
1166 1176 }
1167 1177
1168 1178 @property
1169 1179 def action_as_day(self):
1170 1180 return datetime.date(*self.action_date.timetuple()[:3])
1171 1181
1172 1182 user = relationship('User')
1173 1183 repository = relationship('Repository', cascade='')
1174 1184
1175 1185
1176 1186 class UserGroup(Base, BaseModel):
1177 1187 __tablename__ = 'users_groups'
1178 1188 __table_args__ = (
1179 1189 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1180 1190 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1181 1191 )
1182 1192
1183 1193 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1184 1194 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1185 1195 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1186 1196 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1187 1197 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1188 1198 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1189 1199 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1190 1200 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1191 1201
1192 1202 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1193 1203 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1194 1204 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1195 1205 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1196 1206 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1197 1207 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1198 1208
1199 1209 user = relationship('User')
1200 1210
1201 1211 @hybrid_property
1212 def description_safe(self):
1213 from rhodecode.lib import helpers as h
1214 return h.escape(self.description)
1215
1216 @hybrid_property
1202 1217 def group_data(self):
1203 1218 if not self._group_data:
1204 1219 return {}
1205 1220
1206 1221 try:
1207 1222 return json.loads(self._group_data)
1208 1223 except TypeError:
1209 1224 return {}
1210 1225
1211 1226 @group_data.setter
1212 1227 def group_data(self, val):
1213 1228 try:
1214 1229 self._group_data = json.dumps(val)
1215 1230 except Exception:
1216 1231 log.error(traceback.format_exc())
1217 1232
1218 1233 def __unicode__(self):
1219 1234 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1220 1235 self.users_group_id,
1221 1236 self.users_group_name)
1222 1237
1223 1238 @classmethod
1224 1239 def get_by_group_name(cls, group_name, cache=False,
1225 1240 case_insensitive=False):
1226 1241 if case_insensitive:
1227 1242 q = cls.query().filter(func.lower(cls.users_group_name) ==
1228 1243 func.lower(group_name))
1229 1244
1230 1245 else:
1231 1246 q = cls.query().filter(cls.users_group_name == group_name)
1232 1247 if cache:
1233 1248 q = q.options(
1234 1249 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1235 1250 return q.scalar()
1236 1251
1237 1252 @classmethod
1238 1253 def get(cls, user_group_id, cache=False):
1239 1254 user_group = cls.query()
1240 1255 if cache:
1241 1256 user_group = user_group.options(
1242 1257 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1243 1258 return user_group.get(user_group_id)
1244 1259
1245 1260 def permissions(self, with_admins=True, with_owner=True):
1246 1261 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1247 1262 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1248 1263 joinedload(UserUserGroupToPerm.user),
1249 1264 joinedload(UserUserGroupToPerm.permission),)
1250 1265
1251 1266 # get owners and admins and permissions. We do a trick of re-writing
1252 1267 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1253 1268 # has a global reference and changing one object propagates to all
1254 1269 # others. This means if admin is also an owner admin_row that change
1255 1270 # would propagate to both objects
1256 1271 perm_rows = []
1257 1272 for _usr in q.all():
1258 1273 usr = AttributeDict(_usr.user.get_dict())
1259 1274 usr.permission = _usr.permission.permission_name
1260 1275 perm_rows.append(usr)
1261 1276
1262 1277 # filter the perm rows by 'default' first and then sort them by
1263 1278 # admin,write,read,none permissions sorted again alphabetically in
1264 1279 # each group
1265 1280 perm_rows = sorted(perm_rows, key=display_sort)
1266 1281
1267 1282 _admin_perm = 'usergroup.admin'
1268 1283 owner_row = []
1269 1284 if with_owner:
1270 1285 usr = AttributeDict(self.user.get_dict())
1271 1286 usr.owner_row = True
1272 1287 usr.permission = _admin_perm
1273 1288 owner_row.append(usr)
1274 1289
1275 1290 super_admin_rows = []
1276 1291 if with_admins:
1277 1292 for usr in User.get_all_super_admins():
1278 1293 # if this admin is also owner, don't double the record
1279 1294 if usr.user_id == owner_row[0].user_id:
1280 1295 owner_row[0].admin_row = True
1281 1296 else:
1282 1297 usr = AttributeDict(usr.get_dict())
1283 1298 usr.admin_row = True
1284 1299 usr.permission = _admin_perm
1285 1300 super_admin_rows.append(usr)
1286 1301
1287 1302 return super_admin_rows + owner_row + perm_rows
1288 1303
1289 1304 def permission_user_groups(self):
1290 1305 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1291 1306 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1292 1307 joinedload(UserGroupUserGroupToPerm.target_user_group),
1293 1308 joinedload(UserGroupUserGroupToPerm.permission),)
1294 1309
1295 1310 perm_rows = []
1296 1311 for _user_group in q.all():
1297 1312 usr = AttributeDict(_user_group.user_group.get_dict())
1298 1313 usr.permission = _user_group.permission.permission_name
1299 1314 perm_rows.append(usr)
1300 1315
1301 1316 return perm_rows
1302 1317
1303 1318 def _get_default_perms(self, user_group, suffix=''):
1304 1319 from rhodecode.model.permission import PermissionModel
1305 1320 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1306 1321
1307 1322 def get_default_perms(self, suffix=''):
1308 1323 return self._get_default_perms(self, suffix)
1309 1324
1310 1325 def get_api_data(self, with_group_members=True, include_secrets=False):
1311 1326 """
1312 1327 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1313 1328 basically forwarded.
1314 1329
1315 1330 """
1316 1331 user_group = self
1317 1332 data = {
1318 1333 'users_group_id': user_group.users_group_id,
1319 1334 'group_name': user_group.users_group_name,
1320 1335 'group_description': user_group.user_group_description,
1321 1336 'active': user_group.users_group_active,
1322 1337 'owner': user_group.user.username,
1323 1338 'owner_email': user_group.user.email,
1324 1339 }
1325 1340
1326 1341 if with_group_members:
1327 1342 users = []
1328 1343 for user in user_group.members:
1329 1344 user = user.user
1330 1345 users.append(user.get_api_data(include_secrets=include_secrets))
1331 1346 data['users'] = users
1332 1347
1333 1348 return data
1334 1349
1335 1350
1336 1351 class UserGroupMember(Base, BaseModel):
1337 1352 __tablename__ = 'users_groups_members'
1338 1353 __table_args__ = (
1339 1354 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1340 1355 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1341 1356 )
1342 1357
1343 1358 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1344 1359 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1345 1360 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1346 1361
1347 1362 user = relationship('User', lazy='joined')
1348 1363 users_group = relationship('UserGroup')
1349 1364
1350 1365 def __init__(self, gr_id='', u_id=''):
1351 1366 self.users_group_id = gr_id
1352 1367 self.user_id = u_id
1353 1368
1354 1369
1355 1370 class RepositoryField(Base, BaseModel):
1356 1371 __tablename__ = 'repositories_fields'
1357 1372 __table_args__ = (
1358 1373 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1359 1374 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1360 1375 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1361 1376 )
1362 1377 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1363 1378
1364 1379 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1365 1380 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1366 1381 field_key = Column("field_key", String(250))
1367 1382 field_label = Column("field_label", String(1024), nullable=False)
1368 1383 field_value = Column("field_value", String(10000), nullable=False)
1369 1384 field_desc = Column("field_desc", String(1024), nullable=False)
1370 1385 field_type = Column("field_type", String(255), nullable=False, unique=None)
1371 1386 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1372 1387
1373 1388 repository = relationship('Repository')
1374 1389
1375 1390 @property
1376 1391 def field_key_prefixed(self):
1377 1392 return 'ex_%s' % self.field_key
1378 1393
1379 1394 @classmethod
1380 1395 def un_prefix_key(cls, key):
1381 1396 if key.startswith(cls.PREFIX):
1382 1397 return key[len(cls.PREFIX):]
1383 1398 return key
1384 1399
1385 1400 @classmethod
1386 1401 def get_by_key_name(cls, key, repo):
1387 1402 row = cls.query()\
1388 1403 .filter(cls.repository == repo)\
1389 1404 .filter(cls.field_key == key).scalar()
1390 1405 return row
1391 1406
1392 1407
1393 1408 class Repository(Base, BaseModel):
1394 1409 __tablename__ = 'repositories'
1395 1410 __table_args__ = (
1396 1411 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1397 1412 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1398 1413 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1399 1414 )
1400 1415 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1401 1416 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1402 1417
1403 1418 STATE_CREATED = 'repo_state_created'
1404 1419 STATE_PENDING = 'repo_state_pending'
1405 1420 STATE_ERROR = 'repo_state_error'
1406 1421
1407 1422 LOCK_AUTOMATIC = 'lock_auto'
1408 1423 LOCK_API = 'lock_api'
1409 1424 LOCK_WEB = 'lock_web'
1410 1425 LOCK_PULL = 'lock_pull'
1411 1426
1412 1427 NAME_SEP = URL_SEP
1413 1428
1414 1429 repo_id = Column(
1415 1430 "repo_id", Integer(), nullable=False, unique=True, default=None,
1416 1431 primary_key=True)
1417 1432 _repo_name = Column(
1418 1433 "repo_name", Text(), nullable=False, default=None)
1419 1434 _repo_name_hash = Column(
1420 1435 "repo_name_hash", String(255), nullable=False, unique=True)
1421 1436 repo_state = Column("repo_state", String(255), nullable=True)
1422 1437
1423 1438 clone_uri = Column(
1424 1439 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1425 1440 default=None)
1426 1441 repo_type = Column(
1427 1442 "repo_type", String(255), nullable=False, unique=False, default=None)
1428 1443 user_id = Column(
1429 1444 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1430 1445 unique=False, default=None)
1431 1446 private = Column(
1432 1447 "private", Boolean(), nullable=True, unique=None, default=None)
1433 1448 enable_statistics = Column(
1434 1449 "statistics", Boolean(), nullable=True, unique=None, default=True)
1435 1450 enable_downloads = Column(
1436 1451 "downloads", Boolean(), nullable=True, unique=None, default=True)
1437 1452 description = Column(
1438 1453 "description", String(10000), nullable=True, unique=None, default=None)
1439 1454 created_on = Column(
1440 1455 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1441 1456 default=datetime.datetime.now)
1442 1457 updated_on = Column(
1443 1458 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1444 1459 default=datetime.datetime.now)
1445 1460 _landing_revision = Column(
1446 1461 "landing_revision", String(255), nullable=False, unique=False,
1447 1462 default=None)
1448 1463 enable_locking = Column(
1449 1464 "enable_locking", Boolean(), nullable=False, unique=None,
1450 1465 default=False)
1451 1466 _locked = Column(
1452 1467 "locked", String(255), nullable=True, unique=False, default=None)
1453 1468 _changeset_cache = Column(
1454 1469 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1455 1470
1456 1471 fork_id = Column(
1457 1472 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1458 1473 nullable=True, unique=False, default=None)
1459 1474 group_id = Column(
1460 1475 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1461 1476 unique=False, default=None)
1462 1477
1463 1478 user = relationship('User', lazy='joined')
1464 1479 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1465 1480 group = relationship('RepoGroup', lazy='joined')
1466 1481 repo_to_perm = relationship(
1467 1482 'UserRepoToPerm', cascade='all',
1468 1483 order_by='UserRepoToPerm.repo_to_perm_id')
1469 1484 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1470 1485 stats = relationship('Statistics', cascade='all', uselist=False)
1471 1486
1472 1487 followers = relationship(
1473 1488 'UserFollowing',
1474 1489 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1475 1490 cascade='all')
1476 1491 extra_fields = relationship(
1477 1492 'RepositoryField', cascade="all, delete, delete-orphan")
1478 1493 logs = relationship('UserLog')
1479 1494 comments = relationship(
1480 1495 'ChangesetComment', cascade="all, delete, delete-orphan")
1481 1496 pull_requests_source = relationship(
1482 1497 'PullRequest',
1483 1498 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1484 1499 cascade="all, delete, delete-orphan")
1485 1500 pull_requests_target = relationship(
1486 1501 'PullRequest',
1487 1502 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1488 1503 cascade="all, delete, delete-orphan")
1489 1504 ui = relationship('RepoRhodeCodeUi', cascade="all")
1490 1505 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1491 1506 integrations = relationship('Integration',
1492 1507 cascade="all, delete, delete-orphan")
1493 1508
1494 1509 def __unicode__(self):
1495 1510 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1496 1511 safe_unicode(self.repo_name))
1497 1512
1498 1513 @hybrid_property
1514 def description_safe(self):
1515 from rhodecode.lib import helpers as h
1516 return h.escape(self.description)
1517
1518 @hybrid_property
1499 1519 def landing_rev(self):
1500 1520 # always should return [rev_type, rev]
1501 1521 if self._landing_revision:
1502 1522 _rev_info = self._landing_revision.split(':')
1503 1523 if len(_rev_info) < 2:
1504 1524 _rev_info.insert(0, 'rev')
1505 1525 return [_rev_info[0], _rev_info[1]]
1506 1526 return [None, None]
1507 1527
1508 1528 @landing_rev.setter
1509 1529 def landing_rev(self, val):
1510 1530 if ':' not in val:
1511 1531 raise ValueError('value must be delimited with `:` and consist '
1512 1532 'of <rev_type>:<rev>, got %s instead' % val)
1513 1533 self._landing_revision = val
1514 1534
1515 1535 @hybrid_property
1516 1536 def locked(self):
1517 1537 if self._locked:
1518 1538 user_id, timelocked, reason = self._locked.split(':')
1519 1539 lock_values = int(user_id), timelocked, reason
1520 1540 else:
1521 1541 lock_values = [None, None, None]
1522 1542 return lock_values
1523 1543
1524 1544 @locked.setter
1525 1545 def locked(self, val):
1526 1546 if val and isinstance(val, (list, tuple)):
1527 1547 self._locked = ':'.join(map(str, val))
1528 1548 else:
1529 1549 self._locked = None
1530 1550
1531 1551 @hybrid_property
1532 1552 def changeset_cache(self):
1533 1553 from rhodecode.lib.vcs.backends.base import EmptyCommit
1534 1554 dummy = EmptyCommit().__json__()
1535 1555 if not self._changeset_cache:
1536 1556 return dummy
1537 1557 try:
1538 1558 return json.loads(self._changeset_cache)
1539 1559 except TypeError:
1540 1560 return dummy
1541 1561 except Exception:
1542 1562 log.error(traceback.format_exc())
1543 1563 return dummy
1544 1564
1545 1565 @changeset_cache.setter
1546 1566 def changeset_cache(self, val):
1547 1567 try:
1548 1568 self._changeset_cache = json.dumps(val)
1549 1569 except Exception:
1550 1570 log.error(traceback.format_exc())
1551 1571
1552 1572 @hybrid_property
1553 1573 def repo_name(self):
1554 1574 return self._repo_name
1555 1575
1556 1576 @repo_name.setter
1557 1577 def repo_name(self, value):
1558 1578 self._repo_name = value
1559 1579 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1560 1580
1561 1581 @classmethod
1562 1582 def normalize_repo_name(cls, repo_name):
1563 1583 """
1564 1584 Normalizes os specific repo_name to the format internally stored inside
1565 1585 database using URL_SEP
1566 1586
1567 1587 :param cls:
1568 1588 :param repo_name:
1569 1589 """
1570 1590 return cls.NAME_SEP.join(repo_name.split(os.sep))
1571 1591
1572 1592 @classmethod
1573 1593 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1574 1594 session = Session()
1575 1595 q = session.query(cls).filter(cls.repo_name == repo_name)
1576 1596
1577 1597 if cache:
1578 1598 if identity_cache:
1579 1599 val = cls.identity_cache(session, 'repo_name', repo_name)
1580 1600 if val:
1581 1601 return val
1582 1602 else:
1583 1603 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1584 1604 q = q.options(
1585 1605 FromCache("sql_cache_short", cache_key))
1586 1606
1587 1607 return q.scalar()
1588 1608
1589 1609 @classmethod
1590 1610 def get_by_full_path(cls, repo_full_path):
1591 1611 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1592 1612 repo_name = cls.normalize_repo_name(repo_name)
1593 1613 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1594 1614
1595 1615 @classmethod
1596 1616 def get_repo_forks(cls, repo_id):
1597 1617 return cls.query().filter(Repository.fork_id == repo_id)
1598 1618
1599 1619 @classmethod
1600 1620 def base_path(cls):
1601 1621 """
1602 1622 Returns base path when all repos are stored
1603 1623
1604 1624 :param cls:
1605 1625 """
1606 1626 q = Session().query(RhodeCodeUi)\
1607 1627 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1608 1628 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1609 1629 return q.one().ui_value
1610 1630
1611 1631 @classmethod
1612 1632 def is_valid(cls, repo_name):
1613 1633 """
1614 1634 returns True if given repo name is a valid filesystem repository
1615 1635
1616 1636 :param cls:
1617 1637 :param repo_name:
1618 1638 """
1619 1639 from rhodecode.lib.utils import is_valid_repo
1620 1640
1621 1641 return is_valid_repo(repo_name, cls.base_path())
1622 1642
1623 1643 @classmethod
1624 1644 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1625 1645 case_insensitive=True):
1626 1646 q = Repository.query()
1627 1647
1628 1648 if not isinstance(user_id, Optional):
1629 1649 q = q.filter(Repository.user_id == user_id)
1630 1650
1631 1651 if not isinstance(group_id, Optional):
1632 1652 q = q.filter(Repository.group_id == group_id)
1633 1653
1634 1654 if case_insensitive:
1635 1655 q = q.order_by(func.lower(Repository.repo_name))
1636 1656 else:
1637 1657 q = q.order_by(Repository.repo_name)
1638 1658 return q.all()
1639 1659
1640 1660 @property
1641 1661 def forks(self):
1642 1662 """
1643 1663 Return forks of this repo
1644 1664 """
1645 1665 return Repository.get_repo_forks(self.repo_id)
1646 1666
1647 1667 @property
1648 1668 def parent(self):
1649 1669 """
1650 1670 Returns fork parent
1651 1671 """
1652 1672 return self.fork
1653 1673
1654 1674 @property
1655 1675 def just_name(self):
1656 1676 return self.repo_name.split(self.NAME_SEP)[-1]
1657 1677
1658 1678 @property
1659 1679 def groups_with_parents(self):
1660 1680 groups = []
1661 1681 if self.group is None:
1662 1682 return groups
1663 1683
1664 1684 cur_gr = self.group
1665 1685 groups.insert(0, cur_gr)
1666 1686 while 1:
1667 1687 gr = getattr(cur_gr, 'parent_group', None)
1668 1688 cur_gr = cur_gr.parent_group
1669 1689 if gr is None:
1670 1690 break
1671 1691 groups.insert(0, gr)
1672 1692
1673 1693 return groups
1674 1694
1675 1695 @property
1676 1696 def groups_and_repo(self):
1677 1697 return self.groups_with_parents, self
1678 1698
1679 1699 @LazyProperty
1680 1700 def repo_path(self):
1681 1701 """
1682 1702 Returns base full path for that repository means where it actually
1683 1703 exists on a filesystem
1684 1704 """
1685 1705 q = Session().query(RhodeCodeUi).filter(
1686 1706 RhodeCodeUi.ui_key == self.NAME_SEP)
1687 1707 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1688 1708 return q.one().ui_value
1689 1709
1690 1710 @property
1691 1711 def repo_full_path(self):
1692 1712 p = [self.repo_path]
1693 1713 # we need to split the name by / since this is how we store the
1694 1714 # names in the database, but that eventually needs to be converted
1695 1715 # into a valid system path
1696 1716 p += self.repo_name.split(self.NAME_SEP)
1697 1717 return os.path.join(*map(safe_unicode, p))
1698 1718
1699 1719 @property
1700 1720 def cache_keys(self):
1701 1721 """
1702 1722 Returns associated cache keys for that repo
1703 1723 """
1704 1724 return CacheKey.query()\
1705 1725 .filter(CacheKey.cache_args == self.repo_name)\
1706 1726 .order_by(CacheKey.cache_key)\
1707 1727 .all()
1708 1728
1709 1729 def get_new_name(self, repo_name):
1710 1730 """
1711 1731 returns new full repository name based on assigned group and new new
1712 1732
1713 1733 :param group_name:
1714 1734 """
1715 1735 path_prefix = self.group.full_path_splitted if self.group else []
1716 1736 return self.NAME_SEP.join(path_prefix + [repo_name])
1717 1737
1718 1738 @property
1719 1739 def _config(self):
1720 1740 """
1721 1741 Returns db based config object.
1722 1742 """
1723 1743 from rhodecode.lib.utils import make_db_config
1724 1744 return make_db_config(clear_session=False, repo=self)
1725 1745
1726 1746 def permissions(self, with_admins=True, with_owner=True):
1727 1747 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1728 1748 q = q.options(joinedload(UserRepoToPerm.repository),
1729 1749 joinedload(UserRepoToPerm.user),
1730 1750 joinedload(UserRepoToPerm.permission),)
1731 1751
1732 1752 # get owners and admins and permissions. We do a trick of re-writing
1733 1753 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1734 1754 # has a global reference and changing one object propagates to all
1735 1755 # others. This means if admin is also an owner admin_row that change
1736 1756 # would propagate to both objects
1737 1757 perm_rows = []
1738 1758 for _usr in q.all():
1739 1759 usr = AttributeDict(_usr.user.get_dict())
1740 1760 usr.permission = _usr.permission.permission_name
1741 1761 perm_rows.append(usr)
1742 1762
1743 1763 # filter the perm rows by 'default' first and then sort them by
1744 1764 # admin,write,read,none permissions sorted again alphabetically in
1745 1765 # each group
1746 1766 perm_rows = sorted(perm_rows, key=display_sort)
1747 1767
1748 1768 _admin_perm = 'repository.admin'
1749 1769 owner_row = []
1750 1770 if with_owner:
1751 1771 usr = AttributeDict(self.user.get_dict())
1752 1772 usr.owner_row = True
1753 1773 usr.permission = _admin_perm
1754 1774 owner_row.append(usr)
1755 1775
1756 1776 super_admin_rows = []
1757 1777 if with_admins:
1758 1778 for usr in User.get_all_super_admins():
1759 1779 # if this admin is also owner, don't double the record
1760 1780 if usr.user_id == owner_row[0].user_id:
1761 1781 owner_row[0].admin_row = True
1762 1782 else:
1763 1783 usr = AttributeDict(usr.get_dict())
1764 1784 usr.admin_row = True
1765 1785 usr.permission = _admin_perm
1766 1786 super_admin_rows.append(usr)
1767 1787
1768 1788 return super_admin_rows + owner_row + perm_rows
1769 1789
1770 1790 def permission_user_groups(self):
1771 1791 q = UserGroupRepoToPerm.query().filter(
1772 1792 UserGroupRepoToPerm.repository == self)
1773 1793 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1774 1794 joinedload(UserGroupRepoToPerm.users_group),
1775 1795 joinedload(UserGroupRepoToPerm.permission),)
1776 1796
1777 1797 perm_rows = []
1778 1798 for _user_group in q.all():
1779 1799 usr = AttributeDict(_user_group.users_group.get_dict())
1780 1800 usr.permission = _user_group.permission.permission_name
1781 1801 perm_rows.append(usr)
1782 1802
1783 1803 return perm_rows
1784 1804
1785 1805 def get_api_data(self, include_secrets=False):
1786 1806 """
1787 1807 Common function for generating repo api data
1788 1808
1789 1809 :param include_secrets: See :meth:`User.get_api_data`.
1790 1810
1791 1811 """
1792 1812 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1793 1813 # move this methods on models level.
1794 1814 from rhodecode.model.settings import SettingsModel
1795 1815 from rhodecode.model.repo import RepoModel
1796 1816
1797 1817 repo = self
1798 1818 _user_id, _time, _reason = self.locked
1799 1819
1800 1820 data = {
1801 1821 'repo_id': repo.repo_id,
1802 1822 'repo_name': repo.repo_name,
1803 1823 'repo_type': repo.repo_type,
1804 1824 'clone_uri': repo.clone_uri or '',
1805 1825 'url': RepoModel().get_url(self),
1806 1826 'private': repo.private,
1807 1827 'created_on': repo.created_on,
1808 'description': repo.description,
1828 'description': repo.description_safe,
1809 1829 'landing_rev': repo.landing_rev,
1810 1830 'owner': repo.user.username,
1811 1831 'fork_of': repo.fork.repo_name if repo.fork else None,
1812 1832 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1813 1833 'enable_statistics': repo.enable_statistics,
1814 1834 'enable_locking': repo.enable_locking,
1815 1835 'enable_downloads': repo.enable_downloads,
1816 1836 'last_changeset': repo.changeset_cache,
1817 1837 'locked_by': User.get(_user_id).get_api_data(
1818 1838 include_secrets=include_secrets) if _user_id else None,
1819 1839 'locked_date': time_to_datetime(_time) if _time else None,
1820 1840 'lock_reason': _reason if _reason else None,
1821 1841 }
1822 1842
1823 1843 # TODO: mikhail: should be per-repo settings here
1824 1844 rc_config = SettingsModel().get_all_settings()
1825 1845 repository_fields = str2bool(
1826 1846 rc_config.get('rhodecode_repository_fields'))
1827 1847 if repository_fields:
1828 1848 for f in self.extra_fields:
1829 1849 data[f.field_key_prefixed] = f.field_value
1830 1850
1831 1851 return data
1832 1852
1833 1853 @classmethod
1834 1854 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1835 1855 if not lock_time:
1836 1856 lock_time = time.time()
1837 1857 if not lock_reason:
1838 1858 lock_reason = cls.LOCK_AUTOMATIC
1839 1859 repo.locked = [user_id, lock_time, lock_reason]
1840 1860 Session().add(repo)
1841 1861 Session().commit()
1842 1862
1843 1863 @classmethod
1844 1864 def unlock(cls, repo):
1845 1865 repo.locked = None
1846 1866 Session().add(repo)
1847 1867 Session().commit()
1848 1868
1849 1869 @classmethod
1850 1870 def getlock(cls, repo):
1851 1871 return repo.locked
1852 1872
1853 1873 def is_user_lock(self, user_id):
1854 1874 if self.lock[0]:
1855 1875 lock_user_id = safe_int(self.lock[0])
1856 1876 user_id = safe_int(user_id)
1857 1877 # both are ints, and they are equal
1858 1878 return all([lock_user_id, user_id]) and lock_user_id == user_id
1859 1879
1860 1880 return False
1861 1881
1862 1882 def get_locking_state(self, action, user_id, only_when_enabled=True):
1863 1883 """
1864 1884 Checks locking on this repository, if locking is enabled and lock is
1865 1885 present returns a tuple of make_lock, locked, locked_by.
1866 1886 make_lock can have 3 states None (do nothing) True, make lock
1867 1887 False release lock, This value is later propagated to hooks, which
1868 1888 do the locking. Think about this as signals passed to hooks what to do.
1869 1889
1870 1890 """
1871 1891 # TODO: johbo: This is part of the business logic and should be moved
1872 1892 # into the RepositoryModel.
1873 1893
1874 1894 if action not in ('push', 'pull'):
1875 1895 raise ValueError("Invalid action value: %s" % repr(action))
1876 1896
1877 1897 # defines if locked error should be thrown to user
1878 1898 currently_locked = False
1879 1899 # defines if new lock should be made, tri-state
1880 1900 make_lock = None
1881 1901 repo = self
1882 1902 user = User.get(user_id)
1883 1903
1884 1904 lock_info = repo.locked
1885 1905
1886 1906 if repo and (repo.enable_locking or not only_when_enabled):
1887 1907 if action == 'push':
1888 1908 # check if it's already locked !, if it is compare users
1889 1909 locked_by_user_id = lock_info[0]
1890 1910 if user.user_id == locked_by_user_id:
1891 1911 log.debug(
1892 1912 'Got `push` action from user %s, now unlocking', user)
1893 1913 # unlock if we have push from user who locked
1894 1914 make_lock = False
1895 1915 else:
1896 1916 # we're not the same user who locked, ban with
1897 1917 # code defined in settings (default is 423 HTTP Locked) !
1898 1918 log.debug('Repo %s is currently locked by %s', repo, user)
1899 1919 currently_locked = True
1900 1920 elif action == 'pull':
1901 1921 # [0] user [1] date
1902 1922 if lock_info[0] and lock_info[1]:
1903 1923 log.debug('Repo %s is currently locked by %s', repo, user)
1904 1924 currently_locked = True
1905 1925 else:
1906 1926 log.debug('Setting lock on repo %s by %s', repo, user)
1907 1927 make_lock = True
1908 1928
1909 1929 else:
1910 1930 log.debug('Repository %s do not have locking enabled', repo)
1911 1931
1912 1932 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1913 1933 make_lock, currently_locked, lock_info)
1914 1934
1915 1935 from rhodecode.lib.auth import HasRepoPermissionAny
1916 1936 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1917 1937 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1918 1938 # if we don't have at least write permission we cannot make a lock
1919 1939 log.debug('lock state reset back to FALSE due to lack '
1920 1940 'of at least read permission')
1921 1941 make_lock = False
1922 1942
1923 1943 return make_lock, currently_locked, lock_info
1924 1944
1925 1945 @property
1926 1946 def last_db_change(self):
1927 1947 return self.updated_on
1928 1948
1929 1949 @property
1930 1950 def clone_uri_hidden(self):
1931 1951 clone_uri = self.clone_uri
1932 1952 if clone_uri:
1933 1953 import urlobject
1934 1954 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
1935 1955 if url_obj.password:
1936 1956 clone_uri = url_obj.with_password('*****')
1937 1957 return clone_uri
1938 1958
1939 1959 def clone_url(self, **override):
1940 1960
1941 1961 uri_tmpl = None
1942 1962 if 'with_id' in override:
1943 1963 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1944 1964 del override['with_id']
1945 1965
1946 1966 if 'uri_tmpl' in override:
1947 1967 uri_tmpl = override['uri_tmpl']
1948 1968 del override['uri_tmpl']
1949 1969
1950 1970 # we didn't override our tmpl from **overrides
1951 1971 if not uri_tmpl:
1952 1972 uri_tmpl = self.DEFAULT_CLONE_URI
1953 1973 try:
1954 1974 from pylons import tmpl_context as c
1955 1975 uri_tmpl = c.clone_uri_tmpl
1956 1976 except Exception:
1957 1977 # in any case if we call this outside of request context,
1958 1978 # ie, not having tmpl_context set up
1959 1979 pass
1960 1980
1961 1981 request = get_current_request()
1962 1982 return get_clone_url(request=request,
1963 1983 uri_tmpl=uri_tmpl,
1964 1984 repo_name=self.repo_name,
1965 1985 repo_id=self.repo_id, **override)
1966 1986
1967 1987 def set_state(self, state):
1968 1988 self.repo_state = state
1969 1989 Session().add(self)
1970 1990 #==========================================================================
1971 1991 # SCM PROPERTIES
1972 1992 #==========================================================================
1973 1993
1974 1994 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1975 1995 return get_commit_safe(
1976 1996 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1977 1997
1978 1998 def get_changeset(self, rev=None, pre_load=None):
1979 1999 warnings.warn("Use get_commit", DeprecationWarning)
1980 2000 commit_id = None
1981 2001 commit_idx = None
1982 2002 if isinstance(rev, basestring):
1983 2003 commit_id = rev
1984 2004 else:
1985 2005 commit_idx = rev
1986 2006 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1987 2007 pre_load=pre_load)
1988 2008
1989 2009 def get_landing_commit(self):
1990 2010 """
1991 2011 Returns landing commit, or if that doesn't exist returns the tip
1992 2012 """
1993 2013 _rev_type, _rev = self.landing_rev
1994 2014 commit = self.get_commit(_rev)
1995 2015 if isinstance(commit, EmptyCommit):
1996 2016 return self.get_commit()
1997 2017 return commit
1998 2018
1999 2019 def update_commit_cache(self, cs_cache=None, config=None):
2000 2020 """
2001 2021 Update cache of last changeset for repository, keys should be::
2002 2022
2003 2023 short_id
2004 2024 raw_id
2005 2025 revision
2006 2026 parents
2007 2027 message
2008 2028 date
2009 2029 author
2010 2030
2011 2031 :param cs_cache:
2012 2032 """
2013 2033 from rhodecode.lib.vcs.backends.base import BaseChangeset
2014 2034 if cs_cache is None:
2015 2035 # use no-cache version here
2016 2036 scm_repo = self.scm_instance(cache=False, config=config)
2017 2037 if scm_repo:
2018 2038 cs_cache = scm_repo.get_commit(
2019 2039 pre_load=["author", "date", "message", "parents"])
2020 2040 else:
2021 2041 cs_cache = EmptyCommit()
2022 2042
2023 2043 if isinstance(cs_cache, BaseChangeset):
2024 2044 cs_cache = cs_cache.__json__()
2025 2045
2026 2046 def is_outdated(new_cs_cache):
2027 2047 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2028 2048 new_cs_cache['revision'] != self.changeset_cache['revision']):
2029 2049 return True
2030 2050 return False
2031 2051
2032 2052 # check if we have maybe already latest cached revision
2033 2053 if is_outdated(cs_cache) or not self.changeset_cache:
2034 2054 _default = datetime.datetime.fromtimestamp(0)
2035 2055 last_change = cs_cache.get('date') or _default
2036 2056 log.debug('updated repo %s with new cs cache %s',
2037 2057 self.repo_name, cs_cache)
2038 2058 self.updated_on = last_change
2039 2059 self.changeset_cache = cs_cache
2040 2060 Session().add(self)
2041 2061 Session().commit()
2042 2062 else:
2043 2063 log.debug('Skipping update_commit_cache for repo:`%s` '
2044 2064 'commit already with latest changes', self.repo_name)
2045 2065
2046 2066 @property
2047 2067 def tip(self):
2048 2068 return self.get_commit('tip')
2049 2069
2050 2070 @property
2051 2071 def author(self):
2052 2072 return self.tip.author
2053 2073
2054 2074 @property
2055 2075 def last_change(self):
2056 2076 return self.scm_instance().last_change
2057 2077
2058 2078 def get_comments(self, revisions=None):
2059 2079 """
2060 2080 Returns comments for this repository grouped by revisions
2061 2081
2062 2082 :param revisions: filter query by revisions only
2063 2083 """
2064 2084 cmts = ChangesetComment.query()\
2065 2085 .filter(ChangesetComment.repo == self)
2066 2086 if revisions:
2067 2087 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2068 2088 grouped = collections.defaultdict(list)
2069 2089 for cmt in cmts.all():
2070 2090 grouped[cmt.revision].append(cmt)
2071 2091 return grouped
2072 2092
2073 2093 def statuses(self, revisions=None):
2074 2094 """
2075 2095 Returns statuses for this repository
2076 2096
2077 2097 :param revisions: list of revisions to get statuses for
2078 2098 """
2079 2099 statuses = ChangesetStatus.query()\
2080 2100 .filter(ChangesetStatus.repo == self)\
2081 2101 .filter(ChangesetStatus.version == 0)
2082 2102
2083 2103 if revisions:
2084 2104 # Try doing the filtering in chunks to avoid hitting limits
2085 2105 size = 500
2086 2106 status_results = []
2087 2107 for chunk in xrange(0, len(revisions), size):
2088 2108 status_results += statuses.filter(
2089 2109 ChangesetStatus.revision.in_(
2090 2110 revisions[chunk: chunk+size])
2091 2111 ).all()
2092 2112 else:
2093 2113 status_results = statuses.all()
2094 2114
2095 2115 grouped = {}
2096 2116
2097 2117 # maybe we have open new pullrequest without a status?
2098 2118 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2099 2119 status_lbl = ChangesetStatus.get_status_lbl(stat)
2100 2120 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2101 2121 for rev in pr.revisions:
2102 2122 pr_id = pr.pull_request_id
2103 2123 pr_repo = pr.target_repo.repo_name
2104 2124 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2105 2125
2106 2126 for stat in status_results:
2107 2127 pr_id = pr_repo = None
2108 2128 if stat.pull_request:
2109 2129 pr_id = stat.pull_request.pull_request_id
2110 2130 pr_repo = stat.pull_request.target_repo.repo_name
2111 2131 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2112 2132 pr_id, pr_repo]
2113 2133 return grouped
2114 2134
2115 2135 # ==========================================================================
2116 2136 # SCM CACHE INSTANCE
2117 2137 # ==========================================================================
2118 2138
2119 2139 def scm_instance(self, **kwargs):
2120 2140 import rhodecode
2121 2141
2122 2142 # Passing a config will not hit the cache currently only used
2123 2143 # for repo2dbmapper
2124 2144 config = kwargs.pop('config', None)
2125 2145 cache = kwargs.pop('cache', None)
2126 2146 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2127 2147 # if cache is NOT defined use default global, else we have a full
2128 2148 # control over cache behaviour
2129 2149 if cache is None and full_cache and not config:
2130 2150 return self._get_instance_cached()
2131 2151 return self._get_instance(cache=bool(cache), config=config)
2132 2152
2133 2153 def _get_instance_cached(self):
2134 2154 @cache_region('long_term')
2135 2155 def _get_repo(cache_key):
2136 2156 return self._get_instance()
2137 2157
2138 2158 invalidator_context = CacheKey.repo_context_cache(
2139 2159 _get_repo, self.repo_name, None, thread_scoped=True)
2140 2160
2141 2161 with invalidator_context as context:
2142 2162 context.invalidate()
2143 2163 repo = context.compute()
2144 2164
2145 2165 return repo
2146 2166
2147 2167 def _get_instance(self, cache=True, config=None):
2148 2168 config = config or self._config
2149 2169 custom_wire = {
2150 2170 'cache': cache # controls the vcs.remote cache
2151 2171 }
2152 2172 repo = get_vcs_instance(
2153 2173 repo_path=safe_str(self.repo_full_path),
2154 2174 config=config,
2155 2175 with_wire=custom_wire,
2156 2176 create=False,
2157 2177 _vcs_alias=self.repo_type)
2158 2178
2159 2179 return repo
2160 2180
2161 2181 def __json__(self):
2162 2182 return {'landing_rev': self.landing_rev}
2163 2183
2164 2184 def get_dict(self):
2165 2185
2166 2186 # Since we transformed `repo_name` to a hybrid property, we need to
2167 2187 # keep compatibility with the code which uses `repo_name` field.
2168 2188
2169 2189 result = super(Repository, self).get_dict()
2170 2190 result['repo_name'] = result.pop('_repo_name', None)
2171 2191 return result
2172 2192
2173 2193
2174 2194 class RepoGroup(Base, BaseModel):
2175 2195 __tablename__ = 'groups'
2176 2196 __table_args__ = (
2177 2197 UniqueConstraint('group_name', 'group_parent_id'),
2178 2198 CheckConstraint('group_id != group_parent_id'),
2179 2199 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2180 2200 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2181 2201 )
2182 2202 __mapper_args__ = {'order_by': 'group_name'}
2183 2203
2184 2204 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2185 2205
2186 2206 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2187 2207 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2188 2208 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2189 2209 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2190 2210 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2191 2211 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2192 2212 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2193 2213 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2194 2214
2195 2215 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2196 2216 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2197 2217 parent_group = relationship('RepoGroup', remote_side=group_id)
2198 2218 user = relationship('User')
2199 2219 integrations = relationship('Integration',
2200 2220 cascade="all, delete, delete-orphan")
2201 2221
2202 2222 def __init__(self, group_name='', parent_group=None):
2203 2223 self.group_name = group_name
2204 2224 self.parent_group = parent_group
2205 2225
2206 2226 def __unicode__(self):
2207 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2208 self.group_name)
2227 return u"<%s('id:%s:%s')>" % (
2228 self.__class__.__name__, self.group_id, self.group_name)
2229
2230 @hybrid_property
2231 def description_safe(self):
2232 from rhodecode.lib import helpers as h
2233 return h.escape(self.group_description)
2209 2234
2210 2235 @classmethod
2211 2236 def _generate_choice(cls, repo_group):
2212 2237 from webhelpers.html import literal as _literal
2213 2238 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2214 2239 return repo_group.group_id, _name(repo_group.full_path_splitted)
2215 2240
2216 2241 @classmethod
2217 2242 def groups_choices(cls, groups=None, show_empty_group=True):
2218 2243 if not groups:
2219 2244 groups = cls.query().all()
2220 2245
2221 2246 repo_groups = []
2222 2247 if show_empty_group:
2223 2248 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2224 2249
2225 2250 repo_groups.extend([cls._generate_choice(x) for x in groups])
2226 2251
2227 2252 repo_groups = sorted(
2228 2253 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2229 2254 return repo_groups
2230 2255
2231 2256 @classmethod
2232 2257 def url_sep(cls):
2233 2258 return URL_SEP
2234 2259
2235 2260 @classmethod
2236 2261 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2237 2262 if case_insensitive:
2238 2263 gr = cls.query().filter(func.lower(cls.group_name)
2239 2264 == func.lower(group_name))
2240 2265 else:
2241 2266 gr = cls.query().filter(cls.group_name == group_name)
2242 2267 if cache:
2243 2268 name_key = _hash_key(group_name)
2244 2269 gr = gr.options(
2245 2270 FromCache("sql_cache_short", "get_group_%s" % name_key))
2246 2271 return gr.scalar()
2247 2272
2248 2273 @classmethod
2249 2274 def get_user_personal_repo_group(cls, user_id):
2250 2275 user = User.get(user_id)
2251 2276 if user.username == User.DEFAULT_USER:
2252 2277 return None
2253 2278
2254 2279 return cls.query()\
2255 2280 .filter(cls.personal == true()) \
2256 2281 .filter(cls.user == user).scalar()
2257 2282
2258 2283 @classmethod
2259 2284 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2260 2285 case_insensitive=True):
2261 2286 q = RepoGroup.query()
2262 2287
2263 2288 if not isinstance(user_id, Optional):
2264 2289 q = q.filter(RepoGroup.user_id == user_id)
2265 2290
2266 2291 if not isinstance(group_id, Optional):
2267 2292 q = q.filter(RepoGroup.group_parent_id == group_id)
2268 2293
2269 2294 if case_insensitive:
2270 2295 q = q.order_by(func.lower(RepoGroup.group_name))
2271 2296 else:
2272 2297 q = q.order_by(RepoGroup.group_name)
2273 2298 return q.all()
2274 2299
2275 2300 @property
2276 2301 def parents(self):
2277 2302 parents_recursion_limit = 10
2278 2303 groups = []
2279 2304 if self.parent_group is None:
2280 2305 return groups
2281 2306 cur_gr = self.parent_group
2282 2307 groups.insert(0, cur_gr)
2283 2308 cnt = 0
2284 2309 while 1:
2285 2310 cnt += 1
2286 2311 gr = getattr(cur_gr, 'parent_group', None)
2287 2312 cur_gr = cur_gr.parent_group
2288 2313 if gr is None:
2289 2314 break
2290 2315 if cnt == parents_recursion_limit:
2291 2316 # this will prevent accidental infinit loops
2292 2317 log.error(('more than %s parents found for group %s, stopping '
2293 2318 'recursive parent fetching' % (parents_recursion_limit, self)))
2294 2319 break
2295 2320
2296 2321 groups.insert(0, gr)
2297 2322 return groups
2298 2323
2299 2324 @property
2300 2325 def children(self):
2301 2326 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2302 2327
2303 2328 @property
2304 2329 def name(self):
2305 2330 return self.group_name.split(RepoGroup.url_sep())[-1]
2306 2331
2307 2332 @property
2308 2333 def full_path(self):
2309 2334 return self.group_name
2310 2335
2311 2336 @property
2312 2337 def full_path_splitted(self):
2313 2338 return self.group_name.split(RepoGroup.url_sep())
2314 2339
2315 2340 @property
2316 2341 def repositories(self):
2317 2342 return Repository.query()\
2318 2343 .filter(Repository.group == self)\
2319 2344 .order_by(Repository.repo_name)
2320 2345
2321 2346 @property
2322 2347 def repositories_recursive_count(self):
2323 2348 cnt = self.repositories.count()
2324 2349
2325 2350 def children_count(group):
2326 2351 cnt = 0
2327 2352 for child in group.children:
2328 2353 cnt += child.repositories.count()
2329 2354 cnt += children_count(child)
2330 2355 return cnt
2331 2356
2332 2357 return cnt + children_count(self)
2333 2358
2334 2359 def _recursive_objects(self, include_repos=True):
2335 2360 all_ = []
2336 2361
2337 2362 def _get_members(root_gr):
2338 2363 if include_repos:
2339 2364 for r in root_gr.repositories:
2340 2365 all_.append(r)
2341 2366 childs = root_gr.children.all()
2342 2367 if childs:
2343 2368 for gr in childs:
2344 2369 all_.append(gr)
2345 2370 _get_members(gr)
2346 2371
2347 2372 _get_members(self)
2348 2373 return [self] + all_
2349 2374
2350 2375 def recursive_groups_and_repos(self):
2351 2376 """
2352 2377 Recursive return all groups, with repositories in those groups
2353 2378 """
2354 2379 return self._recursive_objects()
2355 2380
2356 2381 def recursive_groups(self):
2357 2382 """
2358 2383 Returns all children groups for this group including children of children
2359 2384 """
2360 2385 return self._recursive_objects(include_repos=False)
2361 2386
2362 2387 def get_new_name(self, group_name):
2363 2388 """
2364 2389 returns new full group name based on parent and new name
2365 2390
2366 2391 :param group_name:
2367 2392 """
2368 2393 path_prefix = (self.parent_group.full_path_splitted if
2369 2394 self.parent_group else [])
2370 2395 return RepoGroup.url_sep().join(path_prefix + [group_name])
2371 2396
2372 2397 def permissions(self, with_admins=True, with_owner=True):
2373 2398 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2374 2399 q = q.options(joinedload(UserRepoGroupToPerm.group),
2375 2400 joinedload(UserRepoGroupToPerm.user),
2376 2401 joinedload(UserRepoGroupToPerm.permission),)
2377 2402
2378 2403 # get owners and admins and permissions. We do a trick of re-writing
2379 2404 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2380 2405 # has a global reference and changing one object propagates to all
2381 2406 # others. This means if admin is also an owner admin_row that change
2382 2407 # would propagate to both objects
2383 2408 perm_rows = []
2384 2409 for _usr in q.all():
2385 2410 usr = AttributeDict(_usr.user.get_dict())
2386 2411 usr.permission = _usr.permission.permission_name
2387 2412 perm_rows.append(usr)
2388 2413
2389 2414 # filter the perm rows by 'default' first and then sort them by
2390 2415 # admin,write,read,none permissions sorted again alphabetically in
2391 2416 # each group
2392 2417 perm_rows = sorted(perm_rows, key=display_sort)
2393 2418
2394 2419 _admin_perm = 'group.admin'
2395 2420 owner_row = []
2396 2421 if with_owner:
2397 2422 usr = AttributeDict(self.user.get_dict())
2398 2423 usr.owner_row = True
2399 2424 usr.permission = _admin_perm
2400 2425 owner_row.append(usr)
2401 2426
2402 2427 super_admin_rows = []
2403 2428 if with_admins:
2404 2429 for usr in User.get_all_super_admins():
2405 2430 # if this admin is also owner, don't double the record
2406 2431 if usr.user_id == owner_row[0].user_id:
2407 2432 owner_row[0].admin_row = True
2408 2433 else:
2409 2434 usr = AttributeDict(usr.get_dict())
2410 2435 usr.admin_row = True
2411 2436 usr.permission = _admin_perm
2412 2437 super_admin_rows.append(usr)
2413 2438
2414 2439 return super_admin_rows + owner_row + perm_rows
2415 2440
2416 2441 def permission_user_groups(self):
2417 2442 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2418 2443 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2419 2444 joinedload(UserGroupRepoGroupToPerm.users_group),
2420 2445 joinedload(UserGroupRepoGroupToPerm.permission),)
2421 2446
2422 2447 perm_rows = []
2423 2448 for _user_group in q.all():
2424 2449 usr = AttributeDict(_user_group.users_group.get_dict())
2425 2450 usr.permission = _user_group.permission.permission_name
2426 2451 perm_rows.append(usr)
2427 2452
2428 2453 return perm_rows
2429 2454
2430 2455 def get_api_data(self):
2431 2456 """
2432 2457 Common function for generating api data
2433 2458
2434 2459 """
2435 2460 group = self
2436 2461 data = {
2437 2462 'group_id': group.group_id,
2438 2463 'group_name': group.group_name,
2439 'group_description': group.group_description,
2464 'group_description': group.description_safe,
2440 2465 'parent_group': group.parent_group.group_name if group.parent_group else None,
2441 2466 'repositories': [x.repo_name for x in group.repositories],
2442 2467 'owner': group.user.username,
2443 2468 }
2444 2469 return data
2445 2470
2446 2471
2447 2472 class Permission(Base, BaseModel):
2448 2473 __tablename__ = 'permissions'
2449 2474 __table_args__ = (
2450 2475 Index('p_perm_name_idx', 'permission_name'),
2451 2476 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2452 2477 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2453 2478 )
2454 2479 PERMS = [
2455 2480 ('hg.admin', _('RhodeCode Super Administrator')),
2456 2481
2457 2482 ('repository.none', _('Repository no access')),
2458 2483 ('repository.read', _('Repository read access')),
2459 2484 ('repository.write', _('Repository write access')),
2460 2485 ('repository.admin', _('Repository admin access')),
2461 2486
2462 2487 ('group.none', _('Repository group no access')),
2463 2488 ('group.read', _('Repository group read access')),
2464 2489 ('group.write', _('Repository group write access')),
2465 2490 ('group.admin', _('Repository group admin access')),
2466 2491
2467 2492 ('usergroup.none', _('User group no access')),
2468 2493 ('usergroup.read', _('User group read access')),
2469 2494 ('usergroup.write', _('User group write access')),
2470 2495 ('usergroup.admin', _('User group admin access')),
2471 2496
2472 2497 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2473 2498 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2474 2499
2475 2500 ('hg.usergroup.create.false', _('User Group creation disabled')),
2476 2501 ('hg.usergroup.create.true', _('User Group creation enabled')),
2477 2502
2478 2503 ('hg.create.none', _('Repository creation disabled')),
2479 2504 ('hg.create.repository', _('Repository creation enabled')),
2480 2505 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2481 2506 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2482 2507
2483 2508 ('hg.fork.none', _('Repository forking disabled')),
2484 2509 ('hg.fork.repository', _('Repository forking enabled')),
2485 2510
2486 2511 ('hg.register.none', _('Registration disabled')),
2487 2512 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2488 2513 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2489 2514
2490 2515 ('hg.password_reset.enabled', _('Password reset enabled')),
2491 2516 ('hg.password_reset.hidden', _('Password reset hidden')),
2492 2517 ('hg.password_reset.disabled', _('Password reset disabled')),
2493 2518
2494 2519 ('hg.extern_activate.manual', _('Manual activation of external account')),
2495 2520 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2496 2521
2497 2522 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2498 2523 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2499 2524 ]
2500 2525
2501 2526 # definition of system default permissions for DEFAULT user
2502 2527 DEFAULT_USER_PERMISSIONS = [
2503 2528 'repository.read',
2504 2529 'group.read',
2505 2530 'usergroup.read',
2506 2531 'hg.create.repository',
2507 2532 'hg.repogroup.create.false',
2508 2533 'hg.usergroup.create.false',
2509 2534 'hg.create.write_on_repogroup.true',
2510 2535 'hg.fork.repository',
2511 2536 'hg.register.manual_activate',
2512 2537 'hg.password_reset.enabled',
2513 2538 'hg.extern_activate.auto',
2514 2539 'hg.inherit_default_perms.true',
2515 2540 ]
2516 2541
2517 2542 # defines which permissions are more important higher the more important
2518 2543 # Weight defines which permissions are more important.
2519 2544 # The higher number the more important.
2520 2545 PERM_WEIGHTS = {
2521 2546 'repository.none': 0,
2522 2547 'repository.read': 1,
2523 2548 'repository.write': 3,
2524 2549 'repository.admin': 4,
2525 2550
2526 2551 'group.none': 0,
2527 2552 'group.read': 1,
2528 2553 'group.write': 3,
2529 2554 'group.admin': 4,
2530 2555
2531 2556 'usergroup.none': 0,
2532 2557 'usergroup.read': 1,
2533 2558 'usergroup.write': 3,
2534 2559 'usergroup.admin': 4,
2535 2560
2536 2561 'hg.repogroup.create.false': 0,
2537 2562 'hg.repogroup.create.true': 1,
2538 2563
2539 2564 'hg.usergroup.create.false': 0,
2540 2565 'hg.usergroup.create.true': 1,
2541 2566
2542 2567 'hg.fork.none': 0,
2543 2568 'hg.fork.repository': 1,
2544 2569 'hg.create.none': 0,
2545 2570 'hg.create.repository': 1
2546 2571 }
2547 2572
2548 2573 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2549 2574 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2550 2575 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2551 2576
2552 2577 def __unicode__(self):
2553 2578 return u"<%s('%s:%s')>" % (
2554 2579 self.__class__.__name__, self.permission_id, self.permission_name
2555 2580 )
2556 2581
2557 2582 @classmethod
2558 2583 def get_by_key(cls, key):
2559 2584 return cls.query().filter(cls.permission_name == key).scalar()
2560 2585
2561 2586 @classmethod
2562 2587 def get_default_repo_perms(cls, user_id, repo_id=None):
2563 2588 q = Session().query(UserRepoToPerm, Repository, Permission)\
2564 2589 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2565 2590 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2566 2591 .filter(UserRepoToPerm.user_id == user_id)
2567 2592 if repo_id:
2568 2593 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2569 2594 return q.all()
2570 2595
2571 2596 @classmethod
2572 2597 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2573 2598 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2574 2599 .join(
2575 2600 Permission,
2576 2601 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2577 2602 .join(
2578 2603 Repository,
2579 2604 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2580 2605 .join(
2581 2606 UserGroup,
2582 2607 UserGroupRepoToPerm.users_group_id ==
2583 2608 UserGroup.users_group_id)\
2584 2609 .join(
2585 2610 UserGroupMember,
2586 2611 UserGroupRepoToPerm.users_group_id ==
2587 2612 UserGroupMember.users_group_id)\
2588 2613 .filter(
2589 2614 UserGroupMember.user_id == user_id,
2590 2615 UserGroup.users_group_active == true())
2591 2616 if repo_id:
2592 2617 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2593 2618 return q.all()
2594 2619
2595 2620 @classmethod
2596 2621 def get_default_group_perms(cls, user_id, repo_group_id=None):
2597 2622 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2598 2623 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2599 2624 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2600 2625 .filter(UserRepoGroupToPerm.user_id == user_id)
2601 2626 if repo_group_id:
2602 2627 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2603 2628 return q.all()
2604 2629
2605 2630 @classmethod
2606 2631 def get_default_group_perms_from_user_group(
2607 2632 cls, user_id, repo_group_id=None):
2608 2633 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2609 2634 .join(
2610 2635 Permission,
2611 2636 UserGroupRepoGroupToPerm.permission_id ==
2612 2637 Permission.permission_id)\
2613 2638 .join(
2614 2639 RepoGroup,
2615 2640 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2616 2641 .join(
2617 2642 UserGroup,
2618 2643 UserGroupRepoGroupToPerm.users_group_id ==
2619 2644 UserGroup.users_group_id)\
2620 2645 .join(
2621 2646 UserGroupMember,
2622 2647 UserGroupRepoGroupToPerm.users_group_id ==
2623 2648 UserGroupMember.users_group_id)\
2624 2649 .filter(
2625 2650 UserGroupMember.user_id == user_id,
2626 2651 UserGroup.users_group_active == true())
2627 2652 if repo_group_id:
2628 2653 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2629 2654 return q.all()
2630 2655
2631 2656 @classmethod
2632 2657 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2633 2658 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2634 2659 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2635 2660 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2636 2661 .filter(UserUserGroupToPerm.user_id == user_id)
2637 2662 if user_group_id:
2638 2663 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2639 2664 return q.all()
2640 2665
2641 2666 @classmethod
2642 2667 def get_default_user_group_perms_from_user_group(
2643 2668 cls, user_id, user_group_id=None):
2644 2669 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2645 2670 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2646 2671 .join(
2647 2672 Permission,
2648 2673 UserGroupUserGroupToPerm.permission_id ==
2649 2674 Permission.permission_id)\
2650 2675 .join(
2651 2676 TargetUserGroup,
2652 2677 UserGroupUserGroupToPerm.target_user_group_id ==
2653 2678 TargetUserGroup.users_group_id)\
2654 2679 .join(
2655 2680 UserGroup,
2656 2681 UserGroupUserGroupToPerm.user_group_id ==
2657 2682 UserGroup.users_group_id)\
2658 2683 .join(
2659 2684 UserGroupMember,
2660 2685 UserGroupUserGroupToPerm.user_group_id ==
2661 2686 UserGroupMember.users_group_id)\
2662 2687 .filter(
2663 2688 UserGroupMember.user_id == user_id,
2664 2689 UserGroup.users_group_active == true())
2665 2690 if user_group_id:
2666 2691 q = q.filter(
2667 2692 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2668 2693
2669 2694 return q.all()
2670 2695
2671 2696
2672 2697 class UserRepoToPerm(Base, BaseModel):
2673 2698 __tablename__ = 'repo_to_perm'
2674 2699 __table_args__ = (
2675 2700 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2676 2701 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2677 2702 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2678 2703 )
2679 2704 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2680 2705 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2681 2706 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2682 2707 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2683 2708
2684 2709 user = relationship('User')
2685 2710 repository = relationship('Repository')
2686 2711 permission = relationship('Permission')
2687 2712
2688 2713 @classmethod
2689 2714 def create(cls, user, repository, permission):
2690 2715 n = cls()
2691 2716 n.user = user
2692 2717 n.repository = repository
2693 2718 n.permission = permission
2694 2719 Session().add(n)
2695 2720 return n
2696 2721
2697 2722 def __unicode__(self):
2698 2723 return u'<%s => %s >' % (self.user, self.repository)
2699 2724
2700 2725
2701 2726 class UserUserGroupToPerm(Base, BaseModel):
2702 2727 __tablename__ = 'user_user_group_to_perm'
2703 2728 __table_args__ = (
2704 2729 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2705 2730 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2706 2731 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2707 2732 )
2708 2733 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2709 2734 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2710 2735 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2711 2736 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2712 2737
2713 2738 user = relationship('User')
2714 2739 user_group = relationship('UserGroup')
2715 2740 permission = relationship('Permission')
2716 2741
2717 2742 @classmethod
2718 2743 def create(cls, user, user_group, permission):
2719 2744 n = cls()
2720 2745 n.user = user
2721 2746 n.user_group = user_group
2722 2747 n.permission = permission
2723 2748 Session().add(n)
2724 2749 return n
2725 2750
2726 2751 def __unicode__(self):
2727 2752 return u'<%s => %s >' % (self.user, self.user_group)
2728 2753
2729 2754
2730 2755 class UserToPerm(Base, BaseModel):
2731 2756 __tablename__ = 'user_to_perm'
2732 2757 __table_args__ = (
2733 2758 UniqueConstraint('user_id', 'permission_id'),
2734 2759 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2735 2760 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2736 2761 )
2737 2762 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2738 2763 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2739 2764 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2740 2765
2741 2766 user = relationship('User')
2742 2767 permission = relationship('Permission', lazy='joined')
2743 2768
2744 2769 def __unicode__(self):
2745 2770 return u'<%s => %s >' % (self.user, self.permission)
2746 2771
2747 2772
2748 2773 class UserGroupRepoToPerm(Base, BaseModel):
2749 2774 __tablename__ = 'users_group_repo_to_perm'
2750 2775 __table_args__ = (
2751 2776 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2752 2777 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2753 2778 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2754 2779 )
2755 2780 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2756 2781 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2757 2782 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2758 2783 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2759 2784
2760 2785 users_group = relationship('UserGroup')
2761 2786 permission = relationship('Permission')
2762 2787 repository = relationship('Repository')
2763 2788
2764 2789 @classmethod
2765 2790 def create(cls, users_group, repository, permission):
2766 2791 n = cls()
2767 2792 n.users_group = users_group
2768 2793 n.repository = repository
2769 2794 n.permission = permission
2770 2795 Session().add(n)
2771 2796 return n
2772 2797
2773 2798 def __unicode__(self):
2774 2799 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2775 2800
2776 2801
2777 2802 class UserGroupUserGroupToPerm(Base, BaseModel):
2778 2803 __tablename__ = 'user_group_user_group_to_perm'
2779 2804 __table_args__ = (
2780 2805 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2781 2806 CheckConstraint('target_user_group_id != user_group_id'),
2782 2807 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2783 2808 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2784 2809 )
2785 2810 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2786 2811 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2787 2812 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2788 2813 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2789 2814
2790 2815 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2791 2816 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2792 2817 permission = relationship('Permission')
2793 2818
2794 2819 @classmethod
2795 2820 def create(cls, target_user_group, user_group, permission):
2796 2821 n = cls()
2797 2822 n.target_user_group = target_user_group
2798 2823 n.user_group = user_group
2799 2824 n.permission = permission
2800 2825 Session().add(n)
2801 2826 return n
2802 2827
2803 2828 def __unicode__(self):
2804 2829 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2805 2830
2806 2831
2807 2832 class UserGroupToPerm(Base, BaseModel):
2808 2833 __tablename__ = 'users_group_to_perm'
2809 2834 __table_args__ = (
2810 2835 UniqueConstraint('users_group_id', 'permission_id',),
2811 2836 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2812 2837 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2813 2838 )
2814 2839 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2815 2840 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2816 2841 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2817 2842
2818 2843 users_group = relationship('UserGroup')
2819 2844 permission = relationship('Permission')
2820 2845
2821 2846
2822 2847 class UserRepoGroupToPerm(Base, BaseModel):
2823 2848 __tablename__ = 'user_repo_group_to_perm'
2824 2849 __table_args__ = (
2825 2850 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2826 2851 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2827 2852 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2828 2853 )
2829 2854
2830 2855 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2831 2856 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2832 2857 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2833 2858 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2834 2859
2835 2860 user = relationship('User')
2836 2861 group = relationship('RepoGroup')
2837 2862 permission = relationship('Permission')
2838 2863
2839 2864 @classmethod
2840 2865 def create(cls, user, repository_group, permission):
2841 2866 n = cls()
2842 2867 n.user = user
2843 2868 n.group = repository_group
2844 2869 n.permission = permission
2845 2870 Session().add(n)
2846 2871 return n
2847 2872
2848 2873
2849 2874 class UserGroupRepoGroupToPerm(Base, BaseModel):
2850 2875 __tablename__ = 'users_group_repo_group_to_perm'
2851 2876 __table_args__ = (
2852 2877 UniqueConstraint('users_group_id', 'group_id'),
2853 2878 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2854 2879 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2855 2880 )
2856 2881
2857 2882 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2858 2883 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2859 2884 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2860 2885 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2861 2886
2862 2887 users_group = relationship('UserGroup')
2863 2888 permission = relationship('Permission')
2864 2889 group = relationship('RepoGroup')
2865 2890
2866 2891 @classmethod
2867 2892 def create(cls, user_group, repository_group, permission):
2868 2893 n = cls()
2869 2894 n.users_group = user_group
2870 2895 n.group = repository_group
2871 2896 n.permission = permission
2872 2897 Session().add(n)
2873 2898 return n
2874 2899
2875 2900 def __unicode__(self):
2876 2901 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2877 2902
2878 2903
2879 2904 class Statistics(Base, BaseModel):
2880 2905 __tablename__ = 'statistics'
2881 2906 __table_args__ = (
2882 2907 UniqueConstraint('repository_id'),
2883 2908 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2884 2909 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2885 2910 )
2886 2911 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2887 2912 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2888 2913 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2889 2914 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2890 2915 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2891 2916 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2892 2917
2893 2918 repository = relationship('Repository', single_parent=True)
2894 2919
2895 2920
2896 2921 class UserFollowing(Base, BaseModel):
2897 2922 __tablename__ = 'user_followings'
2898 2923 __table_args__ = (
2899 2924 UniqueConstraint('user_id', 'follows_repository_id'),
2900 2925 UniqueConstraint('user_id', 'follows_user_id'),
2901 2926 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2902 2927 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2903 2928 )
2904 2929
2905 2930 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2906 2931 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2907 2932 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2908 2933 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2909 2934 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2910 2935
2911 2936 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2912 2937
2913 2938 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2914 2939 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2915 2940
2916 2941 @classmethod
2917 2942 def get_repo_followers(cls, repo_id):
2918 2943 return cls.query().filter(cls.follows_repo_id == repo_id)
2919 2944
2920 2945
2921 2946 class CacheKey(Base, BaseModel):
2922 2947 __tablename__ = 'cache_invalidation'
2923 2948 __table_args__ = (
2924 2949 UniqueConstraint('cache_key'),
2925 2950 Index('key_idx', 'cache_key'),
2926 2951 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2927 2952 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2928 2953 )
2929 2954 CACHE_TYPE_ATOM = 'ATOM'
2930 2955 CACHE_TYPE_RSS = 'RSS'
2931 2956 CACHE_TYPE_README = 'README'
2932 2957
2933 2958 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2934 2959 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2935 2960 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2936 2961 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2937 2962
2938 2963 def __init__(self, cache_key, cache_args=''):
2939 2964 self.cache_key = cache_key
2940 2965 self.cache_args = cache_args
2941 2966 self.cache_active = False
2942 2967
2943 2968 def __unicode__(self):
2944 2969 return u"<%s('%s:%s[%s]')>" % (
2945 2970 self.__class__.__name__,
2946 2971 self.cache_id, self.cache_key, self.cache_active)
2947 2972
2948 2973 def _cache_key_partition(self):
2949 2974 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2950 2975 return prefix, repo_name, suffix
2951 2976
2952 2977 def get_prefix(self):
2953 2978 """
2954 2979 Try to extract prefix from existing cache key. The key could consist
2955 2980 of prefix, repo_name, suffix
2956 2981 """
2957 2982 # this returns prefix, repo_name, suffix
2958 2983 return self._cache_key_partition()[0]
2959 2984
2960 2985 def get_suffix(self):
2961 2986 """
2962 2987 get suffix that might have been used in _get_cache_key to
2963 2988 generate self.cache_key. Only used for informational purposes
2964 2989 in repo_edit.mako.
2965 2990 """
2966 2991 # prefix, repo_name, suffix
2967 2992 return self._cache_key_partition()[2]
2968 2993
2969 2994 @classmethod
2970 2995 def delete_all_cache(cls):
2971 2996 """
2972 2997 Delete all cache keys from database.
2973 2998 Should only be run when all instances are down and all entries
2974 2999 thus stale.
2975 3000 """
2976 3001 cls.query().delete()
2977 3002 Session().commit()
2978 3003
2979 3004 @classmethod
2980 3005 def get_cache_key(cls, repo_name, cache_type):
2981 3006 """
2982 3007
2983 3008 Generate a cache key for this process of RhodeCode instance.
2984 3009 Prefix most likely will be process id or maybe explicitly set
2985 3010 instance_id from .ini file.
2986 3011 """
2987 3012 import rhodecode
2988 3013 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2989 3014
2990 3015 repo_as_unicode = safe_unicode(repo_name)
2991 3016 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2992 3017 if cache_type else repo_as_unicode
2993 3018
2994 3019 return u'{}{}'.format(prefix, key)
2995 3020
2996 3021 @classmethod
2997 3022 def set_invalidate(cls, repo_name, delete=False):
2998 3023 """
2999 3024 Mark all caches of a repo as invalid in the database.
3000 3025 """
3001 3026
3002 3027 try:
3003 3028 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3004 3029 if delete:
3005 3030 log.debug('cache objects deleted for repo %s',
3006 3031 safe_str(repo_name))
3007 3032 qry.delete()
3008 3033 else:
3009 3034 log.debug('cache objects marked as invalid for repo %s',
3010 3035 safe_str(repo_name))
3011 3036 qry.update({"cache_active": False})
3012 3037
3013 3038 Session().commit()
3014 3039 except Exception:
3015 3040 log.exception(
3016 3041 'Cache key invalidation failed for repository %s',
3017 3042 safe_str(repo_name))
3018 3043 Session().rollback()
3019 3044
3020 3045 @classmethod
3021 3046 def get_active_cache(cls, cache_key):
3022 3047 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3023 3048 if inv_obj:
3024 3049 return inv_obj
3025 3050 return None
3026 3051
3027 3052 @classmethod
3028 3053 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3029 3054 thread_scoped=False):
3030 3055 """
3031 3056 @cache_region('long_term')
3032 3057 def _heavy_calculation(cache_key):
3033 3058 return 'result'
3034 3059
3035 3060 cache_context = CacheKey.repo_context_cache(
3036 3061 _heavy_calculation, repo_name, cache_type)
3037 3062
3038 3063 with cache_context as context:
3039 3064 context.invalidate()
3040 3065 computed = context.compute()
3041 3066
3042 3067 assert computed == 'result'
3043 3068 """
3044 3069 from rhodecode.lib import caches
3045 3070 return caches.InvalidationContext(
3046 3071 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3047 3072
3048 3073
3049 3074 class ChangesetComment(Base, BaseModel):
3050 3075 __tablename__ = 'changeset_comments'
3051 3076 __table_args__ = (
3052 3077 Index('cc_revision_idx', 'revision'),
3053 3078 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3054 3079 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3055 3080 )
3056 3081
3057 3082 COMMENT_OUTDATED = u'comment_outdated'
3058 3083 COMMENT_TYPE_NOTE = u'note'
3059 3084 COMMENT_TYPE_TODO = u'todo'
3060 3085 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3061 3086
3062 3087 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3063 3088 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3064 3089 revision = Column('revision', String(40), nullable=True)
3065 3090 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3066 3091 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3067 3092 line_no = Column('line_no', Unicode(10), nullable=True)
3068 3093 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3069 3094 f_path = Column('f_path', Unicode(1000), nullable=True)
3070 3095 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3071 3096 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3072 3097 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3073 3098 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3074 3099 renderer = Column('renderer', Unicode(64), nullable=True)
3075 3100 display_state = Column('display_state', Unicode(128), nullable=True)
3076 3101
3077 3102 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3078 3103 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3079 3104 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3080 3105 author = relationship('User', lazy='joined')
3081 3106 repo = relationship('Repository')
3082 3107 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3083 3108 pull_request = relationship('PullRequest', lazy='joined')
3084 3109 pull_request_version = relationship('PullRequestVersion')
3085 3110
3086 3111 @classmethod
3087 3112 def get_users(cls, revision=None, pull_request_id=None):
3088 3113 """
3089 3114 Returns user associated with this ChangesetComment. ie those
3090 3115 who actually commented
3091 3116
3092 3117 :param cls:
3093 3118 :param revision:
3094 3119 """
3095 3120 q = Session().query(User)\
3096 3121 .join(ChangesetComment.author)
3097 3122 if revision:
3098 3123 q = q.filter(cls.revision == revision)
3099 3124 elif pull_request_id:
3100 3125 q = q.filter(cls.pull_request_id == pull_request_id)
3101 3126 return q.all()
3102 3127
3103 3128 @classmethod
3104 3129 def get_index_from_version(cls, pr_version, versions):
3105 3130 num_versions = [x.pull_request_version_id for x in versions]
3106 3131 try:
3107 3132 return num_versions.index(pr_version) +1
3108 3133 except (IndexError, ValueError):
3109 3134 return
3110 3135
3111 3136 @property
3112 3137 def outdated(self):
3113 3138 return self.display_state == self.COMMENT_OUTDATED
3114 3139
3115 3140 def outdated_at_version(self, version):
3116 3141 """
3117 3142 Checks if comment is outdated for given pull request version
3118 3143 """
3119 3144 return self.outdated and self.pull_request_version_id != version
3120 3145
3121 3146 def older_than_version(self, version):
3122 3147 """
3123 3148 Checks if comment is made from previous version than given
3124 3149 """
3125 3150 if version is None:
3126 3151 return self.pull_request_version_id is not None
3127 3152
3128 3153 return self.pull_request_version_id < version
3129 3154
3130 3155 @property
3131 3156 def resolved(self):
3132 3157 return self.resolved_by[0] if self.resolved_by else None
3133 3158
3134 3159 @property
3135 3160 def is_todo(self):
3136 3161 return self.comment_type == self.COMMENT_TYPE_TODO
3137 3162
3138 3163 @property
3139 3164 def is_inline(self):
3140 3165 return self.line_no and self.f_path
3141 3166
3142 3167 def get_index_version(self, versions):
3143 3168 return self.get_index_from_version(
3144 3169 self.pull_request_version_id, versions)
3145 3170
3146 3171 def __repr__(self):
3147 3172 if self.comment_id:
3148 3173 return '<DB:Comment #%s>' % self.comment_id
3149 3174 else:
3150 3175 return '<DB:Comment at %#x>' % id(self)
3151 3176
3152 3177 def get_api_data(self):
3153 3178 comment = self
3154 3179 data = {
3155 3180 'comment_id': comment.comment_id,
3156 3181 'comment_type': comment.comment_type,
3157 3182 'comment_text': comment.text,
3158 3183 'comment_status': comment.status_change,
3159 3184 'comment_f_path': comment.f_path,
3160 3185 'comment_lineno': comment.line_no,
3161 3186 'comment_author': comment.author,
3162 3187 'comment_created_on': comment.created_on
3163 3188 }
3164 3189 return data
3165 3190
3166 3191 def __json__(self):
3167 3192 data = dict()
3168 3193 data.update(self.get_api_data())
3169 3194 return data
3170 3195
3171 3196
3172 3197 class ChangesetStatus(Base, BaseModel):
3173 3198 __tablename__ = 'changeset_statuses'
3174 3199 __table_args__ = (
3175 3200 Index('cs_revision_idx', 'revision'),
3176 3201 Index('cs_version_idx', 'version'),
3177 3202 UniqueConstraint('repo_id', 'revision', 'version'),
3178 3203 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3179 3204 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3180 3205 )
3181 3206 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3182 3207 STATUS_APPROVED = 'approved'
3183 3208 STATUS_REJECTED = 'rejected'
3184 3209 STATUS_UNDER_REVIEW = 'under_review'
3185 3210
3186 3211 STATUSES = [
3187 3212 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3188 3213 (STATUS_APPROVED, _("Approved")),
3189 3214 (STATUS_REJECTED, _("Rejected")),
3190 3215 (STATUS_UNDER_REVIEW, _("Under Review")),
3191 3216 ]
3192 3217
3193 3218 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3194 3219 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3195 3220 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3196 3221 revision = Column('revision', String(40), nullable=False)
3197 3222 status = Column('status', String(128), nullable=False, default=DEFAULT)
3198 3223 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3199 3224 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3200 3225 version = Column('version', Integer(), nullable=False, default=0)
3201 3226 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3202 3227
3203 3228 author = relationship('User', lazy='joined')
3204 3229 repo = relationship('Repository')
3205 3230 comment = relationship('ChangesetComment', lazy='joined')
3206 3231 pull_request = relationship('PullRequest', lazy='joined')
3207 3232
3208 3233 def __unicode__(self):
3209 3234 return u"<%s('%s[v%s]:%s')>" % (
3210 3235 self.__class__.__name__,
3211 3236 self.status, self.version, self.author
3212 3237 )
3213 3238
3214 3239 @classmethod
3215 3240 def get_status_lbl(cls, value):
3216 3241 return dict(cls.STATUSES).get(value)
3217 3242
3218 3243 @property
3219 3244 def status_lbl(self):
3220 3245 return ChangesetStatus.get_status_lbl(self.status)
3221 3246
3222 3247 def get_api_data(self):
3223 3248 status = self
3224 3249 data = {
3225 3250 'status_id': status.changeset_status_id,
3226 3251 'status': status.status,
3227 3252 }
3228 3253 return data
3229 3254
3230 3255 def __json__(self):
3231 3256 data = dict()
3232 3257 data.update(self.get_api_data())
3233 3258 return data
3234 3259
3235 3260
3236 3261 class _PullRequestBase(BaseModel):
3237 3262 """
3238 3263 Common attributes of pull request and version entries.
3239 3264 """
3240 3265
3241 3266 # .status values
3242 3267 STATUS_NEW = u'new'
3243 3268 STATUS_OPEN = u'open'
3244 3269 STATUS_CLOSED = u'closed'
3245 3270
3246 3271 title = Column('title', Unicode(255), nullable=True)
3247 3272 description = Column(
3248 3273 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3249 3274 nullable=True)
3250 3275 # new/open/closed status of pull request (not approve/reject/etc)
3251 3276 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3252 3277 created_on = Column(
3253 3278 'created_on', DateTime(timezone=False), nullable=False,
3254 3279 default=datetime.datetime.now)
3255 3280 updated_on = Column(
3256 3281 'updated_on', DateTime(timezone=False), nullable=False,
3257 3282 default=datetime.datetime.now)
3258 3283
3259 3284 @declared_attr
3260 3285 def user_id(cls):
3261 3286 return Column(
3262 3287 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3263 3288 unique=None)
3264 3289
3265 3290 # 500 revisions max
3266 3291 _revisions = Column(
3267 3292 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3268 3293
3269 3294 @declared_attr
3270 3295 def source_repo_id(cls):
3271 3296 # TODO: dan: rename column to source_repo_id
3272 3297 return Column(
3273 3298 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3274 3299 nullable=False)
3275 3300
3276 3301 source_ref = Column('org_ref', Unicode(255), nullable=False)
3277 3302
3278 3303 @declared_attr
3279 3304 def target_repo_id(cls):
3280 3305 # TODO: dan: rename column to target_repo_id
3281 3306 return Column(
3282 3307 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3283 3308 nullable=False)
3284 3309
3285 3310 target_ref = Column('other_ref', Unicode(255), nullable=False)
3286 3311 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3287 3312
3288 3313 # TODO: dan: rename column to last_merge_source_rev
3289 3314 _last_merge_source_rev = Column(
3290 3315 'last_merge_org_rev', String(40), nullable=True)
3291 3316 # TODO: dan: rename column to last_merge_target_rev
3292 3317 _last_merge_target_rev = Column(
3293 3318 'last_merge_other_rev', String(40), nullable=True)
3294 3319 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3295 3320 merge_rev = Column('merge_rev', String(40), nullable=True)
3296 3321
3297 3322 reviewer_data = Column(
3298 3323 'reviewer_data_json', MutationObj.as_mutable(
3299 3324 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3300 3325
3301 3326 @property
3302 3327 def reviewer_data_json(self):
3303 3328 return json.dumps(self.reviewer_data)
3304 3329
3305 3330 @hybrid_property
3331 def description_safe(self):
3332 from rhodecode.lib import helpers as h
3333 return h.escape(self.description)
3334
3335 @hybrid_property
3306 3336 def revisions(self):
3307 3337 return self._revisions.split(':') if self._revisions else []
3308 3338
3309 3339 @revisions.setter
3310 3340 def revisions(self, val):
3311 3341 self._revisions = ':'.join(val)
3312 3342
3313 3343 @declared_attr
3314 3344 def author(cls):
3315 3345 return relationship('User', lazy='joined')
3316 3346
3317 3347 @declared_attr
3318 3348 def source_repo(cls):
3319 3349 return relationship(
3320 3350 'Repository',
3321 3351 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3322 3352
3323 3353 @property
3324 3354 def source_ref_parts(self):
3325 3355 return self.unicode_to_reference(self.source_ref)
3326 3356
3327 3357 @declared_attr
3328 3358 def target_repo(cls):
3329 3359 return relationship(
3330 3360 'Repository',
3331 3361 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3332 3362
3333 3363 @property
3334 3364 def target_ref_parts(self):
3335 3365 return self.unicode_to_reference(self.target_ref)
3336 3366
3337 3367 @property
3338 3368 def shadow_merge_ref(self):
3339 3369 return self.unicode_to_reference(self._shadow_merge_ref)
3340 3370
3341 3371 @shadow_merge_ref.setter
3342 3372 def shadow_merge_ref(self, ref):
3343 3373 self._shadow_merge_ref = self.reference_to_unicode(ref)
3344 3374
3345 3375 def unicode_to_reference(self, raw):
3346 3376 """
3347 3377 Convert a unicode (or string) to a reference object.
3348 3378 If unicode evaluates to False it returns None.
3349 3379 """
3350 3380 if raw:
3351 3381 refs = raw.split(':')
3352 3382 return Reference(*refs)
3353 3383 else:
3354 3384 return None
3355 3385
3356 3386 def reference_to_unicode(self, ref):
3357 3387 """
3358 3388 Convert a reference object to unicode.
3359 3389 If reference is None it returns None.
3360 3390 """
3361 3391 if ref:
3362 3392 return u':'.join(ref)
3363 3393 else:
3364 3394 return None
3365 3395
3366 3396 def get_api_data(self, with_merge_state=True):
3367 3397 from rhodecode.model.pull_request import PullRequestModel
3368 3398
3369 3399 pull_request = self
3370 3400 if with_merge_state:
3371 3401 merge_status = PullRequestModel().merge_status(pull_request)
3372 3402 merge_state = {
3373 3403 'status': merge_status[0],
3374 3404 'message': safe_unicode(merge_status[1]),
3375 3405 }
3376 3406 else:
3377 3407 merge_state = {'status': 'not_available',
3378 3408 'message': 'not_available'}
3379 3409
3380 3410 merge_data = {
3381 3411 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3382 3412 'reference': (
3383 3413 pull_request.shadow_merge_ref._asdict()
3384 3414 if pull_request.shadow_merge_ref else None),
3385 3415 }
3386 3416
3387 3417 data = {
3388 3418 'pull_request_id': pull_request.pull_request_id,
3389 3419 'url': PullRequestModel().get_url(pull_request),
3390 3420 'title': pull_request.title,
3391 3421 'description': pull_request.description,
3392 3422 'status': pull_request.status,
3393 3423 'created_on': pull_request.created_on,
3394 3424 'updated_on': pull_request.updated_on,
3395 3425 'commit_ids': pull_request.revisions,
3396 3426 'review_status': pull_request.calculated_review_status(),
3397 3427 'mergeable': merge_state,
3398 3428 'source': {
3399 3429 'clone_url': pull_request.source_repo.clone_url(),
3400 3430 'repository': pull_request.source_repo.repo_name,
3401 3431 'reference': {
3402 3432 'name': pull_request.source_ref_parts.name,
3403 3433 'type': pull_request.source_ref_parts.type,
3404 3434 'commit_id': pull_request.source_ref_parts.commit_id,
3405 3435 },
3406 3436 },
3407 3437 'target': {
3408 3438 'clone_url': pull_request.target_repo.clone_url(),
3409 3439 'repository': pull_request.target_repo.repo_name,
3410 3440 'reference': {
3411 3441 'name': pull_request.target_ref_parts.name,
3412 3442 'type': pull_request.target_ref_parts.type,
3413 3443 'commit_id': pull_request.target_ref_parts.commit_id,
3414 3444 },
3415 3445 },
3416 3446 'merge': merge_data,
3417 3447 'author': pull_request.author.get_api_data(include_secrets=False,
3418 3448 details='basic'),
3419 3449 'reviewers': [
3420 3450 {
3421 3451 'user': reviewer.get_api_data(include_secrets=False,
3422 3452 details='basic'),
3423 3453 'reasons': reasons,
3424 3454 'review_status': st[0][1].status if st else 'not_reviewed',
3425 3455 }
3426 3456 for reviewer, reasons, mandatory, st in
3427 3457 pull_request.reviewers_statuses()
3428 3458 ]
3429 3459 }
3430 3460
3431 3461 return data
3432 3462
3433 3463
3434 3464 class PullRequest(Base, _PullRequestBase):
3435 3465 __tablename__ = 'pull_requests'
3436 3466 __table_args__ = (
3437 3467 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3438 3468 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3439 3469 )
3440 3470
3441 3471 pull_request_id = Column(
3442 3472 'pull_request_id', Integer(), nullable=False, primary_key=True)
3443 3473
3444 3474 def __repr__(self):
3445 3475 if self.pull_request_id:
3446 3476 return '<DB:PullRequest #%s>' % self.pull_request_id
3447 3477 else:
3448 3478 return '<DB:PullRequest at %#x>' % id(self)
3449 3479
3450 3480 reviewers = relationship('PullRequestReviewers',
3451 3481 cascade="all, delete, delete-orphan")
3452 3482 statuses = relationship('ChangesetStatus',
3453 3483 cascade="all, delete, delete-orphan")
3454 3484 comments = relationship('ChangesetComment',
3455 3485 cascade="all, delete, delete-orphan")
3456 3486 versions = relationship('PullRequestVersion',
3457 3487 cascade="all, delete, delete-orphan",
3458 3488 lazy='dynamic')
3459 3489
3460 3490 @classmethod
3461 3491 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3462 3492 internal_methods=None):
3463 3493
3464 3494 class PullRequestDisplay(object):
3465 3495 """
3466 3496 Special object wrapper for showing PullRequest data via Versions
3467 3497 It mimics PR object as close as possible. This is read only object
3468 3498 just for display
3469 3499 """
3470 3500
3471 3501 def __init__(self, attrs, internal=None):
3472 3502 self.attrs = attrs
3473 3503 # internal have priority over the given ones via attrs
3474 3504 self.internal = internal or ['versions']
3475 3505
3476 3506 def __getattr__(self, item):
3477 3507 if item in self.internal:
3478 3508 return getattr(self, item)
3479 3509 try:
3480 3510 return self.attrs[item]
3481 3511 except KeyError:
3482 3512 raise AttributeError(
3483 3513 '%s object has no attribute %s' % (self, item))
3484 3514
3485 3515 def __repr__(self):
3486 3516 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3487 3517
3488 3518 def versions(self):
3489 3519 return pull_request_obj.versions.order_by(
3490 3520 PullRequestVersion.pull_request_version_id).all()
3491 3521
3492 3522 def is_closed(self):
3493 3523 return pull_request_obj.is_closed()
3494 3524
3495 3525 @property
3496 3526 def pull_request_version_id(self):
3497 3527 return getattr(pull_request_obj, 'pull_request_version_id', None)
3498 3528
3499 3529 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3500 3530
3501 3531 attrs.author = StrictAttributeDict(
3502 3532 pull_request_obj.author.get_api_data())
3503 3533 if pull_request_obj.target_repo:
3504 3534 attrs.target_repo = StrictAttributeDict(
3505 3535 pull_request_obj.target_repo.get_api_data())
3506 3536 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3507 3537
3508 3538 if pull_request_obj.source_repo:
3509 3539 attrs.source_repo = StrictAttributeDict(
3510 3540 pull_request_obj.source_repo.get_api_data())
3511 3541 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3512 3542
3513 3543 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3514 3544 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3515 3545 attrs.revisions = pull_request_obj.revisions
3516 3546
3517 3547 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3518 3548 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3519 3549 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3520 3550
3521 3551 return PullRequestDisplay(attrs, internal=internal_methods)
3522 3552
3523 3553 def is_closed(self):
3524 3554 return self.status == self.STATUS_CLOSED
3525 3555
3526 3556 def __json__(self):
3527 3557 return {
3528 3558 'revisions': self.revisions,
3529 3559 }
3530 3560
3531 3561 def calculated_review_status(self):
3532 3562 from rhodecode.model.changeset_status import ChangesetStatusModel
3533 3563 return ChangesetStatusModel().calculated_review_status(self)
3534 3564
3535 3565 def reviewers_statuses(self):
3536 3566 from rhodecode.model.changeset_status import ChangesetStatusModel
3537 3567 return ChangesetStatusModel().reviewers_statuses(self)
3538 3568
3539 3569 @property
3540 3570 def workspace_id(self):
3541 3571 from rhodecode.model.pull_request import PullRequestModel
3542 3572 return PullRequestModel()._workspace_id(self)
3543 3573
3544 3574 def get_shadow_repo(self):
3545 3575 workspace_id = self.workspace_id
3546 3576 vcs_obj = self.target_repo.scm_instance()
3547 3577 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3548 3578 workspace_id)
3549 3579 return vcs_obj._get_shadow_instance(shadow_repository_path)
3550 3580
3551 3581
3552 3582 class PullRequestVersion(Base, _PullRequestBase):
3553 3583 __tablename__ = 'pull_request_versions'
3554 3584 __table_args__ = (
3555 3585 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3556 3586 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3557 3587 )
3558 3588
3559 3589 pull_request_version_id = Column(
3560 3590 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3561 3591 pull_request_id = Column(
3562 3592 'pull_request_id', Integer(),
3563 3593 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3564 3594 pull_request = relationship('PullRequest')
3565 3595
3566 3596 def __repr__(self):
3567 3597 if self.pull_request_version_id:
3568 3598 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3569 3599 else:
3570 3600 return '<DB:PullRequestVersion at %#x>' % id(self)
3571 3601
3572 3602 @property
3573 3603 def reviewers(self):
3574 3604 return self.pull_request.reviewers
3575 3605
3576 3606 @property
3577 3607 def versions(self):
3578 3608 return self.pull_request.versions
3579 3609
3580 3610 def is_closed(self):
3581 3611 # calculate from original
3582 3612 return self.pull_request.status == self.STATUS_CLOSED
3583 3613
3584 3614 def calculated_review_status(self):
3585 3615 return self.pull_request.calculated_review_status()
3586 3616
3587 3617 def reviewers_statuses(self):
3588 3618 return self.pull_request.reviewers_statuses()
3589 3619
3590 3620
3591 3621 class PullRequestReviewers(Base, BaseModel):
3592 3622 __tablename__ = 'pull_request_reviewers'
3593 3623 __table_args__ = (
3594 3624 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3595 3625 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3596 3626 )
3597 3627
3598 3628 @hybrid_property
3599 3629 def reasons(self):
3600 3630 if not self._reasons:
3601 3631 return []
3602 3632 return self._reasons
3603 3633
3604 3634 @reasons.setter
3605 3635 def reasons(self, val):
3606 3636 val = val or []
3607 3637 if any(not isinstance(x, basestring) for x in val):
3608 3638 raise Exception('invalid reasons type, must be list of strings')
3609 3639 self._reasons = val
3610 3640
3611 3641 pull_requests_reviewers_id = Column(
3612 3642 'pull_requests_reviewers_id', Integer(), nullable=False,
3613 3643 primary_key=True)
3614 3644 pull_request_id = Column(
3615 3645 "pull_request_id", Integer(),
3616 3646 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3617 3647 user_id = Column(
3618 3648 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3619 3649 _reasons = Column(
3620 3650 'reason', MutationList.as_mutable(
3621 3651 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3622 3652 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3623 3653 user = relationship('User')
3624 3654 pull_request = relationship('PullRequest')
3625 3655
3626 3656
3627 3657 class Notification(Base, BaseModel):
3628 3658 __tablename__ = 'notifications'
3629 3659 __table_args__ = (
3630 3660 Index('notification_type_idx', 'type'),
3631 3661 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3632 3662 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3633 3663 )
3634 3664
3635 3665 TYPE_CHANGESET_COMMENT = u'cs_comment'
3636 3666 TYPE_MESSAGE = u'message'
3637 3667 TYPE_MENTION = u'mention'
3638 3668 TYPE_REGISTRATION = u'registration'
3639 3669 TYPE_PULL_REQUEST = u'pull_request'
3640 3670 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3641 3671
3642 3672 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3643 3673 subject = Column('subject', Unicode(512), nullable=True)
3644 3674 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3645 3675 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3646 3676 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3647 3677 type_ = Column('type', Unicode(255))
3648 3678
3649 3679 created_by_user = relationship('User')
3650 3680 notifications_to_users = relationship('UserNotification', lazy='joined',
3651 3681 cascade="all, delete, delete-orphan")
3652 3682
3653 3683 @property
3654 3684 def recipients(self):
3655 3685 return [x.user for x in UserNotification.query()\
3656 3686 .filter(UserNotification.notification == self)\
3657 3687 .order_by(UserNotification.user_id.asc()).all()]
3658 3688
3659 3689 @classmethod
3660 3690 def create(cls, created_by, subject, body, recipients, type_=None):
3661 3691 if type_ is None:
3662 3692 type_ = Notification.TYPE_MESSAGE
3663 3693
3664 3694 notification = cls()
3665 3695 notification.created_by_user = created_by
3666 3696 notification.subject = subject
3667 3697 notification.body = body
3668 3698 notification.type_ = type_
3669 3699 notification.created_on = datetime.datetime.now()
3670 3700
3671 3701 for u in recipients:
3672 3702 assoc = UserNotification()
3673 3703 assoc.notification = notification
3674 3704
3675 3705 # if created_by is inside recipients mark his notification
3676 3706 # as read
3677 3707 if u.user_id == created_by.user_id:
3678 3708 assoc.read = True
3679 3709
3680 3710 u.notifications.append(assoc)
3681 3711 Session().add(notification)
3682 3712
3683 3713 return notification
3684 3714
3685 3715 @property
3686 3716 def description(self):
3687 3717 from rhodecode.model.notification import NotificationModel
3688 3718 return NotificationModel().make_description(self)
3689 3719
3690 3720
3691 3721 class UserNotification(Base, BaseModel):
3692 3722 __tablename__ = 'user_to_notification'
3693 3723 __table_args__ = (
3694 3724 UniqueConstraint('user_id', 'notification_id'),
3695 3725 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3696 3726 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3697 3727 )
3698 3728 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3699 3729 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3700 3730 read = Column('read', Boolean, default=False)
3701 3731 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3702 3732
3703 3733 user = relationship('User', lazy="joined")
3704 3734 notification = relationship('Notification', lazy="joined",
3705 3735 order_by=lambda: Notification.created_on.desc(),)
3706 3736
3707 3737 def mark_as_read(self):
3708 3738 self.read = True
3709 3739 Session().add(self)
3710 3740
3711 3741
3712 3742 class Gist(Base, BaseModel):
3713 3743 __tablename__ = 'gists'
3714 3744 __table_args__ = (
3715 3745 Index('g_gist_access_id_idx', 'gist_access_id'),
3716 3746 Index('g_created_on_idx', 'created_on'),
3717 3747 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3718 3748 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3719 3749 )
3720 3750 GIST_PUBLIC = u'public'
3721 3751 GIST_PRIVATE = u'private'
3722 3752 DEFAULT_FILENAME = u'gistfile1.txt'
3723 3753
3724 3754 ACL_LEVEL_PUBLIC = u'acl_public'
3725 3755 ACL_LEVEL_PRIVATE = u'acl_private'
3726 3756
3727 3757 gist_id = Column('gist_id', Integer(), primary_key=True)
3728 3758 gist_access_id = Column('gist_access_id', Unicode(250))
3729 3759 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3730 3760 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3731 3761 gist_expires = Column('gist_expires', Float(53), nullable=False)
3732 3762 gist_type = Column('gist_type', Unicode(128), nullable=False)
3733 3763 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3734 3764 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3735 3765 acl_level = Column('acl_level', Unicode(128), nullable=True)
3736 3766
3737 3767 owner = relationship('User')
3738 3768
3739 3769 def __repr__(self):
3740 3770 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3741 3771
3772 @hybrid_property
3773 def description_safe(self):
3774 from rhodecode.lib import helpers as h
3775 return h.escape(self.gist_description)
3776
3742 3777 @classmethod
3743 3778 def get_or_404(cls, id_, pyramid_exc=False):
3744 3779
3745 3780 if pyramid_exc:
3746 3781 from pyramid.httpexceptions import HTTPNotFound
3747 3782 else:
3748 3783 from webob.exc import HTTPNotFound
3749 3784
3750 3785 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3751 3786 if not res:
3752 3787 raise HTTPNotFound
3753 3788 return res
3754 3789
3755 3790 @classmethod
3756 3791 def get_by_access_id(cls, gist_access_id):
3757 3792 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3758 3793
3759 3794 def gist_url(self):
3760 3795 import rhodecode
3761 3796 from pylons import url
3762 3797
3763 3798 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3764 3799 if alias_url:
3765 3800 return alias_url.replace('{gistid}', self.gist_access_id)
3766 3801
3767 3802 return url('gist', gist_id=self.gist_access_id, qualified=True)
3768 3803
3769 3804 @classmethod
3770 3805 def base_path(cls):
3771 3806 """
3772 3807 Returns base path when all gists are stored
3773 3808
3774 3809 :param cls:
3775 3810 """
3776 3811 from rhodecode.model.gist import GIST_STORE_LOC
3777 3812 q = Session().query(RhodeCodeUi)\
3778 3813 .filter(RhodeCodeUi.ui_key == URL_SEP)
3779 3814 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3780 3815 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3781 3816
3782 3817 def get_api_data(self):
3783 3818 """
3784 3819 Common function for generating gist related data for API
3785 3820 """
3786 3821 gist = self
3787 3822 data = {
3788 3823 'gist_id': gist.gist_id,
3789 3824 'type': gist.gist_type,
3790 3825 'access_id': gist.gist_access_id,
3791 3826 'description': gist.gist_description,
3792 3827 'url': gist.gist_url(),
3793 3828 'expires': gist.gist_expires,
3794 3829 'created_on': gist.created_on,
3795 3830 'modified_at': gist.modified_at,
3796 3831 'content': None,
3797 3832 'acl_level': gist.acl_level,
3798 3833 }
3799 3834 return data
3800 3835
3801 3836 def __json__(self):
3802 3837 data = dict(
3803 3838 )
3804 3839 data.update(self.get_api_data())
3805 3840 return data
3806 3841 # SCM functions
3807 3842
3808 3843 def scm_instance(self, **kwargs):
3809 3844 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
3810 3845 return get_vcs_instance(
3811 3846 repo_path=safe_str(full_repo_path), create=False)
3812 3847
3813 3848
3814 3849 class ExternalIdentity(Base, BaseModel):
3815 3850 __tablename__ = 'external_identities'
3816 3851 __table_args__ = (
3817 3852 Index('local_user_id_idx', 'local_user_id'),
3818 3853 Index('external_id_idx', 'external_id'),
3819 3854 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3820 3855 'mysql_charset': 'utf8'})
3821 3856
3822 3857 external_id = Column('external_id', Unicode(255), default=u'',
3823 3858 primary_key=True)
3824 3859 external_username = Column('external_username', Unicode(1024), default=u'')
3825 3860 local_user_id = Column('local_user_id', Integer(),
3826 3861 ForeignKey('users.user_id'), primary_key=True)
3827 3862 provider_name = Column('provider_name', Unicode(255), default=u'',
3828 3863 primary_key=True)
3829 3864 access_token = Column('access_token', String(1024), default=u'')
3830 3865 alt_token = Column('alt_token', String(1024), default=u'')
3831 3866 token_secret = Column('token_secret', String(1024), default=u'')
3832 3867
3833 3868 @classmethod
3834 3869 def by_external_id_and_provider(cls, external_id, provider_name,
3835 3870 local_user_id=None):
3836 3871 """
3837 3872 Returns ExternalIdentity instance based on search params
3838 3873
3839 3874 :param external_id:
3840 3875 :param provider_name:
3841 3876 :return: ExternalIdentity
3842 3877 """
3843 3878 query = cls.query()
3844 3879 query = query.filter(cls.external_id == external_id)
3845 3880 query = query.filter(cls.provider_name == provider_name)
3846 3881 if local_user_id:
3847 3882 query = query.filter(cls.local_user_id == local_user_id)
3848 3883 return query.first()
3849 3884
3850 3885 @classmethod
3851 3886 def user_by_external_id_and_provider(cls, external_id, provider_name):
3852 3887 """
3853 3888 Returns User instance based on search params
3854 3889
3855 3890 :param external_id:
3856 3891 :param provider_name:
3857 3892 :return: User
3858 3893 """
3859 3894 query = User.query()
3860 3895 query = query.filter(cls.external_id == external_id)
3861 3896 query = query.filter(cls.provider_name == provider_name)
3862 3897 query = query.filter(User.user_id == cls.local_user_id)
3863 3898 return query.first()
3864 3899
3865 3900 @classmethod
3866 3901 def by_local_user_id(cls, local_user_id):
3867 3902 """
3868 3903 Returns all tokens for user
3869 3904
3870 3905 :param local_user_id:
3871 3906 :return: ExternalIdentity
3872 3907 """
3873 3908 query = cls.query()
3874 3909 query = query.filter(cls.local_user_id == local_user_id)
3875 3910 return query
3876 3911
3877 3912
3878 3913 class Integration(Base, BaseModel):
3879 3914 __tablename__ = 'integrations'
3880 3915 __table_args__ = (
3881 3916 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3882 3917 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3883 3918 )
3884 3919
3885 3920 integration_id = Column('integration_id', Integer(), primary_key=True)
3886 3921 integration_type = Column('integration_type', String(255))
3887 3922 enabled = Column('enabled', Boolean(), nullable=False)
3888 3923 name = Column('name', String(255), nullable=False)
3889 3924 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
3890 3925 default=False)
3891 3926
3892 3927 settings = Column(
3893 3928 'settings_json', MutationObj.as_mutable(
3894 3929 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3895 3930 repo_id = Column(
3896 3931 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
3897 3932 nullable=True, unique=None, default=None)
3898 3933 repo = relationship('Repository', lazy='joined')
3899 3934
3900 3935 repo_group_id = Column(
3901 3936 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
3902 3937 nullable=True, unique=None, default=None)
3903 3938 repo_group = relationship('RepoGroup', lazy='joined')
3904 3939
3905 3940 @property
3906 3941 def scope(self):
3907 3942 if self.repo:
3908 3943 return repr(self.repo)
3909 3944 if self.repo_group:
3910 3945 if self.child_repos_only:
3911 3946 return repr(self.repo_group) + ' (child repos only)'
3912 3947 else:
3913 3948 return repr(self.repo_group) + ' (recursive)'
3914 3949 if self.child_repos_only:
3915 3950 return 'root_repos'
3916 3951 return 'global'
3917 3952
3918 3953 def __repr__(self):
3919 3954 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
3920 3955
3921 3956
3922 3957 class RepoReviewRuleUser(Base, BaseModel):
3923 3958 __tablename__ = 'repo_review_rules_users'
3924 3959 __table_args__ = (
3925 3960 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3926 3961 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3927 3962 )
3928 3963 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
3929 3964 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3930 3965 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
3931 3966 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3932 3967 user = relationship('User')
3933 3968
3934 3969 def rule_data(self):
3935 3970 return {
3936 3971 'mandatory': self.mandatory
3937 3972 }
3938 3973
3939 3974
3940 3975 class RepoReviewRuleUserGroup(Base, BaseModel):
3941 3976 __tablename__ = 'repo_review_rules_users_groups'
3942 3977 __table_args__ = (
3943 3978 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3944 3979 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3945 3980 )
3946 3981 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
3947 3982 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3948 3983 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
3949 3984 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3950 3985 users_group = relationship('UserGroup')
3951 3986
3952 3987 def rule_data(self):
3953 3988 return {
3954 3989 'mandatory': self.mandatory
3955 3990 }
3956 3991
3957 3992
3958 3993 class RepoReviewRule(Base, BaseModel):
3959 3994 __tablename__ = 'repo_review_rules'
3960 3995 __table_args__ = (
3961 3996 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3962 3997 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3963 3998 )
3964 3999
3965 4000 repo_review_rule_id = Column(
3966 4001 'repo_review_rule_id', Integer(), primary_key=True)
3967 4002 repo_id = Column(
3968 4003 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
3969 4004 repo = relationship('Repository', backref='review_rules')
3970 4005
3971 4006 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
3972 4007 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
3973 4008
3974 4009 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
3975 4010 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
3976 4011 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
3977 4012 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
3978 4013
3979 4014 rule_users = relationship('RepoReviewRuleUser')
3980 4015 rule_user_groups = relationship('RepoReviewRuleUserGroup')
3981 4016
3982 4017 @hybrid_property
3983 4018 def branch_pattern(self):
3984 4019 return self._branch_pattern or '*'
3985 4020
3986 4021 def _validate_glob(self, value):
3987 4022 re.compile('^' + glob2re(value) + '$')
3988 4023
3989 4024 @branch_pattern.setter
3990 4025 def branch_pattern(self, value):
3991 4026 self._validate_glob(value)
3992 4027 self._branch_pattern = value or '*'
3993 4028
3994 4029 @hybrid_property
3995 4030 def file_pattern(self):
3996 4031 return self._file_pattern or '*'
3997 4032
3998 4033 @file_pattern.setter
3999 4034 def file_pattern(self, value):
4000 4035 self._validate_glob(value)
4001 4036 self._file_pattern = value or '*'
4002 4037
4003 4038 def matches(self, branch, files_changed):
4004 4039 """
4005 4040 Check if this review rule matches a branch/files in a pull request
4006 4041
4007 4042 :param branch: branch name for the commit
4008 4043 :param files_changed: list of file paths changed in the pull request
4009 4044 """
4010 4045
4011 4046 branch = branch or ''
4012 4047 files_changed = files_changed or []
4013 4048
4014 4049 branch_matches = True
4015 4050 if branch:
4016 4051 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4017 4052 branch_matches = bool(branch_regex.search(branch))
4018 4053
4019 4054 files_matches = True
4020 4055 if self.file_pattern != '*':
4021 4056 files_matches = False
4022 4057 file_regex = re.compile(glob2re(self.file_pattern))
4023 4058 for filename in files_changed:
4024 4059 if file_regex.search(filename):
4025 4060 files_matches = True
4026 4061 break
4027 4062
4028 4063 return branch_matches and files_matches
4029 4064
4030 4065 @property
4031 4066 def review_users(self):
4032 4067 """ Returns the users which this rule applies to """
4033 4068
4034 4069 users = collections.OrderedDict()
4035 4070
4036 4071 for rule_user in self.rule_users:
4037 4072 if rule_user.user.active:
4038 4073 if rule_user.user not in users:
4039 4074 users[rule_user.user.username] = {
4040 4075 'user': rule_user.user,
4041 4076 'source': 'user',
4042 4077 'source_data': {},
4043 4078 'data': rule_user.rule_data()
4044 4079 }
4045 4080
4046 4081 for rule_user_group in self.rule_user_groups:
4047 4082 source_data = {
4048 4083 'name': rule_user_group.users_group.users_group_name,
4049 4084 'members': len(rule_user_group.users_group.members)
4050 4085 }
4051 4086 for member in rule_user_group.users_group.members:
4052 4087 if member.user.active:
4053 4088 users[member.user.username] = {
4054 4089 'user': member.user,
4055 4090 'source': 'user_group',
4056 4091 'source_data': source_data,
4057 4092 'data': rule_user_group.rule_data()
4058 4093 }
4059 4094
4060 4095 return users
4061 4096
4062 4097 def __repr__(self):
4063 4098 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4064 4099 self.repo_review_rule_id, self.repo)
4065 4100
4066 4101
4067 4102 class DbMigrateVersion(Base, BaseModel):
4068 4103 __tablename__ = 'db_migrate_version'
4069 4104 __table_args__ = (
4070 4105 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4071 4106 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4072 4107 )
4073 4108 repository_id = Column('repository_id', String(250), primary_key=True)
4074 4109 repository_path = Column('repository_path', Text)
4075 4110 version = Column('version', Integer)
4076 4111
4077 4112
4078 4113 class DbSession(Base, BaseModel):
4079 4114 __tablename__ = 'db_session'
4080 4115 __table_args__ = (
4081 4116 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4082 4117 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4083 4118 )
4084 4119
4085 4120 def __repr__(self):
4086 4121 return '<DB:DbSession({})>'.format(self.id)
4087 4122
4088 4123 id = Column('id', Integer())
4089 4124 namespace = Column('namespace', String(255), primary_key=True)
4090 4125 accessed = Column('accessed', DateTime, nullable=False)
4091 4126 created = Column('created', DateTime, nullable=False)
4092 4127 data = Column('data', PickleType, nullable=False)
@@ -1,1551 +1,1551 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from pyramid.threadlocal import get_current_request
35 35 from sqlalchemy import or_
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = 3
78 78
79 79 MERGE_STATUS_MESSAGES = {
80 80 MergeFailureReason.NONE: lazy_ugettext(
81 81 'This pull request can be automatically merged.'),
82 82 MergeFailureReason.UNKNOWN: lazy_ugettext(
83 83 'This pull request cannot be merged because of an unhandled'
84 84 ' exception.'),
85 85 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
86 86 'This pull request cannot be merged because of merge conflicts.'),
87 87 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
88 88 'This pull request could not be merged because push to target'
89 89 ' failed.'),
90 90 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
91 91 'This pull request cannot be merged because the target is not a'
92 92 ' head.'),
93 93 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
94 94 'This pull request cannot be merged because the source contains'
95 95 ' more branches than the target.'),
96 96 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
97 97 'This pull request cannot be merged because the target has'
98 98 ' multiple heads.'),
99 99 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
100 100 'This pull request cannot be merged because the target repository'
101 101 ' is locked.'),
102 102 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
103 103 'This pull request cannot be merged because the target or the '
104 104 'source reference is missing.'),
105 105 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the target '
107 107 'reference is missing.'),
108 108 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
109 109 'This pull request cannot be merged because the source '
110 110 'reference is missing.'),
111 111 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
112 112 'This pull request cannot be merged because of conflicts related '
113 113 'to sub repositories.'),
114 114 }
115 115
116 116 UPDATE_STATUS_MESSAGES = {
117 117 UpdateFailureReason.NONE: lazy_ugettext(
118 118 'Pull request update successful.'),
119 119 UpdateFailureReason.UNKNOWN: lazy_ugettext(
120 120 'Pull request update failed because of an unknown error.'),
121 121 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
122 122 'No update needed because the source and target have not changed.'),
123 123 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
124 124 'Pull request cannot be updated because the reference type is '
125 125 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
126 126 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
127 127 'This pull request cannot be updated because the target '
128 128 'reference is missing.'),
129 129 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
130 130 'This pull request cannot be updated because the source '
131 131 'reference is missing.'),
132 132 }
133 133
134 134 def __get_pull_request(self, pull_request):
135 135 return self._get_instance((
136 136 PullRequest, PullRequestVersion), pull_request)
137 137
138 138 def _check_perms(self, perms, pull_request, user, api=False):
139 139 if not api:
140 140 return h.HasRepoPermissionAny(*perms)(
141 141 user=user, repo_name=pull_request.target_repo.repo_name)
142 142 else:
143 143 return h.HasRepoPermissionAnyApi(*perms)(
144 144 user=user, repo_name=pull_request.target_repo.repo_name)
145 145
146 146 def check_user_read(self, pull_request, user, api=False):
147 147 _perms = ('repository.admin', 'repository.write', 'repository.read',)
148 148 return self._check_perms(_perms, pull_request, user, api)
149 149
150 150 def check_user_merge(self, pull_request, user, api=False):
151 151 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
152 152 return self._check_perms(_perms, pull_request, user, api)
153 153
154 154 def check_user_update(self, pull_request, user, api=False):
155 155 owner = user.user_id == pull_request.user_id
156 156 return self.check_user_merge(pull_request, user, api) or owner
157 157
158 158 def check_user_delete(self, pull_request, user):
159 159 owner = user.user_id == pull_request.user_id
160 160 _perms = ('repository.admin',)
161 161 return self._check_perms(_perms, pull_request, user) or owner
162 162
163 163 def check_user_change_status(self, pull_request, user, api=False):
164 164 reviewer = user.user_id in [x.user_id for x in
165 165 pull_request.reviewers]
166 166 return self.check_user_update(pull_request, user, api) or reviewer
167 167
168 168 def get(self, pull_request):
169 169 return self.__get_pull_request(pull_request)
170 170
171 171 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
172 172 opened_by=None, order_by=None,
173 173 order_dir='desc'):
174 174 repo = None
175 175 if repo_name:
176 176 repo = self._get_repo(repo_name)
177 177
178 178 q = PullRequest.query()
179 179
180 180 # source or target
181 181 if repo and source:
182 182 q = q.filter(PullRequest.source_repo == repo)
183 183 elif repo:
184 184 q = q.filter(PullRequest.target_repo == repo)
185 185
186 186 # closed,opened
187 187 if statuses:
188 188 q = q.filter(PullRequest.status.in_(statuses))
189 189
190 190 # opened by filter
191 191 if opened_by:
192 192 q = q.filter(PullRequest.user_id.in_(opened_by))
193 193
194 194 if order_by:
195 195 order_map = {
196 196 'name_raw': PullRequest.pull_request_id,
197 197 'title': PullRequest.title,
198 198 'updated_on_raw': PullRequest.updated_on,
199 199 'target_repo': PullRequest.target_repo_id
200 200 }
201 201 if order_dir == 'asc':
202 202 q = q.order_by(order_map[order_by].asc())
203 203 else:
204 204 q = q.order_by(order_map[order_by].desc())
205 205
206 206 return q
207 207
208 208 def count_all(self, repo_name, source=False, statuses=None,
209 209 opened_by=None):
210 210 """
211 211 Count the number of pull requests for a specific repository.
212 212
213 213 :param repo_name: target or source repo
214 214 :param source: boolean flag to specify if repo_name refers to source
215 215 :param statuses: list of pull request statuses
216 216 :param opened_by: author user of the pull request
217 217 :returns: int number of pull requests
218 218 """
219 219 q = self._prepare_get_all_query(
220 220 repo_name, source=source, statuses=statuses, opened_by=opened_by)
221 221
222 222 return q.count()
223 223
224 224 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
225 225 offset=0, length=None, order_by=None, order_dir='desc'):
226 226 """
227 227 Get all pull requests for a specific repository.
228 228
229 229 :param repo_name: target or source repo
230 230 :param source: boolean flag to specify if repo_name refers to source
231 231 :param statuses: list of pull request statuses
232 232 :param opened_by: author user of the pull request
233 233 :param offset: pagination offset
234 234 :param length: length of returned list
235 235 :param order_by: order of the returned list
236 236 :param order_dir: 'asc' or 'desc' ordering direction
237 237 :returns: list of pull requests
238 238 """
239 239 q = self._prepare_get_all_query(
240 240 repo_name, source=source, statuses=statuses, opened_by=opened_by,
241 241 order_by=order_by, order_dir=order_dir)
242 242
243 243 if length:
244 244 pull_requests = q.limit(length).offset(offset).all()
245 245 else:
246 246 pull_requests = q.all()
247 247
248 248 return pull_requests
249 249
250 250 def count_awaiting_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :returns: int number of pull requests
261 261 """
262 262 pull_requests = self.get_awaiting_review(
263 263 repo_name, source=source, statuses=statuses, opened_by=opened_by)
264 264
265 265 return len(pull_requests)
266 266
267 267 def get_awaiting_review(self, repo_name, source=False, statuses=None,
268 268 opened_by=None, offset=0, length=None,
269 269 order_by=None, order_dir='desc'):
270 270 """
271 271 Get all pull requests for a specific repository that are awaiting
272 272 review.
273 273
274 274 :param repo_name: target or source repo
275 275 :param source: boolean flag to specify if repo_name refers to source
276 276 :param statuses: list of pull request statuses
277 277 :param opened_by: author user of the pull request
278 278 :param offset: pagination offset
279 279 :param length: length of returned list
280 280 :param order_by: order of the returned list
281 281 :param order_dir: 'asc' or 'desc' ordering direction
282 282 :returns: list of pull requests
283 283 """
284 284 pull_requests = self.get_all(
285 285 repo_name, source=source, statuses=statuses, opened_by=opened_by,
286 286 order_by=order_by, order_dir=order_dir)
287 287
288 288 _filtered_pull_requests = []
289 289 for pr in pull_requests:
290 290 status = pr.calculated_review_status()
291 291 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
292 292 ChangesetStatus.STATUS_UNDER_REVIEW]:
293 293 _filtered_pull_requests.append(pr)
294 294 if length:
295 295 return _filtered_pull_requests[offset:offset+length]
296 296 else:
297 297 return _filtered_pull_requests
298 298
299 299 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
300 300 opened_by=None, user_id=None):
301 301 """
302 302 Count the number of pull requests for a specific repository that are
303 303 awaiting review from a specific user.
304 304
305 305 :param repo_name: target or source repo
306 306 :param source: boolean flag to specify if repo_name refers to source
307 307 :param statuses: list of pull request statuses
308 308 :param opened_by: author user of the pull request
309 309 :param user_id: reviewer user of the pull request
310 310 :returns: int number of pull requests
311 311 """
312 312 pull_requests = self.get_awaiting_my_review(
313 313 repo_name, source=source, statuses=statuses, opened_by=opened_by,
314 314 user_id=user_id)
315 315
316 316 return len(pull_requests)
317 317
318 318 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
319 319 opened_by=None, user_id=None, offset=0,
320 320 length=None, order_by=None, order_dir='desc'):
321 321 """
322 322 Get all pull requests for a specific repository that are awaiting
323 323 review from a specific user.
324 324
325 325 :param repo_name: target or source repo
326 326 :param source: boolean flag to specify if repo_name refers to source
327 327 :param statuses: list of pull request statuses
328 328 :param opened_by: author user of the pull request
329 329 :param user_id: reviewer user of the pull request
330 330 :param offset: pagination offset
331 331 :param length: length of returned list
332 332 :param order_by: order of the returned list
333 333 :param order_dir: 'asc' or 'desc' ordering direction
334 334 :returns: list of pull requests
335 335 """
336 336 pull_requests = self.get_all(
337 337 repo_name, source=source, statuses=statuses, opened_by=opened_by,
338 338 order_by=order_by, order_dir=order_dir)
339 339
340 340 _my = PullRequestModel().get_not_reviewed(user_id)
341 341 my_participation = []
342 342 for pr in pull_requests:
343 343 if pr in _my:
344 344 my_participation.append(pr)
345 345 _filtered_pull_requests = my_participation
346 346 if length:
347 347 return _filtered_pull_requests[offset:offset+length]
348 348 else:
349 349 return _filtered_pull_requests
350 350
351 351 def get_not_reviewed(self, user_id):
352 352 return [
353 353 x.pull_request for x in PullRequestReviewers.query().filter(
354 354 PullRequestReviewers.user_id == user_id).all()
355 355 ]
356 356
357 357 def _prepare_participating_query(self, user_id=None, statuses=None,
358 358 order_by=None, order_dir='desc'):
359 359 q = PullRequest.query()
360 360 if user_id:
361 361 reviewers_subquery = Session().query(
362 362 PullRequestReviewers.pull_request_id).filter(
363 363 PullRequestReviewers.user_id == user_id).subquery()
364 364 user_filter= or_(
365 365 PullRequest.user_id == user_id,
366 366 PullRequest.pull_request_id.in_(reviewers_subquery)
367 367 )
368 368 q = PullRequest.query().filter(user_filter)
369 369
370 370 # closed,opened
371 371 if statuses:
372 372 q = q.filter(PullRequest.status.in_(statuses))
373 373
374 374 if order_by:
375 375 order_map = {
376 376 'name_raw': PullRequest.pull_request_id,
377 377 'title': PullRequest.title,
378 378 'updated_on_raw': PullRequest.updated_on,
379 379 'target_repo': PullRequest.target_repo_id
380 380 }
381 381 if order_dir == 'asc':
382 382 q = q.order_by(order_map[order_by].asc())
383 383 else:
384 384 q = q.order_by(order_map[order_by].desc())
385 385
386 386 return q
387 387
388 388 def count_im_participating_in(self, user_id=None, statuses=None):
389 389 q = self._prepare_participating_query(user_id, statuses=statuses)
390 390 return q.count()
391 391
392 392 def get_im_participating_in(
393 393 self, user_id=None, statuses=None, offset=0,
394 394 length=None, order_by=None, order_dir='desc'):
395 395 """
396 396 Get all Pull requests that i'm participating in, or i have opened
397 397 """
398 398
399 399 q = self._prepare_participating_query(
400 400 user_id, statuses=statuses, order_by=order_by,
401 401 order_dir=order_dir)
402 402
403 403 if length:
404 404 pull_requests = q.limit(length).offset(offset).all()
405 405 else:
406 406 pull_requests = q.all()
407 407
408 408 return pull_requests
409 409
410 410 def get_versions(self, pull_request):
411 411 """
412 412 returns version of pull request sorted by ID descending
413 413 """
414 414 return PullRequestVersion.query()\
415 415 .filter(PullRequestVersion.pull_request == pull_request)\
416 416 .order_by(PullRequestVersion.pull_request_version_id.asc())\
417 417 .all()
418 418
419 419 def create(self, created_by, source_repo, source_ref, target_repo,
420 420 target_ref, revisions, reviewers, title, description=None,
421 421 reviewer_data=None):
422 422
423 423 created_by_user = self._get_user(created_by)
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.author = created_by_user
436 436 pull_request.reviewer_data = reviewer_data
437 437
438 438 Session().add(pull_request)
439 439 Session().flush()
440 440
441 441 reviewer_ids = set()
442 442 # members / reviewers
443 443 for reviewer_object in reviewers:
444 444 user_id, reasons, mandatory = reviewer_object
445 445 user = self._get_user(user_id)
446 446
447 447 # skip duplicates
448 448 if user.user_id in reviewer_ids:
449 449 continue
450 450
451 451 reviewer_ids.add(user.user_id)
452 452
453 453 reviewer = PullRequestReviewers()
454 454 reviewer.user = user
455 455 reviewer.pull_request = pull_request
456 456 reviewer.reasons = reasons
457 457 reviewer.mandatory = mandatory
458 458 Session().add(reviewer)
459 459
460 460 # Set approval status to "Under Review" for all commits which are
461 461 # part of this pull request.
462 462 ChangesetStatusModel().set_status(
463 463 repo=target_repo,
464 464 status=ChangesetStatus.STATUS_UNDER_REVIEW,
465 465 user=created_by_user,
466 466 pull_request=pull_request
467 467 )
468 468
469 469 self.notify_reviewers(pull_request, reviewer_ids)
470 470 self._trigger_pull_request_hook(
471 471 pull_request, created_by_user, 'create')
472 472
473 473 creation_data = pull_request.get_api_data(with_merge_state=False)
474 474 self._log_audit_action(
475 475 'repo.pull_request.create', {'data': creation_data},
476 476 created_by_user, pull_request)
477 477
478 478 return pull_request
479 479
480 480 def _trigger_pull_request_hook(self, pull_request, user, action):
481 481 pull_request = self.__get_pull_request(pull_request)
482 482 target_scm = pull_request.target_repo.scm_instance()
483 483 if action == 'create':
484 484 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
485 485 elif action == 'merge':
486 486 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
487 487 elif action == 'close':
488 488 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
489 489 elif action == 'review_status_change':
490 490 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
491 491 elif action == 'update':
492 492 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
493 493 else:
494 494 return
495 495
496 496 trigger_hook(
497 497 username=user.username,
498 498 repo_name=pull_request.target_repo.repo_name,
499 499 repo_alias=target_scm.alias,
500 500 pull_request=pull_request)
501 501
502 502 def _get_commit_ids(self, pull_request):
503 503 """
504 504 Return the commit ids of the merged pull request.
505 505
506 506 This method is not dealing correctly yet with the lack of autoupdates
507 507 nor with the implicit target updates.
508 508 For example: if a commit in the source repo is already in the target it
509 509 will be reported anyways.
510 510 """
511 511 merge_rev = pull_request.merge_rev
512 512 if merge_rev is None:
513 513 raise ValueError('This pull request was not merged yet')
514 514
515 515 commit_ids = list(pull_request.revisions)
516 516 if merge_rev not in commit_ids:
517 517 commit_ids.append(merge_rev)
518 518
519 519 return commit_ids
520 520
521 521 def merge(self, pull_request, user, extras):
522 522 log.debug("Merging pull request %s", pull_request.pull_request_id)
523 523 merge_state = self._merge_pull_request(pull_request, user, extras)
524 524 if merge_state.executed:
525 525 log.debug(
526 526 "Merge was successful, updating the pull request comments.")
527 527 self._comment_and_close_pr(pull_request, user, merge_state)
528 528
529 529 self._log_audit_action(
530 530 'repo.pull_request.merge',
531 531 {'merge_state': merge_state.__dict__},
532 532 user, pull_request)
533 533
534 534 else:
535 535 log.warn("Merge failed, not updating the pull request.")
536 536 return merge_state
537 537
538 538 def _merge_pull_request(self, pull_request, user, extras):
539 539 target_vcs = pull_request.target_repo.scm_instance()
540 540 source_vcs = pull_request.source_repo.scm_instance()
541 541 target_ref = self._refresh_reference(
542 542 pull_request.target_ref_parts, target_vcs)
543 543
544 544 message = _(
545 545 'Merge pull request #%(pr_id)s from '
546 546 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
547 547 'pr_id': pull_request.pull_request_id,
548 548 'source_repo': source_vcs.name,
549 549 'source_ref_name': pull_request.source_ref_parts.name,
550 550 'pr_title': pull_request.title
551 551 }
552 552
553 553 workspace_id = self._workspace_id(pull_request)
554 554 use_rebase = self._use_rebase_for_merging(pull_request)
555 555
556 556 callback_daemon, extras = prepare_callback_daemon(
557 557 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
558 558 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
559 559
560 560 with callback_daemon:
561 561 # TODO: johbo: Implement a clean way to run a config_override
562 562 # for a single call.
563 563 target_vcs.config.set(
564 564 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
565 565 merge_state = target_vcs.merge(
566 566 target_ref, source_vcs, pull_request.source_ref_parts,
567 567 workspace_id, user_name=user.username,
568 568 user_email=user.email, message=message, use_rebase=use_rebase)
569 569 return merge_state
570 570
571 571 def _comment_and_close_pr(self, pull_request, user, merge_state):
572 572 pull_request.merge_rev = merge_state.merge_ref.commit_id
573 573 pull_request.updated_on = datetime.datetime.now()
574 574
575 575 CommentsModel().create(
576 576 text=unicode(_('Pull request merged and closed')),
577 577 repo=pull_request.target_repo.repo_id,
578 578 user=user.user_id,
579 579 pull_request=pull_request.pull_request_id,
580 580 f_path=None,
581 581 line_no=None,
582 582 closing_pr=True
583 583 )
584 584
585 585 Session().add(pull_request)
586 586 Session().flush()
587 587 # TODO: paris: replace invalidation with less radical solution
588 588 ScmModel().mark_for_invalidation(
589 589 pull_request.target_repo.repo_name)
590 590 self._trigger_pull_request_hook(pull_request, user, 'merge')
591 591
592 592 def has_valid_update_type(self, pull_request):
593 593 source_ref_type = pull_request.source_ref_parts.type
594 594 return source_ref_type in ['book', 'branch', 'tag']
595 595
596 596 def update_commits(self, pull_request):
597 597 """
598 598 Get the updated list of commits for the pull request
599 599 and return the new pull request version and the list
600 600 of commits processed by this update action
601 601 """
602 602 pull_request = self.__get_pull_request(pull_request)
603 603 source_ref_type = pull_request.source_ref_parts.type
604 604 source_ref_name = pull_request.source_ref_parts.name
605 605 source_ref_id = pull_request.source_ref_parts.commit_id
606 606
607 607 target_ref_type = pull_request.target_ref_parts.type
608 608 target_ref_name = pull_request.target_ref_parts.name
609 609 target_ref_id = pull_request.target_ref_parts.commit_id
610 610
611 611 if not self.has_valid_update_type(pull_request):
612 612 log.debug(
613 613 "Skipping update of pull request %s due to ref type: %s",
614 614 pull_request, source_ref_type)
615 615 return UpdateResponse(
616 616 executed=False,
617 617 reason=UpdateFailureReason.WRONG_REF_TYPE,
618 618 old=pull_request, new=None, changes=None,
619 619 source_changed=False, target_changed=False)
620 620
621 621 # source repo
622 622 source_repo = pull_request.source_repo.scm_instance()
623 623 try:
624 624 source_commit = source_repo.get_commit(commit_id=source_ref_name)
625 625 except CommitDoesNotExistError:
626 626 return UpdateResponse(
627 627 executed=False,
628 628 reason=UpdateFailureReason.MISSING_SOURCE_REF,
629 629 old=pull_request, new=None, changes=None,
630 630 source_changed=False, target_changed=False)
631 631
632 632 source_changed = source_ref_id != source_commit.raw_id
633 633
634 634 # target repo
635 635 target_repo = pull_request.target_repo.scm_instance()
636 636 try:
637 637 target_commit = target_repo.get_commit(commit_id=target_ref_name)
638 638 except CommitDoesNotExistError:
639 639 return UpdateResponse(
640 640 executed=False,
641 641 reason=UpdateFailureReason.MISSING_TARGET_REF,
642 642 old=pull_request, new=None, changes=None,
643 643 source_changed=False, target_changed=False)
644 644 target_changed = target_ref_id != target_commit.raw_id
645 645
646 646 if not (source_changed or target_changed):
647 647 log.debug("Nothing changed in pull request %s", pull_request)
648 648 return UpdateResponse(
649 649 executed=False,
650 650 reason=UpdateFailureReason.NO_CHANGE,
651 651 old=pull_request, new=None, changes=None,
652 652 source_changed=target_changed, target_changed=source_changed)
653 653
654 654 change_in_found = 'target repo' if target_changed else 'source repo'
655 655 log.debug('Updating pull request because of change in %s detected',
656 656 change_in_found)
657 657
658 658 # Finally there is a need for an update, in case of source change
659 659 # we create a new version, else just an update
660 660 if source_changed:
661 661 pull_request_version = self._create_version_from_snapshot(pull_request)
662 662 self._link_comments_to_version(pull_request_version)
663 663 else:
664 664 try:
665 665 ver = pull_request.versions[-1]
666 666 except IndexError:
667 667 ver = None
668 668
669 669 pull_request.pull_request_version_id = \
670 670 ver.pull_request_version_id if ver else None
671 671 pull_request_version = pull_request
672 672
673 673 try:
674 674 if target_ref_type in ('tag', 'branch', 'book'):
675 675 target_commit = target_repo.get_commit(target_ref_name)
676 676 else:
677 677 target_commit = target_repo.get_commit(target_ref_id)
678 678 except CommitDoesNotExistError:
679 679 return UpdateResponse(
680 680 executed=False,
681 681 reason=UpdateFailureReason.MISSING_TARGET_REF,
682 682 old=pull_request, new=None, changes=None,
683 683 source_changed=source_changed, target_changed=target_changed)
684 684
685 685 # re-compute commit ids
686 686 old_commit_ids = pull_request.revisions
687 687 pre_load = ["author", "branch", "date", "message"]
688 688 commit_ranges = target_repo.compare(
689 689 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
690 690 pre_load=pre_load)
691 691
692 692 ancestor = target_repo.get_common_ancestor(
693 693 target_commit.raw_id, source_commit.raw_id, source_repo)
694 694
695 695 pull_request.source_ref = '%s:%s:%s' % (
696 696 source_ref_type, source_ref_name, source_commit.raw_id)
697 697 pull_request.target_ref = '%s:%s:%s' % (
698 698 target_ref_type, target_ref_name, ancestor)
699 699
700 700 pull_request.revisions = [
701 701 commit.raw_id for commit in reversed(commit_ranges)]
702 702 pull_request.updated_on = datetime.datetime.now()
703 703 Session().add(pull_request)
704 704 new_commit_ids = pull_request.revisions
705 705
706 706 old_diff_data, new_diff_data = self._generate_update_diffs(
707 707 pull_request, pull_request_version)
708 708
709 709 # calculate commit and file changes
710 710 changes = self._calculate_commit_id_changes(
711 711 old_commit_ids, new_commit_ids)
712 712 file_changes = self._calculate_file_changes(
713 713 old_diff_data, new_diff_data)
714 714
715 715 # set comments as outdated if DIFFS changed
716 716 CommentsModel().outdate_comments(
717 717 pull_request, old_diff_data=old_diff_data,
718 718 new_diff_data=new_diff_data)
719 719
720 720 commit_changes = (changes.added or changes.removed)
721 721 file_node_changes = (
722 722 file_changes.added or file_changes.modified or file_changes.removed)
723 723 pr_has_changes = commit_changes or file_node_changes
724 724
725 725 # Add an automatic comment to the pull request, in case
726 726 # anything has changed
727 727 if pr_has_changes:
728 728 update_comment = CommentsModel().create(
729 729 text=self._render_update_message(changes, file_changes),
730 730 repo=pull_request.target_repo,
731 731 user=pull_request.author,
732 732 pull_request=pull_request,
733 733 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
734 734
735 735 # Update status to "Under Review" for added commits
736 736 for commit_id in changes.added:
737 737 ChangesetStatusModel().set_status(
738 738 repo=pull_request.source_repo,
739 739 status=ChangesetStatus.STATUS_UNDER_REVIEW,
740 740 comment=update_comment,
741 741 user=pull_request.author,
742 742 pull_request=pull_request,
743 743 revision=commit_id)
744 744
745 745 log.debug(
746 746 'Updated pull request %s, added_ids: %s, common_ids: %s, '
747 747 'removed_ids: %s', pull_request.pull_request_id,
748 748 changes.added, changes.common, changes.removed)
749 749 log.debug(
750 750 'Updated pull request with the following file changes: %s',
751 751 file_changes)
752 752
753 753 log.info(
754 754 "Updated pull request %s from commit %s to commit %s, "
755 755 "stored new version %s of this pull request.",
756 756 pull_request.pull_request_id, source_ref_id,
757 757 pull_request.source_ref_parts.commit_id,
758 758 pull_request_version.pull_request_version_id)
759 759 Session().commit()
760 760 self._trigger_pull_request_hook(
761 761 pull_request, pull_request.author, 'update')
762 762
763 763 return UpdateResponse(
764 764 executed=True, reason=UpdateFailureReason.NONE,
765 765 old=pull_request, new=pull_request_version, changes=changes,
766 766 source_changed=source_changed, target_changed=target_changed)
767 767
768 768 def _create_version_from_snapshot(self, pull_request):
769 769 version = PullRequestVersion()
770 770 version.title = pull_request.title
771 771 version.description = pull_request.description
772 772 version.status = pull_request.status
773 773 version.created_on = datetime.datetime.now()
774 774 version.updated_on = pull_request.updated_on
775 775 version.user_id = pull_request.user_id
776 776 version.source_repo = pull_request.source_repo
777 777 version.source_ref = pull_request.source_ref
778 778 version.target_repo = pull_request.target_repo
779 779 version.target_ref = pull_request.target_ref
780 780
781 781 version._last_merge_source_rev = pull_request._last_merge_source_rev
782 782 version._last_merge_target_rev = pull_request._last_merge_target_rev
783 783 version._last_merge_status = pull_request._last_merge_status
784 784 version.shadow_merge_ref = pull_request.shadow_merge_ref
785 785 version.merge_rev = pull_request.merge_rev
786 786 version.reviewer_data = pull_request.reviewer_data
787 787
788 788 version.revisions = pull_request.revisions
789 789 version.pull_request = pull_request
790 790 Session().add(version)
791 791 Session().flush()
792 792
793 793 return version
794 794
795 795 def _generate_update_diffs(self, pull_request, pull_request_version):
796 796
797 797 diff_context = (
798 798 self.DIFF_CONTEXT +
799 799 CommentsModel.needed_extra_diff_context())
800 800
801 801 source_repo = pull_request_version.source_repo
802 802 source_ref_id = pull_request_version.source_ref_parts.commit_id
803 803 target_ref_id = pull_request_version.target_ref_parts.commit_id
804 804 old_diff = self._get_diff_from_pr_or_version(
805 805 source_repo, source_ref_id, target_ref_id, context=diff_context)
806 806
807 807 source_repo = pull_request.source_repo
808 808 source_ref_id = pull_request.source_ref_parts.commit_id
809 809 target_ref_id = pull_request.target_ref_parts.commit_id
810 810
811 811 new_diff = self._get_diff_from_pr_or_version(
812 812 source_repo, source_ref_id, target_ref_id, context=diff_context)
813 813
814 814 old_diff_data = diffs.DiffProcessor(old_diff)
815 815 old_diff_data.prepare()
816 816 new_diff_data = diffs.DiffProcessor(new_diff)
817 817 new_diff_data.prepare()
818 818
819 819 return old_diff_data, new_diff_data
820 820
821 821 def _link_comments_to_version(self, pull_request_version):
822 822 """
823 823 Link all unlinked comments of this pull request to the given version.
824 824
825 825 :param pull_request_version: The `PullRequestVersion` to which
826 826 the comments shall be linked.
827 827
828 828 """
829 829 pull_request = pull_request_version.pull_request
830 830 comments = ChangesetComment.query()\
831 831 .filter(
832 832 # TODO: johbo: Should we query for the repo at all here?
833 833 # Pending decision on how comments of PRs are to be related
834 834 # to either the source repo, the target repo or no repo at all.
835 835 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
836 836 ChangesetComment.pull_request == pull_request,
837 837 ChangesetComment.pull_request_version == None)\
838 838 .order_by(ChangesetComment.comment_id.asc())
839 839
840 840 # TODO: johbo: Find out why this breaks if it is done in a bulk
841 841 # operation.
842 842 for comment in comments:
843 843 comment.pull_request_version_id = (
844 844 pull_request_version.pull_request_version_id)
845 845 Session().add(comment)
846 846
847 847 def _calculate_commit_id_changes(self, old_ids, new_ids):
848 848 added = [x for x in new_ids if x not in old_ids]
849 849 common = [x for x in new_ids if x in old_ids]
850 850 removed = [x for x in old_ids if x not in new_ids]
851 851 total = new_ids
852 852 return ChangeTuple(added, common, removed, total)
853 853
854 854 def _calculate_file_changes(self, old_diff_data, new_diff_data):
855 855
856 856 old_files = OrderedDict()
857 857 for diff_data in old_diff_data.parsed_diff:
858 858 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
859 859
860 860 added_files = []
861 861 modified_files = []
862 862 removed_files = []
863 863 for diff_data in new_diff_data.parsed_diff:
864 864 new_filename = diff_data['filename']
865 865 new_hash = md5_safe(diff_data['raw_diff'])
866 866
867 867 old_hash = old_files.get(new_filename)
868 868 if not old_hash:
869 869 # file is not present in old diff, means it's added
870 870 added_files.append(new_filename)
871 871 else:
872 872 if new_hash != old_hash:
873 873 modified_files.append(new_filename)
874 874 # now remove a file from old, since we have seen it already
875 875 del old_files[new_filename]
876 876
877 877 # removed files is when there are present in old, but not in NEW,
878 878 # since we remove old files that are present in new diff, left-overs
879 879 # if any should be the removed files
880 880 removed_files.extend(old_files.keys())
881 881
882 882 return FileChangeTuple(added_files, modified_files, removed_files)
883 883
884 884 def _render_update_message(self, changes, file_changes):
885 885 """
886 886 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
887 887 so it's always looking the same disregarding on which default
888 888 renderer system is using.
889 889
890 890 :param changes: changes named tuple
891 891 :param file_changes: file changes named tuple
892 892
893 893 """
894 894 new_status = ChangesetStatus.get_status_lbl(
895 895 ChangesetStatus.STATUS_UNDER_REVIEW)
896 896
897 897 changed_files = (
898 898 file_changes.added + file_changes.modified + file_changes.removed)
899 899
900 900 params = {
901 901 'under_review_label': new_status,
902 902 'added_commits': changes.added,
903 903 'removed_commits': changes.removed,
904 904 'changed_files': changed_files,
905 905 'added_files': file_changes.added,
906 906 'modified_files': file_changes.modified,
907 907 'removed_files': file_changes.removed,
908 908 }
909 909 renderer = RstTemplateRenderer()
910 910 return renderer.render('pull_request_update.mako', **params)
911 911
912 912 def edit(self, pull_request, title, description, user):
913 913 pull_request = self.__get_pull_request(pull_request)
914 914 old_data = pull_request.get_api_data(with_merge_state=False)
915 915 if pull_request.is_closed():
916 916 raise ValueError('This pull request is closed')
917 917 if title:
918 918 pull_request.title = title
919 919 pull_request.description = description
920 920 pull_request.updated_on = datetime.datetime.now()
921 921 Session().add(pull_request)
922 922 self._log_audit_action(
923 923 'repo.pull_request.edit', {'old_data': old_data},
924 924 user, pull_request)
925 925
926 926 def update_reviewers(self, pull_request, reviewer_data, user):
927 927 """
928 928 Update the reviewers in the pull request
929 929
930 930 :param pull_request: the pr to update
931 931 :param reviewer_data: list of tuples
932 932 [(user, ['reason1', 'reason2'], mandatory_flag)]
933 933 """
934 934
935 935 reviewers = {}
936 936 for user_id, reasons, mandatory in reviewer_data:
937 937 if isinstance(user_id, (int, basestring)):
938 938 user_id = self._get_user(user_id).user_id
939 939 reviewers[user_id] = {
940 940 'reasons': reasons, 'mandatory': mandatory}
941 941
942 942 reviewers_ids = set(reviewers.keys())
943 943 pull_request = self.__get_pull_request(pull_request)
944 944 current_reviewers = PullRequestReviewers.query()\
945 945 .filter(PullRequestReviewers.pull_request ==
946 946 pull_request).all()
947 947 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
948 948
949 949 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
950 950 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
951 951
952 952 log.debug("Adding %s reviewers", ids_to_add)
953 953 log.debug("Removing %s reviewers", ids_to_remove)
954 954 changed = False
955 955 for uid in ids_to_add:
956 956 changed = True
957 957 _usr = self._get_user(uid)
958 958 reviewer = PullRequestReviewers()
959 959 reviewer.user = _usr
960 960 reviewer.pull_request = pull_request
961 961 reviewer.reasons = reviewers[uid]['reasons']
962 962 # NOTE(marcink): mandatory shouldn't be changed now
963 963 # reviewer.mandatory = reviewers[uid]['reasons']
964 964 Session().add(reviewer)
965 965 self._log_audit_action(
966 966 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
967 967 user, pull_request)
968 968
969 969 for uid in ids_to_remove:
970 970 changed = True
971 971 reviewers = PullRequestReviewers.query()\
972 972 .filter(PullRequestReviewers.user_id == uid,
973 973 PullRequestReviewers.pull_request == pull_request)\
974 974 .all()
975 975 # use .all() in case we accidentally added the same person twice
976 976 # this CAN happen due to the lack of DB checks
977 977 for obj in reviewers:
978 978 old_data = obj.get_dict()
979 979 Session().delete(obj)
980 980 self._log_audit_action(
981 981 'repo.pull_request.reviewer.delete',
982 982 {'old_data': old_data}, user, pull_request)
983 983
984 984 if changed:
985 985 pull_request.updated_on = datetime.datetime.now()
986 986 Session().add(pull_request)
987 987
988 988 self.notify_reviewers(pull_request, ids_to_add)
989 989 return ids_to_add, ids_to_remove
990 990
991 991 def get_url(self, pull_request, request=None, permalink=False):
992 992 if not request:
993 993 request = get_current_request()
994 994
995 995 if permalink:
996 996 return request.route_url(
997 997 'pull_requests_global',
998 998 pull_request_id=pull_request.pull_request_id,)
999 999 else:
1000 1000 return request.route_url('pullrequest_show',
1001 1001 repo_name=safe_str(pull_request.target_repo.repo_name),
1002 1002 pull_request_id=pull_request.pull_request_id,)
1003 1003
1004 1004 def get_shadow_clone_url(self, pull_request):
1005 1005 """
1006 1006 Returns qualified url pointing to the shadow repository. If this pull
1007 1007 request is closed there is no shadow repository and ``None`` will be
1008 1008 returned.
1009 1009 """
1010 1010 if pull_request.is_closed():
1011 1011 return None
1012 1012 else:
1013 1013 pr_url = urllib.unquote(self.get_url(pull_request))
1014 1014 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1015 1015
1016 1016 def notify_reviewers(self, pull_request, reviewers_ids):
1017 1017 # notification to reviewers
1018 1018 if not reviewers_ids:
1019 1019 return
1020 1020
1021 1021 pull_request_obj = pull_request
1022 1022 # get the current participants of this pull request
1023 1023 recipients = reviewers_ids
1024 1024 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1025 1025
1026 1026 pr_source_repo = pull_request_obj.source_repo
1027 1027 pr_target_repo = pull_request_obj.target_repo
1028 1028
1029 1029 pr_url = h.route_url('pullrequest_show',
1030 1030 repo_name=pr_target_repo.repo_name,
1031 1031 pull_request_id=pull_request_obj.pull_request_id,)
1032 1032
1033 1033 # set some variables for email notification
1034 1034 pr_target_repo_url = h.route_url(
1035 1035 'repo_summary', repo_name=pr_target_repo.repo_name)
1036 1036
1037 1037 pr_source_repo_url = h.route_url(
1038 1038 'repo_summary', repo_name=pr_source_repo.repo_name)
1039 1039
1040 1040 # pull request specifics
1041 1041 pull_request_commits = [
1042 1042 (x.raw_id, x.message)
1043 1043 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1044 1044
1045 1045 kwargs = {
1046 1046 'user': pull_request.author,
1047 1047 'pull_request': pull_request_obj,
1048 1048 'pull_request_commits': pull_request_commits,
1049 1049
1050 1050 'pull_request_target_repo': pr_target_repo,
1051 1051 'pull_request_target_repo_url': pr_target_repo_url,
1052 1052
1053 1053 'pull_request_source_repo': pr_source_repo,
1054 1054 'pull_request_source_repo_url': pr_source_repo_url,
1055 1055
1056 1056 'pull_request_url': pr_url,
1057 1057 }
1058 1058
1059 1059 # pre-generate the subject for notification itself
1060 1060 (subject,
1061 1061 _h, _e, # we don't care about those
1062 1062 body_plaintext) = EmailNotificationModel().render_email(
1063 1063 notification_type, **kwargs)
1064 1064
1065 1065 # create notification objects, and emails
1066 1066 NotificationModel().create(
1067 1067 created_by=pull_request.author,
1068 1068 notification_subject=subject,
1069 1069 notification_body=body_plaintext,
1070 1070 notification_type=notification_type,
1071 1071 recipients=recipients,
1072 1072 email_kwargs=kwargs,
1073 1073 )
1074 1074
1075 1075 def delete(self, pull_request, user):
1076 1076 pull_request = self.__get_pull_request(pull_request)
1077 1077 old_data = pull_request.get_api_data(with_merge_state=False)
1078 1078 self._cleanup_merge_workspace(pull_request)
1079 1079 self._log_audit_action(
1080 1080 'repo.pull_request.delete', {'old_data': old_data},
1081 1081 user, pull_request)
1082 1082 Session().delete(pull_request)
1083 1083
1084 1084 def close_pull_request(self, pull_request, user):
1085 1085 pull_request = self.__get_pull_request(pull_request)
1086 1086 self._cleanup_merge_workspace(pull_request)
1087 1087 pull_request.status = PullRequest.STATUS_CLOSED
1088 1088 pull_request.updated_on = datetime.datetime.now()
1089 1089 Session().add(pull_request)
1090 1090 self._trigger_pull_request_hook(
1091 1091 pull_request, pull_request.author, 'close')
1092 1092 self._log_audit_action(
1093 1093 'repo.pull_request.close', {}, user, pull_request)
1094 1094
1095 1095 def close_pull_request_with_comment(
1096 1096 self, pull_request, user, repo, message=None):
1097 1097
1098 1098 pull_request_review_status = pull_request.calculated_review_status()
1099 1099
1100 1100 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1101 1101 # approved only if we have voting consent
1102 1102 status = ChangesetStatus.STATUS_APPROVED
1103 1103 else:
1104 1104 status = ChangesetStatus.STATUS_REJECTED
1105 1105 status_lbl = ChangesetStatus.get_status_lbl(status)
1106 1106
1107 1107 default_message = (
1108 1108 _('Closing with status change {transition_icon} {status}.')
1109 1109 ).format(transition_icon='>', status=status_lbl)
1110 1110 text = message or default_message
1111 1111
1112 1112 # create a comment, and link it to new status
1113 1113 comment = CommentsModel().create(
1114 1114 text=text,
1115 1115 repo=repo.repo_id,
1116 1116 user=user.user_id,
1117 1117 pull_request=pull_request.pull_request_id,
1118 1118 status_change=status_lbl,
1119 1119 status_change_type=status,
1120 1120 closing_pr=True
1121 1121 )
1122 1122
1123 1123 # calculate old status before we change it
1124 1124 old_calculated_status = pull_request.calculated_review_status()
1125 1125 ChangesetStatusModel().set_status(
1126 1126 repo.repo_id,
1127 1127 status,
1128 1128 user.user_id,
1129 1129 comment=comment,
1130 1130 pull_request=pull_request.pull_request_id
1131 1131 )
1132 1132
1133 1133 Session().flush()
1134 1134 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1135 1135 # we now calculate the status of pull request again, and based on that
1136 1136 # calculation trigger status change. This might happen in cases
1137 1137 # that non-reviewer admin closes a pr, which means his vote doesn't
1138 1138 # change the status, while if he's a reviewer this might change it.
1139 1139 calculated_status = pull_request.calculated_review_status()
1140 1140 if old_calculated_status != calculated_status:
1141 1141 self._trigger_pull_request_hook(
1142 1142 pull_request, user, 'review_status_change')
1143 1143
1144 1144 # finally close the PR
1145 1145 PullRequestModel().close_pull_request(
1146 1146 pull_request.pull_request_id, user)
1147 1147
1148 1148 return comment, status
1149 1149
1150 1150 def merge_status(self, pull_request):
1151 1151 if not self._is_merge_enabled(pull_request):
1152 1152 return False, _('Server-side pull request merging is disabled.')
1153 1153 if pull_request.is_closed():
1154 1154 return False, _('This pull request is closed.')
1155 1155 merge_possible, msg = self._check_repo_requirements(
1156 1156 target=pull_request.target_repo, source=pull_request.source_repo)
1157 1157 if not merge_possible:
1158 1158 return merge_possible, msg
1159 1159
1160 1160 try:
1161 1161 resp = self._try_merge(pull_request)
1162 1162 log.debug("Merge response: %s", resp)
1163 1163 status = resp.possible, self.merge_status_message(
1164 1164 resp.failure_reason)
1165 1165 except NotImplementedError:
1166 1166 status = False, _('Pull request merging is not supported.')
1167 1167
1168 1168 return status
1169 1169
1170 1170 def _check_repo_requirements(self, target, source):
1171 1171 """
1172 1172 Check if `target` and `source` have compatible requirements.
1173 1173
1174 1174 Currently this is just checking for largefiles.
1175 1175 """
1176 1176 target_has_largefiles = self._has_largefiles(target)
1177 1177 source_has_largefiles = self._has_largefiles(source)
1178 1178 merge_possible = True
1179 1179 message = u''
1180 1180
1181 1181 if target_has_largefiles != source_has_largefiles:
1182 1182 merge_possible = False
1183 1183 if source_has_largefiles:
1184 1184 message = _(
1185 1185 'Target repository large files support is disabled.')
1186 1186 else:
1187 1187 message = _(
1188 1188 'Source repository large files support is disabled.')
1189 1189
1190 1190 return merge_possible, message
1191 1191
1192 1192 def _has_largefiles(self, repo):
1193 1193 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1194 1194 'extensions', 'largefiles')
1195 1195 return largefiles_ui and largefiles_ui[0].active
1196 1196
1197 1197 def _try_merge(self, pull_request):
1198 1198 """
1199 1199 Try to merge the pull request and return the merge status.
1200 1200 """
1201 1201 log.debug(
1202 1202 "Trying out if the pull request %s can be merged.",
1203 1203 pull_request.pull_request_id)
1204 1204 target_vcs = pull_request.target_repo.scm_instance()
1205 1205
1206 1206 # Refresh the target reference.
1207 1207 try:
1208 1208 target_ref = self._refresh_reference(
1209 1209 pull_request.target_ref_parts, target_vcs)
1210 1210 except CommitDoesNotExistError:
1211 1211 merge_state = MergeResponse(
1212 1212 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1213 1213 return merge_state
1214 1214
1215 1215 target_locked = pull_request.target_repo.locked
1216 1216 if target_locked and target_locked[0]:
1217 1217 log.debug("The target repository is locked.")
1218 1218 merge_state = MergeResponse(
1219 1219 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1220 1220 elif self._needs_merge_state_refresh(pull_request, target_ref):
1221 1221 log.debug("Refreshing the merge status of the repository.")
1222 1222 merge_state = self._refresh_merge_state(
1223 1223 pull_request, target_vcs, target_ref)
1224 1224 else:
1225 1225 possible = pull_request.\
1226 1226 _last_merge_status == MergeFailureReason.NONE
1227 1227 merge_state = MergeResponse(
1228 1228 possible, False, None, pull_request._last_merge_status)
1229 1229
1230 1230 return merge_state
1231 1231
1232 1232 def _refresh_reference(self, reference, vcs_repository):
1233 1233 if reference.type in ('branch', 'book'):
1234 1234 name_or_id = reference.name
1235 1235 else:
1236 1236 name_or_id = reference.commit_id
1237 1237 refreshed_commit = vcs_repository.get_commit(name_or_id)
1238 1238 refreshed_reference = Reference(
1239 1239 reference.type, reference.name, refreshed_commit.raw_id)
1240 1240 return refreshed_reference
1241 1241
1242 1242 def _needs_merge_state_refresh(self, pull_request, target_reference):
1243 1243 return not(
1244 1244 pull_request.revisions and
1245 1245 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1246 1246 target_reference.commit_id == pull_request._last_merge_target_rev)
1247 1247
1248 1248 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1249 1249 workspace_id = self._workspace_id(pull_request)
1250 1250 source_vcs = pull_request.source_repo.scm_instance()
1251 1251 use_rebase = self._use_rebase_for_merging(pull_request)
1252 1252 merge_state = target_vcs.merge(
1253 1253 target_reference, source_vcs, pull_request.source_ref_parts,
1254 1254 workspace_id, dry_run=True, use_rebase=use_rebase)
1255 1255
1256 1256 # Do not store the response if there was an unknown error.
1257 1257 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1258 1258 pull_request._last_merge_source_rev = \
1259 1259 pull_request.source_ref_parts.commit_id
1260 1260 pull_request._last_merge_target_rev = target_reference.commit_id
1261 1261 pull_request._last_merge_status = merge_state.failure_reason
1262 1262 pull_request.shadow_merge_ref = merge_state.merge_ref
1263 1263 Session().add(pull_request)
1264 1264 Session().commit()
1265 1265
1266 1266 return merge_state
1267 1267
1268 1268 def _workspace_id(self, pull_request):
1269 1269 workspace_id = 'pr-%s' % pull_request.pull_request_id
1270 1270 return workspace_id
1271 1271
1272 1272 def merge_status_message(self, status_code):
1273 1273 """
1274 1274 Return a human friendly error message for the given merge status code.
1275 1275 """
1276 1276 return self.MERGE_STATUS_MESSAGES[status_code]
1277 1277
1278 1278 def generate_repo_data(self, repo, commit_id=None, branch=None,
1279 1279 bookmark=None):
1280 1280 all_refs, selected_ref = \
1281 1281 self._get_repo_pullrequest_sources(
1282 1282 repo.scm_instance(), commit_id=commit_id,
1283 1283 branch=branch, bookmark=bookmark)
1284 1284
1285 1285 refs_select2 = []
1286 1286 for element in all_refs:
1287 1287 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1288 1288 refs_select2.append({'text': element[1], 'children': children})
1289 1289
1290 1290 return {
1291 1291 'user': {
1292 1292 'user_id': repo.user.user_id,
1293 1293 'username': repo.user.username,
1294 1294 'firstname': repo.user.first_name,
1295 1295 'lastname': repo.user.last_name,
1296 1296 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1297 1297 },
1298 'description': h.chop_at_smart(repo.description, '\n'),
1298 'description': h.chop_at_smart(repo.description_safe, '\n'),
1299 1299 'refs': {
1300 1300 'all_refs': all_refs,
1301 1301 'selected_ref': selected_ref,
1302 1302 'select2_refs': refs_select2
1303 1303 }
1304 1304 }
1305 1305
1306 1306 def generate_pullrequest_title(self, source, source_ref, target):
1307 1307 return u'{source}#{at_ref} to {target}'.format(
1308 1308 source=source,
1309 1309 at_ref=source_ref,
1310 1310 target=target,
1311 1311 )
1312 1312
1313 1313 def _cleanup_merge_workspace(self, pull_request):
1314 1314 # Merging related cleanup
1315 1315 target_scm = pull_request.target_repo.scm_instance()
1316 1316 workspace_id = 'pr-%s' % pull_request.pull_request_id
1317 1317
1318 1318 try:
1319 1319 target_scm.cleanup_merge_workspace(workspace_id)
1320 1320 except NotImplementedError:
1321 1321 pass
1322 1322
1323 1323 def _get_repo_pullrequest_sources(
1324 1324 self, repo, commit_id=None, branch=None, bookmark=None):
1325 1325 """
1326 1326 Return a structure with repo's interesting commits, suitable for
1327 1327 the selectors in pullrequest controller
1328 1328
1329 1329 :param commit_id: a commit that must be in the list somehow
1330 1330 and selected by default
1331 1331 :param branch: a branch that must be in the list and selected
1332 1332 by default - even if closed
1333 1333 :param bookmark: a bookmark that must be in the list and selected
1334 1334 """
1335 1335
1336 1336 commit_id = safe_str(commit_id) if commit_id else None
1337 1337 branch = safe_str(branch) if branch else None
1338 1338 bookmark = safe_str(bookmark) if bookmark else None
1339 1339
1340 1340 selected = None
1341 1341
1342 1342 # order matters: first source that has commit_id in it will be selected
1343 1343 sources = []
1344 1344 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1345 1345 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1346 1346
1347 1347 if commit_id:
1348 1348 ref_commit = (h.short_id(commit_id), commit_id)
1349 1349 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1350 1350
1351 1351 sources.append(
1352 1352 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1353 1353 )
1354 1354
1355 1355 groups = []
1356 1356 for group_key, ref_list, group_name, match in sources:
1357 1357 group_refs = []
1358 1358 for ref_name, ref_id in ref_list:
1359 1359 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1360 1360 group_refs.append((ref_key, ref_name))
1361 1361
1362 1362 if not selected:
1363 1363 if set([commit_id, match]) & set([ref_id, ref_name]):
1364 1364 selected = ref_key
1365 1365
1366 1366 if group_refs:
1367 1367 groups.append((group_refs, group_name))
1368 1368
1369 1369 if not selected:
1370 1370 ref = commit_id or branch or bookmark
1371 1371 if ref:
1372 1372 raise CommitDoesNotExistError(
1373 1373 'No commit refs could be found matching: %s' % ref)
1374 1374 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1375 1375 selected = 'branch:%s:%s' % (
1376 1376 repo.DEFAULT_BRANCH_NAME,
1377 1377 repo.branches[repo.DEFAULT_BRANCH_NAME]
1378 1378 )
1379 1379 elif repo.commit_ids:
1380 1380 rev = repo.commit_ids[0]
1381 1381 selected = 'rev:%s:%s' % (rev, rev)
1382 1382 else:
1383 1383 raise EmptyRepositoryError()
1384 1384 return groups, selected
1385 1385
1386 1386 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1387 1387 return self._get_diff_from_pr_or_version(
1388 1388 source_repo, source_ref_id, target_ref_id, context=context)
1389 1389
1390 1390 def _get_diff_from_pr_or_version(
1391 1391 self, source_repo, source_ref_id, target_ref_id, context):
1392 1392 target_commit = source_repo.get_commit(
1393 1393 commit_id=safe_str(target_ref_id))
1394 1394 source_commit = source_repo.get_commit(
1395 1395 commit_id=safe_str(source_ref_id))
1396 1396 if isinstance(source_repo, Repository):
1397 1397 vcs_repo = source_repo.scm_instance()
1398 1398 else:
1399 1399 vcs_repo = source_repo
1400 1400
1401 1401 # TODO: johbo: In the context of an update, we cannot reach
1402 1402 # the old commit anymore with our normal mechanisms. It needs
1403 1403 # some sort of special support in the vcs layer to avoid this
1404 1404 # workaround.
1405 1405 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1406 1406 vcs_repo.alias == 'git'):
1407 1407 source_commit.raw_id = safe_str(source_ref_id)
1408 1408
1409 1409 log.debug('calculating diff between '
1410 1410 'source_ref:%s and target_ref:%s for repo `%s`',
1411 1411 target_ref_id, source_ref_id,
1412 1412 safe_unicode(vcs_repo.path))
1413 1413
1414 1414 vcs_diff = vcs_repo.get_diff(
1415 1415 commit1=target_commit, commit2=source_commit, context=context)
1416 1416 return vcs_diff
1417 1417
1418 1418 def _is_merge_enabled(self, pull_request):
1419 1419 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1420 1420 settings = settings_model.get_general_settings()
1421 1421 return settings.get('rhodecode_pr_merge_enabled', False)
1422 1422
1423 1423 def _use_rebase_for_merging(self, pull_request):
1424 1424 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1425 1425 settings = settings_model.get_general_settings()
1426 1426 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1427 1427
1428 1428 def _log_audit_action(self, action, action_data, user, pull_request):
1429 1429 audit_logger.store(
1430 1430 action=action,
1431 1431 action_data=action_data,
1432 1432 user=user,
1433 1433 repo=pull_request.target_repo)
1434 1434
1435 1435 def get_reviewer_functions(self):
1436 1436 """
1437 1437 Fetches functions for validation and fetching default reviewers.
1438 1438 If available we use the EE package, else we fallback to CE
1439 1439 package functions
1440 1440 """
1441 1441 try:
1442 1442 from rc_reviewers.utils import get_default_reviewers_data
1443 1443 from rc_reviewers.utils import validate_default_reviewers
1444 1444 except ImportError:
1445 1445 from rhodecode.apps.repository.utils import \
1446 1446 get_default_reviewers_data
1447 1447 from rhodecode.apps.repository.utils import \
1448 1448 validate_default_reviewers
1449 1449
1450 1450 return get_default_reviewers_data, validate_default_reviewers
1451 1451
1452 1452
1453 1453 class MergeCheck(object):
1454 1454 """
1455 1455 Perform Merge Checks and returns a check object which stores information
1456 1456 about merge errors, and merge conditions
1457 1457 """
1458 1458 TODO_CHECK = 'todo'
1459 1459 PERM_CHECK = 'perm'
1460 1460 REVIEW_CHECK = 'review'
1461 1461 MERGE_CHECK = 'merge'
1462 1462
1463 1463 def __init__(self):
1464 1464 self.review_status = None
1465 1465 self.merge_possible = None
1466 1466 self.merge_msg = ''
1467 1467 self.failed = None
1468 1468 self.errors = []
1469 1469 self.error_details = OrderedDict()
1470 1470
1471 1471 def push_error(self, error_type, message, error_key, details):
1472 1472 self.failed = True
1473 1473 self.errors.append([error_type, message])
1474 1474 self.error_details[error_key] = dict(
1475 1475 details=details,
1476 1476 error_type=error_type,
1477 1477 message=message
1478 1478 )
1479 1479
1480 1480 @classmethod
1481 1481 def validate(cls, pull_request, user, fail_early=False, translator=None):
1482 1482 # if migrated to pyramid...
1483 1483 # _ = lambda: translator or _ # use passed in translator if any
1484 1484
1485 1485 merge_check = cls()
1486 1486
1487 1487 # permissions to merge
1488 1488 user_allowed_to_merge = PullRequestModel().check_user_merge(
1489 1489 pull_request, user)
1490 1490 if not user_allowed_to_merge:
1491 1491 log.debug("MergeCheck: cannot merge, approval is pending.")
1492 1492
1493 1493 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1494 1494 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1495 1495 if fail_early:
1496 1496 return merge_check
1497 1497
1498 1498 # review status, must be always present
1499 1499 review_status = pull_request.calculated_review_status()
1500 1500 merge_check.review_status = review_status
1501 1501
1502 1502 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1503 1503 if not status_approved:
1504 1504 log.debug("MergeCheck: cannot merge, approval is pending.")
1505 1505
1506 1506 msg = _('Pull request reviewer approval is pending.')
1507 1507
1508 1508 merge_check.push_error(
1509 1509 'warning', msg, cls.REVIEW_CHECK, review_status)
1510 1510
1511 1511 if fail_early:
1512 1512 return merge_check
1513 1513
1514 1514 # left over TODOs
1515 1515 todos = CommentsModel().get_unresolved_todos(pull_request)
1516 1516 if todos:
1517 1517 log.debug("MergeCheck: cannot merge, {} "
1518 1518 "unresolved todos left.".format(len(todos)))
1519 1519
1520 1520 if len(todos) == 1:
1521 1521 msg = _('Cannot merge, {} TODO still not resolved.').format(
1522 1522 len(todos))
1523 1523 else:
1524 1524 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1525 1525 len(todos))
1526 1526
1527 1527 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1528 1528
1529 1529 if fail_early:
1530 1530 return merge_check
1531 1531
1532 1532 # merge possible
1533 1533 merge_status, msg = PullRequestModel().merge_status(pull_request)
1534 1534 merge_check.merge_possible = merge_status
1535 1535 merge_check.merge_msg = msg
1536 1536 if not merge_status:
1537 1537 log.debug(
1538 1538 "MergeCheck: cannot merge, pull request merge not possible.")
1539 1539 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1540 1540
1541 1541 if fail_early:
1542 1542 return merge_check
1543 1543
1544 1544 return merge_check
1545 1545
1546 1546
1547 1547 ChangeTuple = namedtuple('ChangeTuple',
1548 1548 ['added', 'common', 'removed', 'total'])
1549 1549
1550 1550 FileChangeTuple = namedtuple('FileChangeTuple',
1551 1551 ['added', 'modified', 'removed'])
@@ -1,1029 +1,1029 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Repository model for rhodecode
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 from datetime import datetime, timedelta
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 from rhodecode import events
37 37 from rhodecode.lib import helpers as h
38 38 from rhodecode.lib.auth import HasUserGroupPermissionAny
39 39 from rhodecode.lib.caching_query import FromCache
40 40 from rhodecode.lib.exceptions import AttachedForksError
41 41 from rhodecode.lib.hooks_base import log_delete_repository
42 42 from rhodecode.lib.utils import make_db_config
43 43 from rhodecode.lib.utils2 import (
44 44 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
45 45 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
46 46 from rhodecode.lib.vcs.backends import get_backend
47 47 from rhodecode.model import BaseModel
48 48 from rhodecode.model.db import (_hash_key,
49 49 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
50 50 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
51 51 RepoGroup, RepositoryField)
52 52
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class RepoModel(BaseModel):
60 60
61 61 cls = Repository
62 62
63 63 def _get_user_group(self, users_group):
64 64 return self._get_instance(UserGroup, users_group,
65 65 callback=UserGroup.get_by_group_name)
66 66
67 67 def _get_repo_group(self, repo_group):
68 68 return self._get_instance(RepoGroup, repo_group,
69 69 callback=RepoGroup.get_by_group_name)
70 70
71 71 def _create_default_perms(self, repository, private):
72 72 # create default permission
73 73 default = 'repository.read'
74 74 def_user = User.get_default_user()
75 75 for p in def_user.user_perms:
76 76 if p.permission.permission_name.startswith('repository.'):
77 77 default = p.permission.permission_name
78 78 break
79 79
80 80 default_perm = 'repository.none' if private else default
81 81
82 82 repo_to_perm = UserRepoToPerm()
83 83 repo_to_perm.permission = Permission.get_by_key(default_perm)
84 84
85 85 repo_to_perm.repository = repository
86 86 repo_to_perm.user_id = def_user.user_id
87 87
88 88 return repo_to_perm
89 89
90 90 @LazyProperty
91 91 def repos_path(self):
92 92 """
93 93 Gets the repositories root path from database
94 94 """
95 95 settings_model = VcsSettingsModel(sa=self.sa)
96 96 return settings_model.get_repos_location()
97 97
98 98 def get(self, repo_id, cache=False):
99 99 repo = self.sa.query(Repository) \
100 100 .filter(Repository.repo_id == repo_id)
101 101
102 102 if cache:
103 103 repo = repo.options(
104 104 FromCache("sql_cache_short", "get_repo_%s" % repo_id))
105 105 return repo.scalar()
106 106
107 107 def get_repo(self, repository):
108 108 return self._get_repo(repository)
109 109
110 110 def get_by_repo_name(self, repo_name, cache=False):
111 111 repo = self.sa.query(Repository) \
112 112 .filter(Repository.repo_name == repo_name)
113 113
114 114 if cache:
115 115 name_key = _hash_key(repo_name)
116 116 repo = repo.options(
117 117 FromCache("sql_cache_short", "get_repo_%s" % name_key))
118 118 return repo.scalar()
119 119
120 120 def _extract_id_from_repo_name(self, repo_name):
121 121 if repo_name.startswith('/'):
122 122 repo_name = repo_name.lstrip('/')
123 123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 124 if by_id_match:
125 125 return by_id_match.groups()[0]
126 126
127 127 def get_repo_by_id(self, repo_name):
128 128 """
129 129 Extracts repo_name by id from special urls.
130 130 Example url is _11/repo_name
131 131
132 132 :param repo_name:
133 133 :return: repo object if matched else None
134 134 """
135 135
136 136 try:
137 137 _repo_id = self._extract_id_from_repo_name(repo_name)
138 138 if _repo_id:
139 139 return self.get(_repo_id)
140 140 except Exception:
141 141 log.exception('Failed to extract repo_name from URL')
142 142
143 143 return None
144 144
145 145 def get_repos_for_root(self, root, traverse=False):
146 146 if traverse:
147 147 like_expression = u'{}%'.format(safe_unicode(root))
148 148 repos = Repository.query().filter(
149 149 Repository.repo_name.like(like_expression)).all()
150 150 else:
151 151 if root and not isinstance(root, RepoGroup):
152 152 raise ValueError(
153 153 'Root must be an instance '
154 154 'of RepoGroup, got:{} instead'.format(type(root)))
155 155 repos = Repository.query().filter(Repository.group == root).all()
156 156 return repos
157 157
158 158 def get_url(self, repo, request=None, permalink=False):
159 159 if not request:
160 160 request = get_current_request()
161 161
162 162 if not request:
163 163 return
164 164
165 165 if permalink:
166 166 return request.route_url(
167 167 'repo_summary', repo_name=safe_str(repo.repo_id))
168 168 else:
169 169 return request.route_url(
170 170 'repo_summary', repo_name=safe_str(repo.repo_name))
171 171
172 172 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
173 173 if not request:
174 174 request = get_current_request()
175 175
176 176 if not request:
177 177 return
178 178
179 179 if permalink:
180 180 return request.route_url(
181 181 'repo_commit', repo_name=safe_str(repo.repo_id),
182 182 commit_id=commit_id)
183 183
184 184 else:
185 185 return request.route_url(
186 186 'repo_commit', repo_name=safe_str(repo.repo_name),
187 187 commit_id=commit_id)
188 188
189 189 @classmethod
190 190 def update_repoinfo(cls, repositories=None):
191 191 if not repositories:
192 192 repositories = Repository.getAll()
193 193 for repo in repositories:
194 194 repo.update_commit_cache()
195 195
196 196 def get_repos_as_dict(self, repo_list=None, admin=False,
197 197 super_user_actions=False):
198 198
199 199 from rhodecode.lib.utils import PartialRenderer
200 200 _render = PartialRenderer('data_table/_dt_elements.mako')
201 201 c = _render.c
202 202
203 203 def quick_menu(repo_name):
204 204 return _render('quick_menu', repo_name)
205 205
206 206 def repo_lnk(name, rtype, rstate, private, fork_of):
207 207 return _render('repo_name', name, rtype, rstate, private, fork_of,
208 208 short_name=not admin, admin=False)
209 209
210 210 def last_change(last_change):
211 211 if admin and isinstance(last_change, datetime) and not last_change.tzinfo:
212 212 last_change = last_change + timedelta(seconds=
213 213 (datetime.now() - datetime.utcnow()).seconds)
214 214 return _render("last_change", last_change)
215 215
216 216 def rss_lnk(repo_name):
217 217 return _render("rss", repo_name)
218 218
219 219 def atom_lnk(repo_name):
220 220 return _render("atom", repo_name)
221 221
222 222 def last_rev(repo_name, cs_cache):
223 223 return _render('revision', repo_name, cs_cache.get('revision'),
224 224 cs_cache.get('raw_id'), cs_cache.get('author'),
225 225 cs_cache.get('message'))
226 226
227 227 def desc(desc):
228 228 if c.visual.stylify_metatags:
229 229 desc = h.urlify_text(h.escaped_stylize(desc))
230 230 else:
231 231 desc = h.urlify_text(h.html_escape(desc))
232 232
233 233 return _render('repo_desc', desc)
234 234
235 235 def state(repo_state):
236 236 return _render("repo_state", repo_state)
237 237
238 238 def repo_actions(repo_name):
239 239 return _render('repo_actions', repo_name, super_user_actions)
240 240
241 241 def user_profile(username):
242 242 return _render('user_profile', username)
243 243
244 244 repos_data = []
245 245 for repo in repo_list:
246 246 cs_cache = repo.changeset_cache
247 247 row = {
248 248 "menu": quick_menu(repo.repo_name),
249 249
250 250 "name": repo_lnk(repo.repo_name, repo.repo_type,
251 251 repo.repo_state, repo.private, repo.fork),
252 252 "name_raw": repo.repo_name.lower(),
253 253
254 254 "last_change": last_change(repo.last_db_change),
255 255 "last_change_raw": datetime_to_time(repo.last_db_change),
256 256
257 257 "last_changeset": last_rev(repo.repo_name, cs_cache),
258 258 "last_changeset_raw": cs_cache.get('revision'),
259 259
260 "desc": desc(repo.description),
260 "desc": desc(repo.description_safe),
261 261 "owner": user_profile(repo.user.username),
262 262
263 263 "state": state(repo.repo_state),
264 264 "rss": rss_lnk(repo.repo_name),
265 265
266 266 "atom": atom_lnk(repo.repo_name),
267 267 }
268 268 if admin:
269 269 row.update({
270 270 "action": repo_actions(repo.repo_name),
271 271 })
272 272 repos_data.append(row)
273 273
274 274 return repos_data
275 275
276 276 def _get_defaults(self, repo_name):
277 277 """
278 278 Gets information about repository, and returns a dict for
279 279 usage in forms
280 280
281 281 :param repo_name:
282 282 """
283 283
284 284 repo_info = Repository.get_by_repo_name(repo_name)
285 285
286 286 if repo_info is None:
287 287 return None
288 288
289 289 defaults = repo_info.get_dict()
290 290 defaults['repo_name'] = repo_info.just_name
291 291
292 292 groups = repo_info.groups_with_parents
293 293 parent_group = groups[-1] if groups else None
294 294
295 295 # we use -1 as this is how in HTML, we mark an empty group
296 296 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
297 297
298 298 keys_to_process = (
299 299 {'k': 'repo_type', 'strip': False},
300 300 {'k': 'repo_enable_downloads', 'strip': True},
301 301 {'k': 'repo_description', 'strip': True},
302 302 {'k': 'repo_enable_locking', 'strip': True},
303 303 {'k': 'repo_landing_rev', 'strip': True},
304 304 {'k': 'clone_uri', 'strip': False},
305 305 {'k': 'repo_private', 'strip': True},
306 306 {'k': 'repo_enable_statistics', 'strip': True}
307 307 )
308 308
309 309 for item in keys_to_process:
310 310 attr = item['k']
311 311 if item['strip']:
312 312 attr = remove_prefix(item['k'], 'repo_')
313 313
314 314 val = defaults[attr]
315 315 if item['k'] == 'repo_landing_rev':
316 316 val = ':'.join(defaults[attr])
317 317 defaults[item['k']] = val
318 318 if item['k'] == 'clone_uri':
319 319 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
320 320
321 321 # fill owner
322 322 if repo_info.user:
323 323 defaults.update({'user': repo_info.user.username})
324 324 else:
325 325 replacement_user = User.get_first_super_admin().username
326 326 defaults.update({'user': replacement_user})
327 327
328 328 return defaults
329 329
330 330 def update(self, repo, **kwargs):
331 331 try:
332 332 cur_repo = self._get_repo(repo)
333 333 source_repo_name = cur_repo.repo_name
334 334 if 'user' in kwargs:
335 335 cur_repo.user = User.get_by_username(kwargs['user'])
336 336
337 337 if 'repo_group' in kwargs:
338 338 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
339 339 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
340 340
341 341 update_keys = [
342 342 (1, 'repo_description'),
343 343 (1, 'repo_landing_rev'),
344 344 (1, 'repo_private'),
345 345 (1, 'repo_enable_downloads'),
346 346 (1, 'repo_enable_locking'),
347 347 (1, 'repo_enable_statistics'),
348 348 (0, 'clone_uri'),
349 349 (0, 'fork_id')
350 350 ]
351 351 for strip, k in update_keys:
352 352 if k in kwargs:
353 353 val = kwargs[k]
354 354 if strip:
355 355 k = remove_prefix(k, 'repo_')
356 356
357 357 setattr(cur_repo, k, val)
358 358
359 359 new_name = cur_repo.get_new_name(kwargs['repo_name'])
360 360 cur_repo.repo_name = new_name
361 361
362 362 # if private flag is set, reset default permission to NONE
363 363 if kwargs.get('repo_private'):
364 364 EMPTY_PERM = 'repository.none'
365 365 RepoModel().grant_user_permission(
366 366 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
367 367 )
368 368
369 369 # handle extra fields
370 370 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
371 371 kwargs):
372 372 k = RepositoryField.un_prefix_key(field)
373 373 ex_field = RepositoryField.get_by_key_name(
374 374 key=k, repo=cur_repo)
375 375 if ex_field:
376 376 ex_field.field_value = kwargs[field]
377 377 self.sa.add(ex_field)
378 378 self.sa.add(cur_repo)
379 379
380 380 if source_repo_name != new_name:
381 381 # rename repository
382 382 self._rename_filesystem_repo(
383 383 old=source_repo_name, new=new_name)
384 384
385 385 return cur_repo
386 386 except Exception:
387 387 log.error(traceback.format_exc())
388 388 raise
389 389
390 390 def _create_repo(self, repo_name, repo_type, description, owner,
391 391 private=False, clone_uri=None, repo_group=None,
392 392 landing_rev='rev:tip', fork_of=None,
393 393 copy_fork_permissions=False, enable_statistics=False,
394 394 enable_locking=False, enable_downloads=False,
395 395 copy_group_permissions=False,
396 396 state=Repository.STATE_PENDING):
397 397 """
398 398 Create repository inside database with PENDING state, this should be
399 399 only executed by create() repo. With exception of importing existing
400 400 repos
401 401 """
402 402 from rhodecode.model.scm import ScmModel
403 403
404 404 owner = self._get_user(owner)
405 405 fork_of = self._get_repo(fork_of)
406 406 repo_group = self._get_repo_group(safe_int(repo_group))
407 407
408 408 try:
409 409 repo_name = safe_unicode(repo_name)
410 410 description = safe_unicode(description)
411 411 # repo name is just a name of repository
412 412 # while repo_name_full is a full qualified name that is combined
413 413 # with name and path of group
414 414 repo_name_full = repo_name
415 415 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
416 416
417 417 new_repo = Repository()
418 418 new_repo.repo_state = state
419 419 new_repo.enable_statistics = False
420 420 new_repo.repo_name = repo_name_full
421 421 new_repo.repo_type = repo_type
422 422 new_repo.user = owner
423 423 new_repo.group = repo_group
424 424 new_repo.description = description or repo_name
425 425 new_repo.private = private
426 426 new_repo.clone_uri = clone_uri
427 427 new_repo.landing_rev = landing_rev
428 428
429 429 new_repo.enable_statistics = enable_statistics
430 430 new_repo.enable_locking = enable_locking
431 431 new_repo.enable_downloads = enable_downloads
432 432
433 433 if repo_group:
434 434 new_repo.enable_locking = repo_group.enable_locking
435 435
436 436 if fork_of:
437 437 parent_repo = fork_of
438 438 new_repo.fork = parent_repo
439 439
440 440 events.trigger(events.RepoPreCreateEvent(new_repo))
441 441
442 442 self.sa.add(new_repo)
443 443
444 444 EMPTY_PERM = 'repository.none'
445 445 if fork_of and copy_fork_permissions:
446 446 repo = fork_of
447 447 user_perms = UserRepoToPerm.query() \
448 448 .filter(UserRepoToPerm.repository == repo).all()
449 449 group_perms = UserGroupRepoToPerm.query() \
450 450 .filter(UserGroupRepoToPerm.repository == repo).all()
451 451
452 452 for perm in user_perms:
453 453 UserRepoToPerm.create(
454 454 perm.user, new_repo, perm.permission)
455 455
456 456 for perm in group_perms:
457 457 UserGroupRepoToPerm.create(
458 458 perm.users_group, new_repo, perm.permission)
459 459 # in case we copy permissions and also set this repo to private
460 460 # override the default user permission to make it a private
461 461 # repo
462 462 if private:
463 463 RepoModel(self.sa).grant_user_permission(
464 464 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
465 465
466 466 elif repo_group and copy_group_permissions:
467 467 user_perms = UserRepoGroupToPerm.query() \
468 468 .filter(UserRepoGroupToPerm.group == repo_group).all()
469 469
470 470 group_perms = UserGroupRepoGroupToPerm.query() \
471 471 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
472 472
473 473 for perm in user_perms:
474 474 perm_name = perm.permission.permission_name.replace(
475 475 'group.', 'repository.')
476 476 perm_obj = Permission.get_by_key(perm_name)
477 477 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
478 478
479 479 for perm in group_perms:
480 480 perm_name = perm.permission.permission_name.replace(
481 481 'group.', 'repository.')
482 482 perm_obj = Permission.get_by_key(perm_name)
483 483 UserGroupRepoToPerm.create(
484 484 perm.users_group, new_repo, perm_obj)
485 485
486 486 if private:
487 487 RepoModel(self.sa).grant_user_permission(
488 488 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
489 489
490 490 else:
491 491 perm_obj = self._create_default_perms(new_repo, private)
492 492 self.sa.add(perm_obj)
493 493
494 494 # now automatically start following this repository as owner
495 495 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
496 496 owner.user_id)
497 497
498 498 # we need to flush here, in order to check if database won't
499 499 # throw any exceptions, create filesystem dirs at the very end
500 500 self.sa.flush()
501 501 events.trigger(events.RepoCreateEvent(new_repo))
502 502 return new_repo
503 503
504 504 except Exception:
505 505 log.error(traceback.format_exc())
506 506 raise
507 507
508 508 def create(self, form_data, cur_user):
509 509 """
510 510 Create repository using celery tasks
511 511
512 512 :param form_data:
513 513 :param cur_user:
514 514 """
515 515 from rhodecode.lib.celerylib import tasks, run_task
516 516 return run_task(tasks.create_repo, form_data, cur_user)
517 517
518 518 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
519 519 perm_deletions=None, check_perms=True,
520 520 cur_user=None):
521 521 if not perm_additions:
522 522 perm_additions = []
523 523 if not perm_updates:
524 524 perm_updates = []
525 525 if not perm_deletions:
526 526 perm_deletions = []
527 527
528 528 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
529 529
530 530 changes = {
531 531 'added': [],
532 532 'updated': [],
533 533 'deleted': []
534 534 }
535 535 # update permissions
536 536 for member_id, perm, member_type in perm_updates:
537 537 member_id = int(member_id)
538 538 if member_type == 'user':
539 539 member_name = User.get(member_id).username
540 540 # this updates also current one if found
541 541 self.grant_user_permission(
542 542 repo=repo, user=member_id, perm=perm)
543 543 else: # set for user group
544 544 # check if we have permissions to alter this usergroup
545 545 member_name = UserGroup.get(member_id).users_group_name
546 546 if not check_perms or HasUserGroupPermissionAny(
547 547 *req_perms)(member_name, user=cur_user):
548 548 self.grant_user_group_permission(
549 549 repo=repo, group_name=member_id, perm=perm)
550 550
551 551 changes['updated'].append({'type': member_type, 'id': member_id,
552 552 'name': member_name, 'new_perm': perm})
553 553
554 554 # set new permissions
555 555 for member_id, perm, member_type in perm_additions:
556 556 member_id = int(member_id)
557 557 if member_type == 'user':
558 558 member_name = User.get(member_id).username
559 559 self.grant_user_permission(
560 560 repo=repo, user=member_id, perm=perm)
561 561 else: # set for user group
562 562 # check if we have permissions to alter this usergroup
563 563 member_name = UserGroup.get(member_id).users_group_name
564 564 if not check_perms or HasUserGroupPermissionAny(
565 565 *req_perms)(member_name, user=cur_user):
566 566 self.grant_user_group_permission(
567 567 repo=repo, group_name=member_id, perm=perm)
568 568 changes['added'].append({'type': member_type, 'id': member_id,
569 569 'name': member_name, 'new_perm': perm})
570 570 # delete permissions
571 571 for member_id, perm, member_type in perm_deletions:
572 572 member_id = int(member_id)
573 573 if member_type == 'user':
574 574 member_name = User.get(member_id).username
575 575 self.revoke_user_permission(repo=repo, user=member_id)
576 576 else: # set for user group
577 577 # check if we have permissions to alter this usergroup
578 578 member_name = UserGroup.get(member_id).users_group_name
579 579 if not check_perms or HasUserGroupPermissionAny(
580 580 *req_perms)(member_name, user=cur_user):
581 581 self.revoke_user_group_permission(
582 582 repo=repo, group_name=member_id)
583 583
584 584 changes['deleted'].append({'type': member_type, 'id': member_id,
585 585 'name': member_name, 'new_perm': perm})
586 586 return changes
587 587
588 588 def create_fork(self, form_data, cur_user):
589 589 """
590 590 Simple wrapper into executing celery task for fork creation
591 591
592 592 :param form_data:
593 593 :param cur_user:
594 594 """
595 595 from rhodecode.lib.celerylib import tasks, run_task
596 596 return run_task(tasks.create_repo_fork, form_data, cur_user)
597 597
598 598 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
599 599 """
600 600 Delete given repository, forks parameter defines what do do with
601 601 attached forks. Throws AttachedForksError if deleted repo has attached
602 602 forks
603 603
604 604 :param repo:
605 605 :param forks: str 'delete' or 'detach'
606 606 :param fs_remove: remove(archive) repo from filesystem
607 607 """
608 608 if not cur_user:
609 609 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
610 610 repo = self._get_repo(repo)
611 611 if repo:
612 612 if forks == 'detach':
613 613 for r in repo.forks:
614 614 r.fork = None
615 615 self.sa.add(r)
616 616 elif forks == 'delete':
617 617 for r in repo.forks:
618 618 self.delete(r, forks='delete')
619 619 elif [f for f in repo.forks]:
620 620 raise AttachedForksError()
621 621
622 622 old_repo_dict = repo.get_dict()
623 623 events.trigger(events.RepoPreDeleteEvent(repo))
624 624 try:
625 625 self.sa.delete(repo)
626 626 if fs_remove:
627 627 self._delete_filesystem_repo(repo)
628 628 else:
629 629 log.debug('skipping removal from filesystem')
630 630 old_repo_dict.update({
631 631 'deleted_by': cur_user,
632 632 'deleted_on': time.time(),
633 633 })
634 634 log_delete_repository(**old_repo_dict)
635 635 events.trigger(events.RepoDeleteEvent(repo))
636 636 except Exception:
637 637 log.error(traceback.format_exc())
638 638 raise
639 639
640 640 def grant_user_permission(self, repo, user, perm):
641 641 """
642 642 Grant permission for user on given repository, or update existing one
643 643 if found
644 644
645 645 :param repo: Instance of Repository, repository_id, or repository name
646 646 :param user: Instance of User, user_id or username
647 647 :param perm: Instance of Permission, or permission_name
648 648 """
649 649 user = self._get_user(user)
650 650 repo = self._get_repo(repo)
651 651 permission = self._get_perm(perm)
652 652
653 653 # check if we have that permission already
654 654 obj = self.sa.query(UserRepoToPerm) \
655 655 .filter(UserRepoToPerm.user == user) \
656 656 .filter(UserRepoToPerm.repository == repo) \
657 657 .scalar()
658 658 if obj is None:
659 659 # create new !
660 660 obj = UserRepoToPerm()
661 661 obj.repository = repo
662 662 obj.user = user
663 663 obj.permission = permission
664 664 self.sa.add(obj)
665 665 log.debug('Granted perm %s to %s on %s', perm, user, repo)
666 666 action_logger_generic(
667 667 'granted permission: {} to user: {} on repo: {}'.format(
668 668 perm, user, repo), namespace='security.repo')
669 669 return obj
670 670
671 671 def revoke_user_permission(self, repo, user):
672 672 """
673 673 Revoke permission for user on given repository
674 674
675 675 :param repo: Instance of Repository, repository_id, or repository name
676 676 :param user: Instance of User, user_id or username
677 677 """
678 678
679 679 user = self._get_user(user)
680 680 repo = self._get_repo(repo)
681 681
682 682 obj = self.sa.query(UserRepoToPerm) \
683 683 .filter(UserRepoToPerm.repository == repo) \
684 684 .filter(UserRepoToPerm.user == user) \
685 685 .scalar()
686 686 if obj:
687 687 self.sa.delete(obj)
688 688 log.debug('Revoked perm on %s on %s', repo, user)
689 689 action_logger_generic(
690 690 'revoked permission from user: {} on repo: {}'.format(
691 691 user, repo), namespace='security.repo')
692 692
693 693 def grant_user_group_permission(self, repo, group_name, perm):
694 694 """
695 695 Grant permission for user group on given repository, or update
696 696 existing one if found
697 697
698 698 :param repo: Instance of Repository, repository_id, or repository name
699 699 :param group_name: Instance of UserGroup, users_group_id,
700 700 or user group name
701 701 :param perm: Instance of Permission, or permission_name
702 702 """
703 703 repo = self._get_repo(repo)
704 704 group_name = self._get_user_group(group_name)
705 705 permission = self._get_perm(perm)
706 706
707 707 # check if we have that permission already
708 708 obj = self.sa.query(UserGroupRepoToPerm) \
709 709 .filter(UserGroupRepoToPerm.users_group == group_name) \
710 710 .filter(UserGroupRepoToPerm.repository == repo) \
711 711 .scalar()
712 712
713 713 if obj is None:
714 714 # create new
715 715 obj = UserGroupRepoToPerm()
716 716
717 717 obj.repository = repo
718 718 obj.users_group = group_name
719 719 obj.permission = permission
720 720 self.sa.add(obj)
721 721 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
722 722 action_logger_generic(
723 723 'granted permission: {} to usergroup: {} on repo: {}'.format(
724 724 perm, group_name, repo), namespace='security.repo')
725 725
726 726 return obj
727 727
728 728 def revoke_user_group_permission(self, repo, group_name):
729 729 """
730 730 Revoke permission for user group on given repository
731 731
732 732 :param repo: Instance of Repository, repository_id, or repository name
733 733 :param group_name: Instance of UserGroup, users_group_id,
734 734 or user group name
735 735 """
736 736 repo = self._get_repo(repo)
737 737 group_name = self._get_user_group(group_name)
738 738
739 739 obj = self.sa.query(UserGroupRepoToPerm) \
740 740 .filter(UserGroupRepoToPerm.repository == repo) \
741 741 .filter(UserGroupRepoToPerm.users_group == group_name) \
742 742 .scalar()
743 743 if obj:
744 744 self.sa.delete(obj)
745 745 log.debug('Revoked perm to %s on %s', repo, group_name)
746 746 action_logger_generic(
747 747 'revoked permission from usergroup: {} on repo: {}'.format(
748 748 group_name, repo), namespace='security.repo')
749 749
750 750 def delete_stats(self, repo_name):
751 751 """
752 752 removes stats for given repo
753 753
754 754 :param repo_name:
755 755 """
756 756 repo = self._get_repo(repo_name)
757 757 try:
758 758 obj = self.sa.query(Statistics) \
759 759 .filter(Statistics.repository == repo).scalar()
760 760 if obj:
761 761 self.sa.delete(obj)
762 762 except Exception:
763 763 log.error(traceback.format_exc())
764 764 raise
765 765
766 766 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
767 767 field_type='str', field_desc=''):
768 768
769 769 repo = self._get_repo(repo_name)
770 770
771 771 new_field = RepositoryField()
772 772 new_field.repository = repo
773 773 new_field.field_key = field_key
774 774 new_field.field_type = field_type # python type
775 775 new_field.field_value = field_value
776 776 new_field.field_desc = field_desc
777 777 new_field.field_label = field_label
778 778 self.sa.add(new_field)
779 779 return new_field
780 780
781 781 def delete_repo_field(self, repo_name, field_key):
782 782 repo = self._get_repo(repo_name)
783 783 field = RepositoryField.get_by_key_name(field_key, repo)
784 784 if field:
785 785 self.sa.delete(field)
786 786
787 787 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
788 788 clone_uri=None, repo_store_location=None,
789 789 use_global_config=False):
790 790 """
791 791 makes repository on filesystem. It's group aware means it'll create
792 792 a repository within a group, and alter the paths accordingly of
793 793 group location
794 794
795 795 :param repo_name:
796 796 :param alias:
797 797 :param parent:
798 798 :param clone_uri:
799 799 :param repo_store_location:
800 800 """
801 801 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
802 802 from rhodecode.model.scm import ScmModel
803 803
804 804 if Repository.NAME_SEP in repo_name:
805 805 raise ValueError(
806 806 'repo_name must not contain groups got `%s`' % repo_name)
807 807
808 808 if isinstance(repo_group, RepoGroup):
809 809 new_parent_path = os.sep.join(repo_group.full_path_splitted)
810 810 else:
811 811 new_parent_path = repo_group or ''
812 812
813 813 if repo_store_location:
814 814 _paths = [repo_store_location]
815 815 else:
816 816 _paths = [self.repos_path, new_parent_path, repo_name]
817 817 # we need to make it str for mercurial
818 818 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
819 819
820 820 # check if this path is not a repository
821 821 if is_valid_repo(repo_path, self.repos_path):
822 822 raise Exception('This path %s is a valid repository' % repo_path)
823 823
824 824 # check if this path is a group
825 825 if is_valid_repo_group(repo_path, self.repos_path):
826 826 raise Exception('This path %s is a valid group' % repo_path)
827 827
828 828 log.info('creating repo %s in %s from url: `%s`',
829 829 repo_name, safe_unicode(repo_path),
830 830 obfuscate_url_pw(clone_uri))
831 831
832 832 backend = get_backend(repo_type)
833 833
834 834 config_repo = None if use_global_config else repo_name
835 835 if config_repo and new_parent_path:
836 836 config_repo = Repository.NAME_SEP.join(
837 837 (new_parent_path, config_repo))
838 838 config = make_db_config(clear_session=False, repo=config_repo)
839 839 config.set('extensions', 'largefiles', '')
840 840
841 841 # patch and reset hooks section of UI config to not run any
842 842 # hooks on creating remote repo
843 843 config.clear_section('hooks')
844 844
845 845 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
846 846 if repo_type == 'git':
847 847 repo = backend(
848 848 repo_path, config=config, create=True, src_url=clone_uri,
849 849 bare=True)
850 850 else:
851 851 repo = backend(
852 852 repo_path, config=config, create=True, src_url=clone_uri)
853 853
854 854 ScmModel().install_hooks(repo, repo_type=repo_type)
855 855
856 856 log.debug('Created repo %s with %s backend',
857 857 safe_unicode(repo_name), safe_unicode(repo_type))
858 858 return repo
859 859
860 860 def _rename_filesystem_repo(self, old, new):
861 861 """
862 862 renames repository on filesystem
863 863
864 864 :param old: old name
865 865 :param new: new name
866 866 """
867 867 log.info('renaming repo from %s to %s', old, new)
868 868
869 869 old_path = os.path.join(self.repos_path, old)
870 870 new_path = os.path.join(self.repos_path, new)
871 871 if os.path.isdir(new_path):
872 872 raise Exception(
873 873 'Was trying to rename to already existing dir %s' % new_path
874 874 )
875 875 shutil.move(old_path, new_path)
876 876
877 877 def _delete_filesystem_repo(self, repo):
878 878 """
879 879 removes repo from filesystem, the removal is acctually made by
880 880 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
881 881 repository is no longer valid for rhodecode, can be undeleted later on
882 882 by reverting the renames on this repository
883 883
884 884 :param repo: repo object
885 885 """
886 886 rm_path = os.path.join(self.repos_path, repo.repo_name)
887 887 repo_group = repo.group
888 888 log.info("Removing repository %s", rm_path)
889 889 # disable hg/git internal that it doesn't get detected as repo
890 890 alias = repo.repo_type
891 891
892 892 config = make_db_config(clear_session=False)
893 893 config.set('extensions', 'largefiles', '')
894 894 bare = getattr(repo.scm_instance(config=config), 'bare', False)
895 895
896 896 # skip this for bare git repos
897 897 if not bare:
898 898 # disable VCS repo
899 899 vcs_path = os.path.join(rm_path, '.%s' % alias)
900 900 if os.path.exists(vcs_path):
901 901 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
902 902
903 903 _now = datetime.now()
904 904 _ms = str(_now.microsecond).rjust(6, '0')
905 905 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
906 906 repo.just_name)
907 907 if repo_group:
908 908 # if repository is in group, prefix the removal path with the group
909 909 args = repo_group.full_path_splitted + [_d]
910 910 _d = os.path.join(*args)
911 911
912 912 if os.path.isdir(rm_path):
913 913 shutil.move(rm_path, os.path.join(self.repos_path, _d))
914 914
915 915
916 916 class ReadmeFinder:
917 917 """
918 918 Utility which knows how to find a readme for a specific commit.
919 919
920 920 The main idea is that this is a configurable algorithm. When creating an
921 921 instance you can define parameters, currently only the `default_renderer`.
922 922 Based on this configuration the method :meth:`search` behaves slightly
923 923 different.
924 924 """
925 925
926 926 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
927 927 path_re = re.compile(r'^docs?', re.IGNORECASE)
928 928
929 929 default_priorities = {
930 930 None: 0,
931 931 '.text': 2,
932 932 '.txt': 3,
933 933 '.rst': 1,
934 934 '.rest': 2,
935 935 '.md': 1,
936 936 '.mkdn': 2,
937 937 '.mdown': 3,
938 938 '.markdown': 4,
939 939 }
940 940
941 941 path_priority = {
942 942 'doc': 0,
943 943 'docs': 1,
944 944 }
945 945
946 946 FALLBACK_PRIORITY = 99
947 947
948 948 RENDERER_TO_EXTENSION = {
949 949 'rst': ['.rst', '.rest'],
950 950 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
951 951 }
952 952
953 953 def __init__(self, default_renderer=None):
954 954 self._default_renderer = default_renderer
955 955 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
956 956 default_renderer, [])
957 957
958 958 def search(self, commit, path='/'):
959 959 """
960 960 Find a readme in the given `commit`.
961 961 """
962 962 nodes = commit.get_nodes(path)
963 963 matches = self._match_readmes(nodes)
964 964 matches = self._sort_according_to_priority(matches)
965 965 if matches:
966 966 return matches[0].node
967 967
968 968 paths = self._match_paths(nodes)
969 969 paths = self._sort_paths_according_to_priority(paths)
970 970 for path in paths:
971 971 match = self.search(commit, path=path)
972 972 if match:
973 973 return match
974 974
975 975 return None
976 976
977 977 def _match_readmes(self, nodes):
978 978 for node in nodes:
979 979 if not node.is_file():
980 980 continue
981 981 path = node.path.rsplit('/', 1)[-1]
982 982 match = self.readme_re.match(path)
983 983 if match:
984 984 extension = match.group(1)
985 985 yield ReadmeMatch(node, match, self._priority(extension))
986 986
987 987 def _match_paths(self, nodes):
988 988 for node in nodes:
989 989 if not node.is_dir():
990 990 continue
991 991 match = self.path_re.match(node.path)
992 992 if match:
993 993 yield node.path
994 994
995 995 def _priority(self, extension):
996 996 renderer_priority = (
997 997 0 if extension in self._renderer_extensions else 1)
998 998 extension_priority = self.default_priorities.get(
999 999 extension, self.FALLBACK_PRIORITY)
1000 1000 return (renderer_priority, extension_priority)
1001 1001
1002 1002 def _sort_according_to_priority(self, matches):
1003 1003
1004 1004 def priority_and_path(match):
1005 1005 return (match.priority, match.path)
1006 1006
1007 1007 return sorted(matches, key=priority_and_path)
1008 1008
1009 1009 def _sort_paths_according_to_priority(self, paths):
1010 1010
1011 1011 def priority_and_path(path):
1012 1012 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1013 1013
1014 1014 return sorted(paths, key=priority_and_path)
1015 1015
1016 1016
1017 1017 class ReadmeMatch:
1018 1018
1019 1019 def __init__(self, node, match, priority):
1020 1020 self.node = node
1021 1021 self._match = match
1022 1022 self.priority = priority
1023 1023
1024 1024 @property
1025 1025 def path(self):
1026 1026 return self.node.path
1027 1027
1028 1028 def __repr__(self):
1029 1029 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,733 +1,733 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 repo group model for RhodeCode
24 24 """
25 25
26 26 import os
27 27 import datetime
28 28 import itertools
29 29 import logging
30 30 import shutil
31 31 import traceback
32 32 import string
33 33
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 from rhodecode import events
37 37 from rhodecode.model import BaseModel
38 38 from rhodecode.model.db import (_hash_key,
39 39 RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
40 40 UserGroup, Repository)
41 41 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
42 42 from rhodecode.lib.caching_query import FromCache
43 43 from rhodecode.lib.utils2 import action_logger_generic
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 class RepoGroupModel(BaseModel):
49 49
50 50 cls = RepoGroup
51 51 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
52 52 PERSONAL_GROUP_PATTERN = '${username}' # default
53 53
54 54 def _get_user_group(self, users_group):
55 55 return self._get_instance(UserGroup, users_group,
56 56 callback=UserGroup.get_by_group_name)
57 57
58 58 def _get_repo_group(self, repo_group):
59 59 return self._get_instance(RepoGroup, repo_group,
60 60 callback=RepoGroup.get_by_group_name)
61 61
62 62 @LazyProperty
63 63 def repos_path(self):
64 64 """
65 65 Gets the repositories root path from database
66 66 """
67 67
68 68 settings_model = VcsSettingsModel(sa=self.sa)
69 69 return settings_model.get_repos_location()
70 70
71 71 def get_by_group_name(self, repo_group_name, cache=None):
72 72 repo = self.sa.query(RepoGroup) \
73 73 .filter(RepoGroup.group_name == repo_group_name)
74 74
75 75 if cache:
76 76 name_key = _hash_key(repo_group_name)
77 77 repo = repo.options(
78 78 FromCache("sql_cache_short", "get_repo_group_%s" % name_key))
79 79 return repo.scalar()
80 80
81 81 def get_default_create_personal_repo_group(self):
82 82 value = SettingsModel().get_setting_by_name(
83 83 'create_personal_repo_group')
84 84 return value.app_settings_value if value else None or False
85 85
86 86 def get_personal_group_name_pattern(self):
87 87 value = SettingsModel().get_setting_by_name(
88 88 'personal_repo_group_pattern')
89 89 val = value.app_settings_value if value else None
90 90 group_template = val or self.PERSONAL_GROUP_PATTERN
91 91
92 92 group_template = group_template.lstrip('/')
93 93 return group_template
94 94
95 95 def get_personal_group_name(self, user):
96 96 template = self.get_personal_group_name_pattern()
97 97 return string.Template(template).safe_substitute(
98 98 username=user.username,
99 99 user_id=user.user_id,
100 100 )
101 101
102 102 def create_personal_repo_group(self, user, commit_early=True):
103 103 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
104 104 personal_repo_group_name = self.get_personal_group_name(user)
105 105
106 106 # create a new one
107 107 RepoGroupModel().create(
108 108 group_name=personal_repo_group_name,
109 109 group_description=desc,
110 110 owner=user.username,
111 111 personal=True,
112 112 commit_early=commit_early)
113 113
114 114 def _create_default_perms(self, new_group):
115 115 # create default permission
116 116 default_perm = 'group.read'
117 117 def_user = User.get_default_user()
118 118 for p in def_user.user_perms:
119 119 if p.permission.permission_name.startswith('group.'):
120 120 default_perm = p.permission.permission_name
121 121 break
122 122
123 123 repo_group_to_perm = UserRepoGroupToPerm()
124 124 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
125 125
126 126 repo_group_to_perm.group = new_group
127 127 repo_group_to_perm.user_id = def_user.user_id
128 128 return repo_group_to_perm
129 129
130 130 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
131 131 get_object=False):
132 132 """
133 133 Get's the group name and a parent group name from given group name.
134 134 If repo_in_path is set to truth, we asume the full path also includes
135 135 repo name, in such case we clean the last element.
136 136
137 137 :param group_name_full:
138 138 """
139 139 split_paths = 1
140 140 if repo_in_path:
141 141 split_paths = 2
142 142 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
143 143
144 144 if repo_in_path and len(_parts) > 1:
145 145 # such case last element is the repo_name
146 146 _parts.pop(-1)
147 147 group_name_cleaned = _parts[-1] # just the group name
148 148 parent_repo_group_name = None
149 149
150 150 if len(_parts) > 1:
151 151 parent_repo_group_name = _parts[0]
152 152
153 153 parent_group = None
154 154 if parent_repo_group_name:
155 155 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
156 156
157 157 if get_object:
158 158 return group_name_cleaned, parent_repo_group_name, parent_group
159 159
160 160 return group_name_cleaned, parent_repo_group_name
161 161
162 162 def check_exist_filesystem(self, group_name, exc_on_failure=True):
163 163 create_path = os.path.join(self.repos_path, group_name)
164 164 log.debug('creating new group in %s', create_path)
165 165
166 166 if os.path.isdir(create_path):
167 167 if exc_on_failure:
168 168 abs_create_path = os.path.abspath(create_path)
169 169 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
170 170 return False
171 171 return True
172 172
173 173 def _create_group(self, group_name):
174 174 """
175 175 makes repository group on filesystem
176 176
177 177 :param repo_name:
178 178 :param parent_id:
179 179 """
180 180
181 181 self.check_exist_filesystem(group_name)
182 182 create_path = os.path.join(self.repos_path, group_name)
183 183 log.debug('creating new group in %s', create_path)
184 184 os.makedirs(create_path, mode=0755)
185 185 log.debug('created group in %s', create_path)
186 186
187 187 def _rename_group(self, old, new):
188 188 """
189 189 Renames a group on filesystem
190 190
191 191 :param group_name:
192 192 """
193 193
194 194 if old == new:
195 195 log.debug('skipping group rename')
196 196 return
197 197
198 198 log.debug('renaming repository group from %s to %s', old, new)
199 199
200 200 old_path = os.path.join(self.repos_path, old)
201 201 new_path = os.path.join(self.repos_path, new)
202 202
203 203 log.debug('renaming repos paths from %s to %s', old_path, new_path)
204 204
205 205 if os.path.isdir(new_path):
206 206 raise Exception('Was trying to rename to already '
207 207 'existing dir %s' % new_path)
208 208 shutil.move(old_path, new_path)
209 209
210 210 def _delete_filesystem_group(self, group, force_delete=False):
211 211 """
212 212 Deletes a group from a filesystem
213 213
214 214 :param group: instance of group from database
215 215 :param force_delete: use shutil rmtree to remove all objects
216 216 """
217 217 paths = group.full_path.split(RepoGroup.url_sep())
218 218 paths = os.sep.join(paths)
219 219
220 220 rm_path = os.path.join(self.repos_path, paths)
221 221 log.info("Removing group %s", rm_path)
222 222 # delete only if that path really exists
223 223 if os.path.isdir(rm_path):
224 224 if force_delete:
225 225 shutil.rmtree(rm_path)
226 226 else:
227 227 # archive that group`
228 228 _now = datetime.datetime.now()
229 229 _ms = str(_now.microsecond).rjust(6, '0')
230 230 _d = 'rm__%s_GROUP_%s' % (
231 231 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
232 232 shutil.move(rm_path, os.path.join(self.repos_path, _d))
233 233
234 234 def create(self, group_name, group_description, owner, just_db=False,
235 235 copy_permissions=False, personal=None, commit_early=True):
236 236
237 237 (group_name_cleaned,
238 238 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
239 239
240 240 parent_group = None
241 241 if parent_group_name:
242 242 parent_group = self._get_repo_group(parent_group_name)
243 243 if not parent_group:
244 244 # we tried to create a nested group, but the parent is not
245 245 # existing
246 246 raise ValueError(
247 247 'Parent group `%s` given in `%s` group name '
248 248 'is not yet existing.' % (parent_group_name, group_name))
249 249
250 250 # because we are doing a cleanup, we need to check if such directory
251 251 # already exists. If we don't do that we can accidentally delete
252 252 # existing directory via cleanup that can cause data issues, since
253 253 # delete does a folder rename to special syntax later cleanup
254 254 # functions can delete this
255 255 cleanup_group = self.check_exist_filesystem(group_name,
256 256 exc_on_failure=False)
257 257 try:
258 258 user = self._get_user(owner)
259 259 new_repo_group = RepoGroup()
260 260 new_repo_group.user = user
261 261 new_repo_group.group_description = group_description or group_name
262 262 new_repo_group.parent_group = parent_group
263 263 new_repo_group.group_name = group_name
264 264 new_repo_group.personal = personal
265 265
266 266 self.sa.add(new_repo_group)
267 267
268 268 # create an ADMIN permission for owner except if we're super admin,
269 269 # later owner should go into the owner field of groups
270 270 if not user.is_admin:
271 271 self.grant_user_permission(repo_group=new_repo_group,
272 272 user=owner, perm='group.admin')
273 273
274 274 if parent_group and copy_permissions:
275 275 # copy permissions from parent
276 276 user_perms = UserRepoGroupToPerm.query() \
277 277 .filter(UserRepoGroupToPerm.group == parent_group).all()
278 278
279 279 group_perms = UserGroupRepoGroupToPerm.query() \
280 280 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
281 281
282 282 for perm in user_perms:
283 283 # don't copy over the permission for user who is creating
284 284 # this group, if he is not super admin he get's admin
285 285 # permission set above
286 286 if perm.user != user or user.is_admin:
287 287 UserRepoGroupToPerm.create(
288 288 perm.user, new_repo_group, perm.permission)
289 289
290 290 for perm in group_perms:
291 291 UserGroupRepoGroupToPerm.create(
292 292 perm.users_group, new_repo_group, perm.permission)
293 293 else:
294 294 perm_obj = self._create_default_perms(new_repo_group)
295 295 self.sa.add(perm_obj)
296 296
297 297 # now commit the changes, earlier so we are sure everything is in
298 298 # the database.
299 299 if commit_early:
300 300 self.sa.commit()
301 301 if not just_db:
302 302 self._create_group(new_repo_group.group_name)
303 303
304 304 # trigger the post hook
305 305 from rhodecode.lib.hooks_base import log_create_repository_group
306 306 repo_group = RepoGroup.get_by_group_name(group_name)
307 307 log_create_repository_group(
308 308 created_by=user.username, **repo_group.get_dict())
309 309
310 310 # Trigger create event.
311 311 events.trigger(events.RepoGroupCreateEvent(repo_group))
312 312
313 313 return new_repo_group
314 314 except Exception:
315 315 self.sa.rollback()
316 316 log.exception('Exception occurred when creating repository group, '
317 317 'doing cleanup...')
318 318 # rollback things manually !
319 319 repo_group = RepoGroup.get_by_group_name(group_name)
320 320 if repo_group:
321 321 RepoGroup.delete(repo_group.group_id)
322 322 self.sa.commit()
323 323 if cleanup_group:
324 324 RepoGroupModel()._delete_filesystem_group(repo_group)
325 325 raise
326 326
327 327 def update_permissions(
328 328 self, repo_group, perm_additions=None, perm_updates=None,
329 329 perm_deletions=None, recursive=None, check_perms=True,
330 330 cur_user=None):
331 331 from rhodecode.model.repo import RepoModel
332 332 from rhodecode.lib.auth import HasUserGroupPermissionAny
333 333
334 334 if not perm_additions:
335 335 perm_additions = []
336 336 if not perm_updates:
337 337 perm_updates = []
338 338 if not perm_deletions:
339 339 perm_deletions = []
340 340
341 341 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
342 342
343 343 changes = {
344 344 'added': [],
345 345 'updated': [],
346 346 'deleted': []
347 347 }
348 348
349 349 def _set_perm_user(obj, user, perm):
350 350 if isinstance(obj, RepoGroup):
351 351 self.grant_user_permission(
352 352 repo_group=obj, user=user, perm=perm)
353 353 elif isinstance(obj, Repository):
354 354 # private repos will not allow to change the default
355 355 # permissions using recursive mode
356 356 if obj.private and user == User.DEFAULT_USER:
357 357 return
358 358
359 359 # we set group permission but we have to switch to repo
360 360 # permission
361 361 perm = perm.replace('group.', 'repository.')
362 362 RepoModel().grant_user_permission(
363 363 repo=obj, user=user, perm=perm)
364 364
365 365 def _set_perm_group(obj, users_group, perm):
366 366 if isinstance(obj, RepoGroup):
367 367 self.grant_user_group_permission(
368 368 repo_group=obj, group_name=users_group, perm=perm)
369 369 elif isinstance(obj, Repository):
370 370 # we set group permission but we have to switch to repo
371 371 # permission
372 372 perm = perm.replace('group.', 'repository.')
373 373 RepoModel().grant_user_group_permission(
374 374 repo=obj, group_name=users_group, perm=perm)
375 375
376 376 def _revoke_perm_user(obj, user):
377 377 if isinstance(obj, RepoGroup):
378 378 self.revoke_user_permission(repo_group=obj, user=user)
379 379 elif isinstance(obj, Repository):
380 380 RepoModel().revoke_user_permission(repo=obj, user=user)
381 381
382 382 def _revoke_perm_group(obj, user_group):
383 383 if isinstance(obj, RepoGroup):
384 384 self.revoke_user_group_permission(
385 385 repo_group=obj, group_name=user_group)
386 386 elif isinstance(obj, Repository):
387 387 RepoModel().revoke_user_group_permission(
388 388 repo=obj, group_name=user_group)
389 389
390 390 # start updates
391 391 log.debug('Now updating permissions for %s in recursive mode:%s',
392 392 repo_group, recursive)
393 393
394 394 # initialize check function, we'll call that multiple times
395 395 has_group_perm = HasUserGroupPermissionAny(*req_perms)
396 396
397 397 for obj in repo_group.recursive_groups_and_repos():
398 398 # iterated obj is an instance of a repos group or repository in
399 399 # that group, recursive option can be: none, repos, groups, all
400 400 if recursive == 'all':
401 401 obj = obj
402 402 elif recursive == 'repos':
403 403 # skip groups, other than this one
404 404 if isinstance(obj, RepoGroup) and not obj == repo_group:
405 405 continue
406 406 elif recursive == 'groups':
407 407 # skip repos
408 408 if isinstance(obj, Repository):
409 409 continue
410 410 else: # recursive == 'none':
411 411 # DEFAULT option - don't apply to iterated objects
412 412 # also we do a break at the end of this loop. if we are not
413 413 # in recursive mode
414 414 obj = repo_group
415 415
416 416 change_obj = obj.get_api_data()
417 417
418 418 # update permissions
419 419 for member_id, perm, member_type in perm_updates:
420 420 member_id = int(member_id)
421 421 if member_type == 'user':
422 422 member_name = User.get(member_id).username
423 423 # this updates also current one if found
424 424 _set_perm_user(obj, user=member_id, perm=perm)
425 425 else: # set for user group
426 426 member_name = UserGroup.get(member_id).users_group_name
427 427 if not check_perms or has_group_perm(member_name,
428 428 user=cur_user):
429 429 _set_perm_group(obj, users_group=member_id, perm=perm)
430 430
431 431 changes['updated'].append(
432 432 {'change_obj': change_obj, 'type': member_type,
433 433 'id': member_id, 'name': member_name, 'new_perm': perm})
434 434
435 435 # set new permissions
436 436 for member_id, perm, member_type in perm_additions:
437 437 member_id = int(member_id)
438 438 if member_type == 'user':
439 439 member_name = User.get(member_id).username
440 440 _set_perm_user(obj, user=member_id, perm=perm)
441 441 else: # set for user group
442 442 # check if we have permissions to alter this usergroup
443 443 member_name = UserGroup.get(member_id).users_group_name
444 444 if not check_perms or has_group_perm(member_name,
445 445 user=cur_user):
446 446 _set_perm_group(obj, users_group=member_id, perm=perm)
447 447
448 448 changes['added'].append(
449 449 {'change_obj': change_obj, 'type': member_type,
450 450 'id': member_id, 'name': member_name, 'new_perm': perm})
451 451
452 452 # delete permissions
453 453 for member_id, perm, member_type in perm_deletions:
454 454 member_id = int(member_id)
455 455 if member_type == 'user':
456 456 member_name = User.get(member_id).username
457 457 _revoke_perm_user(obj, user=member_id)
458 458 else: # set for user group
459 459 # check if we have permissions to alter this usergroup
460 460 member_name = UserGroup.get(member_id).users_group_name
461 461 if not check_perms or has_group_perm(member_name,
462 462 user=cur_user):
463 463 _revoke_perm_group(obj, user_group=member_id)
464 464
465 465 changes['deleted'].append(
466 466 {'change_obj': change_obj, 'type': member_type,
467 467 'id': member_id, 'name': member_name, 'new_perm': perm})
468 468
469 469 # if it's not recursive call for all,repos,groups
470 470 # break the loop and don't proceed with other changes
471 471 if recursive not in ['all', 'repos', 'groups']:
472 472 break
473 473
474 474 return changes
475 475
476 476 def update(self, repo_group, form_data):
477 477 try:
478 478 repo_group = self._get_repo_group(repo_group)
479 479 old_path = repo_group.full_path
480 480
481 481 # change properties
482 482 if 'group_description' in form_data:
483 483 repo_group.group_description = form_data['group_description']
484 484
485 485 if 'enable_locking' in form_data:
486 486 repo_group.enable_locking = form_data['enable_locking']
487 487
488 488 if 'group_parent_id' in form_data:
489 489 parent_group = (
490 490 self._get_repo_group(form_data['group_parent_id']))
491 491 repo_group.group_parent_id = (
492 492 parent_group.group_id if parent_group else None)
493 493 repo_group.parent_group = parent_group
494 494
495 495 # mikhail: to update the full_path, we have to explicitly
496 496 # update group_name
497 497 group_name = form_data.get('group_name', repo_group.name)
498 498 repo_group.group_name = repo_group.get_new_name(group_name)
499 499
500 500 new_path = repo_group.full_path
501 501
502 502 if 'user' in form_data:
503 503 repo_group.user = User.get_by_username(form_data['user'])
504 504
505 505 self.sa.add(repo_group)
506 506
507 507 # iterate over all members of this groups and do fixes
508 508 # set locking if given
509 509 # if obj is a repoGroup also fix the name of the group according
510 510 # to the parent
511 511 # if obj is a Repo fix it's name
512 512 # this can be potentially heavy operation
513 513 for obj in repo_group.recursive_groups_and_repos():
514 514 # set the value from it's parent
515 515 obj.enable_locking = repo_group.enable_locking
516 516 if isinstance(obj, RepoGroup):
517 517 new_name = obj.get_new_name(obj.name)
518 518 log.debug('Fixing group %s to new name %s',
519 519 obj.group_name, new_name)
520 520 obj.group_name = new_name
521 521 elif isinstance(obj, Repository):
522 522 # we need to get all repositories from this new group and
523 523 # rename them accordingly to new group path
524 524 new_name = obj.get_new_name(obj.just_name)
525 525 log.debug('Fixing repo %s to new name %s',
526 526 obj.repo_name, new_name)
527 527 obj.repo_name = new_name
528 528 self.sa.add(obj)
529 529
530 530 self._rename_group(old_path, new_path)
531 531
532 532 # Trigger update event.
533 533 events.trigger(events.RepoGroupUpdateEvent(repo_group))
534 534
535 535 return repo_group
536 536 except Exception:
537 537 log.error(traceback.format_exc())
538 538 raise
539 539
540 540 def delete(self, repo_group, force_delete=False, fs_remove=True):
541 541 repo_group = self._get_repo_group(repo_group)
542 542 if not repo_group:
543 543 return False
544 544 try:
545 545 self.sa.delete(repo_group)
546 546 if fs_remove:
547 547 self._delete_filesystem_group(repo_group, force_delete)
548 548 else:
549 549 log.debug('skipping removal from filesystem')
550 550
551 551 # Trigger delete event.
552 552 events.trigger(events.RepoGroupDeleteEvent(repo_group))
553 553 return True
554 554
555 555 except Exception:
556 556 log.error('Error removing repo_group %s', repo_group)
557 557 raise
558 558
559 559 def grant_user_permission(self, repo_group, user, perm):
560 560 """
561 561 Grant permission for user on given repository group, or update
562 562 existing one if found
563 563
564 564 :param repo_group: Instance of RepoGroup, repositories_group_id,
565 565 or repositories_group name
566 566 :param user: Instance of User, user_id or username
567 567 :param perm: Instance of Permission, or permission_name
568 568 """
569 569
570 570 repo_group = self._get_repo_group(repo_group)
571 571 user = self._get_user(user)
572 572 permission = self._get_perm(perm)
573 573
574 574 # check if we have that permission already
575 575 obj = self.sa.query(UserRepoGroupToPerm)\
576 576 .filter(UserRepoGroupToPerm.user == user)\
577 577 .filter(UserRepoGroupToPerm.group == repo_group)\
578 578 .scalar()
579 579 if obj is None:
580 580 # create new !
581 581 obj = UserRepoGroupToPerm()
582 582 obj.group = repo_group
583 583 obj.user = user
584 584 obj.permission = permission
585 585 self.sa.add(obj)
586 586 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
587 587 action_logger_generic(
588 588 'granted permission: {} to user: {} on repogroup: {}'.format(
589 589 perm, user, repo_group), namespace='security.repogroup')
590 590 return obj
591 591
592 592 def revoke_user_permission(self, repo_group, user):
593 593 """
594 594 Revoke permission for user on given repository group
595 595
596 596 :param repo_group: Instance of RepoGroup, repositories_group_id,
597 597 or repositories_group name
598 598 :param user: Instance of User, user_id or username
599 599 """
600 600
601 601 repo_group = self._get_repo_group(repo_group)
602 602 user = self._get_user(user)
603 603
604 604 obj = self.sa.query(UserRepoGroupToPerm)\
605 605 .filter(UserRepoGroupToPerm.user == user)\
606 606 .filter(UserRepoGroupToPerm.group == repo_group)\
607 607 .scalar()
608 608 if obj:
609 609 self.sa.delete(obj)
610 610 log.debug('Revoked perm on %s on %s', repo_group, user)
611 611 action_logger_generic(
612 612 'revoked permission from user: {} on repogroup: {}'.format(
613 613 user, repo_group), namespace='security.repogroup')
614 614
615 615 def grant_user_group_permission(self, repo_group, group_name, perm):
616 616 """
617 617 Grant permission for user group on given repository group, or update
618 618 existing one if found
619 619
620 620 :param repo_group: Instance of RepoGroup, repositories_group_id,
621 621 or repositories_group name
622 622 :param group_name: Instance of UserGroup, users_group_id,
623 623 or user group name
624 624 :param perm: Instance of Permission, or permission_name
625 625 """
626 626 repo_group = self._get_repo_group(repo_group)
627 627 group_name = self._get_user_group(group_name)
628 628 permission = self._get_perm(perm)
629 629
630 630 # check if we have that permission already
631 631 obj = self.sa.query(UserGroupRepoGroupToPerm)\
632 632 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
633 633 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
634 634 .scalar()
635 635
636 636 if obj is None:
637 637 # create new
638 638 obj = UserGroupRepoGroupToPerm()
639 639
640 640 obj.group = repo_group
641 641 obj.users_group = group_name
642 642 obj.permission = permission
643 643 self.sa.add(obj)
644 644 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
645 645 action_logger_generic(
646 646 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
647 647 perm, group_name, repo_group), namespace='security.repogroup')
648 648 return obj
649 649
650 650 def revoke_user_group_permission(self, repo_group, group_name):
651 651 """
652 652 Revoke permission for user group on given repository group
653 653
654 654 :param repo_group: Instance of RepoGroup, repositories_group_id,
655 655 or repositories_group name
656 656 :param group_name: Instance of UserGroup, users_group_id,
657 657 or user group name
658 658 """
659 659 repo_group = self._get_repo_group(repo_group)
660 660 group_name = self._get_user_group(group_name)
661 661
662 662 obj = self.sa.query(UserGroupRepoGroupToPerm)\
663 663 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
664 664 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
665 665 .scalar()
666 666 if obj:
667 667 self.sa.delete(obj)
668 668 log.debug('Revoked perm to %s on %s', repo_group, group_name)
669 669 action_logger_generic(
670 670 'revoked permission from usergroup: {} on repogroup: {}'.format(
671 671 group_name, repo_group), namespace='security.repogroup')
672 672
673 673 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
674 674 super_user_actions=False):
675 675
676 676 from rhodecode.lib.utils import PartialRenderer
677 677 _render = PartialRenderer('data_table/_dt_elements.mako')
678 678 c = _render.c
679 679 h = _render.h
680 680
681 681 def quick_menu(repo_group_name):
682 682 return _render('quick_repo_group_menu', repo_group_name)
683 683
684 684 def repo_group_lnk(repo_group_name):
685 685 return _render('repo_group_name', repo_group_name)
686 686
687 687 def desc(desc, personal):
688 688 prefix = h.escaped_stylize(u'[personal] ') if personal else ''
689 689
690 690 if c.visual.stylify_metatags:
691 691 desc = h.urlify_text(prefix + h.escaped_stylize(desc))
692 692 else:
693 693 desc = h.urlify_text(prefix + h.html_escape(desc))
694 694
695 695 return _render('repo_group_desc', desc)
696 696
697 697 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
698 698 return _render(
699 699 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
700 700
701 701 def repo_group_name(repo_group_name, children_groups):
702 702 return _render("repo_group_name", repo_group_name, children_groups)
703 703
704 704 def user_profile(username):
705 705 return _render('user_profile', username)
706 706
707 707 repo_group_data = []
708 708 for group in repo_group_list:
709 709
710 710 row = {
711 711 "menu": quick_menu(group.group_name),
712 712 "name": repo_group_lnk(group.group_name),
713 713 "name_raw": group.group_name,
714 "desc": desc(group.group_description, group.personal),
714 "desc": desc(group.description_safe, group.personal),
715 715 "top_level_repos": 0,
716 716 "owner": user_profile(group.user.username)
717 717 }
718 718 if admin:
719 719 repo_count = group.repositories.count()
720 720 children_groups = map(
721 721 h.safe_unicode,
722 722 itertools.chain((g.name for g in group.parents),
723 723 (x.name for x in [group])))
724 724 row.update({
725 725 "action": repo_group_actions(
726 726 group.group_id, group.group_name, repo_count),
727 727 "top_level_repos": repo_count,
728 728 "name": repo_group_name(group.group_name, children_groups),
729 729
730 730 })
731 731 repo_group_data.append(row)
732 732
733 733 return repo_group_data
General Comments 0
You need to be logged in to leave comments. Login now