##// END OF EJS Templates
tests: fix cache problems after empty repo check change.
marcink -
r3738:b8214661 new-ui
parent child Browse files
Show More
@@ -1,137 +1,143 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23
24 24 from rhodecode.model.meta import Session
25 25 from rhodecode.model.repo import RepoModel
26 26 from rhodecode.model.user import UserModel
27 27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
28 28 from rhodecode.api.tests.utils import (
29 29 build_data, api_call, assert_ok, assert_error, expected_permissions)
30 30
31 31
32 32 @pytest.mark.usefixtures("testuser_api", "app")
33 33 class TestGetRepo(object):
34 34 @pytest.mark.parametrize("apikey_attr, expect_secrets", [
35 35 ('apikey', True),
36 36 ('apikey_regular', False),
37 37 ])
38 38 @pytest.mark.parametrize("cache_param", [
39 39 True,
40 40 False,
41 41 None,
42 42 ])
43 43 def test_api_get_repo(
44 44 self, apikey_attr, expect_secrets, cache_param, backend,
45 45 user_util):
46 46 repo = backend.create_repo()
47 repo_id = repo.repo_id
47 48 usr = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
48 49 group = user_util.create_user_group(members=[usr])
49 50 user_util.grant_user_group_permission_to_repo(
50 51 repo=repo, user_group=group, permission_name='repository.read')
51 52 Session().commit()
52 53 kwargs = {
53 54 'repoid': repo.repo_name,
54 55 }
55 56 if cache_param is not None:
56 57 kwargs['cache'] = cache_param
57 58
58 59 apikey = getattr(self, apikey_attr)
59 60 id_, params = build_data(apikey, 'get_repo', **kwargs)
60 61 response = api_call(self.app, params)
61 62
62 63 ret = repo.get_api_data()
63 64
64 65 permissions = expected_permissions(repo)
65 66
66 67 followers = []
68
69 repo = RepoModel().get(repo_id)
67 70 for user in repo.followers:
68 71 followers.append(user.user.get_api_data(
69 72 include_secrets=expect_secrets))
70 73
71 74 ret['permissions'] = permissions
72 75 ret['followers'] = followers
73 76
74 77 expected = ret
75 78
76 79 assert_ok(id_, expected, given=response.body)
77 80
78 81 @pytest.mark.parametrize("grant_perm", [
79 82 'repository.admin',
80 83 'repository.write',
81 84 'repository.read',
82 85 ])
83 86 def test_api_get_repo_by_non_admin(self, grant_perm, backend):
84 87 # TODO: Depending on which tests are running before this one, we
85 88 # start with a different number of permissions in the database.
86 89 repo = RepoModel().get_by_repo_name(backend.repo_name)
90 repo_id = repo.repo_id
87 91 permission_count = len(repo.repo_to_perm)
88 92
89 93 RepoModel().grant_user_permission(repo=backend.repo_name,
90 94 user=self.TEST_USER_LOGIN,
91 95 perm=grant_perm)
92 96 Session().commit()
93 97 id_, params = build_data(
94 98 self.apikey_regular, 'get_repo', repoid=backend.repo_name)
95 99 response = api_call(self.app, params)
96 100
97 101 repo = RepoModel().get_by_repo_name(backend.repo_name)
98 102 ret = repo.get_api_data()
99 103
100 104 assert permission_count + 1, len(repo.repo_to_perm)
101 105
102 106 permissions = expected_permissions(repo)
103 107
104 108 followers = []
109
110 repo = RepoModel().get(repo_id)
105 111 for user in repo.followers:
106 112 followers.append(user.user.get_api_data())
107 113
108 114 ret['permissions'] = permissions
109 115 ret['followers'] = followers
110 116
111 117 expected = ret
112 118 try:
113 119 assert_ok(id_, expected, given=response.body)
114 120 finally:
115 121 RepoModel().revoke_user_permission(
116 122 backend.repo_name, self.TEST_USER_LOGIN)
117 123
118 124 def test_api_get_repo_by_non_admin_no_permission_to_repo(self, backend):
119 125 RepoModel().grant_user_permission(repo=backend.repo_name,
120 126 user=self.TEST_USER_LOGIN,
121 127 perm='repository.none')
122 128
123 129 id_, params = build_data(
124 130 self.apikey_regular, 'get_repo', repoid=backend.repo_name)
125 131 response = api_call(self.app, params)
126 132
127 133 expected = 'repository `%s` does not exist' % (backend.repo_name)
128 134 assert_error(id_, expected, given=response.body)
129 135
130 136 def test_api_get_repo_not_existing(self):
131 137 id_, params = build_data(
132 138 self.apikey, 'get_repo', repoid='no-such-repo')
133 139 response = api_call(self.app, params)
134 140
135 141 ret = 'repository `%s` does not exist' % 'no-such-repo'
136 142 expected = ret
137 143 assert_error(id_, expected, given=response.body)
@@ -1,5155 +1,5154 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import warnings
33 33 import ipaddress
34 34 import functools
35 35 import traceback
36 36 import collections
37 37
38 38 from sqlalchemy import (
39 39 or_, and_, not_, func, TypeDecorator, event,
40 40 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
41 41 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
42 42 Text, Float, PickleType)
43 43 from sqlalchemy.sql.expression import true, false, case
44 44 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
45 45 from sqlalchemy.orm import (
46 46 relationship, joinedload, class_mapper, validates, aliased)
47 47 from sqlalchemy.ext.declarative import declared_attr
48 48 from sqlalchemy.ext.hybrid import hybrid_property
49 49 from sqlalchemy.exc import IntegrityError # pragma: no cover
50 50 from sqlalchemy.dialects.mysql import LONGTEXT
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 52 from pyramid import compat
53 53 from pyramid.threadlocal import get_current_request
54 54 from webhelpers.text import collapse, remove_formatting
55 55
56 56 from rhodecode.translation import _
57 57 from rhodecode.lib.vcs import get_vcs_instance
58 58 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
59 59 from rhodecode.lib.utils2 import (
60 60 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
61 61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
62 62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
63 63 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
64 64 JsonRaw
65 65 from rhodecode.lib.ext_json import json
66 66 from rhodecode.lib.caching_query import FromCache
67 67 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
68 68 from rhodecode.lib.encrypt2 import Encryptor
69 69 from rhodecode.model.meta import Base, Session
70 70
71 71 URL_SEP = '/'
72 72 log = logging.getLogger(__name__)
73 73
74 74 # =============================================================================
75 75 # BASE CLASSES
76 76 # =============================================================================
77 77
78 78 # this is propagated from .ini file rhodecode.encrypted_values.secret or
79 79 # beaker.session.secret if first is not set.
80 80 # and initialized at environment.py
81 81 ENCRYPTION_KEY = None
82 82
83 83 # used to sort permissions by types, '#' used here is not allowed to be in
84 84 # usernames, and it's very early in sorted string.printable table.
85 85 PERMISSION_TYPE_SORT = {
86 86 'admin': '####',
87 87 'write': '###',
88 88 'read': '##',
89 89 'none': '#',
90 90 }
91 91
92 92
93 93 def display_user_sort(obj):
94 94 """
95 95 Sort function used to sort permissions in .permissions() function of
96 96 Repository, RepoGroup, UserGroup. Also it put the default user in front
97 97 of all other resources
98 98 """
99 99
100 100 if obj.username == User.DEFAULT_USER:
101 101 return '#####'
102 102 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
103 103 return prefix + obj.username
104 104
105 105
106 106 def display_user_group_sort(obj):
107 107 """
108 108 Sort function used to sort permissions in .permissions() function of
109 109 Repository, RepoGroup, UserGroup. Also it put the default user in front
110 110 of all other resources
111 111 """
112 112
113 113 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
114 114 return prefix + obj.users_group_name
115 115
116 116
117 117 def _hash_key(k):
118 118 return sha1_safe(k)
119 119
120 120
121 121 def in_filter_generator(qry, items, limit=500):
122 122 """
123 123 Splits IN() into multiple with OR
124 124 e.g.::
125 125 cnt = Repository.query().filter(
126 126 or_(
127 127 *in_filter_generator(Repository.repo_id, range(100000))
128 128 )).count()
129 129 """
130 130 if not items:
131 131 # empty list will cause empty query which might cause security issues
132 132 # this can lead to hidden unpleasant results
133 133 items = [-1]
134 134
135 135 parts = []
136 136 for chunk in xrange(0, len(items), limit):
137 137 parts.append(
138 138 qry.in_(items[chunk: chunk + limit])
139 139 )
140 140
141 141 return parts
142 142
143 143
144 144 base_table_args = {
145 145 'extend_existing': True,
146 146 'mysql_engine': 'InnoDB',
147 147 'mysql_charset': 'utf8',
148 148 'sqlite_autoincrement': True
149 149 }
150 150
151 151
152 152 class EncryptedTextValue(TypeDecorator):
153 153 """
154 154 Special column for encrypted long text data, use like::
155 155
156 156 value = Column("encrypted_value", EncryptedValue(), nullable=False)
157 157
158 158 This column is intelligent so if value is in unencrypted form it return
159 159 unencrypted form, but on save it always encrypts
160 160 """
161 161 impl = Text
162 162
163 163 def process_bind_param(self, value, dialect):
164 164 """
165 165 Setter for storing value
166 166 """
167 167 import rhodecode
168 168 if not value:
169 169 return value
170 170
171 171 # protect against double encrypting if values is already encrypted
172 172 if value.startswith('enc$aes$') \
173 173 or value.startswith('enc$aes_hmac$') \
174 174 or value.startswith('enc2$'):
175 175 raise ValueError('value needs to be in unencrypted format, '
176 176 'ie. not starting with enc$ or enc2$')
177 177
178 178 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
179 179 if algo == 'aes':
180 180 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
181 181 elif algo == 'fernet':
182 182 return Encryptor(ENCRYPTION_KEY).encrypt(value)
183 183 else:
184 184 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
185 185
186 186 def process_result_value(self, value, dialect):
187 187 """
188 188 Getter for retrieving value
189 189 """
190 190
191 191 import rhodecode
192 192 if not value:
193 193 return value
194 194
195 195 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
196 196 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
197 197 if algo == 'aes':
198 198 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
199 199 elif algo == 'fernet':
200 200 return Encryptor(ENCRYPTION_KEY).decrypt(value)
201 201 else:
202 202 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
203 203 return decrypted_data
204 204
205 205
206 206 class BaseModel(object):
207 207 """
208 208 Base Model for all classes
209 209 """
210 210
211 211 @classmethod
212 212 def _get_keys(cls):
213 213 """return column names for this model """
214 214 return class_mapper(cls).c.keys()
215 215
216 216 def get_dict(self):
217 217 """
218 218 return dict with keys and values corresponding
219 219 to this model data """
220 220
221 221 d = {}
222 222 for k in self._get_keys():
223 223 d[k] = getattr(self, k)
224 224
225 225 # also use __json__() if present to get additional fields
226 226 _json_attr = getattr(self, '__json__', None)
227 227 if _json_attr:
228 228 # update with attributes from __json__
229 229 if callable(_json_attr):
230 230 _json_attr = _json_attr()
231 231 for k, val in _json_attr.iteritems():
232 232 d[k] = val
233 233 return d
234 234
235 235 def get_appstruct(self):
236 236 """return list with keys and values tuples corresponding
237 237 to this model data """
238 238
239 239 lst = []
240 240 for k in self._get_keys():
241 241 lst.append((k, getattr(self, k),))
242 242 return lst
243 243
244 244 def populate_obj(self, populate_dict):
245 245 """populate model with data from given populate_dict"""
246 246
247 247 for k in self._get_keys():
248 248 if k in populate_dict:
249 249 setattr(self, k, populate_dict[k])
250 250
251 251 @classmethod
252 252 def query(cls):
253 253 return Session().query(cls)
254 254
255 255 @classmethod
256 256 def get(cls, id_):
257 257 if id_:
258 258 return cls.query().get(id_)
259 259
260 260 @classmethod
261 261 def get_or_404(cls, id_):
262 262 from pyramid.httpexceptions import HTTPNotFound
263 263
264 264 try:
265 265 id_ = int(id_)
266 266 except (TypeError, ValueError):
267 267 raise HTTPNotFound()
268 268
269 269 res = cls.query().get(id_)
270 270 if not res:
271 271 raise HTTPNotFound()
272 272 return res
273 273
274 274 @classmethod
275 275 def getAll(cls):
276 276 # deprecated and left for backward compatibility
277 277 return cls.get_all()
278 278
279 279 @classmethod
280 280 def get_all(cls):
281 281 return cls.query().all()
282 282
283 283 @classmethod
284 284 def delete(cls, id_):
285 285 obj = cls.query().get(id_)
286 286 Session().delete(obj)
287 287
288 288 @classmethod
289 289 def identity_cache(cls, session, attr_name, value):
290 290 exist_in_session = []
291 291 for (item_cls, pkey), instance in session.identity_map.items():
292 292 if cls == item_cls and getattr(instance, attr_name) == value:
293 293 exist_in_session.append(instance)
294 294 if exist_in_session:
295 295 if len(exist_in_session) == 1:
296 296 return exist_in_session[0]
297 297 log.exception(
298 298 'multiple objects with attr %s and '
299 299 'value %s found with same name: %r',
300 300 attr_name, value, exist_in_session)
301 301
302 302 def __repr__(self):
303 303 if hasattr(self, '__unicode__'):
304 304 # python repr needs to return str
305 305 try:
306 306 return safe_str(self.__unicode__())
307 307 except UnicodeDecodeError:
308 308 pass
309 309 return '<DB:%s>' % (self.__class__.__name__)
310 310
311 311
312 312 class RhodeCodeSetting(Base, BaseModel):
313 313 __tablename__ = 'rhodecode_settings'
314 314 __table_args__ = (
315 315 UniqueConstraint('app_settings_name'),
316 316 base_table_args
317 317 )
318 318
319 319 SETTINGS_TYPES = {
320 320 'str': safe_str,
321 321 'int': safe_int,
322 322 'unicode': safe_unicode,
323 323 'bool': str2bool,
324 324 'list': functools.partial(aslist, sep=',')
325 325 }
326 326 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
327 327 GLOBAL_CONF_KEY = 'app_settings'
328 328
329 329 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
330 330 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
331 331 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
332 332 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
333 333
334 334 def __init__(self, key='', val='', type='unicode'):
335 335 self.app_settings_name = key
336 336 self.app_settings_type = type
337 337 self.app_settings_value = val
338 338
339 339 @validates('_app_settings_value')
340 340 def validate_settings_value(self, key, val):
341 341 assert type(val) == unicode
342 342 return val
343 343
344 344 @hybrid_property
345 345 def app_settings_value(self):
346 346 v = self._app_settings_value
347 347 _type = self.app_settings_type
348 348 if _type:
349 349 _type = self.app_settings_type.split('.')[0]
350 350 # decode the encrypted value
351 351 if 'encrypted' in self.app_settings_type:
352 352 cipher = EncryptedTextValue()
353 353 v = safe_unicode(cipher.process_result_value(v, None))
354 354
355 355 converter = self.SETTINGS_TYPES.get(_type) or \
356 356 self.SETTINGS_TYPES['unicode']
357 357 return converter(v)
358 358
359 359 @app_settings_value.setter
360 360 def app_settings_value(self, val):
361 361 """
362 362 Setter that will always make sure we use unicode in app_settings_value
363 363
364 364 :param val:
365 365 """
366 366 val = safe_unicode(val)
367 367 # encode the encrypted value
368 368 if 'encrypted' in self.app_settings_type:
369 369 cipher = EncryptedTextValue()
370 370 val = safe_unicode(cipher.process_bind_param(val, None))
371 371 self._app_settings_value = val
372 372
373 373 @hybrid_property
374 374 def app_settings_type(self):
375 375 return self._app_settings_type
376 376
377 377 @app_settings_type.setter
378 378 def app_settings_type(self, val):
379 379 if val.split('.')[0] not in self.SETTINGS_TYPES:
380 380 raise Exception('type must be one of %s got %s'
381 381 % (self.SETTINGS_TYPES.keys(), val))
382 382 self._app_settings_type = val
383 383
384 384 @classmethod
385 385 def get_by_prefix(cls, prefix):
386 386 return RhodeCodeSetting.query()\
387 387 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
388 388 .all()
389 389
390 390 def __unicode__(self):
391 391 return u"<%s('%s:%s[%s]')>" % (
392 392 self.__class__.__name__,
393 393 self.app_settings_name, self.app_settings_value,
394 394 self.app_settings_type
395 395 )
396 396
397 397
398 398 class RhodeCodeUi(Base, BaseModel):
399 399 __tablename__ = 'rhodecode_ui'
400 400 __table_args__ = (
401 401 UniqueConstraint('ui_key'),
402 402 base_table_args
403 403 )
404 404
405 405 HOOK_REPO_SIZE = 'changegroup.repo_size'
406 406 # HG
407 407 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
408 408 HOOK_PULL = 'outgoing.pull_logger'
409 409 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
410 410 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
411 411 HOOK_PUSH = 'changegroup.push_logger'
412 412 HOOK_PUSH_KEY = 'pushkey.key_push'
413 413
414 414 HOOKS_BUILTIN = [
415 415 HOOK_PRE_PULL,
416 416 HOOK_PULL,
417 417 HOOK_PRE_PUSH,
418 418 HOOK_PRETX_PUSH,
419 419 HOOK_PUSH,
420 420 HOOK_PUSH_KEY,
421 421 ]
422 422
423 423 # TODO: johbo: Unify way how hooks are configured for git and hg,
424 424 # git part is currently hardcoded.
425 425
426 426 # SVN PATTERNS
427 427 SVN_BRANCH_ID = 'vcs_svn_branch'
428 428 SVN_TAG_ID = 'vcs_svn_tag'
429 429
430 430 ui_id = Column(
431 431 "ui_id", Integer(), nullable=False, unique=True, default=None,
432 432 primary_key=True)
433 433 ui_section = Column(
434 434 "ui_section", String(255), nullable=True, unique=None, default=None)
435 435 ui_key = Column(
436 436 "ui_key", String(255), nullable=True, unique=None, default=None)
437 437 ui_value = Column(
438 438 "ui_value", String(255), nullable=True, unique=None, default=None)
439 439 ui_active = Column(
440 440 "ui_active", Boolean(), nullable=True, unique=None, default=True)
441 441
442 442 def __repr__(self):
443 443 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
444 444 self.ui_key, self.ui_value)
445 445
446 446
447 447 class RepoRhodeCodeSetting(Base, BaseModel):
448 448 __tablename__ = 'repo_rhodecode_settings'
449 449 __table_args__ = (
450 450 UniqueConstraint(
451 451 'app_settings_name', 'repository_id',
452 452 name='uq_repo_rhodecode_setting_name_repo_id'),
453 453 base_table_args
454 454 )
455 455
456 456 repository_id = Column(
457 457 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
458 458 nullable=False)
459 459 app_settings_id = Column(
460 460 "app_settings_id", Integer(), nullable=False, unique=True,
461 461 default=None, primary_key=True)
462 462 app_settings_name = Column(
463 463 "app_settings_name", String(255), nullable=True, unique=None,
464 464 default=None)
465 465 _app_settings_value = Column(
466 466 "app_settings_value", String(4096), nullable=True, unique=None,
467 467 default=None)
468 468 _app_settings_type = Column(
469 469 "app_settings_type", String(255), nullable=True, unique=None,
470 470 default=None)
471 471
472 472 repository = relationship('Repository')
473 473
474 474 def __init__(self, repository_id, key='', val='', type='unicode'):
475 475 self.repository_id = repository_id
476 476 self.app_settings_name = key
477 477 self.app_settings_type = type
478 478 self.app_settings_value = val
479 479
480 480 @validates('_app_settings_value')
481 481 def validate_settings_value(self, key, val):
482 482 assert type(val) == unicode
483 483 return val
484 484
485 485 @hybrid_property
486 486 def app_settings_value(self):
487 487 v = self._app_settings_value
488 488 type_ = self.app_settings_type
489 489 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
490 490 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
491 491 return converter(v)
492 492
493 493 @app_settings_value.setter
494 494 def app_settings_value(self, val):
495 495 """
496 496 Setter that will always make sure we use unicode in app_settings_value
497 497
498 498 :param val:
499 499 """
500 500 self._app_settings_value = safe_unicode(val)
501 501
502 502 @hybrid_property
503 503 def app_settings_type(self):
504 504 return self._app_settings_type
505 505
506 506 @app_settings_type.setter
507 507 def app_settings_type(self, val):
508 508 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
509 509 if val not in SETTINGS_TYPES:
510 510 raise Exception('type must be one of %s got %s'
511 511 % (SETTINGS_TYPES.keys(), val))
512 512 self._app_settings_type = val
513 513
514 514 def __unicode__(self):
515 515 return u"<%s('%s:%s:%s[%s]')>" % (
516 516 self.__class__.__name__, self.repository.repo_name,
517 517 self.app_settings_name, self.app_settings_value,
518 518 self.app_settings_type
519 519 )
520 520
521 521
522 522 class RepoRhodeCodeUi(Base, BaseModel):
523 523 __tablename__ = 'repo_rhodecode_ui'
524 524 __table_args__ = (
525 525 UniqueConstraint(
526 526 'repository_id', 'ui_section', 'ui_key',
527 527 name='uq_repo_rhodecode_ui_repository_id_section_key'),
528 528 base_table_args
529 529 )
530 530
531 531 repository_id = Column(
532 532 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
533 533 nullable=False)
534 534 ui_id = Column(
535 535 "ui_id", Integer(), nullable=False, unique=True, default=None,
536 536 primary_key=True)
537 537 ui_section = Column(
538 538 "ui_section", String(255), nullable=True, unique=None, default=None)
539 539 ui_key = Column(
540 540 "ui_key", String(255), nullable=True, unique=None, default=None)
541 541 ui_value = Column(
542 542 "ui_value", String(255), nullable=True, unique=None, default=None)
543 543 ui_active = Column(
544 544 "ui_active", Boolean(), nullable=True, unique=None, default=True)
545 545
546 546 repository = relationship('Repository')
547 547
548 548 def __repr__(self):
549 549 return '<%s[%s:%s]%s=>%s]>' % (
550 550 self.__class__.__name__, self.repository.repo_name,
551 551 self.ui_section, self.ui_key, self.ui_value)
552 552
553 553
554 554 class User(Base, BaseModel):
555 555 __tablename__ = 'users'
556 556 __table_args__ = (
557 557 UniqueConstraint('username'), UniqueConstraint('email'),
558 558 Index('u_username_idx', 'username'),
559 559 Index('u_email_idx', 'email'),
560 560 base_table_args
561 561 )
562 562
563 563 DEFAULT_USER = 'default'
564 564 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
565 565 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
566 566
567 567 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
568 568 username = Column("username", String(255), nullable=True, unique=None, default=None)
569 569 password = Column("password", String(255), nullable=True, unique=None, default=None)
570 570 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
571 571 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
572 572 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
573 573 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
574 574 _email = Column("email", String(255), nullable=True, unique=None, default=None)
575 575 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
576 576 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
577 577
578 578 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
579 579 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
580 580 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
581 581 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
582 582 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
583 583 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
584 584
585 585 user_log = relationship('UserLog')
586 586 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
587 587
588 588 repositories = relationship('Repository')
589 589 repository_groups = relationship('RepoGroup')
590 590 user_groups = relationship('UserGroup')
591 591
592 592 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
593 593 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
594 594
595 595 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
596 596 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
597 597 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
598 598
599 599 group_member = relationship('UserGroupMember', cascade='all')
600 600
601 601 notifications = relationship('UserNotification', cascade='all')
602 602 # notifications assigned to this user
603 603 user_created_notifications = relationship('Notification', cascade='all')
604 604 # comments created by this user
605 605 user_comments = relationship('ChangesetComment', cascade='all')
606 606 # user profile extra info
607 607 user_emails = relationship('UserEmailMap', cascade='all')
608 608 user_ip_map = relationship('UserIpMap', cascade='all')
609 609 user_auth_tokens = relationship('UserApiKeys', cascade='all')
610 610 user_ssh_keys = relationship('UserSshKeys', cascade='all')
611 611
612 612 # gists
613 613 user_gists = relationship('Gist', cascade='all')
614 614 # user pull requests
615 615 user_pull_requests = relationship('PullRequest', cascade='all')
616 616 # external identities
617 617 extenal_identities = relationship(
618 618 'ExternalIdentity',
619 619 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
620 620 cascade='all')
621 621 # review rules
622 622 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
623 623
624 624 def __unicode__(self):
625 625 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
626 626 self.user_id, self.username)
627 627
628 628 @hybrid_property
629 629 def email(self):
630 630 return self._email
631 631
632 632 @email.setter
633 633 def email(self, val):
634 634 self._email = val.lower() if val else None
635 635
636 636 @hybrid_property
637 637 def first_name(self):
638 638 from rhodecode.lib import helpers as h
639 639 if self.name:
640 640 return h.escape(self.name)
641 641 return self.name
642 642
643 643 @hybrid_property
644 644 def last_name(self):
645 645 from rhodecode.lib import helpers as h
646 646 if self.lastname:
647 647 return h.escape(self.lastname)
648 648 return self.lastname
649 649
650 650 @hybrid_property
651 651 def api_key(self):
652 652 """
653 653 Fetch if exist an auth-token with role ALL connected to this user
654 654 """
655 655 user_auth_token = UserApiKeys.query()\
656 656 .filter(UserApiKeys.user_id == self.user_id)\
657 657 .filter(or_(UserApiKeys.expires == -1,
658 658 UserApiKeys.expires >= time.time()))\
659 659 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
660 660 if user_auth_token:
661 661 user_auth_token = user_auth_token.api_key
662 662
663 663 return user_auth_token
664 664
665 665 @api_key.setter
666 666 def api_key(self, val):
667 667 # don't allow to set API key this is deprecated for now
668 668 self._api_key = None
669 669
670 670 @property
671 671 def reviewer_pull_requests(self):
672 672 return PullRequestReviewers.query() \
673 673 .options(joinedload(PullRequestReviewers.pull_request)) \
674 674 .filter(PullRequestReviewers.user_id == self.user_id) \
675 675 .all()
676 676
677 677 @property
678 678 def firstname(self):
679 679 # alias for future
680 680 return self.name
681 681
682 682 @property
683 683 def emails(self):
684 684 other = UserEmailMap.query()\
685 685 .filter(UserEmailMap.user == self) \
686 686 .order_by(UserEmailMap.email_id.asc()) \
687 687 .all()
688 688 return [self.email] + [x.email for x in other]
689 689
690 690 @property
691 691 def auth_tokens(self):
692 692 auth_tokens = self.get_auth_tokens()
693 693 return [x.api_key for x in auth_tokens]
694 694
695 695 def get_auth_tokens(self):
696 696 return UserApiKeys.query()\
697 697 .filter(UserApiKeys.user == self)\
698 698 .order_by(UserApiKeys.user_api_key_id.asc())\
699 699 .all()
700 700
701 701 @LazyProperty
702 702 def feed_token(self):
703 703 return self.get_feed_token()
704 704
705 705 def get_feed_token(self, cache=True):
706 706 feed_tokens = UserApiKeys.query()\
707 707 .filter(UserApiKeys.user == self)\
708 708 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
709 709 if cache:
710 710 feed_tokens = feed_tokens.options(
711 711 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
712 712
713 713 feed_tokens = feed_tokens.all()
714 714 if feed_tokens:
715 715 return feed_tokens[0].api_key
716 716 return 'NO_FEED_TOKEN_AVAILABLE'
717 717
718 718 @classmethod
719 719 def get(cls, user_id, cache=False):
720 720 if not user_id:
721 721 return
722 722
723 723 user = cls.query()
724 724 if cache:
725 725 user = user.options(
726 726 FromCache("sql_cache_short", "get_users_%s" % user_id))
727 727 return user.get(user_id)
728 728
729 729 @classmethod
730 730 def extra_valid_auth_tokens(cls, user, role=None):
731 731 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
732 732 .filter(or_(UserApiKeys.expires == -1,
733 733 UserApiKeys.expires >= time.time()))
734 734 if role:
735 735 tokens = tokens.filter(or_(UserApiKeys.role == role,
736 736 UserApiKeys.role == UserApiKeys.ROLE_ALL))
737 737 return tokens.all()
738 738
739 739 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
740 740 from rhodecode.lib import auth
741 741
742 742 log.debug('Trying to authenticate user: %s via auth-token, '
743 743 'and roles: %s', self, roles)
744 744
745 745 if not auth_token:
746 746 return False
747 747
748 748 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
749 749 tokens_q = UserApiKeys.query()\
750 750 .filter(UserApiKeys.user_id == self.user_id)\
751 751 .filter(or_(UserApiKeys.expires == -1,
752 752 UserApiKeys.expires >= time.time()))
753 753
754 754 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
755 755
756 756 crypto_backend = auth.crypto_backend()
757 757 enc_token_map = {}
758 758 plain_token_map = {}
759 759 for token in tokens_q:
760 760 if token.api_key.startswith(crypto_backend.ENC_PREF):
761 761 enc_token_map[token.api_key] = token
762 762 else:
763 763 plain_token_map[token.api_key] = token
764 764 log.debug(
765 765 'Found %s plain and %s encrypted user tokens to check for authentication',
766 766 len(plain_token_map), len(enc_token_map))
767 767
768 768 # plain token match comes first
769 769 match = plain_token_map.get(auth_token)
770 770
771 771 # check encrypted tokens now
772 772 if not match:
773 773 for token_hash, token in enc_token_map.items():
774 774 # NOTE(marcink): this is expensive to calculate, but most secure
775 775 if crypto_backend.hash_check(auth_token, token_hash):
776 776 match = token
777 777 break
778 778
779 779 if match:
780 780 log.debug('Found matching token %s', match)
781 781 if match.repo_id:
782 782 log.debug('Found scope, checking for scope match of token %s', match)
783 783 if match.repo_id == scope_repo_id:
784 784 return True
785 785 else:
786 786 log.debug(
787 787 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
788 788 'and calling scope is:%s, skipping further checks',
789 789 match.repo, scope_repo_id)
790 790 return False
791 791 else:
792 792 return True
793 793
794 794 return False
795 795
796 796 @property
797 797 def ip_addresses(self):
798 798 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
799 799 return [x.ip_addr for x in ret]
800 800
801 801 @property
802 802 def username_and_name(self):
803 803 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
804 804
805 805 @property
806 806 def username_or_name_or_email(self):
807 807 full_name = self.full_name if self.full_name is not ' ' else None
808 808 return self.username or full_name or self.email
809 809
810 810 @property
811 811 def full_name(self):
812 812 return '%s %s' % (self.first_name, self.last_name)
813 813
814 814 @property
815 815 def full_name_or_username(self):
816 816 return ('%s %s' % (self.first_name, self.last_name)
817 817 if (self.first_name and self.last_name) else self.username)
818 818
819 819 @property
820 820 def full_contact(self):
821 821 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
822 822
823 823 @property
824 824 def short_contact(self):
825 825 return '%s %s' % (self.first_name, self.last_name)
826 826
827 827 @property
828 828 def is_admin(self):
829 829 return self.admin
830 830
831 831 def AuthUser(self, **kwargs):
832 832 """
833 833 Returns instance of AuthUser for this user
834 834 """
835 835 from rhodecode.lib.auth import AuthUser
836 836 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
837 837
838 838 @hybrid_property
839 839 def user_data(self):
840 840 if not self._user_data:
841 841 return {}
842 842
843 843 try:
844 844 return json.loads(self._user_data)
845 845 except TypeError:
846 846 return {}
847 847
848 848 @user_data.setter
849 849 def user_data(self, val):
850 850 if not isinstance(val, dict):
851 851 raise Exception('user_data must be dict, got %s' % type(val))
852 852 try:
853 853 self._user_data = json.dumps(val)
854 854 except Exception:
855 855 log.error(traceback.format_exc())
856 856
857 857 @classmethod
858 858 def get_by_username(cls, username, case_insensitive=False,
859 859 cache=False, identity_cache=False):
860 860 session = Session()
861 861
862 862 if case_insensitive:
863 863 q = cls.query().filter(
864 864 func.lower(cls.username) == func.lower(username))
865 865 else:
866 866 q = cls.query().filter(cls.username == username)
867 867
868 868 if cache:
869 869 if identity_cache:
870 870 val = cls.identity_cache(session, 'username', username)
871 871 if val:
872 872 return val
873 873 else:
874 874 cache_key = "get_user_by_name_%s" % _hash_key(username)
875 875 q = q.options(
876 876 FromCache("sql_cache_short", cache_key))
877 877
878 878 return q.scalar()
879 879
880 880 @classmethod
881 881 def get_by_auth_token(cls, auth_token, cache=False):
882 882 q = UserApiKeys.query()\
883 883 .filter(UserApiKeys.api_key == auth_token)\
884 884 .filter(or_(UserApiKeys.expires == -1,
885 885 UserApiKeys.expires >= time.time()))
886 886 if cache:
887 887 q = q.options(
888 888 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
889 889
890 890 match = q.first()
891 891 if match:
892 892 return match.user
893 893
894 894 @classmethod
895 895 def get_by_email(cls, email, case_insensitive=False, cache=False):
896 896
897 897 if case_insensitive:
898 898 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
899 899
900 900 else:
901 901 q = cls.query().filter(cls.email == email)
902 902
903 903 email_key = _hash_key(email)
904 904 if cache:
905 905 q = q.options(
906 906 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
907 907
908 908 ret = q.scalar()
909 909 if ret is None:
910 910 q = UserEmailMap.query()
911 911 # try fetching in alternate email map
912 912 if case_insensitive:
913 913 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
914 914 else:
915 915 q = q.filter(UserEmailMap.email == email)
916 916 q = q.options(joinedload(UserEmailMap.user))
917 917 if cache:
918 918 q = q.options(
919 919 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
920 920 ret = getattr(q.scalar(), 'user', None)
921 921
922 922 return ret
923 923
924 924 @classmethod
925 925 def get_from_cs_author(cls, author):
926 926 """
927 927 Tries to get User objects out of commit author string
928 928
929 929 :param author:
930 930 """
931 931 from rhodecode.lib.helpers import email, author_name
932 932 # Valid email in the attribute passed, see if they're in the system
933 933 _email = email(author)
934 934 if _email:
935 935 user = cls.get_by_email(_email, case_insensitive=True)
936 936 if user:
937 937 return user
938 938 # Maybe we can match by username?
939 939 _author = author_name(author)
940 940 user = cls.get_by_username(_author, case_insensitive=True)
941 941 if user:
942 942 return user
943 943
944 944 def update_userdata(self, **kwargs):
945 945 usr = self
946 946 old = usr.user_data
947 947 old.update(**kwargs)
948 948 usr.user_data = old
949 949 Session().add(usr)
950 950 log.debug('updated userdata with ', kwargs)
951 951
952 952 def update_lastlogin(self):
953 953 """Update user lastlogin"""
954 954 self.last_login = datetime.datetime.now()
955 955 Session().add(self)
956 956 log.debug('updated user %s lastlogin', self.username)
957 957
958 958 def update_password(self, new_password):
959 959 from rhodecode.lib.auth import get_crypt_password
960 960
961 961 self.password = get_crypt_password(new_password)
962 962 Session().add(self)
963 963
964 964 @classmethod
965 965 def get_first_super_admin(cls):
966 966 user = User.query()\
967 967 .filter(User.admin == true()) \
968 968 .order_by(User.user_id.asc()) \
969 969 .first()
970 970
971 971 if user is None:
972 972 raise Exception('FATAL: Missing administrative account!')
973 973 return user
974 974
975 975 @classmethod
976 976 def get_all_super_admins(cls, only_active=False):
977 977 """
978 978 Returns all admin accounts sorted by username
979 979 """
980 980 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
981 981 if only_active:
982 982 qry = qry.filter(User.active == true())
983 983 return qry.all()
984 984
985 985 @classmethod
986 986 def get_default_user(cls, cache=False, refresh=False):
987 987 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
988 988 if user is None:
989 989 raise Exception('FATAL: Missing default account!')
990 990 if refresh:
991 991 # The default user might be based on outdated state which
992 992 # has been loaded from the cache.
993 993 # A call to refresh() ensures that the
994 994 # latest state from the database is used.
995 995 Session().refresh(user)
996 996 return user
997 997
998 998 def _get_default_perms(self, user, suffix=''):
999 999 from rhodecode.model.permission import PermissionModel
1000 1000 return PermissionModel().get_default_perms(user.user_perms, suffix)
1001 1001
1002 1002 def get_default_perms(self, suffix=''):
1003 1003 return self._get_default_perms(self, suffix)
1004 1004
1005 1005 def get_api_data(self, include_secrets=False, details='full'):
1006 1006 """
1007 1007 Common function for generating user related data for API
1008 1008
1009 1009 :param include_secrets: By default secrets in the API data will be replaced
1010 1010 by a placeholder value to prevent exposing this data by accident. In case
1011 1011 this data shall be exposed, set this flag to ``True``.
1012 1012
1013 1013 :param details: details can be 'basic|full' basic gives only a subset of
1014 1014 the available user information that includes user_id, name and emails.
1015 1015 """
1016 1016 user = self
1017 1017 user_data = self.user_data
1018 1018 data = {
1019 1019 'user_id': user.user_id,
1020 1020 'username': user.username,
1021 1021 'firstname': user.name,
1022 1022 'lastname': user.lastname,
1023 1023 'email': user.email,
1024 1024 'emails': user.emails,
1025 1025 }
1026 1026 if details == 'basic':
1027 1027 return data
1028 1028
1029 1029 auth_token_length = 40
1030 1030 auth_token_replacement = '*' * auth_token_length
1031 1031
1032 1032 extras = {
1033 1033 'auth_tokens': [auth_token_replacement],
1034 1034 'active': user.active,
1035 1035 'admin': user.admin,
1036 1036 'extern_type': user.extern_type,
1037 1037 'extern_name': user.extern_name,
1038 1038 'last_login': user.last_login,
1039 1039 'last_activity': user.last_activity,
1040 1040 'ip_addresses': user.ip_addresses,
1041 1041 'language': user_data.get('language')
1042 1042 }
1043 1043 data.update(extras)
1044 1044
1045 1045 if include_secrets:
1046 1046 data['auth_tokens'] = user.auth_tokens
1047 1047 return data
1048 1048
1049 1049 def __json__(self):
1050 1050 data = {
1051 1051 'full_name': self.full_name,
1052 1052 'full_name_or_username': self.full_name_or_username,
1053 1053 'short_contact': self.short_contact,
1054 1054 'full_contact': self.full_contact,
1055 1055 }
1056 1056 data.update(self.get_api_data())
1057 1057 return data
1058 1058
1059 1059
1060 1060 class UserApiKeys(Base, BaseModel):
1061 1061 __tablename__ = 'user_api_keys'
1062 1062 __table_args__ = (
1063 1063 Index('uak_api_key_idx', 'api_key', unique=True),
1064 1064 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1065 1065 base_table_args
1066 1066 )
1067 1067 __mapper_args__ = {}
1068 1068
1069 1069 # ApiKey role
1070 1070 ROLE_ALL = 'token_role_all'
1071 1071 ROLE_HTTP = 'token_role_http'
1072 1072 ROLE_VCS = 'token_role_vcs'
1073 1073 ROLE_API = 'token_role_api'
1074 1074 ROLE_FEED = 'token_role_feed'
1075 1075 ROLE_PASSWORD_RESET = 'token_password_reset'
1076 1076
1077 1077 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1078 1078
1079 1079 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1080 1080 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1081 1081 api_key = Column("api_key", String(255), nullable=False, unique=True)
1082 1082 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1083 1083 expires = Column('expires', Float(53), nullable=False)
1084 1084 role = Column('role', String(255), nullable=True)
1085 1085 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1086 1086
1087 1087 # scope columns
1088 1088 repo_id = Column(
1089 1089 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1090 1090 nullable=True, unique=None, default=None)
1091 1091 repo = relationship('Repository', lazy='joined')
1092 1092
1093 1093 repo_group_id = Column(
1094 1094 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1095 1095 nullable=True, unique=None, default=None)
1096 1096 repo_group = relationship('RepoGroup', lazy='joined')
1097 1097
1098 1098 user = relationship('User', lazy='joined')
1099 1099
1100 1100 def __unicode__(self):
1101 1101 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1102 1102
1103 1103 def __json__(self):
1104 1104 data = {
1105 1105 'auth_token': self.api_key,
1106 1106 'role': self.role,
1107 1107 'scope': self.scope_humanized,
1108 1108 'expired': self.expired
1109 1109 }
1110 1110 return data
1111 1111
1112 1112 def get_api_data(self, include_secrets=False):
1113 1113 data = self.__json__()
1114 1114 if include_secrets:
1115 1115 return data
1116 1116 else:
1117 1117 data['auth_token'] = self.token_obfuscated
1118 1118 return data
1119 1119
1120 1120 @hybrid_property
1121 1121 def description_safe(self):
1122 1122 from rhodecode.lib import helpers as h
1123 1123 return h.escape(self.description)
1124 1124
1125 1125 @property
1126 1126 def expired(self):
1127 1127 if self.expires == -1:
1128 1128 return False
1129 1129 return time.time() > self.expires
1130 1130
1131 1131 @classmethod
1132 1132 def _get_role_name(cls, role):
1133 1133 return {
1134 1134 cls.ROLE_ALL: _('all'),
1135 1135 cls.ROLE_HTTP: _('http/web interface'),
1136 1136 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1137 1137 cls.ROLE_API: _('api calls'),
1138 1138 cls.ROLE_FEED: _('feed access'),
1139 1139 }.get(role, role)
1140 1140
1141 1141 @property
1142 1142 def role_humanized(self):
1143 1143 return self._get_role_name(self.role)
1144 1144
1145 1145 def _get_scope(self):
1146 1146 if self.repo:
1147 1147 return 'Repository: {}'.format(self.repo.repo_name)
1148 1148 if self.repo_group:
1149 1149 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1150 1150 return 'Global'
1151 1151
1152 1152 @property
1153 1153 def scope_humanized(self):
1154 1154 return self._get_scope()
1155 1155
1156 1156 @property
1157 1157 def token_obfuscated(self):
1158 1158 if self.api_key:
1159 1159 return self.api_key[:4] + "****"
1160 1160
1161 1161
1162 1162 class UserEmailMap(Base, BaseModel):
1163 1163 __tablename__ = 'user_email_map'
1164 1164 __table_args__ = (
1165 1165 Index('uem_email_idx', 'email'),
1166 1166 UniqueConstraint('email'),
1167 1167 base_table_args
1168 1168 )
1169 1169 __mapper_args__ = {}
1170 1170
1171 1171 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1172 1172 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1173 1173 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1174 1174 user = relationship('User', lazy='joined')
1175 1175
1176 1176 @validates('_email')
1177 1177 def validate_email(self, key, email):
1178 1178 # check if this email is not main one
1179 1179 main_email = Session().query(User).filter(User.email == email).scalar()
1180 1180 if main_email is not None:
1181 1181 raise AttributeError('email %s is present is user table' % email)
1182 1182 return email
1183 1183
1184 1184 @hybrid_property
1185 1185 def email(self):
1186 1186 return self._email
1187 1187
1188 1188 @email.setter
1189 1189 def email(self, val):
1190 1190 self._email = val.lower() if val else None
1191 1191
1192 1192
1193 1193 class UserIpMap(Base, BaseModel):
1194 1194 __tablename__ = 'user_ip_map'
1195 1195 __table_args__ = (
1196 1196 UniqueConstraint('user_id', 'ip_addr'),
1197 1197 base_table_args
1198 1198 )
1199 1199 __mapper_args__ = {}
1200 1200
1201 1201 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1202 1202 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1203 1203 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1204 1204 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1205 1205 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1206 1206 user = relationship('User', lazy='joined')
1207 1207
1208 1208 @hybrid_property
1209 1209 def description_safe(self):
1210 1210 from rhodecode.lib import helpers as h
1211 1211 return h.escape(self.description)
1212 1212
1213 1213 @classmethod
1214 1214 def _get_ip_range(cls, ip_addr):
1215 1215 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1216 1216 return [str(net.network_address), str(net.broadcast_address)]
1217 1217
1218 1218 def __json__(self):
1219 1219 return {
1220 1220 'ip_addr': self.ip_addr,
1221 1221 'ip_range': self._get_ip_range(self.ip_addr),
1222 1222 }
1223 1223
1224 1224 def __unicode__(self):
1225 1225 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1226 1226 self.user_id, self.ip_addr)
1227 1227
1228 1228
1229 1229 class UserSshKeys(Base, BaseModel):
1230 1230 __tablename__ = 'user_ssh_keys'
1231 1231 __table_args__ = (
1232 1232 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1233 1233
1234 1234 UniqueConstraint('ssh_key_fingerprint'),
1235 1235
1236 1236 base_table_args
1237 1237 )
1238 1238 __mapper_args__ = {}
1239 1239
1240 1240 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1241 1241 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1242 1242 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1243 1243
1244 1244 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1245 1245
1246 1246 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1247 1247 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1248 1248 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1249 1249
1250 1250 user = relationship('User', lazy='joined')
1251 1251
1252 1252 def __json__(self):
1253 1253 data = {
1254 1254 'ssh_fingerprint': self.ssh_key_fingerprint,
1255 1255 'description': self.description,
1256 1256 'created_on': self.created_on
1257 1257 }
1258 1258 return data
1259 1259
1260 1260 def get_api_data(self):
1261 1261 data = self.__json__()
1262 1262 return data
1263 1263
1264 1264
1265 1265 class UserLog(Base, BaseModel):
1266 1266 __tablename__ = 'user_logs'
1267 1267 __table_args__ = (
1268 1268 base_table_args,
1269 1269 )
1270 1270
1271 1271 VERSION_1 = 'v1'
1272 1272 VERSION_2 = 'v2'
1273 1273 VERSIONS = [VERSION_1, VERSION_2]
1274 1274
1275 1275 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1276 1276 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1277 1277 username = Column("username", String(255), nullable=True, unique=None, default=None)
1278 1278 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1279 1279 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1280 1280 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1281 1281 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1282 1282 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1283 1283
1284 1284 version = Column("version", String(255), nullable=True, default=VERSION_1)
1285 1285 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1286 1286 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1287 1287
1288 1288 def __unicode__(self):
1289 1289 return u"<%s('id:%s:%s')>" % (
1290 1290 self.__class__.__name__, self.repository_name, self.action)
1291 1291
1292 1292 def __json__(self):
1293 1293 return {
1294 1294 'user_id': self.user_id,
1295 1295 'username': self.username,
1296 1296 'repository_id': self.repository_id,
1297 1297 'repository_name': self.repository_name,
1298 1298 'user_ip': self.user_ip,
1299 1299 'action_date': self.action_date,
1300 1300 'action': self.action,
1301 1301 }
1302 1302
1303 1303 @hybrid_property
1304 1304 def entry_id(self):
1305 1305 return self.user_log_id
1306 1306
1307 1307 @property
1308 1308 def action_as_day(self):
1309 1309 return datetime.date(*self.action_date.timetuple()[:3])
1310 1310
1311 1311 user = relationship('User')
1312 1312 repository = relationship('Repository', cascade='')
1313 1313
1314 1314
1315 1315 class UserGroup(Base, BaseModel):
1316 1316 __tablename__ = 'users_groups'
1317 1317 __table_args__ = (
1318 1318 base_table_args,
1319 1319 )
1320 1320
1321 1321 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1322 1322 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1323 1323 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1324 1324 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1325 1325 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1326 1326 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1327 1327 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1328 1328 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1329 1329
1330 1330 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1331 1331 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1332 1332 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1333 1333 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1334 1334 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1335 1335 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1336 1336
1337 1337 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1338 1338 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1339 1339
1340 1340 @classmethod
1341 1341 def _load_group_data(cls, column):
1342 1342 if not column:
1343 1343 return {}
1344 1344
1345 1345 try:
1346 1346 return json.loads(column) or {}
1347 1347 except TypeError:
1348 1348 return {}
1349 1349
1350 1350 @hybrid_property
1351 1351 def description_safe(self):
1352 1352 from rhodecode.lib import helpers as h
1353 1353 return h.escape(self.user_group_description)
1354 1354
1355 1355 @hybrid_property
1356 1356 def group_data(self):
1357 1357 return self._load_group_data(self._group_data)
1358 1358
1359 1359 @group_data.expression
1360 1360 def group_data(self, **kwargs):
1361 1361 return self._group_data
1362 1362
1363 1363 @group_data.setter
1364 1364 def group_data(self, val):
1365 1365 try:
1366 1366 self._group_data = json.dumps(val)
1367 1367 except Exception:
1368 1368 log.error(traceback.format_exc())
1369 1369
1370 1370 @classmethod
1371 1371 def _load_sync(cls, group_data):
1372 1372 if group_data:
1373 1373 return group_data.get('extern_type')
1374 1374
1375 1375 @property
1376 1376 def sync(self):
1377 1377 return self._load_sync(self.group_data)
1378 1378
1379 1379 def __unicode__(self):
1380 1380 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1381 1381 self.users_group_id,
1382 1382 self.users_group_name)
1383 1383
1384 1384 @classmethod
1385 1385 def get_by_group_name(cls, group_name, cache=False,
1386 1386 case_insensitive=False):
1387 1387 if case_insensitive:
1388 1388 q = cls.query().filter(func.lower(cls.users_group_name) ==
1389 1389 func.lower(group_name))
1390 1390
1391 1391 else:
1392 1392 q = cls.query().filter(cls.users_group_name == group_name)
1393 1393 if cache:
1394 1394 q = q.options(
1395 1395 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1396 1396 return q.scalar()
1397 1397
1398 1398 @classmethod
1399 1399 def get(cls, user_group_id, cache=False):
1400 1400 if not user_group_id:
1401 1401 return
1402 1402
1403 1403 user_group = cls.query()
1404 1404 if cache:
1405 1405 user_group = user_group.options(
1406 1406 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1407 1407 return user_group.get(user_group_id)
1408 1408
1409 1409 def permissions(self, with_admins=True, with_owner=True,
1410 1410 expand_from_user_groups=False):
1411 1411 """
1412 1412 Permissions for user groups
1413 1413 """
1414 1414 _admin_perm = 'usergroup.admin'
1415 1415
1416 1416 owner_row = []
1417 1417 if with_owner:
1418 1418 usr = AttributeDict(self.user.get_dict())
1419 1419 usr.owner_row = True
1420 1420 usr.permission = _admin_perm
1421 1421 owner_row.append(usr)
1422 1422
1423 1423 super_admin_ids = []
1424 1424 super_admin_rows = []
1425 1425 if with_admins:
1426 1426 for usr in User.get_all_super_admins():
1427 1427 super_admin_ids.append(usr.user_id)
1428 1428 # if this admin is also owner, don't double the record
1429 1429 if usr.user_id == owner_row[0].user_id:
1430 1430 owner_row[0].admin_row = True
1431 1431 else:
1432 1432 usr = AttributeDict(usr.get_dict())
1433 1433 usr.admin_row = True
1434 1434 usr.permission = _admin_perm
1435 1435 super_admin_rows.append(usr)
1436 1436
1437 1437 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1438 1438 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1439 1439 joinedload(UserUserGroupToPerm.user),
1440 1440 joinedload(UserUserGroupToPerm.permission),)
1441 1441
1442 1442 # get owners and admins and permissions. We do a trick of re-writing
1443 1443 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1444 1444 # has a global reference and changing one object propagates to all
1445 1445 # others. This means if admin is also an owner admin_row that change
1446 1446 # would propagate to both objects
1447 1447 perm_rows = []
1448 1448 for _usr in q.all():
1449 1449 usr = AttributeDict(_usr.user.get_dict())
1450 1450 # if this user is also owner/admin, mark as duplicate record
1451 1451 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1452 1452 usr.duplicate_perm = True
1453 1453 usr.permission = _usr.permission.permission_name
1454 1454 perm_rows.append(usr)
1455 1455
1456 1456 # filter the perm rows by 'default' first and then sort them by
1457 1457 # admin,write,read,none permissions sorted again alphabetically in
1458 1458 # each group
1459 1459 perm_rows = sorted(perm_rows, key=display_user_sort)
1460 1460
1461 1461 user_groups_rows = []
1462 1462 if expand_from_user_groups:
1463 1463 for ug in self.permission_user_groups(with_members=True):
1464 1464 for user_data in ug.members:
1465 1465 user_groups_rows.append(user_data)
1466 1466
1467 1467 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1468 1468
1469 1469 def permission_user_groups(self, with_members=False):
1470 1470 q = UserGroupUserGroupToPerm.query()\
1471 1471 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1472 1472 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1473 1473 joinedload(UserGroupUserGroupToPerm.target_user_group),
1474 1474 joinedload(UserGroupUserGroupToPerm.permission),)
1475 1475
1476 1476 perm_rows = []
1477 1477 for _user_group in q.all():
1478 1478 entry = AttributeDict(_user_group.user_group.get_dict())
1479 1479 entry.permission = _user_group.permission.permission_name
1480 1480 if with_members:
1481 1481 entry.members = [x.user.get_dict()
1482 1482 for x in _user_group.user_group.members]
1483 1483 perm_rows.append(entry)
1484 1484
1485 1485 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1486 1486 return perm_rows
1487 1487
1488 1488 def _get_default_perms(self, user_group, suffix=''):
1489 1489 from rhodecode.model.permission import PermissionModel
1490 1490 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1491 1491
1492 1492 def get_default_perms(self, suffix=''):
1493 1493 return self._get_default_perms(self, suffix)
1494 1494
1495 1495 def get_api_data(self, with_group_members=True, include_secrets=False):
1496 1496 """
1497 1497 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1498 1498 basically forwarded.
1499 1499
1500 1500 """
1501 1501 user_group = self
1502 1502 data = {
1503 1503 'users_group_id': user_group.users_group_id,
1504 1504 'group_name': user_group.users_group_name,
1505 1505 'group_description': user_group.user_group_description,
1506 1506 'active': user_group.users_group_active,
1507 1507 'owner': user_group.user.username,
1508 1508 'sync': user_group.sync,
1509 1509 'owner_email': user_group.user.email,
1510 1510 }
1511 1511
1512 1512 if with_group_members:
1513 1513 users = []
1514 1514 for user in user_group.members:
1515 1515 user = user.user
1516 1516 users.append(user.get_api_data(include_secrets=include_secrets))
1517 1517 data['users'] = users
1518 1518
1519 1519 return data
1520 1520
1521 1521
1522 1522 class UserGroupMember(Base, BaseModel):
1523 1523 __tablename__ = 'users_groups_members'
1524 1524 __table_args__ = (
1525 1525 base_table_args,
1526 1526 )
1527 1527
1528 1528 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1529 1529 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1530 1530 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1531 1531
1532 1532 user = relationship('User', lazy='joined')
1533 1533 users_group = relationship('UserGroup')
1534 1534
1535 1535 def __init__(self, gr_id='', u_id=''):
1536 1536 self.users_group_id = gr_id
1537 1537 self.user_id = u_id
1538 1538
1539 1539
1540 1540 class RepositoryField(Base, BaseModel):
1541 1541 __tablename__ = 'repositories_fields'
1542 1542 __table_args__ = (
1543 1543 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1544 1544 base_table_args,
1545 1545 )
1546 1546
1547 1547 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1548 1548
1549 1549 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1550 1550 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1551 1551 field_key = Column("field_key", String(250))
1552 1552 field_label = Column("field_label", String(1024), nullable=False)
1553 1553 field_value = Column("field_value", String(10000), nullable=False)
1554 1554 field_desc = Column("field_desc", String(1024), nullable=False)
1555 1555 field_type = Column("field_type", String(255), nullable=False, unique=None)
1556 1556 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1557 1557
1558 1558 repository = relationship('Repository')
1559 1559
1560 1560 @property
1561 1561 def field_key_prefixed(self):
1562 1562 return 'ex_%s' % self.field_key
1563 1563
1564 1564 @classmethod
1565 1565 def un_prefix_key(cls, key):
1566 1566 if key.startswith(cls.PREFIX):
1567 1567 return key[len(cls.PREFIX):]
1568 1568 return key
1569 1569
1570 1570 @classmethod
1571 1571 def get_by_key_name(cls, key, repo):
1572 1572 row = cls.query()\
1573 1573 .filter(cls.repository == repo)\
1574 1574 .filter(cls.field_key == key).scalar()
1575 1575 return row
1576 1576
1577 1577
1578 1578 class Repository(Base, BaseModel):
1579 1579 __tablename__ = 'repositories'
1580 1580 __table_args__ = (
1581 1581 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1582 1582 base_table_args,
1583 1583 )
1584 1584 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1585 1585 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1586 1586 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1587 1587
1588 1588 STATE_CREATED = 'repo_state_created'
1589 1589 STATE_PENDING = 'repo_state_pending'
1590 1590 STATE_ERROR = 'repo_state_error'
1591 1591
1592 1592 LOCK_AUTOMATIC = 'lock_auto'
1593 1593 LOCK_API = 'lock_api'
1594 1594 LOCK_WEB = 'lock_web'
1595 1595 LOCK_PULL = 'lock_pull'
1596 1596
1597 1597 NAME_SEP = URL_SEP
1598 1598
1599 1599 repo_id = Column(
1600 1600 "repo_id", Integer(), nullable=False, unique=True, default=None,
1601 1601 primary_key=True)
1602 1602 _repo_name = Column(
1603 1603 "repo_name", Text(), nullable=False, default=None)
1604 1604 _repo_name_hash = Column(
1605 1605 "repo_name_hash", String(255), nullable=False, unique=True)
1606 1606 repo_state = Column("repo_state", String(255), nullable=True)
1607 1607
1608 1608 clone_uri = Column(
1609 1609 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1610 1610 default=None)
1611 1611 push_uri = Column(
1612 1612 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1613 1613 default=None)
1614 1614 repo_type = Column(
1615 1615 "repo_type", String(255), nullable=False, unique=False, default=None)
1616 1616 user_id = Column(
1617 1617 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1618 1618 unique=False, default=None)
1619 1619 private = Column(
1620 1620 "private", Boolean(), nullable=True, unique=None, default=None)
1621 1621 archived = Column(
1622 1622 "archived", Boolean(), nullable=True, unique=None, default=None)
1623 1623 enable_statistics = Column(
1624 1624 "statistics", Boolean(), nullable=True, unique=None, default=True)
1625 1625 enable_downloads = Column(
1626 1626 "downloads", Boolean(), nullable=True, unique=None, default=True)
1627 1627 description = Column(
1628 1628 "description", String(10000), nullable=True, unique=None, default=None)
1629 1629 created_on = Column(
1630 1630 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1631 1631 default=datetime.datetime.now)
1632 1632 updated_on = Column(
1633 1633 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1634 1634 default=datetime.datetime.now)
1635 1635 _landing_revision = Column(
1636 1636 "landing_revision", String(255), nullable=False, unique=False,
1637 1637 default=None)
1638 1638 enable_locking = Column(
1639 1639 "enable_locking", Boolean(), nullable=False, unique=None,
1640 1640 default=False)
1641 1641 _locked = Column(
1642 1642 "locked", String(255), nullable=True, unique=False, default=None)
1643 1643 _changeset_cache = Column(
1644 1644 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1645 1645
1646 1646 fork_id = Column(
1647 1647 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1648 1648 nullable=True, unique=False, default=None)
1649 1649 group_id = Column(
1650 1650 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1651 1651 unique=False, default=None)
1652 1652
1653 1653 user = relationship('User', lazy='joined')
1654 1654 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1655 1655 group = relationship('RepoGroup', lazy='joined')
1656 1656 repo_to_perm = relationship(
1657 1657 'UserRepoToPerm', cascade='all',
1658 1658 order_by='UserRepoToPerm.repo_to_perm_id')
1659 1659 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1660 1660 stats = relationship('Statistics', cascade='all', uselist=False)
1661 1661
1662 1662 followers = relationship(
1663 1663 'UserFollowing',
1664 1664 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1665 1665 cascade='all')
1666 1666 extra_fields = relationship(
1667 1667 'RepositoryField', cascade="all, delete, delete-orphan")
1668 1668 logs = relationship('UserLog')
1669 1669 comments = relationship(
1670 1670 'ChangesetComment', cascade="all, delete, delete-orphan")
1671 1671 pull_requests_source = relationship(
1672 1672 'PullRequest',
1673 1673 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1674 1674 cascade="all, delete, delete-orphan")
1675 1675 pull_requests_target = relationship(
1676 1676 'PullRequest',
1677 1677 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1678 1678 cascade="all, delete, delete-orphan")
1679 1679 ui = relationship('RepoRhodeCodeUi', cascade="all")
1680 1680 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1681 1681 integrations = relationship('Integration',
1682 1682 cascade="all, delete, delete-orphan")
1683 1683
1684 1684 scoped_tokens = relationship('UserApiKeys', cascade="all")
1685 1685
1686 1686 def __unicode__(self):
1687 1687 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1688 1688 safe_unicode(self.repo_name))
1689 1689
1690 1690 @hybrid_property
1691 1691 def description_safe(self):
1692 1692 from rhodecode.lib import helpers as h
1693 1693 return h.escape(self.description)
1694 1694
1695 1695 @hybrid_property
1696 1696 def landing_rev(self):
1697 1697 # always should return [rev_type, rev]
1698 1698 if self._landing_revision:
1699 1699 _rev_info = self._landing_revision.split(':')
1700 1700 if len(_rev_info) < 2:
1701 1701 _rev_info.insert(0, 'rev')
1702 1702 return [_rev_info[0], _rev_info[1]]
1703 1703 return [None, None]
1704 1704
1705 1705 @landing_rev.setter
1706 1706 def landing_rev(self, val):
1707 1707 if ':' not in val:
1708 1708 raise ValueError('value must be delimited with `:` and consist '
1709 1709 'of <rev_type>:<rev>, got %s instead' % val)
1710 1710 self._landing_revision = val
1711 1711
1712 1712 @hybrid_property
1713 1713 def locked(self):
1714 1714 if self._locked:
1715 1715 user_id, timelocked, reason = self._locked.split(':')
1716 1716 lock_values = int(user_id), timelocked, reason
1717 1717 else:
1718 1718 lock_values = [None, None, None]
1719 1719 return lock_values
1720 1720
1721 1721 @locked.setter
1722 1722 def locked(self, val):
1723 1723 if val and isinstance(val, (list, tuple)):
1724 1724 self._locked = ':'.join(map(str, val))
1725 1725 else:
1726 1726 self._locked = None
1727 1727
1728 1728 @hybrid_property
1729 1729 def changeset_cache(self):
1730 1730 from rhodecode.lib.vcs.backends.base import EmptyCommit
1731 1731 dummy = EmptyCommit().__json__()
1732 1732 if not self._changeset_cache:
1733 1733 dummy['source_repo_id'] = self.repo_id
1734 1734 return json.loads(json.dumps(dummy))
1735 1735
1736 1736 try:
1737 1737 return json.loads(self._changeset_cache)
1738 1738 except TypeError:
1739 1739 return dummy
1740 1740 except Exception:
1741 1741 log.error(traceback.format_exc())
1742 1742 return dummy
1743 1743
1744 1744 @changeset_cache.setter
1745 1745 def changeset_cache(self, val):
1746 1746 try:
1747 1747 self._changeset_cache = json.dumps(val)
1748 1748 except Exception:
1749 1749 log.error(traceback.format_exc())
1750 1750
1751 1751 @hybrid_property
1752 1752 def repo_name(self):
1753 1753 return self._repo_name
1754 1754
1755 1755 @repo_name.setter
1756 1756 def repo_name(self, value):
1757 1757 self._repo_name = value
1758 1758 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1759 1759
1760 1760 @classmethod
1761 1761 def normalize_repo_name(cls, repo_name):
1762 1762 """
1763 1763 Normalizes os specific repo_name to the format internally stored inside
1764 1764 database using URL_SEP
1765 1765
1766 1766 :param cls:
1767 1767 :param repo_name:
1768 1768 """
1769 1769 return cls.NAME_SEP.join(repo_name.split(os.sep))
1770 1770
1771 1771 @classmethod
1772 1772 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1773 1773 session = Session()
1774 1774 q = session.query(cls).filter(cls.repo_name == repo_name)
1775 1775
1776 1776 if cache:
1777 1777 if identity_cache:
1778 1778 val = cls.identity_cache(session, 'repo_name', repo_name)
1779 1779 if val:
1780 1780 return val
1781 1781 else:
1782 1782 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1783 1783 q = q.options(
1784 1784 FromCache("sql_cache_short", cache_key))
1785 1785
1786 1786 return q.scalar()
1787 1787
1788 1788 @classmethod
1789 1789 def get_by_id_or_repo_name(cls, repoid):
1790 1790 if isinstance(repoid, (int, long)):
1791 1791 try:
1792 1792 repo = cls.get(repoid)
1793 1793 except ValueError:
1794 1794 repo = None
1795 1795 else:
1796 1796 repo = cls.get_by_repo_name(repoid)
1797 1797 return repo
1798 1798
1799 1799 @classmethod
1800 1800 def get_by_full_path(cls, repo_full_path):
1801 1801 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1802 1802 repo_name = cls.normalize_repo_name(repo_name)
1803 1803 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1804 1804
1805 1805 @classmethod
1806 1806 def get_repo_forks(cls, repo_id):
1807 1807 return cls.query().filter(Repository.fork_id == repo_id)
1808 1808
1809 1809 @classmethod
1810 1810 def base_path(cls):
1811 1811 """
1812 1812 Returns base path when all repos are stored
1813 1813
1814 1814 :param cls:
1815 1815 """
1816 1816 q = Session().query(RhodeCodeUi)\
1817 1817 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1818 1818 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1819 1819 return q.one().ui_value
1820 1820
1821 1821 @classmethod
1822 1822 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1823 1823 case_insensitive=True, archived=False):
1824 1824 q = Repository.query()
1825 1825
1826 1826 if not archived:
1827 1827 q = q.filter(Repository.archived.isnot(true()))
1828 1828
1829 1829 if not isinstance(user_id, Optional):
1830 1830 q = q.filter(Repository.user_id == user_id)
1831 1831
1832 1832 if not isinstance(group_id, Optional):
1833 1833 q = q.filter(Repository.group_id == group_id)
1834 1834
1835 1835 if case_insensitive:
1836 1836 q = q.order_by(func.lower(Repository.repo_name))
1837 1837 else:
1838 1838 q = q.order_by(Repository.repo_name)
1839 1839
1840 1840 return q.all()
1841 1841
1842 1842 @property
1843 1843 def forks(self):
1844 1844 """
1845 1845 Return forks of this repo
1846 1846 """
1847 1847 return Repository.get_repo_forks(self.repo_id)
1848 1848
1849 1849 @property
1850 1850 def parent(self):
1851 1851 """
1852 1852 Returns fork parent
1853 1853 """
1854 1854 return self.fork
1855 1855
1856 1856 @property
1857 1857 def just_name(self):
1858 1858 return self.repo_name.split(self.NAME_SEP)[-1]
1859 1859
1860 1860 @property
1861 1861 def groups_with_parents(self):
1862 1862 groups = []
1863 1863 if self.group is None:
1864 1864 return groups
1865 1865
1866 1866 cur_gr = self.group
1867 1867 groups.insert(0, cur_gr)
1868 1868 while 1:
1869 1869 gr = getattr(cur_gr, 'parent_group', None)
1870 1870 cur_gr = cur_gr.parent_group
1871 1871 if gr is None:
1872 1872 break
1873 1873 groups.insert(0, gr)
1874 1874
1875 1875 return groups
1876 1876
1877 1877 @property
1878 1878 def groups_and_repo(self):
1879 1879 return self.groups_with_parents, self
1880 1880
1881 1881 @LazyProperty
1882 1882 def repo_path(self):
1883 1883 """
1884 1884 Returns base full path for that repository means where it actually
1885 1885 exists on a filesystem
1886 1886 """
1887 1887 q = Session().query(RhodeCodeUi).filter(
1888 1888 RhodeCodeUi.ui_key == self.NAME_SEP)
1889 1889 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1890 1890 return q.one().ui_value
1891 1891
1892 1892 @property
1893 1893 def repo_full_path(self):
1894 1894 p = [self.repo_path]
1895 1895 # we need to split the name by / since this is how we store the
1896 1896 # names in the database, but that eventually needs to be converted
1897 1897 # into a valid system path
1898 1898 p += self.repo_name.split(self.NAME_SEP)
1899 1899 return os.path.join(*map(safe_unicode, p))
1900 1900
1901 1901 @property
1902 1902 def cache_keys(self):
1903 1903 """
1904 1904 Returns associated cache keys for that repo
1905 1905 """
1906 1906 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1907 1907 repo_id=self.repo_id)
1908 1908 return CacheKey.query()\
1909 1909 .filter(CacheKey.cache_args == invalidation_namespace)\
1910 1910 .order_by(CacheKey.cache_key)\
1911 1911 .all()
1912 1912
1913 1913 @property
1914 1914 def cached_diffs_relative_dir(self):
1915 1915 """
1916 1916 Return a relative to the repository store path of cached diffs
1917 1917 used for safe display for users, who shouldn't know the absolute store
1918 1918 path
1919 1919 """
1920 1920 return os.path.join(
1921 1921 os.path.dirname(self.repo_name),
1922 1922 self.cached_diffs_dir.split(os.path.sep)[-1])
1923 1923
1924 1924 @property
1925 1925 def cached_diffs_dir(self):
1926 1926 path = self.repo_full_path
1927 1927 return os.path.join(
1928 1928 os.path.dirname(path),
1929 1929 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1930 1930
1931 1931 def cached_diffs(self):
1932 1932 diff_cache_dir = self.cached_diffs_dir
1933 1933 if os.path.isdir(diff_cache_dir):
1934 1934 return os.listdir(diff_cache_dir)
1935 1935 return []
1936 1936
1937 1937 def shadow_repos(self):
1938 1938 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1939 1939 return [
1940 1940 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1941 1941 if x.startswith(shadow_repos_pattern)]
1942 1942
1943 1943 def get_new_name(self, repo_name):
1944 1944 """
1945 1945 returns new full repository name based on assigned group and new new
1946 1946
1947 1947 :param group_name:
1948 1948 """
1949 1949 path_prefix = self.group.full_path_splitted if self.group else []
1950 1950 return self.NAME_SEP.join(path_prefix + [repo_name])
1951 1951
1952 1952 @property
1953 1953 def _config(self):
1954 1954 """
1955 1955 Returns db based config object.
1956 1956 """
1957 1957 from rhodecode.lib.utils import make_db_config
1958 1958 return make_db_config(clear_session=False, repo=self)
1959 1959
1960 1960 def permissions(self, with_admins=True, with_owner=True,
1961 1961 expand_from_user_groups=False):
1962 1962 """
1963 1963 Permissions for repositories
1964 1964 """
1965 1965 _admin_perm = 'repository.admin'
1966 1966
1967 1967 owner_row = []
1968 1968 if with_owner:
1969 1969 usr = AttributeDict(self.user.get_dict())
1970 1970 usr.owner_row = True
1971 1971 usr.permission = _admin_perm
1972 1972 usr.permission_id = None
1973 1973 owner_row.append(usr)
1974 1974
1975 1975 super_admin_ids = []
1976 1976 super_admin_rows = []
1977 1977 if with_admins:
1978 1978 for usr in User.get_all_super_admins():
1979 1979 super_admin_ids.append(usr.user_id)
1980 1980 # if this admin is also owner, don't double the record
1981 1981 if usr.user_id == owner_row[0].user_id:
1982 1982 owner_row[0].admin_row = True
1983 1983 else:
1984 1984 usr = AttributeDict(usr.get_dict())
1985 1985 usr.admin_row = True
1986 1986 usr.permission = _admin_perm
1987 1987 usr.permission_id = None
1988 1988 super_admin_rows.append(usr)
1989 1989
1990 1990 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1991 1991 q = q.options(joinedload(UserRepoToPerm.repository),
1992 1992 joinedload(UserRepoToPerm.user),
1993 1993 joinedload(UserRepoToPerm.permission),)
1994 1994
1995 1995 # get owners and admins and permissions. We do a trick of re-writing
1996 1996 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1997 1997 # has a global reference and changing one object propagates to all
1998 1998 # others. This means if admin is also an owner admin_row that change
1999 1999 # would propagate to both objects
2000 2000 perm_rows = []
2001 2001 for _usr in q.all():
2002 2002 usr = AttributeDict(_usr.user.get_dict())
2003 2003 # if this user is also owner/admin, mark as duplicate record
2004 2004 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2005 2005 usr.duplicate_perm = True
2006 2006 # also check if this permission is maybe used by branch_permissions
2007 2007 if _usr.branch_perm_entry:
2008 2008 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2009 2009
2010 2010 usr.permission = _usr.permission.permission_name
2011 2011 usr.permission_id = _usr.repo_to_perm_id
2012 2012 perm_rows.append(usr)
2013 2013
2014 2014 # filter the perm rows by 'default' first and then sort them by
2015 2015 # admin,write,read,none permissions sorted again alphabetically in
2016 2016 # each group
2017 2017 perm_rows = sorted(perm_rows, key=display_user_sort)
2018 2018
2019 2019 user_groups_rows = []
2020 2020 if expand_from_user_groups:
2021 2021 for ug in self.permission_user_groups(with_members=True):
2022 2022 for user_data in ug.members:
2023 2023 user_groups_rows.append(user_data)
2024 2024
2025 2025 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2026 2026
2027 2027 def permission_user_groups(self, with_members=True):
2028 2028 q = UserGroupRepoToPerm.query()\
2029 2029 .filter(UserGroupRepoToPerm.repository == self)
2030 2030 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2031 2031 joinedload(UserGroupRepoToPerm.users_group),
2032 2032 joinedload(UserGroupRepoToPerm.permission),)
2033 2033
2034 2034 perm_rows = []
2035 2035 for _user_group in q.all():
2036 2036 entry = AttributeDict(_user_group.users_group.get_dict())
2037 2037 entry.permission = _user_group.permission.permission_name
2038 2038 if with_members:
2039 2039 entry.members = [x.user.get_dict()
2040 2040 for x in _user_group.users_group.members]
2041 2041 perm_rows.append(entry)
2042 2042
2043 2043 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2044 2044 return perm_rows
2045 2045
2046 2046 def get_api_data(self, include_secrets=False):
2047 2047 """
2048 2048 Common function for generating repo api data
2049 2049
2050 2050 :param include_secrets: See :meth:`User.get_api_data`.
2051 2051
2052 2052 """
2053 2053 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2054 2054 # move this methods on models level.
2055 2055 from rhodecode.model.settings import SettingsModel
2056 2056 from rhodecode.model.repo import RepoModel
2057 2057
2058 2058 repo = self
2059 2059 _user_id, _time, _reason = self.locked
2060 2060
2061 2061 data = {
2062 2062 'repo_id': repo.repo_id,
2063 2063 'repo_name': repo.repo_name,
2064 2064 'repo_type': repo.repo_type,
2065 2065 'clone_uri': repo.clone_uri or '',
2066 2066 'push_uri': repo.push_uri or '',
2067 2067 'url': RepoModel().get_url(self),
2068 2068 'private': repo.private,
2069 2069 'created_on': repo.created_on,
2070 2070 'description': repo.description_safe,
2071 2071 'landing_rev': repo.landing_rev,
2072 2072 'owner': repo.user.username,
2073 2073 'fork_of': repo.fork.repo_name if repo.fork else None,
2074 2074 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2075 2075 'enable_statistics': repo.enable_statistics,
2076 2076 'enable_locking': repo.enable_locking,
2077 2077 'enable_downloads': repo.enable_downloads,
2078 2078 'last_changeset': repo.changeset_cache,
2079 2079 'locked_by': User.get(_user_id).get_api_data(
2080 2080 include_secrets=include_secrets) if _user_id else None,
2081 2081 'locked_date': time_to_datetime(_time) if _time else None,
2082 2082 'lock_reason': _reason if _reason else None,
2083 2083 }
2084 2084
2085 2085 # TODO: mikhail: should be per-repo settings here
2086 2086 rc_config = SettingsModel().get_all_settings()
2087 2087 repository_fields = str2bool(
2088 2088 rc_config.get('rhodecode_repository_fields'))
2089 2089 if repository_fields:
2090 2090 for f in self.extra_fields:
2091 2091 data[f.field_key_prefixed] = f.field_value
2092 2092
2093 2093 return data
2094 2094
2095 2095 @classmethod
2096 2096 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2097 2097 if not lock_time:
2098 2098 lock_time = time.time()
2099 2099 if not lock_reason:
2100 2100 lock_reason = cls.LOCK_AUTOMATIC
2101 2101 repo.locked = [user_id, lock_time, lock_reason]
2102 2102 Session().add(repo)
2103 2103 Session().commit()
2104 2104
2105 2105 @classmethod
2106 2106 def unlock(cls, repo):
2107 2107 repo.locked = None
2108 2108 Session().add(repo)
2109 2109 Session().commit()
2110 2110
2111 2111 @classmethod
2112 2112 def getlock(cls, repo):
2113 2113 return repo.locked
2114 2114
2115 2115 def is_user_lock(self, user_id):
2116 2116 if self.lock[0]:
2117 2117 lock_user_id = safe_int(self.lock[0])
2118 2118 user_id = safe_int(user_id)
2119 2119 # both are ints, and they are equal
2120 2120 return all([lock_user_id, user_id]) and lock_user_id == user_id
2121 2121
2122 2122 return False
2123 2123
2124 2124 def get_locking_state(self, action, user_id, only_when_enabled=True):
2125 2125 """
2126 2126 Checks locking on this repository, if locking is enabled and lock is
2127 2127 present returns a tuple of make_lock, locked, locked_by.
2128 2128 make_lock can have 3 states None (do nothing) True, make lock
2129 2129 False release lock, This value is later propagated to hooks, which
2130 2130 do the locking. Think about this as signals passed to hooks what to do.
2131 2131
2132 2132 """
2133 2133 # TODO: johbo: This is part of the business logic and should be moved
2134 2134 # into the RepositoryModel.
2135 2135
2136 2136 if action not in ('push', 'pull'):
2137 2137 raise ValueError("Invalid action value: %s" % repr(action))
2138 2138
2139 2139 # defines if locked error should be thrown to user
2140 2140 currently_locked = False
2141 2141 # defines if new lock should be made, tri-state
2142 2142 make_lock = None
2143 2143 repo = self
2144 2144 user = User.get(user_id)
2145 2145
2146 2146 lock_info = repo.locked
2147 2147
2148 2148 if repo and (repo.enable_locking or not only_when_enabled):
2149 2149 if action == 'push':
2150 2150 # check if it's already locked !, if it is compare users
2151 2151 locked_by_user_id = lock_info[0]
2152 2152 if user.user_id == locked_by_user_id:
2153 2153 log.debug(
2154 2154 'Got `push` action from user %s, now unlocking', user)
2155 2155 # unlock if we have push from user who locked
2156 2156 make_lock = False
2157 2157 else:
2158 2158 # we're not the same user who locked, ban with
2159 2159 # code defined in settings (default is 423 HTTP Locked) !
2160 2160 log.debug('Repo %s is currently locked by %s', repo, user)
2161 2161 currently_locked = True
2162 2162 elif action == 'pull':
2163 2163 # [0] user [1] date
2164 2164 if lock_info[0] and lock_info[1]:
2165 2165 log.debug('Repo %s is currently locked by %s', repo, user)
2166 2166 currently_locked = True
2167 2167 else:
2168 2168 log.debug('Setting lock on repo %s by %s', repo, user)
2169 2169 make_lock = True
2170 2170
2171 2171 else:
2172 2172 log.debug('Repository %s do not have locking enabled', repo)
2173 2173
2174 2174 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2175 2175 make_lock, currently_locked, lock_info)
2176 2176
2177 2177 from rhodecode.lib.auth import HasRepoPermissionAny
2178 2178 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2179 2179 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2180 2180 # if we don't have at least write permission we cannot make a lock
2181 2181 log.debug('lock state reset back to FALSE due to lack '
2182 2182 'of at least read permission')
2183 2183 make_lock = False
2184 2184
2185 2185 return make_lock, currently_locked, lock_info
2186 2186
2187 2187 @property
2188 2188 def last_commit_cache_update_diff(self):
2189 2189 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2190 2190
2191 2191 @property
2192 2192 def last_commit_change(self):
2193 2193 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2194 2194 empty_date = datetime.datetime.fromtimestamp(0)
2195 2195 date_latest = self.changeset_cache.get('date', empty_date)
2196 2196 try:
2197 2197 return parse_datetime(date_latest)
2198 2198 except Exception:
2199 2199 return empty_date
2200 2200
2201 2201 @property
2202 2202 def last_db_change(self):
2203 2203 return self.updated_on
2204 2204
2205 2205 @property
2206 2206 def clone_uri_hidden(self):
2207 2207 clone_uri = self.clone_uri
2208 2208 if clone_uri:
2209 2209 import urlobject
2210 2210 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2211 2211 if url_obj.password:
2212 2212 clone_uri = url_obj.with_password('*****')
2213 2213 return clone_uri
2214 2214
2215 2215 @property
2216 2216 def push_uri_hidden(self):
2217 2217 push_uri = self.push_uri
2218 2218 if push_uri:
2219 2219 import urlobject
2220 2220 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2221 2221 if url_obj.password:
2222 2222 push_uri = url_obj.with_password('*****')
2223 2223 return push_uri
2224 2224
2225 2225 def clone_url(self, **override):
2226 2226 from rhodecode.model.settings import SettingsModel
2227 2227
2228 2228 uri_tmpl = None
2229 2229 if 'with_id' in override:
2230 2230 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2231 2231 del override['with_id']
2232 2232
2233 2233 if 'uri_tmpl' in override:
2234 2234 uri_tmpl = override['uri_tmpl']
2235 2235 del override['uri_tmpl']
2236 2236
2237 2237 ssh = False
2238 2238 if 'ssh' in override:
2239 2239 ssh = True
2240 2240 del override['ssh']
2241 2241
2242 2242 # we didn't override our tmpl from **overrides
2243 2243 if not uri_tmpl:
2244 2244 rc_config = SettingsModel().get_all_settings(cache=True)
2245 2245 if ssh:
2246 2246 uri_tmpl = rc_config.get(
2247 2247 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2248 2248 else:
2249 2249 uri_tmpl = rc_config.get(
2250 2250 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2251 2251
2252 2252 request = get_current_request()
2253 2253 return get_clone_url(request=request,
2254 2254 uri_tmpl=uri_tmpl,
2255 2255 repo_name=self.repo_name,
2256 2256 repo_id=self.repo_id, **override)
2257 2257
2258 2258 def set_state(self, state):
2259 2259 self.repo_state = state
2260 2260 Session().add(self)
2261 2261 #==========================================================================
2262 2262 # SCM PROPERTIES
2263 2263 #==========================================================================
2264 2264
2265 2265 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2266 2266 return get_commit_safe(
2267 2267 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2268 2268
2269 2269 def get_changeset(self, rev=None, pre_load=None):
2270 2270 warnings.warn("Use get_commit", DeprecationWarning)
2271 2271 commit_id = None
2272 2272 commit_idx = None
2273 2273 if isinstance(rev, compat.string_types):
2274 2274 commit_id = rev
2275 2275 else:
2276 2276 commit_idx = rev
2277 2277 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2278 2278 pre_load=pre_load)
2279 2279
2280 2280 def get_landing_commit(self):
2281 2281 """
2282 2282 Returns landing commit, or if that doesn't exist returns the tip
2283 2283 """
2284 2284 _rev_type, _rev = self.landing_rev
2285 2285 commit = self.get_commit(_rev)
2286 2286 if isinstance(commit, EmptyCommit):
2287 2287 return self.get_commit()
2288 2288 return commit
2289 2289
2290 2290 def update_commit_cache(self, cs_cache=None, config=None):
2291 2291 """
2292 2292 Update cache of last changeset for repository, keys should be::
2293 2293
2294 2294 source_repo_id
2295 2295 short_id
2296 2296 raw_id
2297 2297 revision
2298 2298 parents
2299 2299 message
2300 2300 date
2301 2301 author
2302 2302 updated_on
2303 2303
2304 2304 """
2305 2305 from rhodecode.lib.vcs.backends.base import BaseChangeset
2306 2306 if cs_cache is None:
2307 2307 # use no-cache version here
2308 2308 scm_repo = self.scm_instance(cache=False, config=config)
2309 2309
2310 2310 empty = scm_repo is None or scm_repo.is_empty()
2311 2311 if not empty:
2312 2312 cs_cache = scm_repo.get_commit(
2313 2313 pre_load=["author", "date", "message", "parents"])
2314 2314 else:
2315 2315 cs_cache = EmptyCommit()
2316 2316
2317 2317 if isinstance(cs_cache, BaseChangeset):
2318 2318 cs_cache = cs_cache.__json__()
2319 2319
2320 2320 def is_outdated(new_cs_cache):
2321 2321 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2322 2322 new_cs_cache['revision'] != self.changeset_cache['revision']):
2323 2323 return True
2324 2324 return False
2325 2325
2326 2326 # check if we have maybe already latest cached revision
2327 2327 if is_outdated(cs_cache) or not self.changeset_cache:
2328 2328 _default = datetime.datetime.utcnow()
2329 2329 last_change = cs_cache.get('date') or _default
2330 2330 # we check if last update is newer than the new value
2331 2331 # if yes, we use the current timestamp instead. Imagine you get
2332 2332 # old commit pushed 1y ago, we'd set last update 1y to ago.
2333 2333 last_change_timestamp = datetime_to_time(last_change)
2334 2334 current_timestamp = datetime_to_time(last_change)
2335 2335 if last_change_timestamp > current_timestamp:
2336 2336 cs_cache['date'] = _default
2337 2337
2338 2338 cs_cache['updated_on'] = time.time()
2339 2339 self.changeset_cache = cs_cache
2340 2340 Session().add(self)
2341 2341 Session().commit()
2342 2342
2343 2343 log.debug('updated repo %s with new commit cache %s',
2344 2344 self.repo_name, cs_cache)
2345 2345 else:
2346 2346 cs_cache = self.changeset_cache
2347 2347 cs_cache['updated_on'] = time.time()
2348 2348 self.changeset_cache = cs_cache
2349 2349 Session().add(self)
2350 2350 Session().commit()
2351 2351
2352 2352 log.debug('Skipping update_commit_cache for repo:`%s` '
2353 2353 'commit already with latest changes', self.repo_name)
2354 2354
2355 2355 @property
2356 2356 def tip(self):
2357 2357 return self.get_commit('tip')
2358 2358
2359 2359 @property
2360 2360 def author(self):
2361 2361 return self.tip.author
2362 2362
2363 2363 @property
2364 2364 def last_change(self):
2365 2365 return self.scm_instance().last_change
2366 2366
2367 2367 def get_comments(self, revisions=None):
2368 2368 """
2369 2369 Returns comments for this repository grouped by revisions
2370 2370
2371 2371 :param revisions: filter query by revisions only
2372 2372 """
2373 2373 cmts = ChangesetComment.query()\
2374 2374 .filter(ChangesetComment.repo == self)
2375 2375 if revisions:
2376 2376 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2377 2377 grouped = collections.defaultdict(list)
2378 2378 for cmt in cmts.all():
2379 2379 grouped[cmt.revision].append(cmt)
2380 2380 return grouped
2381 2381
2382 2382 def statuses(self, revisions=None):
2383 2383 """
2384 2384 Returns statuses for this repository
2385 2385
2386 2386 :param revisions: list of revisions to get statuses for
2387 2387 """
2388 2388 statuses = ChangesetStatus.query()\
2389 2389 .filter(ChangesetStatus.repo == self)\
2390 2390 .filter(ChangesetStatus.version == 0)
2391 2391
2392 2392 if revisions:
2393 2393 # Try doing the filtering in chunks to avoid hitting limits
2394 2394 size = 500
2395 2395 status_results = []
2396 2396 for chunk in xrange(0, len(revisions), size):
2397 2397 status_results += statuses.filter(
2398 2398 ChangesetStatus.revision.in_(
2399 2399 revisions[chunk: chunk+size])
2400 2400 ).all()
2401 2401 else:
2402 2402 status_results = statuses.all()
2403 2403
2404 2404 grouped = {}
2405 2405
2406 2406 # maybe we have open new pullrequest without a status?
2407 2407 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2408 2408 status_lbl = ChangesetStatus.get_status_lbl(stat)
2409 2409 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2410 2410 for rev in pr.revisions:
2411 2411 pr_id = pr.pull_request_id
2412 2412 pr_repo = pr.target_repo.repo_name
2413 2413 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2414 2414
2415 2415 for stat in status_results:
2416 2416 pr_id = pr_repo = None
2417 2417 if stat.pull_request:
2418 2418 pr_id = stat.pull_request.pull_request_id
2419 2419 pr_repo = stat.pull_request.target_repo.repo_name
2420 2420 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2421 2421 pr_id, pr_repo]
2422 2422 return grouped
2423 2423
2424 2424 # ==========================================================================
2425 2425 # SCM CACHE INSTANCE
2426 2426 # ==========================================================================
2427 2427
2428 2428 def scm_instance(self, **kwargs):
2429 2429 import rhodecode
2430 2430
2431 2431 # Passing a config will not hit the cache currently only used
2432 2432 # for repo2dbmapper
2433 2433 config = kwargs.pop('config', None)
2434 2434 cache = kwargs.pop('cache', None)
2435 2435 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2436 2436 # if cache is NOT defined use default global, else we have a full
2437 2437 # control over cache behaviour
2438 2438 if cache is None and full_cache and not config:
2439 2439 return self._get_instance_cached()
2440 2440 return self._get_instance(cache=bool(cache), config=config)
2441 2441
2442 2442 def _get_instance_cached(self):
2443 2443 from rhodecode.lib import rc_cache
2444 2444
2445 2445 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2446 2446 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2447 2447 repo_id=self.repo_id)
2448 2448 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2449 2449
2450 2450 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2451 2451 def get_instance_cached(repo_id, context_id):
2452 2452 return self._get_instance()
2453 2453
2454 2454 # we must use thread scoped cache here,
2455 2455 # because each thread of gevent needs it's own not shared connection and cache
2456 2456 # we also alter `args` so the cache key is individual for every green thread.
2457 2457 inv_context_manager = rc_cache.InvalidationContext(
2458 2458 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2459 2459 thread_scoped=True)
2460 2460 with inv_context_manager as invalidation_context:
2461 2461 args = (self.repo_id, inv_context_manager.cache_key)
2462 2462 # re-compute and store cache if we get invalidate signal
2463 2463 if invalidation_context.should_invalidate():
2464 2464 instance = get_instance_cached.refresh(*args)
2465 2465 else:
2466 2466 instance = get_instance_cached(*args)
2467 2467
2468 log.debug(
2469 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2468 log.debug('Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2470 2469 return instance
2471 2470
2472 2471 def _get_instance(self, cache=True, config=None):
2473 2472 config = config or self._config
2474 2473 custom_wire = {
2475 2474 'cache': cache # controls the vcs.remote cache
2476 2475 }
2477 2476 repo = get_vcs_instance(
2478 2477 repo_path=safe_str(self.repo_full_path),
2479 2478 config=config,
2480 2479 with_wire=custom_wire,
2481 2480 create=False,
2482 2481 _vcs_alias=self.repo_type)
2483 2482
2484 2483 return repo
2485 2484
2486 2485 def __json__(self):
2487 2486 return {'landing_rev': self.landing_rev}
2488 2487
2489 2488 def get_dict(self):
2490 2489
2491 2490 # Since we transformed `repo_name` to a hybrid property, we need to
2492 2491 # keep compatibility with the code which uses `repo_name` field.
2493 2492
2494 2493 result = super(Repository, self).get_dict()
2495 2494 result['repo_name'] = result.pop('_repo_name', None)
2496 2495 return result
2497 2496
2498 2497
2499 2498 class RepoGroup(Base, BaseModel):
2500 2499 __tablename__ = 'groups'
2501 2500 __table_args__ = (
2502 2501 UniqueConstraint('group_name', 'group_parent_id'),
2503 2502 base_table_args,
2504 2503 )
2505 2504 __mapper_args__ = {'order_by': 'group_name'}
2506 2505
2507 2506 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2508 2507
2509 2508 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2510 2509 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2511 2510 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2512 2511 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2513 2512 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2514 2513 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2515 2514 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2516 2515 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2517 2516 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2518 2517 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2519 2518 _changeset_cache = Column(
2520 2519 "changeset_cache", LargeBinary(), nullable=True) # JSON data
2521 2520
2522 2521 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2523 2522 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2524 2523 parent_group = relationship('RepoGroup', remote_side=group_id)
2525 2524 user = relationship('User')
2526 2525 integrations = relationship('Integration', cascade="all, delete, delete-orphan")
2527 2526
2528 2527 def __init__(self, group_name='', parent_group=None):
2529 2528 self.group_name = group_name
2530 2529 self.parent_group = parent_group
2531 2530
2532 2531 def __unicode__(self):
2533 2532 return u"<%s('id:%s:%s')>" % (
2534 2533 self.__class__.__name__, self.group_id, self.group_name)
2535 2534
2536 2535 @hybrid_property
2537 2536 def group_name(self):
2538 2537 return self._group_name
2539 2538
2540 2539 @group_name.setter
2541 2540 def group_name(self, value):
2542 2541 self._group_name = value
2543 2542 self.group_name_hash = self.hash_repo_group_name(value)
2544 2543
2545 2544 @hybrid_property
2546 2545 def changeset_cache(self):
2547 2546 from rhodecode.lib.vcs.backends.base import EmptyCommit
2548 2547 dummy = EmptyCommit().__json__()
2549 2548 if not self._changeset_cache:
2550 2549 dummy['source_repo_id'] = ''
2551 2550 return json.loads(json.dumps(dummy))
2552 2551
2553 2552 try:
2554 2553 return json.loads(self._changeset_cache)
2555 2554 except TypeError:
2556 2555 return dummy
2557 2556 except Exception:
2558 2557 log.error(traceback.format_exc())
2559 2558 return dummy
2560 2559
2561 2560 @changeset_cache.setter
2562 2561 def changeset_cache(self, val):
2563 2562 try:
2564 2563 self._changeset_cache = json.dumps(val)
2565 2564 except Exception:
2566 2565 log.error(traceback.format_exc())
2567 2566
2568 2567 @validates('group_parent_id')
2569 2568 def validate_group_parent_id(self, key, val):
2570 2569 """
2571 2570 Check cycle references for a parent group to self
2572 2571 """
2573 2572 if self.group_id and val:
2574 2573 assert val != self.group_id
2575 2574
2576 2575 return val
2577 2576
2578 2577 @hybrid_property
2579 2578 def description_safe(self):
2580 2579 from rhodecode.lib import helpers as h
2581 2580 return h.escape(self.group_description)
2582 2581
2583 2582 @classmethod
2584 2583 def hash_repo_group_name(cls, repo_group_name):
2585 2584 val = remove_formatting(repo_group_name)
2586 2585 val = safe_str(val).lower()
2587 2586 chars = []
2588 2587 for c in val:
2589 2588 if c not in string.ascii_letters:
2590 2589 c = str(ord(c))
2591 2590 chars.append(c)
2592 2591
2593 2592 return ''.join(chars)
2594 2593
2595 2594 @classmethod
2596 2595 def _generate_choice(cls, repo_group):
2597 2596 from webhelpers.html import literal as _literal
2598 2597 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2599 2598 return repo_group.group_id, _name(repo_group.full_path_splitted)
2600 2599
2601 2600 @classmethod
2602 2601 def groups_choices(cls, groups=None, show_empty_group=True):
2603 2602 if not groups:
2604 2603 groups = cls.query().all()
2605 2604
2606 2605 repo_groups = []
2607 2606 if show_empty_group:
2608 2607 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2609 2608
2610 2609 repo_groups.extend([cls._generate_choice(x) for x in groups])
2611 2610
2612 2611 repo_groups = sorted(
2613 2612 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2614 2613 return repo_groups
2615 2614
2616 2615 @classmethod
2617 2616 def url_sep(cls):
2618 2617 return URL_SEP
2619 2618
2620 2619 @classmethod
2621 2620 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2622 2621 if case_insensitive:
2623 2622 gr = cls.query().filter(func.lower(cls.group_name)
2624 2623 == func.lower(group_name))
2625 2624 else:
2626 2625 gr = cls.query().filter(cls.group_name == group_name)
2627 2626 if cache:
2628 2627 name_key = _hash_key(group_name)
2629 2628 gr = gr.options(
2630 2629 FromCache("sql_cache_short", "get_group_%s" % name_key))
2631 2630 return gr.scalar()
2632 2631
2633 2632 @classmethod
2634 2633 def get_user_personal_repo_group(cls, user_id):
2635 2634 user = User.get(user_id)
2636 2635 if user.username == User.DEFAULT_USER:
2637 2636 return None
2638 2637
2639 2638 return cls.query()\
2640 2639 .filter(cls.personal == true()) \
2641 2640 .filter(cls.user == user) \
2642 2641 .order_by(cls.group_id.asc()) \
2643 2642 .first()
2644 2643
2645 2644 @classmethod
2646 2645 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2647 2646 case_insensitive=True):
2648 2647 q = RepoGroup.query()
2649 2648
2650 2649 if not isinstance(user_id, Optional):
2651 2650 q = q.filter(RepoGroup.user_id == user_id)
2652 2651
2653 2652 if not isinstance(group_id, Optional):
2654 2653 q = q.filter(RepoGroup.group_parent_id == group_id)
2655 2654
2656 2655 if case_insensitive:
2657 2656 q = q.order_by(func.lower(RepoGroup.group_name))
2658 2657 else:
2659 2658 q = q.order_by(RepoGroup.group_name)
2660 2659 return q.all()
2661 2660
2662 2661 @property
2663 2662 def parents(self, parents_recursion_limit = 10):
2664 2663 groups = []
2665 2664 if self.parent_group is None:
2666 2665 return groups
2667 2666 cur_gr = self.parent_group
2668 2667 groups.insert(0, cur_gr)
2669 2668 cnt = 0
2670 2669 while 1:
2671 2670 cnt += 1
2672 2671 gr = getattr(cur_gr, 'parent_group', None)
2673 2672 cur_gr = cur_gr.parent_group
2674 2673 if gr is None:
2675 2674 break
2676 2675 if cnt == parents_recursion_limit:
2677 2676 # this will prevent accidental infinit loops
2678 2677 log.error('more than %s parents found for group %s, stopping '
2679 2678 'recursive parent fetching', parents_recursion_limit, self)
2680 2679 break
2681 2680
2682 2681 groups.insert(0, gr)
2683 2682 return groups
2684 2683
2685 2684 @property
2686 2685 def last_commit_cache_update_diff(self):
2687 2686 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2688 2687
2689 2688 @property
2690 2689 def last_commit_change(self):
2691 2690 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2692 2691 empty_date = datetime.datetime.fromtimestamp(0)
2693 2692 date_latest = self.changeset_cache.get('date', empty_date)
2694 2693 try:
2695 2694 return parse_datetime(date_latest)
2696 2695 except Exception:
2697 2696 return empty_date
2698 2697
2699 2698 @property
2700 2699 def last_db_change(self):
2701 2700 return self.updated_on
2702 2701
2703 2702 @property
2704 2703 def children(self):
2705 2704 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2706 2705
2707 2706 @property
2708 2707 def name(self):
2709 2708 return self.group_name.split(RepoGroup.url_sep())[-1]
2710 2709
2711 2710 @property
2712 2711 def full_path(self):
2713 2712 return self.group_name
2714 2713
2715 2714 @property
2716 2715 def full_path_splitted(self):
2717 2716 return self.group_name.split(RepoGroup.url_sep())
2718 2717
2719 2718 @property
2720 2719 def repositories(self):
2721 2720 return Repository.query()\
2722 2721 .filter(Repository.group == self)\
2723 2722 .order_by(Repository.repo_name)
2724 2723
2725 2724 @property
2726 2725 def repositories_recursive_count(self):
2727 2726 cnt = self.repositories.count()
2728 2727
2729 2728 def children_count(group):
2730 2729 cnt = 0
2731 2730 for child in group.children:
2732 2731 cnt += child.repositories.count()
2733 2732 cnt += children_count(child)
2734 2733 return cnt
2735 2734
2736 2735 return cnt + children_count(self)
2737 2736
2738 2737 def _recursive_objects(self, include_repos=True, include_groups=True):
2739 2738 all_ = []
2740 2739
2741 2740 def _get_members(root_gr):
2742 2741 if include_repos:
2743 2742 for r in root_gr.repositories:
2744 2743 all_.append(r)
2745 2744 childs = root_gr.children.all()
2746 2745 if childs:
2747 2746 for gr in childs:
2748 2747 if include_groups:
2749 2748 all_.append(gr)
2750 2749 _get_members(gr)
2751 2750
2752 2751 root_group = []
2753 2752 if include_groups:
2754 2753 root_group = [self]
2755 2754
2756 2755 _get_members(self)
2757 2756 return root_group + all_
2758 2757
2759 2758 def recursive_groups_and_repos(self):
2760 2759 """
2761 2760 Recursive return all groups, with repositories in those groups
2762 2761 """
2763 2762 return self._recursive_objects()
2764 2763
2765 2764 def recursive_groups(self):
2766 2765 """
2767 2766 Returns all children groups for this group including children of children
2768 2767 """
2769 2768 return self._recursive_objects(include_repos=False)
2770 2769
2771 2770 def recursive_repos(self):
2772 2771 """
2773 2772 Returns all children repositories for this group
2774 2773 """
2775 2774 return self._recursive_objects(include_groups=False)
2776 2775
2777 2776 def get_new_name(self, group_name):
2778 2777 """
2779 2778 returns new full group name based on parent and new name
2780 2779
2781 2780 :param group_name:
2782 2781 """
2783 2782 path_prefix = (self.parent_group.full_path_splitted if
2784 2783 self.parent_group else [])
2785 2784 return RepoGroup.url_sep().join(path_prefix + [group_name])
2786 2785
2787 2786 def update_commit_cache(self, config=None):
2788 2787 """
2789 2788 Update cache of last changeset for newest repository inside this group, keys should be::
2790 2789
2791 2790 source_repo_id
2792 2791 short_id
2793 2792 raw_id
2794 2793 revision
2795 2794 parents
2796 2795 message
2797 2796 date
2798 2797 author
2799 2798
2800 2799 """
2801 2800 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2802 2801
2803 2802 def repo_groups_and_repos():
2804 2803 all_entries = OrderedDefaultDict(list)
2805 2804
2806 2805 def _get_members(root_gr, pos=0):
2807 2806
2808 2807 for repo in root_gr.repositories:
2809 2808 all_entries[root_gr].append(repo)
2810 2809
2811 2810 # fill in all parent positions
2812 2811 for parent_group in root_gr.parents:
2813 2812 all_entries[parent_group].extend(all_entries[root_gr])
2814 2813
2815 2814 children_groups = root_gr.children.all()
2816 2815 if children_groups:
2817 2816 for cnt, gr in enumerate(children_groups, 1):
2818 2817 _get_members(gr, pos=pos+cnt)
2819 2818
2820 2819 _get_members(root_gr=self)
2821 2820 return all_entries
2822 2821
2823 2822 empty_date = datetime.datetime.fromtimestamp(0)
2824 2823 for repo_group, repos in repo_groups_and_repos().items():
2825 2824
2826 2825 latest_repo_cs_cache = {}
2827 2826 for repo in repos:
2828 2827 repo_cs_cache = repo.changeset_cache
2829 2828 date_latest = latest_repo_cs_cache.get('date', empty_date)
2830 2829 date_current = repo_cs_cache.get('date', empty_date)
2831 2830 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2832 2831 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2833 2832 latest_repo_cs_cache = repo_cs_cache
2834 2833 latest_repo_cs_cache['source_repo_id'] = repo.repo_id
2835 2834
2836 2835 latest_repo_cs_cache['updated_on'] = time.time()
2837 2836 repo_group.changeset_cache = latest_repo_cs_cache
2838 2837 Session().add(repo_group)
2839 2838 Session().commit()
2840 2839
2841 2840 log.debug('updated repo group %s with new commit cache %s',
2842 2841 repo_group.group_name, latest_repo_cs_cache)
2843 2842
2844 2843 def permissions(self, with_admins=True, with_owner=True,
2845 2844 expand_from_user_groups=False):
2846 2845 """
2847 2846 Permissions for repository groups
2848 2847 """
2849 2848 _admin_perm = 'group.admin'
2850 2849
2851 2850 owner_row = []
2852 2851 if with_owner:
2853 2852 usr = AttributeDict(self.user.get_dict())
2854 2853 usr.owner_row = True
2855 2854 usr.permission = _admin_perm
2856 2855 owner_row.append(usr)
2857 2856
2858 2857 super_admin_ids = []
2859 2858 super_admin_rows = []
2860 2859 if with_admins:
2861 2860 for usr in User.get_all_super_admins():
2862 2861 super_admin_ids.append(usr.user_id)
2863 2862 # if this admin is also owner, don't double the record
2864 2863 if usr.user_id == owner_row[0].user_id:
2865 2864 owner_row[0].admin_row = True
2866 2865 else:
2867 2866 usr = AttributeDict(usr.get_dict())
2868 2867 usr.admin_row = True
2869 2868 usr.permission = _admin_perm
2870 2869 super_admin_rows.append(usr)
2871 2870
2872 2871 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2873 2872 q = q.options(joinedload(UserRepoGroupToPerm.group),
2874 2873 joinedload(UserRepoGroupToPerm.user),
2875 2874 joinedload(UserRepoGroupToPerm.permission),)
2876 2875
2877 2876 # get owners and admins and permissions. We do a trick of re-writing
2878 2877 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2879 2878 # has a global reference and changing one object propagates to all
2880 2879 # others. This means if admin is also an owner admin_row that change
2881 2880 # would propagate to both objects
2882 2881 perm_rows = []
2883 2882 for _usr in q.all():
2884 2883 usr = AttributeDict(_usr.user.get_dict())
2885 2884 # if this user is also owner/admin, mark as duplicate record
2886 2885 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2887 2886 usr.duplicate_perm = True
2888 2887 usr.permission = _usr.permission.permission_name
2889 2888 perm_rows.append(usr)
2890 2889
2891 2890 # filter the perm rows by 'default' first and then sort them by
2892 2891 # admin,write,read,none permissions sorted again alphabetically in
2893 2892 # each group
2894 2893 perm_rows = sorted(perm_rows, key=display_user_sort)
2895 2894
2896 2895 user_groups_rows = []
2897 2896 if expand_from_user_groups:
2898 2897 for ug in self.permission_user_groups(with_members=True):
2899 2898 for user_data in ug.members:
2900 2899 user_groups_rows.append(user_data)
2901 2900
2902 2901 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2903 2902
2904 2903 def permission_user_groups(self, with_members=False):
2905 2904 q = UserGroupRepoGroupToPerm.query()\
2906 2905 .filter(UserGroupRepoGroupToPerm.group == self)
2907 2906 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2908 2907 joinedload(UserGroupRepoGroupToPerm.users_group),
2909 2908 joinedload(UserGroupRepoGroupToPerm.permission),)
2910 2909
2911 2910 perm_rows = []
2912 2911 for _user_group in q.all():
2913 2912 entry = AttributeDict(_user_group.users_group.get_dict())
2914 2913 entry.permission = _user_group.permission.permission_name
2915 2914 if with_members:
2916 2915 entry.members = [x.user.get_dict()
2917 2916 for x in _user_group.users_group.members]
2918 2917 perm_rows.append(entry)
2919 2918
2920 2919 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2921 2920 return perm_rows
2922 2921
2923 2922 def get_api_data(self):
2924 2923 """
2925 2924 Common function for generating api data
2926 2925
2927 2926 """
2928 2927 group = self
2929 2928 data = {
2930 2929 'group_id': group.group_id,
2931 2930 'group_name': group.group_name,
2932 2931 'group_description': group.description_safe,
2933 2932 'parent_group': group.parent_group.group_name if group.parent_group else None,
2934 2933 'repositories': [x.repo_name for x in group.repositories],
2935 2934 'owner': group.user.username,
2936 2935 }
2937 2936 return data
2938 2937
2939 2938 def get_dict(self):
2940 2939 # Since we transformed `group_name` to a hybrid property, we need to
2941 2940 # keep compatibility with the code which uses `group_name` field.
2942 2941 result = super(RepoGroup, self).get_dict()
2943 2942 result['group_name'] = result.pop('_group_name', None)
2944 2943 return result
2945 2944
2946 2945
2947 2946 class Permission(Base, BaseModel):
2948 2947 __tablename__ = 'permissions'
2949 2948 __table_args__ = (
2950 2949 Index('p_perm_name_idx', 'permission_name'),
2951 2950 base_table_args,
2952 2951 )
2953 2952
2954 2953 PERMS = [
2955 2954 ('hg.admin', _('RhodeCode Super Administrator')),
2956 2955
2957 2956 ('repository.none', _('Repository no access')),
2958 2957 ('repository.read', _('Repository read access')),
2959 2958 ('repository.write', _('Repository write access')),
2960 2959 ('repository.admin', _('Repository admin access')),
2961 2960
2962 2961 ('group.none', _('Repository group no access')),
2963 2962 ('group.read', _('Repository group read access')),
2964 2963 ('group.write', _('Repository group write access')),
2965 2964 ('group.admin', _('Repository group admin access')),
2966 2965
2967 2966 ('usergroup.none', _('User group no access')),
2968 2967 ('usergroup.read', _('User group read access')),
2969 2968 ('usergroup.write', _('User group write access')),
2970 2969 ('usergroup.admin', _('User group admin access')),
2971 2970
2972 2971 ('branch.none', _('Branch no permissions')),
2973 2972 ('branch.merge', _('Branch access by web merge')),
2974 2973 ('branch.push', _('Branch access by push')),
2975 2974 ('branch.push_force', _('Branch access by push with force')),
2976 2975
2977 2976 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2978 2977 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2979 2978
2980 2979 ('hg.usergroup.create.false', _('User Group creation disabled')),
2981 2980 ('hg.usergroup.create.true', _('User Group creation enabled')),
2982 2981
2983 2982 ('hg.create.none', _('Repository creation disabled')),
2984 2983 ('hg.create.repository', _('Repository creation enabled')),
2985 2984 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2986 2985 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2987 2986
2988 2987 ('hg.fork.none', _('Repository forking disabled')),
2989 2988 ('hg.fork.repository', _('Repository forking enabled')),
2990 2989
2991 2990 ('hg.register.none', _('Registration disabled')),
2992 2991 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2993 2992 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2994 2993
2995 2994 ('hg.password_reset.enabled', _('Password reset enabled')),
2996 2995 ('hg.password_reset.hidden', _('Password reset hidden')),
2997 2996 ('hg.password_reset.disabled', _('Password reset disabled')),
2998 2997
2999 2998 ('hg.extern_activate.manual', _('Manual activation of external account')),
3000 2999 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3001 3000
3002 3001 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3003 3002 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3004 3003 ]
3005 3004
3006 3005 # definition of system default permissions for DEFAULT user, created on
3007 3006 # system setup
3008 3007 DEFAULT_USER_PERMISSIONS = [
3009 3008 # object perms
3010 3009 'repository.read',
3011 3010 'group.read',
3012 3011 'usergroup.read',
3013 3012 # branch, for backward compat we need same value as before so forced pushed
3014 3013 'branch.push_force',
3015 3014 # global
3016 3015 'hg.create.repository',
3017 3016 'hg.repogroup.create.false',
3018 3017 'hg.usergroup.create.false',
3019 3018 'hg.create.write_on_repogroup.true',
3020 3019 'hg.fork.repository',
3021 3020 'hg.register.manual_activate',
3022 3021 'hg.password_reset.enabled',
3023 3022 'hg.extern_activate.auto',
3024 3023 'hg.inherit_default_perms.true',
3025 3024 ]
3026 3025
3027 3026 # defines which permissions are more important higher the more important
3028 3027 # Weight defines which permissions are more important.
3029 3028 # The higher number the more important.
3030 3029 PERM_WEIGHTS = {
3031 3030 'repository.none': 0,
3032 3031 'repository.read': 1,
3033 3032 'repository.write': 3,
3034 3033 'repository.admin': 4,
3035 3034
3036 3035 'group.none': 0,
3037 3036 'group.read': 1,
3038 3037 'group.write': 3,
3039 3038 'group.admin': 4,
3040 3039
3041 3040 'usergroup.none': 0,
3042 3041 'usergroup.read': 1,
3043 3042 'usergroup.write': 3,
3044 3043 'usergroup.admin': 4,
3045 3044
3046 3045 'branch.none': 0,
3047 3046 'branch.merge': 1,
3048 3047 'branch.push': 3,
3049 3048 'branch.push_force': 4,
3050 3049
3051 3050 'hg.repogroup.create.false': 0,
3052 3051 'hg.repogroup.create.true': 1,
3053 3052
3054 3053 'hg.usergroup.create.false': 0,
3055 3054 'hg.usergroup.create.true': 1,
3056 3055
3057 3056 'hg.fork.none': 0,
3058 3057 'hg.fork.repository': 1,
3059 3058 'hg.create.none': 0,
3060 3059 'hg.create.repository': 1
3061 3060 }
3062 3061
3063 3062 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3064 3063 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3065 3064 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3066 3065
3067 3066 def __unicode__(self):
3068 3067 return u"<%s('%s:%s')>" % (
3069 3068 self.__class__.__name__, self.permission_id, self.permission_name
3070 3069 )
3071 3070
3072 3071 @classmethod
3073 3072 def get_by_key(cls, key):
3074 3073 return cls.query().filter(cls.permission_name == key).scalar()
3075 3074
3076 3075 @classmethod
3077 3076 def get_default_repo_perms(cls, user_id, repo_id=None):
3078 3077 q = Session().query(UserRepoToPerm, Repository, Permission)\
3079 3078 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3080 3079 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3081 3080 .filter(UserRepoToPerm.user_id == user_id)
3082 3081 if repo_id:
3083 3082 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3084 3083 return q.all()
3085 3084
3086 3085 @classmethod
3087 3086 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3088 3087 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3089 3088 .join(
3090 3089 Permission,
3091 3090 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3092 3091 .join(
3093 3092 UserRepoToPerm,
3094 3093 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3095 3094 .filter(UserRepoToPerm.user_id == user_id)
3096 3095
3097 3096 if repo_id:
3098 3097 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3099 3098 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3100 3099
3101 3100 @classmethod
3102 3101 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3103 3102 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3104 3103 .join(
3105 3104 Permission,
3106 3105 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3107 3106 .join(
3108 3107 Repository,
3109 3108 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3110 3109 .join(
3111 3110 UserGroup,
3112 3111 UserGroupRepoToPerm.users_group_id ==
3113 3112 UserGroup.users_group_id)\
3114 3113 .join(
3115 3114 UserGroupMember,
3116 3115 UserGroupRepoToPerm.users_group_id ==
3117 3116 UserGroupMember.users_group_id)\
3118 3117 .filter(
3119 3118 UserGroupMember.user_id == user_id,
3120 3119 UserGroup.users_group_active == true())
3121 3120 if repo_id:
3122 3121 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3123 3122 return q.all()
3124 3123
3125 3124 @classmethod
3126 3125 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3127 3126 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3128 3127 .join(
3129 3128 Permission,
3130 3129 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3131 3130 .join(
3132 3131 UserGroupRepoToPerm,
3133 3132 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3134 3133 .join(
3135 3134 UserGroup,
3136 3135 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3137 3136 .join(
3138 3137 UserGroupMember,
3139 3138 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3140 3139 .filter(
3141 3140 UserGroupMember.user_id == user_id,
3142 3141 UserGroup.users_group_active == true())
3143 3142
3144 3143 if repo_id:
3145 3144 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3146 3145 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3147 3146
3148 3147 @classmethod
3149 3148 def get_default_group_perms(cls, user_id, repo_group_id=None):
3150 3149 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3151 3150 .join(
3152 3151 Permission,
3153 3152 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3154 3153 .join(
3155 3154 RepoGroup,
3156 3155 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3157 3156 .filter(UserRepoGroupToPerm.user_id == user_id)
3158 3157 if repo_group_id:
3159 3158 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3160 3159 return q.all()
3161 3160
3162 3161 @classmethod
3163 3162 def get_default_group_perms_from_user_group(
3164 3163 cls, user_id, repo_group_id=None):
3165 3164 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3166 3165 .join(
3167 3166 Permission,
3168 3167 UserGroupRepoGroupToPerm.permission_id ==
3169 3168 Permission.permission_id)\
3170 3169 .join(
3171 3170 RepoGroup,
3172 3171 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3173 3172 .join(
3174 3173 UserGroup,
3175 3174 UserGroupRepoGroupToPerm.users_group_id ==
3176 3175 UserGroup.users_group_id)\
3177 3176 .join(
3178 3177 UserGroupMember,
3179 3178 UserGroupRepoGroupToPerm.users_group_id ==
3180 3179 UserGroupMember.users_group_id)\
3181 3180 .filter(
3182 3181 UserGroupMember.user_id == user_id,
3183 3182 UserGroup.users_group_active == true())
3184 3183 if repo_group_id:
3185 3184 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3186 3185 return q.all()
3187 3186
3188 3187 @classmethod
3189 3188 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3190 3189 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3191 3190 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3192 3191 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3193 3192 .filter(UserUserGroupToPerm.user_id == user_id)
3194 3193 if user_group_id:
3195 3194 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3196 3195 return q.all()
3197 3196
3198 3197 @classmethod
3199 3198 def get_default_user_group_perms_from_user_group(
3200 3199 cls, user_id, user_group_id=None):
3201 3200 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3202 3201 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3203 3202 .join(
3204 3203 Permission,
3205 3204 UserGroupUserGroupToPerm.permission_id ==
3206 3205 Permission.permission_id)\
3207 3206 .join(
3208 3207 TargetUserGroup,
3209 3208 UserGroupUserGroupToPerm.target_user_group_id ==
3210 3209 TargetUserGroup.users_group_id)\
3211 3210 .join(
3212 3211 UserGroup,
3213 3212 UserGroupUserGroupToPerm.user_group_id ==
3214 3213 UserGroup.users_group_id)\
3215 3214 .join(
3216 3215 UserGroupMember,
3217 3216 UserGroupUserGroupToPerm.user_group_id ==
3218 3217 UserGroupMember.users_group_id)\
3219 3218 .filter(
3220 3219 UserGroupMember.user_id == user_id,
3221 3220 UserGroup.users_group_active == true())
3222 3221 if user_group_id:
3223 3222 q = q.filter(
3224 3223 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3225 3224
3226 3225 return q.all()
3227 3226
3228 3227
3229 3228 class UserRepoToPerm(Base, BaseModel):
3230 3229 __tablename__ = 'repo_to_perm'
3231 3230 __table_args__ = (
3232 3231 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3233 3232 base_table_args
3234 3233 )
3235 3234
3236 3235 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3237 3236 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3238 3237 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3239 3238 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3240 3239
3241 3240 user = relationship('User')
3242 3241 repository = relationship('Repository')
3243 3242 permission = relationship('Permission')
3244 3243
3245 3244 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined')
3246 3245
3247 3246 @classmethod
3248 3247 def create(cls, user, repository, permission):
3249 3248 n = cls()
3250 3249 n.user = user
3251 3250 n.repository = repository
3252 3251 n.permission = permission
3253 3252 Session().add(n)
3254 3253 return n
3255 3254
3256 3255 def __unicode__(self):
3257 3256 return u'<%s => %s >' % (self.user, self.repository)
3258 3257
3259 3258
3260 3259 class UserUserGroupToPerm(Base, BaseModel):
3261 3260 __tablename__ = 'user_user_group_to_perm'
3262 3261 __table_args__ = (
3263 3262 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3264 3263 base_table_args
3265 3264 )
3266 3265
3267 3266 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3268 3267 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3269 3268 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3270 3269 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3271 3270
3272 3271 user = relationship('User')
3273 3272 user_group = relationship('UserGroup')
3274 3273 permission = relationship('Permission')
3275 3274
3276 3275 @classmethod
3277 3276 def create(cls, user, user_group, permission):
3278 3277 n = cls()
3279 3278 n.user = user
3280 3279 n.user_group = user_group
3281 3280 n.permission = permission
3282 3281 Session().add(n)
3283 3282 return n
3284 3283
3285 3284 def __unicode__(self):
3286 3285 return u'<%s => %s >' % (self.user, self.user_group)
3287 3286
3288 3287
3289 3288 class UserToPerm(Base, BaseModel):
3290 3289 __tablename__ = 'user_to_perm'
3291 3290 __table_args__ = (
3292 3291 UniqueConstraint('user_id', 'permission_id'),
3293 3292 base_table_args
3294 3293 )
3295 3294
3296 3295 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3297 3296 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3298 3297 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3299 3298
3300 3299 user = relationship('User')
3301 3300 permission = relationship('Permission', lazy='joined')
3302 3301
3303 3302 def __unicode__(self):
3304 3303 return u'<%s => %s >' % (self.user, self.permission)
3305 3304
3306 3305
3307 3306 class UserGroupRepoToPerm(Base, BaseModel):
3308 3307 __tablename__ = 'users_group_repo_to_perm'
3309 3308 __table_args__ = (
3310 3309 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3311 3310 base_table_args
3312 3311 )
3313 3312
3314 3313 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3315 3314 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3316 3315 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3317 3316 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3318 3317
3319 3318 users_group = relationship('UserGroup')
3320 3319 permission = relationship('Permission')
3321 3320 repository = relationship('Repository')
3322 3321 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3323 3322
3324 3323 @classmethod
3325 3324 def create(cls, users_group, repository, permission):
3326 3325 n = cls()
3327 3326 n.users_group = users_group
3328 3327 n.repository = repository
3329 3328 n.permission = permission
3330 3329 Session().add(n)
3331 3330 return n
3332 3331
3333 3332 def __unicode__(self):
3334 3333 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3335 3334
3336 3335
3337 3336 class UserGroupUserGroupToPerm(Base, BaseModel):
3338 3337 __tablename__ = 'user_group_user_group_to_perm'
3339 3338 __table_args__ = (
3340 3339 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3341 3340 CheckConstraint('target_user_group_id != user_group_id'),
3342 3341 base_table_args
3343 3342 )
3344 3343
3345 3344 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3346 3345 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3347 3346 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3348 3347 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3349 3348
3350 3349 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3351 3350 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3352 3351 permission = relationship('Permission')
3353 3352
3354 3353 @classmethod
3355 3354 def create(cls, target_user_group, user_group, permission):
3356 3355 n = cls()
3357 3356 n.target_user_group = target_user_group
3358 3357 n.user_group = user_group
3359 3358 n.permission = permission
3360 3359 Session().add(n)
3361 3360 return n
3362 3361
3363 3362 def __unicode__(self):
3364 3363 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3365 3364
3366 3365
3367 3366 class UserGroupToPerm(Base, BaseModel):
3368 3367 __tablename__ = 'users_group_to_perm'
3369 3368 __table_args__ = (
3370 3369 UniqueConstraint('users_group_id', 'permission_id',),
3371 3370 base_table_args
3372 3371 )
3373 3372
3374 3373 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3375 3374 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3376 3375 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3377 3376
3378 3377 users_group = relationship('UserGroup')
3379 3378 permission = relationship('Permission')
3380 3379
3381 3380
3382 3381 class UserRepoGroupToPerm(Base, BaseModel):
3383 3382 __tablename__ = 'user_repo_group_to_perm'
3384 3383 __table_args__ = (
3385 3384 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3386 3385 base_table_args
3387 3386 )
3388 3387
3389 3388 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3390 3389 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3391 3390 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3392 3391 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3393 3392
3394 3393 user = relationship('User')
3395 3394 group = relationship('RepoGroup')
3396 3395 permission = relationship('Permission')
3397 3396
3398 3397 @classmethod
3399 3398 def create(cls, user, repository_group, permission):
3400 3399 n = cls()
3401 3400 n.user = user
3402 3401 n.group = repository_group
3403 3402 n.permission = permission
3404 3403 Session().add(n)
3405 3404 return n
3406 3405
3407 3406
3408 3407 class UserGroupRepoGroupToPerm(Base, BaseModel):
3409 3408 __tablename__ = 'users_group_repo_group_to_perm'
3410 3409 __table_args__ = (
3411 3410 UniqueConstraint('users_group_id', 'group_id'),
3412 3411 base_table_args
3413 3412 )
3414 3413
3415 3414 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3416 3415 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3417 3416 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3418 3417 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3419 3418
3420 3419 users_group = relationship('UserGroup')
3421 3420 permission = relationship('Permission')
3422 3421 group = relationship('RepoGroup')
3423 3422
3424 3423 @classmethod
3425 3424 def create(cls, user_group, repository_group, permission):
3426 3425 n = cls()
3427 3426 n.users_group = user_group
3428 3427 n.group = repository_group
3429 3428 n.permission = permission
3430 3429 Session().add(n)
3431 3430 return n
3432 3431
3433 3432 def __unicode__(self):
3434 3433 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3435 3434
3436 3435
3437 3436 class Statistics(Base, BaseModel):
3438 3437 __tablename__ = 'statistics'
3439 3438 __table_args__ = (
3440 3439 base_table_args
3441 3440 )
3442 3441
3443 3442 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3444 3443 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3445 3444 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3446 3445 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3447 3446 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3448 3447 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3449 3448
3450 3449 repository = relationship('Repository', single_parent=True)
3451 3450
3452 3451
3453 3452 class UserFollowing(Base, BaseModel):
3454 3453 __tablename__ = 'user_followings'
3455 3454 __table_args__ = (
3456 3455 UniqueConstraint('user_id', 'follows_repository_id'),
3457 3456 UniqueConstraint('user_id', 'follows_user_id'),
3458 3457 base_table_args
3459 3458 )
3460 3459
3461 3460 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3462 3461 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3463 3462 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3464 3463 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3465 3464 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3466 3465
3467 3466 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3468 3467
3469 3468 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3470 3469 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3471 3470
3472 3471 @classmethod
3473 3472 def get_repo_followers(cls, repo_id):
3474 3473 return cls.query().filter(cls.follows_repo_id == repo_id)
3475 3474
3476 3475
3477 3476 class CacheKey(Base, BaseModel):
3478 3477 __tablename__ = 'cache_invalidation'
3479 3478 __table_args__ = (
3480 3479 UniqueConstraint('cache_key'),
3481 3480 Index('key_idx', 'cache_key'),
3482 3481 base_table_args,
3483 3482 )
3484 3483
3485 3484 CACHE_TYPE_FEED = 'FEED'
3486 3485 CACHE_TYPE_README = 'README'
3487 3486 # namespaces used to register process/thread aware caches
3488 3487 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3489 3488 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3490 3489
3491 3490 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3492 3491 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3493 3492 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3494 3493 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3495 3494
3496 3495 def __init__(self, cache_key, cache_args=''):
3497 3496 self.cache_key = cache_key
3498 3497 self.cache_args = cache_args
3499 3498 self.cache_active = False
3500 3499
3501 3500 def __unicode__(self):
3502 3501 return u"<%s('%s:%s[%s]')>" % (
3503 3502 self.__class__.__name__,
3504 3503 self.cache_id, self.cache_key, self.cache_active)
3505 3504
3506 3505 def _cache_key_partition(self):
3507 3506 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3508 3507 return prefix, repo_name, suffix
3509 3508
3510 3509 def get_prefix(self):
3511 3510 """
3512 3511 Try to extract prefix from existing cache key. The key could consist
3513 3512 of prefix, repo_name, suffix
3514 3513 """
3515 3514 # this returns prefix, repo_name, suffix
3516 3515 return self._cache_key_partition()[0]
3517 3516
3518 3517 def get_suffix(self):
3519 3518 """
3520 3519 get suffix that might have been used in _get_cache_key to
3521 3520 generate self.cache_key. Only used for informational purposes
3522 3521 in repo_edit.mako.
3523 3522 """
3524 3523 # prefix, repo_name, suffix
3525 3524 return self._cache_key_partition()[2]
3526 3525
3527 3526 @classmethod
3528 3527 def delete_all_cache(cls):
3529 3528 """
3530 3529 Delete all cache keys from database.
3531 3530 Should only be run when all instances are down and all entries
3532 3531 thus stale.
3533 3532 """
3534 3533 cls.query().delete()
3535 3534 Session().commit()
3536 3535
3537 3536 @classmethod
3538 3537 def set_invalidate(cls, cache_uid, delete=False):
3539 3538 """
3540 3539 Mark all caches of a repo as invalid in the database.
3541 3540 """
3542 3541
3543 3542 try:
3544 3543 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3545 3544 if delete:
3546 3545 qry.delete()
3547 3546 log.debug('cache objects deleted for cache args %s',
3548 3547 safe_str(cache_uid))
3549 3548 else:
3550 3549 qry.update({"cache_active": False})
3551 3550 log.debug('cache objects marked as invalid for cache args %s',
3552 3551 safe_str(cache_uid))
3553 3552
3554 3553 Session().commit()
3555 3554 except Exception:
3556 3555 log.exception(
3557 3556 'Cache key invalidation failed for cache args %s',
3558 3557 safe_str(cache_uid))
3559 3558 Session().rollback()
3560 3559
3561 3560 @classmethod
3562 3561 def get_active_cache(cls, cache_key):
3563 3562 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3564 3563 if inv_obj:
3565 3564 return inv_obj
3566 3565 return None
3567 3566
3568 3567
3569 3568 class ChangesetComment(Base, BaseModel):
3570 3569 __tablename__ = 'changeset_comments'
3571 3570 __table_args__ = (
3572 3571 Index('cc_revision_idx', 'revision'),
3573 3572 base_table_args,
3574 3573 )
3575 3574
3576 3575 COMMENT_OUTDATED = u'comment_outdated'
3577 3576 COMMENT_TYPE_NOTE = u'note'
3578 3577 COMMENT_TYPE_TODO = u'todo'
3579 3578 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3580 3579
3581 3580 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3582 3581 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3583 3582 revision = Column('revision', String(40), nullable=True)
3584 3583 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3585 3584 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3586 3585 line_no = Column('line_no', Unicode(10), nullable=True)
3587 3586 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3588 3587 f_path = Column('f_path', Unicode(1000), nullable=True)
3589 3588 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3590 3589 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3591 3590 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3592 3591 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3593 3592 renderer = Column('renderer', Unicode(64), nullable=True)
3594 3593 display_state = Column('display_state', Unicode(128), nullable=True)
3595 3594
3596 3595 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3597 3596 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3598 3597
3599 3598 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3600 3599 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3601 3600
3602 3601 author = relationship('User', lazy='joined')
3603 3602 repo = relationship('Repository')
3604 3603 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3605 3604 pull_request = relationship('PullRequest', lazy='joined')
3606 3605 pull_request_version = relationship('PullRequestVersion')
3607 3606
3608 3607 @classmethod
3609 3608 def get_users(cls, revision=None, pull_request_id=None):
3610 3609 """
3611 3610 Returns user associated with this ChangesetComment. ie those
3612 3611 who actually commented
3613 3612
3614 3613 :param cls:
3615 3614 :param revision:
3616 3615 """
3617 3616 q = Session().query(User)\
3618 3617 .join(ChangesetComment.author)
3619 3618 if revision:
3620 3619 q = q.filter(cls.revision == revision)
3621 3620 elif pull_request_id:
3622 3621 q = q.filter(cls.pull_request_id == pull_request_id)
3623 3622 return q.all()
3624 3623
3625 3624 @classmethod
3626 3625 def get_index_from_version(cls, pr_version, versions):
3627 3626 num_versions = [x.pull_request_version_id for x in versions]
3628 3627 try:
3629 3628 return num_versions.index(pr_version) +1
3630 3629 except (IndexError, ValueError):
3631 3630 return
3632 3631
3633 3632 @property
3634 3633 def outdated(self):
3635 3634 return self.display_state == self.COMMENT_OUTDATED
3636 3635
3637 3636 def outdated_at_version(self, version):
3638 3637 """
3639 3638 Checks if comment is outdated for given pull request version
3640 3639 """
3641 3640 return self.outdated and self.pull_request_version_id != version
3642 3641
3643 3642 def older_than_version(self, version):
3644 3643 """
3645 3644 Checks if comment is made from previous version than given
3646 3645 """
3647 3646 if version is None:
3648 3647 return self.pull_request_version_id is not None
3649 3648
3650 3649 return self.pull_request_version_id < version
3651 3650
3652 3651 @property
3653 3652 def resolved(self):
3654 3653 return self.resolved_by[0] if self.resolved_by else None
3655 3654
3656 3655 @property
3657 3656 def is_todo(self):
3658 3657 return self.comment_type == self.COMMENT_TYPE_TODO
3659 3658
3660 3659 @property
3661 3660 def is_inline(self):
3662 3661 return self.line_no and self.f_path
3663 3662
3664 3663 def get_index_version(self, versions):
3665 3664 return self.get_index_from_version(
3666 3665 self.pull_request_version_id, versions)
3667 3666
3668 3667 def __repr__(self):
3669 3668 if self.comment_id:
3670 3669 return '<DB:Comment #%s>' % self.comment_id
3671 3670 else:
3672 3671 return '<DB:Comment at %#x>' % id(self)
3673 3672
3674 3673 def get_api_data(self):
3675 3674 comment = self
3676 3675 data = {
3677 3676 'comment_id': comment.comment_id,
3678 3677 'comment_type': comment.comment_type,
3679 3678 'comment_text': comment.text,
3680 3679 'comment_status': comment.status_change,
3681 3680 'comment_f_path': comment.f_path,
3682 3681 'comment_lineno': comment.line_no,
3683 3682 'comment_author': comment.author,
3684 3683 'comment_created_on': comment.created_on,
3685 3684 'comment_resolved_by': self.resolved
3686 3685 }
3687 3686 return data
3688 3687
3689 3688 def __json__(self):
3690 3689 data = dict()
3691 3690 data.update(self.get_api_data())
3692 3691 return data
3693 3692
3694 3693
3695 3694 class ChangesetStatus(Base, BaseModel):
3696 3695 __tablename__ = 'changeset_statuses'
3697 3696 __table_args__ = (
3698 3697 Index('cs_revision_idx', 'revision'),
3699 3698 Index('cs_version_idx', 'version'),
3700 3699 UniqueConstraint('repo_id', 'revision', 'version'),
3701 3700 base_table_args
3702 3701 )
3703 3702
3704 3703 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3705 3704 STATUS_APPROVED = 'approved'
3706 3705 STATUS_REJECTED = 'rejected'
3707 3706 STATUS_UNDER_REVIEW = 'under_review'
3708 3707
3709 3708 STATUSES = [
3710 3709 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3711 3710 (STATUS_APPROVED, _("Approved")),
3712 3711 (STATUS_REJECTED, _("Rejected")),
3713 3712 (STATUS_UNDER_REVIEW, _("Under Review")),
3714 3713 ]
3715 3714
3716 3715 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3717 3716 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3718 3717 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3719 3718 revision = Column('revision', String(40), nullable=False)
3720 3719 status = Column('status', String(128), nullable=False, default=DEFAULT)
3721 3720 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3722 3721 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3723 3722 version = Column('version', Integer(), nullable=False, default=0)
3724 3723 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3725 3724
3726 3725 author = relationship('User', lazy='joined')
3727 3726 repo = relationship('Repository')
3728 3727 comment = relationship('ChangesetComment', lazy='joined')
3729 3728 pull_request = relationship('PullRequest', lazy='joined')
3730 3729
3731 3730 def __unicode__(self):
3732 3731 return u"<%s('%s[v%s]:%s')>" % (
3733 3732 self.__class__.__name__,
3734 3733 self.status, self.version, self.author
3735 3734 )
3736 3735
3737 3736 @classmethod
3738 3737 def get_status_lbl(cls, value):
3739 3738 return dict(cls.STATUSES).get(value)
3740 3739
3741 3740 @property
3742 3741 def status_lbl(self):
3743 3742 return ChangesetStatus.get_status_lbl(self.status)
3744 3743
3745 3744 def get_api_data(self):
3746 3745 status = self
3747 3746 data = {
3748 3747 'status_id': status.changeset_status_id,
3749 3748 'status': status.status,
3750 3749 }
3751 3750 return data
3752 3751
3753 3752 def __json__(self):
3754 3753 data = dict()
3755 3754 data.update(self.get_api_data())
3756 3755 return data
3757 3756
3758 3757
3759 3758 class _SetState(object):
3760 3759 """
3761 3760 Context processor allowing changing state for sensitive operation such as
3762 3761 pull request update or merge
3763 3762 """
3764 3763
3765 3764 def __init__(self, pull_request, pr_state, back_state=None):
3766 3765 self._pr = pull_request
3767 3766 self._org_state = back_state or pull_request.pull_request_state
3768 3767 self._pr_state = pr_state
3769 3768
3770 3769 def __enter__(self):
3771 3770 log.debug('StateLock: entering set state context, setting state to: `%s`',
3772 3771 self._pr_state)
3773 3772 self._pr.pull_request_state = self._pr_state
3774 3773 Session().add(self._pr)
3775 3774 Session().commit()
3776 3775
3777 3776 def __exit__(self, exc_type, exc_val, exc_tb):
3778 3777 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3779 3778 self._org_state)
3780 3779 self._pr.pull_request_state = self._org_state
3781 3780 Session().add(self._pr)
3782 3781 Session().commit()
3783 3782
3784 3783
3785 3784 class _PullRequestBase(BaseModel):
3786 3785 """
3787 3786 Common attributes of pull request and version entries.
3788 3787 """
3789 3788
3790 3789 # .status values
3791 3790 STATUS_NEW = u'new'
3792 3791 STATUS_OPEN = u'open'
3793 3792 STATUS_CLOSED = u'closed'
3794 3793
3795 3794 # available states
3796 3795 STATE_CREATING = u'creating'
3797 3796 STATE_UPDATING = u'updating'
3798 3797 STATE_MERGING = u'merging'
3799 3798 STATE_CREATED = u'created'
3800 3799
3801 3800 title = Column('title', Unicode(255), nullable=True)
3802 3801 description = Column(
3803 3802 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3804 3803 nullable=True)
3805 3804 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3806 3805
3807 3806 # new/open/closed status of pull request (not approve/reject/etc)
3808 3807 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3809 3808 created_on = Column(
3810 3809 'created_on', DateTime(timezone=False), nullable=False,
3811 3810 default=datetime.datetime.now)
3812 3811 updated_on = Column(
3813 3812 'updated_on', DateTime(timezone=False), nullable=False,
3814 3813 default=datetime.datetime.now)
3815 3814
3816 3815 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3817 3816
3818 3817 @declared_attr
3819 3818 def user_id(cls):
3820 3819 return Column(
3821 3820 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3822 3821 unique=None)
3823 3822
3824 3823 # 500 revisions max
3825 3824 _revisions = Column(
3826 3825 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3827 3826
3828 3827 @declared_attr
3829 3828 def source_repo_id(cls):
3830 3829 # TODO: dan: rename column to source_repo_id
3831 3830 return Column(
3832 3831 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3833 3832 nullable=False)
3834 3833
3835 3834 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3836 3835
3837 3836 @hybrid_property
3838 3837 def source_ref(self):
3839 3838 return self._source_ref
3840 3839
3841 3840 @source_ref.setter
3842 3841 def source_ref(self, val):
3843 3842 parts = (val or '').split(':')
3844 3843 if len(parts) != 3:
3845 3844 raise ValueError(
3846 3845 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3847 3846 self._source_ref = safe_unicode(val)
3848 3847
3849 3848 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3850 3849
3851 3850 @hybrid_property
3852 3851 def target_ref(self):
3853 3852 return self._target_ref
3854 3853
3855 3854 @target_ref.setter
3856 3855 def target_ref(self, val):
3857 3856 parts = (val or '').split(':')
3858 3857 if len(parts) != 3:
3859 3858 raise ValueError(
3860 3859 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3861 3860 self._target_ref = safe_unicode(val)
3862 3861
3863 3862 @declared_attr
3864 3863 def target_repo_id(cls):
3865 3864 # TODO: dan: rename column to target_repo_id
3866 3865 return Column(
3867 3866 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3868 3867 nullable=False)
3869 3868
3870 3869 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3871 3870
3872 3871 # TODO: dan: rename column to last_merge_source_rev
3873 3872 _last_merge_source_rev = Column(
3874 3873 'last_merge_org_rev', String(40), nullable=True)
3875 3874 # TODO: dan: rename column to last_merge_target_rev
3876 3875 _last_merge_target_rev = Column(
3877 3876 'last_merge_other_rev', String(40), nullable=True)
3878 3877 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3879 3878 merge_rev = Column('merge_rev', String(40), nullable=True)
3880 3879
3881 3880 reviewer_data = Column(
3882 3881 'reviewer_data_json', MutationObj.as_mutable(
3883 3882 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3884 3883
3885 3884 @property
3886 3885 def reviewer_data_json(self):
3887 3886 return json.dumps(self.reviewer_data)
3888 3887
3889 3888 @hybrid_property
3890 3889 def description_safe(self):
3891 3890 from rhodecode.lib import helpers as h
3892 3891 return h.escape(self.description)
3893 3892
3894 3893 @hybrid_property
3895 3894 def revisions(self):
3896 3895 return self._revisions.split(':') if self._revisions else []
3897 3896
3898 3897 @revisions.setter
3899 3898 def revisions(self, val):
3900 3899 self._revisions = ':'.join(val)
3901 3900
3902 3901 @hybrid_property
3903 3902 def last_merge_status(self):
3904 3903 return safe_int(self._last_merge_status)
3905 3904
3906 3905 @last_merge_status.setter
3907 3906 def last_merge_status(self, val):
3908 3907 self._last_merge_status = val
3909 3908
3910 3909 @declared_attr
3911 3910 def author(cls):
3912 3911 return relationship('User', lazy='joined')
3913 3912
3914 3913 @declared_attr
3915 3914 def source_repo(cls):
3916 3915 return relationship(
3917 3916 'Repository',
3918 3917 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3919 3918
3920 3919 @property
3921 3920 def source_ref_parts(self):
3922 3921 return self.unicode_to_reference(self.source_ref)
3923 3922
3924 3923 @declared_attr
3925 3924 def target_repo(cls):
3926 3925 return relationship(
3927 3926 'Repository',
3928 3927 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3929 3928
3930 3929 @property
3931 3930 def target_ref_parts(self):
3932 3931 return self.unicode_to_reference(self.target_ref)
3933 3932
3934 3933 @property
3935 3934 def shadow_merge_ref(self):
3936 3935 return self.unicode_to_reference(self._shadow_merge_ref)
3937 3936
3938 3937 @shadow_merge_ref.setter
3939 3938 def shadow_merge_ref(self, ref):
3940 3939 self._shadow_merge_ref = self.reference_to_unicode(ref)
3941 3940
3942 3941 @staticmethod
3943 3942 def unicode_to_reference(raw):
3944 3943 """
3945 3944 Convert a unicode (or string) to a reference object.
3946 3945 If unicode evaluates to False it returns None.
3947 3946 """
3948 3947 if raw:
3949 3948 refs = raw.split(':')
3950 3949 return Reference(*refs)
3951 3950 else:
3952 3951 return None
3953 3952
3954 3953 @staticmethod
3955 3954 def reference_to_unicode(ref):
3956 3955 """
3957 3956 Convert a reference object to unicode.
3958 3957 If reference is None it returns None.
3959 3958 """
3960 3959 if ref:
3961 3960 return u':'.join(ref)
3962 3961 else:
3963 3962 return None
3964 3963
3965 3964 def get_api_data(self, with_merge_state=True):
3966 3965 from rhodecode.model.pull_request import PullRequestModel
3967 3966
3968 3967 pull_request = self
3969 3968 if with_merge_state:
3970 3969 merge_status = PullRequestModel().merge_status(pull_request)
3971 3970 merge_state = {
3972 3971 'status': merge_status[0],
3973 3972 'message': safe_unicode(merge_status[1]),
3974 3973 }
3975 3974 else:
3976 3975 merge_state = {'status': 'not_available',
3977 3976 'message': 'not_available'}
3978 3977
3979 3978 merge_data = {
3980 3979 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3981 3980 'reference': (
3982 3981 pull_request.shadow_merge_ref._asdict()
3983 3982 if pull_request.shadow_merge_ref else None),
3984 3983 }
3985 3984
3986 3985 data = {
3987 3986 'pull_request_id': pull_request.pull_request_id,
3988 3987 'url': PullRequestModel().get_url(pull_request),
3989 3988 'title': pull_request.title,
3990 3989 'description': pull_request.description,
3991 3990 'status': pull_request.status,
3992 3991 'state': pull_request.pull_request_state,
3993 3992 'created_on': pull_request.created_on,
3994 3993 'updated_on': pull_request.updated_on,
3995 3994 'commit_ids': pull_request.revisions,
3996 3995 'review_status': pull_request.calculated_review_status(),
3997 3996 'mergeable': merge_state,
3998 3997 'source': {
3999 3998 'clone_url': pull_request.source_repo.clone_url(),
4000 3999 'repository': pull_request.source_repo.repo_name,
4001 4000 'reference': {
4002 4001 'name': pull_request.source_ref_parts.name,
4003 4002 'type': pull_request.source_ref_parts.type,
4004 4003 'commit_id': pull_request.source_ref_parts.commit_id,
4005 4004 },
4006 4005 },
4007 4006 'target': {
4008 4007 'clone_url': pull_request.target_repo.clone_url(),
4009 4008 'repository': pull_request.target_repo.repo_name,
4010 4009 'reference': {
4011 4010 'name': pull_request.target_ref_parts.name,
4012 4011 'type': pull_request.target_ref_parts.type,
4013 4012 'commit_id': pull_request.target_ref_parts.commit_id,
4014 4013 },
4015 4014 },
4016 4015 'merge': merge_data,
4017 4016 'author': pull_request.author.get_api_data(include_secrets=False,
4018 4017 details='basic'),
4019 4018 'reviewers': [
4020 4019 {
4021 4020 'user': reviewer.get_api_data(include_secrets=False,
4022 4021 details='basic'),
4023 4022 'reasons': reasons,
4024 4023 'review_status': st[0][1].status if st else 'not_reviewed',
4025 4024 }
4026 4025 for obj, reviewer, reasons, mandatory, st in
4027 4026 pull_request.reviewers_statuses()
4028 4027 ]
4029 4028 }
4030 4029
4031 4030 return data
4032 4031
4033 4032 def set_state(self, pull_request_state, final_state=None):
4034 4033 """
4035 4034 # goes from initial state to updating to initial state.
4036 4035 # initial state can be changed by specifying back_state=
4037 4036 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4038 4037 pull_request.merge()
4039 4038
4040 4039 :param pull_request_state:
4041 4040 :param final_state:
4042 4041
4043 4042 """
4044 4043
4045 4044 return _SetState(self, pull_request_state, back_state=final_state)
4046 4045
4047 4046
4048 4047 class PullRequest(Base, _PullRequestBase):
4049 4048 __tablename__ = 'pull_requests'
4050 4049 __table_args__ = (
4051 4050 base_table_args,
4052 4051 )
4053 4052
4054 4053 pull_request_id = Column(
4055 4054 'pull_request_id', Integer(), nullable=False, primary_key=True)
4056 4055
4057 4056 def __repr__(self):
4058 4057 if self.pull_request_id:
4059 4058 return '<DB:PullRequest #%s>' % self.pull_request_id
4060 4059 else:
4061 4060 return '<DB:PullRequest at %#x>' % id(self)
4062 4061
4063 4062 reviewers = relationship('PullRequestReviewers',
4064 4063 cascade="all, delete, delete-orphan")
4065 4064 statuses = relationship('ChangesetStatus',
4066 4065 cascade="all, delete, delete-orphan")
4067 4066 comments = relationship('ChangesetComment',
4068 4067 cascade="all, delete, delete-orphan")
4069 4068 versions = relationship('PullRequestVersion',
4070 4069 cascade="all, delete, delete-orphan",
4071 4070 lazy='dynamic')
4072 4071
4073 4072 @classmethod
4074 4073 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4075 4074 internal_methods=None):
4076 4075
4077 4076 class PullRequestDisplay(object):
4078 4077 """
4079 4078 Special object wrapper for showing PullRequest data via Versions
4080 4079 It mimics PR object as close as possible. This is read only object
4081 4080 just for display
4082 4081 """
4083 4082
4084 4083 def __init__(self, attrs, internal=None):
4085 4084 self.attrs = attrs
4086 4085 # internal have priority over the given ones via attrs
4087 4086 self.internal = internal or ['versions']
4088 4087
4089 4088 def __getattr__(self, item):
4090 4089 if item in self.internal:
4091 4090 return getattr(self, item)
4092 4091 try:
4093 4092 return self.attrs[item]
4094 4093 except KeyError:
4095 4094 raise AttributeError(
4096 4095 '%s object has no attribute %s' % (self, item))
4097 4096
4098 4097 def __repr__(self):
4099 4098 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4100 4099
4101 4100 def versions(self):
4102 4101 return pull_request_obj.versions.order_by(
4103 4102 PullRequestVersion.pull_request_version_id).all()
4104 4103
4105 4104 def is_closed(self):
4106 4105 return pull_request_obj.is_closed()
4107 4106
4108 4107 @property
4109 4108 def pull_request_version_id(self):
4110 4109 return getattr(pull_request_obj, 'pull_request_version_id', None)
4111 4110
4112 4111 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
4113 4112
4114 4113 attrs.author = StrictAttributeDict(
4115 4114 pull_request_obj.author.get_api_data())
4116 4115 if pull_request_obj.target_repo:
4117 4116 attrs.target_repo = StrictAttributeDict(
4118 4117 pull_request_obj.target_repo.get_api_data())
4119 4118 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4120 4119
4121 4120 if pull_request_obj.source_repo:
4122 4121 attrs.source_repo = StrictAttributeDict(
4123 4122 pull_request_obj.source_repo.get_api_data())
4124 4123 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4125 4124
4126 4125 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4127 4126 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4128 4127 attrs.revisions = pull_request_obj.revisions
4129 4128
4130 4129 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4131 4130 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4132 4131 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4133 4132
4134 4133 return PullRequestDisplay(attrs, internal=internal_methods)
4135 4134
4136 4135 def is_closed(self):
4137 4136 return self.status == self.STATUS_CLOSED
4138 4137
4139 4138 def __json__(self):
4140 4139 return {
4141 4140 'revisions': self.revisions,
4142 4141 }
4143 4142
4144 4143 def calculated_review_status(self):
4145 4144 from rhodecode.model.changeset_status import ChangesetStatusModel
4146 4145 return ChangesetStatusModel().calculated_review_status(self)
4147 4146
4148 4147 def reviewers_statuses(self):
4149 4148 from rhodecode.model.changeset_status import ChangesetStatusModel
4150 4149 return ChangesetStatusModel().reviewers_statuses(self)
4151 4150
4152 4151 @property
4153 4152 def workspace_id(self):
4154 4153 from rhodecode.model.pull_request import PullRequestModel
4155 4154 return PullRequestModel()._workspace_id(self)
4156 4155
4157 4156 def get_shadow_repo(self):
4158 4157 workspace_id = self.workspace_id
4159 4158 vcs_obj = self.target_repo.scm_instance()
4160 4159 shadow_repository_path = vcs_obj._get_shadow_repository_path(
4161 4160 self.target_repo.repo_id, workspace_id)
4162 4161 if os.path.isdir(shadow_repository_path):
4163 4162 return vcs_obj._get_shadow_instance(shadow_repository_path)
4164 4163
4165 4164
4166 4165 class PullRequestVersion(Base, _PullRequestBase):
4167 4166 __tablename__ = 'pull_request_versions'
4168 4167 __table_args__ = (
4169 4168 base_table_args,
4170 4169 )
4171 4170
4172 4171 pull_request_version_id = Column(
4173 4172 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4174 4173 pull_request_id = Column(
4175 4174 'pull_request_id', Integer(),
4176 4175 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4177 4176 pull_request = relationship('PullRequest')
4178 4177
4179 4178 def __repr__(self):
4180 4179 if self.pull_request_version_id:
4181 4180 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4182 4181 else:
4183 4182 return '<DB:PullRequestVersion at %#x>' % id(self)
4184 4183
4185 4184 @property
4186 4185 def reviewers(self):
4187 4186 return self.pull_request.reviewers
4188 4187
4189 4188 @property
4190 4189 def versions(self):
4191 4190 return self.pull_request.versions
4192 4191
4193 4192 def is_closed(self):
4194 4193 # calculate from original
4195 4194 return self.pull_request.status == self.STATUS_CLOSED
4196 4195
4197 4196 def calculated_review_status(self):
4198 4197 return self.pull_request.calculated_review_status()
4199 4198
4200 4199 def reviewers_statuses(self):
4201 4200 return self.pull_request.reviewers_statuses()
4202 4201
4203 4202
4204 4203 class PullRequestReviewers(Base, BaseModel):
4205 4204 __tablename__ = 'pull_request_reviewers'
4206 4205 __table_args__ = (
4207 4206 base_table_args,
4208 4207 )
4209 4208
4210 4209 @hybrid_property
4211 4210 def reasons(self):
4212 4211 if not self._reasons:
4213 4212 return []
4214 4213 return self._reasons
4215 4214
4216 4215 @reasons.setter
4217 4216 def reasons(self, val):
4218 4217 val = val or []
4219 4218 if any(not isinstance(x, compat.string_types) for x in val):
4220 4219 raise Exception('invalid reasons type, must be list of strings')
4221 4220 self._reasons = val
4222 4221
4223 4222 pull_requests_reviewers_id = Column(
4224 4223 'pull_requests_reviewers_id', Integer(), nullable=False,
4225 4224 primary_key=True)
4226 4225 pull_request_id = Column(
4227 4226 "pull_request_id", Integer(),
4228 4227 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4229 4228 user_id = Column(
4230 4229 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4231 4230 _reasons = Column(
4232 4231 'reason', MutationList.as_mutable(
4233 4232 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4234 4233
4235 4234 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4236 4235 user = relationship('User')
4237 4236 pull_request = relationship('PullRequest')
4238 4237
4239 4238 rule_data = Column(
4240 4239 'rule_data_json',
4241 4240 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4242 4241
4243 4242 def rule_user_group_data(self):
4244 4243 """
4245 4244 Returns the voting user group rule data for this reviewer
4246 4245 """
4247 4246
4248 4247 if self.rule_data and 'vote_rule' in self.rule_data:
4249 4248 user_group_data = {}
4250 4249 if 'rule_user_group_entry_id' in self.rule_data:
4251 4250 # means a group with voting rules !
4252 4251 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4253 4252 user_group_data['name'] = self.rule_data['rule_name']
4254 4253 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4255 4254
4256 4255 return user_group_data
4257 4256
4258 4257 def __unicode__(self):
4259 4258 return u"<%s('id:%s')>" % (self.__class__.__name__,
4260 4259 self.pull_requests_reviewers_id)
4261 4260
4262 4261
4263 4262 class Notification(Base, BaseModel):
4264 4263 __tablename__ = 'notifications'
4265 4264 __table_args__ = (
4266 4265 Index('notification_type_idx', 'type'),
4267 4266 base_table_args,
4268 4267 )
4269 4268
4270 4269 TYPE_CHANGESET_COMMENT = u'cs_comment'
4271 4270 TYPE_MESSAGE = u'message'
4272 4271 TYPE_MENTION = u'mention'
4273 4272 TYPE_REGISTRATION = u'registration'
4274 4273 TYPE_PULL_REQUEST = u'pull_request'
4275 4274 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4276 4275
4277 4276 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4278 4277 subject = Column('subject', Unicode(512), nullable=True)
4279 4278 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4280 4279 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4281 4280 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4282 4281 type_ = Column('type', Unicode(255))
4283 4282
4284 4283 created_by_user = relationship('User')
4285 4284 notifications_to_users = relationship('UserNotification', lazy='joined',
4286 4285 cascade="all, delete, delete-orphan")
4287 4286
4288 4287 @property
4289 4288 def recipients(self):
4290 4289 return [x.user for x in UserNotification.query()\
4291 4290 .filter(UserNotification.notification == self)\
4292 4291 .order_by(UserNotification.user_id.asc()).all()]
4293 4292
4294 4293 @classmethod
4295 4294 def create(cls, created_by, subject, body, recipients, type_=None):
4296 4295 if type_ is None:
4297 4296 type_ = Notification.TYPE_MESSAGE
4298 4297
4299 4298 notification = cls()
4300 4299 notification.created_by_user = created_by
4301 4300 notification.subject = subject
4302 4301 notification.body = body
4303 4302 notification.type_ = type_
4304 4303 notification.created_on = datetime.datetime.now()
4305 4304
4306 4305 # For each recipient link the created notification to his account
4307 4306 for u in recipients:
4308 4307 assoc = UserNotification()
4309 4308 assoc.user_id = u.user_id
4310 4309 assoc.notification = notification
4311 4310
4312 4311 # if created_by is inside recipients mark his notification
4313 4312 # as read
4314 4313 if u.user_id == created_by.user_id:
4315 4314 assoc.read = True
4316 4315 Session().add(assoc)
4317 4316
4318 4317 Session().add(notification)
4319 4318
4320 4319 return notification
4321 4320
4322 4321
4323 4322 class UserNotification(Base, BaseModel):
4324 4323 __tablename__ = 'user_to_notification'
4325 4324 __table_args__ = (
4326 4325 UniqueConstraint('user_id', 'notification_id'),
4327 4326 base_table_args
4328 4327 )
4329 4328
4330 4329 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4331 4330 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4332 4331 read = Column('read', Boolean, default=False)
4333 4332 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4334 4333
4335 4334 user = relationship('User', lazy="joined")
4336 4335 notification = relationship('Notification', lazy="joined",
4337 4336 order_by=lambda: Notification.created_on.desc(),)
4338 4337
4339 4338 def mark_as_read(self):
4340 4339 self.read = True
4341 4340 Session().add(self)
4342 4341
4343 4342
4344 4343 class Gist(Base, BaseModel):
4345 4344 __tablename__ = 'gists'
4346 4345 __table_args__ = (
4347 4346 Index('g_gist_access_id_idx', 'gist_access_id'),
4348 4347 Index('g_created_on_idx', 'created_on'),
4349 4348 base_table_args
4350 4349 )
4351 4350
4352 4351 GIST_PUBLIC = u'public'
4353 4352 GIST_PRIVATE = u'private'
4354 4353 DEFAULT_FILENAME = u'gistfile1.txt'
4355 4354
4356 4355 ACL_LEVEL_PUBLIC = u'acl_public'
4357 4356 ACL_LEVEL_PRIVATE = u'acl_private'
4358 4357
4359 4358 gist_id = Column('gist_id', Integer(), primary_key=True)
4360 4359 gist_access_id = Column('gist_access_id', Unicode(250))
4361 4360 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4362 4361 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4363 4362 gist_expires = Column('gist_expires', Float(53), nullable=False)
4364 4363 gist_type = Column('gist_type', Unicode(128), nullable=False)
4365 4364 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4366 4365 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4367 4366 acl_level = Column('acl_level', Unicode(128), nullable=True)
4368 4367
4369 4368 owner = relationship('User')
4370 4369
4371 4370 def __repr__(self):
4372 4371 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4373 4372
4374 4373 @hybrid_property
4375 4374 def description_safe(self):
4376 4375 from rhodecode.lib import helpers as h
4377 4376 return h.escape(self.gist_description)
4378 4377
4379 4378 @classmethod
4380 4379 def get_or_404(cls, id_):
4381 4380 from pyramid.httpexceptions import HTTPNotFound
4382 4381
4383 4382 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4384 4383 if not res:
4385 4384 raise HTTPNotFound()
4386 4385 return res
4387 4386
4388 4387 @classmethod
4389 4388 def get_by_access_id(cls, gist_access_id):
4390 4389 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4391 4390
4392 4391 def gist_url(self):
4393 4392 from rhodecode.model.gist import GistModel
4394 4393 return GistModel().get_url(self)
4395 4394
4396 4395 @classmethod
4397 4396 def base_path(cls):
4398 4397 """
4399 4398 Returns base path when all gists are stored
4400 4399
4401 4400 :param cls:
4402 4401 """
4403 4402 from rhodecode.model.gist import GIST_STORE_LOC
4404 4403 q = Session().query(RhodeCodeUi)\
4405 4404 .filter(RhodeCodeUi.ui_key == URL_SEP)
4406 4405 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4407 4406 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4408 4407
4409 4408 def get_api_data(self):
4410 4409 """
4411 4410 Common function for generating gist related data for API
4412 4411 """
4413 4412 gist = self
4414 4413 data = {
4415 4414 'gist_id': gist.gist_id,
4416 4415 'type': gist.gist_type,
4417 4416 'access_id': gist.gist_access_id,
4418 4417 'description': gist.gist_description,
4419 4418 'url': gist.gist_url(),
4420 4419 'expires': gist.gist_expires,
4421 4420 'created_on': gist.created_on,
4422 4421 'modified_at': gist.modified_at,
4423 4422 'content': None,
4424 4423 'acl_level': gist.acl_level,
4425 4424 }
4426 4425 return data
4427 4426
4428 4427 def __json__(self):
4429 4428 data = dict(
4430 4429 )
4431 4430 data.update(self.get_api_data())
4432 4431 return data
4433 4432 # SCM functions
4434 4433
4435 4434 def scm_instance(self, **kwargs):
4436 4435 """
4437 4436 Get explicit Mercurial repository used
4438 4437 :param kwargs:
4439 4438 :return:
4440 4439 """
4441 4440 from rhodecode.model.gist import GistModel
4442 4441 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4443 4442 return get_vcs_instance(
4444 4443 repo_path=safe_str(full_repo_path), create=False,
4445 4444 _vcs_alias=GistModel.vcs_backend)
4446 4445
4447 4446
4448 4447 class ExternalIdentity(Base, BaseModel):
4449 4448 __tablename__ = 'external_identities'
4450 4449 __table_args__ = (
4451 4450 Index('local_user_id_idx', 'local_user_id'),
4452 4451 Index('external_id_idx', 'external_id'),
4453 4452 base_table_args
4454 4453 )
4455 4454
4456 4455 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4457 4456 external_username = Column('external_username', Unicode(1024), default=u'')
4458 4457 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4459 4458 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4460 4459 access_token = Column('access_token', String(1024), default=u'')
4461 4460 alt_token = Column('alt_token', String(1024), default=u'')
4462 4461 token_secret = Column('token_secret', String(1024), default=u'')
4463 4462
4464 4463 @classmethod
4465 4464 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4466 4465 """
4467 4466 Returns ExternalIdentity instance based on search params
4468 4467
4469 4468 :param external_id:
4470 4469 :param provider_name:
4471 4470 :return: ExternalIdentity
4472 4471 """
4473 4472 query = cls.query()
4474 4473 query = query.filter(cls.external_id == external_id)
4475 4474 query = query.filter(cls.provider_name == provider_name)
4476 4475 if local_user_id:
4477 4476 query = query.filter(cls.local_user_id == local_user_id)
4478 4477 return query.first()
4479 4478
4480 4479 @classmethod
4481 4480 def user_by_external_id_and_provider(cls, external_id, provider_name):
4482 4481 """
4483 4482 Returns User instance based on search params
4484 4483
4485 4484 :param external_id:
4486 4485 :param provider_name:
4487 4486 :return: User
4488 4487 """
4489 4488 query = User.query()
4490 4489 query = query.filter(cls.external_id == external_id)
4491 4490 query = query.filter(cls.provider_name == provider_name)
4492 4491 query = query.filter(User.user_id == cls.local_user_id)
4493 4492 return query.first()
4494 4493
4495 4494 @classmethod
4496 4495 def by_local_user_id(cls, local_user_id):
4497 4496 """
4498 4497 Returns all tokens for user
4499 4498
4500 4499 :param local_user_id:
4501 4500 :return: ExternalIdentity
4502 4501 """
4503 4502 query = cls.query()
4504 4503 query = query.filter(cls.local_user_id == local_user_id)
4505 4504 return query
4506 4505
4507 4506 @classmethod
4508 4507 def load_provider_plugin(cls, plugin_id):
4509 4508 from rhodecode.authentication.base import loadplugin
4510 4509 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4511 4510 auth_plugin = loadplugin(_plugin_id)
4512 4511 return auth_plugin
4513 4512
4514 4513
4515 4514 class Integration(Base, BaseModel):
4516 4515 __tablename__ = 'integrations'
4517 4516 __table_args__ = (
4518 4517 base_table_args
4519 4518 )
4520 4519
4521 4520 integration_id = Column('integration_id', Integer(), primary_key=True)
4522 4521 integration_type = Column('integration_type', String(255))
4523 4522 enabled = Column('enabled', Boolean(), nullable=False)
4524 4523 name = Column('name', String(255), nullable=False)
4525 4524 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4526 4525 default=False)
4527 4526
4528 4527 settings = Column(
4529 4528 'settings_json', MutationObj.as_mutable(
4530 4529 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4531 4530 repo_id = Column(
4532 4531 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4533 4532 nullable=True, unique=None, default=None)
4534 4533 repo = relationship('Repository', lazy='joined')
4535 4534
4536 4535 repo_group_id = Column(
4537 4536 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4538 4537 nullable=True, unique=None, default=None)
4539 4538 repo_group = relationship('RepoGroup', lazy='joined')
4540 4539
4541 4540 @property
4542 4541 def scope(self):
4543 4542 if self.repo:
4544 4543 return repr(self.repo)
4545 4544 if self.repo_group:
4546 4545 if self.child_repos_only:
4547 4546 return repr(self.repo_group) + ' (child repos only)'
4548 4547 else:
4549 4548 return repr(self.repo_group) + ' (recursive)'
4550 4549 if self.child_repos_only:
4551 4550 return 'root_repos'
4552 4551 return 'global'
4553 4552
4554 4553 def __repr__(self):
4555 4554 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4556 4555
4557 4556
4558 4557 class RepoReviewRuleUser(Base, BaseModel):
4559 4558 __tablename__ = 'repo_review_rules_users'
4560 4559 __table_args__ = (
4561 4560 base_table_args
4562 4561 )
4563 4562
4564 4563 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4565 4564 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4566 4565 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4567 4566 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4568 4567 user = relationship('User')
4569 4568
4570 4569 def rule_data(self):
4571 4570 return {
4572 4571 'mandatory': self.mandatory
4573 4572 }
4574 4573
4575 4574
4576 4575 class RepoReviewRuleUserGroup(Base, BaseModel):
4577 4576 __tablename__ = 'repo_review_rules_users_groups'
4578 4577 __table_args__ = (
4579 4578 base_table_args
4580 4579 )
4581 4580
4582 4581 VOTE_RULE_ALL = -1
4583 4582
4584 4583 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4585 4584 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4586 4585 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4587 4586 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4588 4587 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4589 4588 users_group = relationship('UserGroup')
4590 4589
4591 4590 def rule_data(self):
4592 4591 return {
4593 4592 'mandatory': self.mandatory,
4594 4593 'vote_rule': self.vote_rule
4595 4594 }
4596 4595
4597 4596 @property
4598 4597 def vote_rule_label(self):
4599 4598 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4600 4599 return 'all must vote'
4601 4600 else:
4602 4601 return 'min. vote {}'.format(self.vote_rule)
4603 4602
4604 4603
4605 4604 class RepoReviewRule(Base, BaseModel):
4606 4605 __tablename__ = 'repo_review_rules'
4607 4606 __table_args__ = (
4608 4607 base_table_args
4609 4608 )
4610 4609
4611 4610 repo_review_rule_id = Column(
4612 4611 'repo_review_rule_id', Integer(), primary_key=True)
4613 4612 repo_id = Column(
4614 4613 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4615 4614 repo = relationship('Repository', backref='review_rules')
4616 4615
4617 4616 review_rule_name = Column('review_rule_name', String(255))
4618 4617 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4619 4618 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4620 4619 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4621 4620
4622 4621 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4623 4622 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4624 4623 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4625 4624 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4626 4625
4627 4626 rule_users = relationship('RepoReviewRuleUser')
4628 4627 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4629 4628
4630 4629 def _validate_pattern(self, value):
4631 4630 re.compile('^' + glob2re(value) + '$')
4632 4631
4633 4632 @hybrid_property
4634 4633 def source_branch_pattern(self):
4635 4634 return self._branch_pattern or '*'
4636 4635
4637 4636 @source_branch_pattern.setter
4638 4637 def source_branch_pattern(self, value):
4639 4638 self._validate_pattern(value)
4640 4639 self._branch_pattern = value or '*'
4641 4640
4642 4641 @hybrid_property
4643 4642 def target_branch_pattern(self):
4644 4643 return self._target_branch_pattern or '*'
4645 4644
4646 4645 @target_branch_pattern.setter
4647 4646 def target_branch_pattern(self, value):
4648 4647 self._validate_pattern(value)
4649 4648 self._target_branch_pattern = value or '*'
4650 4649
4651 4650 @hybrid_property
4652 4651 def file_pattern(self):
4653 4652 return self._file_pattern or '*'
4654 4653
4655 4654 @file_pattern.setter
4656 4655 def file_pattern(self, value):
4657 4656 self._validate_pattern(value)
4658 4657 self._file_pattern = value or '*'
4659 4658
4660 4659 def matches(self, source_branch, target_branch, files_changed):
4661 4660 """
4662 4661 Check if this review rule matches a branch/files in a pull request
4663 4662
4664 4663 :param source_branch: source branch name for the commit
4665 4664 :param target_branch: target branch name for the commit
4666 4665 :param files_changed: list of file paths changed in the pull request
4667 4666 """
4668 4667
4669 4668 source_branch = source_branch or ''
4670 4669 target_branch = target_branch or ''
4671 4670 files_changed = files_changed or []
4672 4671
4673 4672 branch_matches = True
4674 4673 if source_branch or target_branch:
4675 4674 if self.source_branch_pattern == '*':
4676 4675 source_branch_match = True
4677 4676 else:
4678 4677 if self.source_branch_pattern.startswith('re:'):
4679 4678 source_pattern = self.source_branch_pattern[3:]
4680 4679 else:
4681 4680 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4682 4681 source_branch_regex = re.compile(source_pattern)
4683 4682 source_branch_match = bool(source_branch_regex.search(source_branch))
4684 4683 if self.target_branch_pattern == '*':
4685 4684 target_branch_match = True
4686 4685 else:
4687 4686 if self.target_branch_pattern.startswith('re:'):
4688 4687 target_pattern = self.target_branch_pattern[3:]
4689 4688 else:
4690 4689 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4691 4690 target_branch_regex = re.compile(target_pattern)
4692 4691 target_branch_match = bool(target_branch_regex.search(target_branch))
4693 4692
4694 4693 branch_matches = source_branch_match and target_branch_match
4695 4694
4696 4695 files_matches = True
4697 4696 if self.file_pattern != '*':
4698 4697 files_matches = False
4699 4698 if self.file_pattern.startswith('re:'):
4700 4699 file_pattern = self.file_pattern[3:]
4701 4700 else:
4702 4701 file_pattern = glob2re(self.file_pattern)
4703 4702 file_regex = re.compile(file_pattern)
4704 4703 for filename in files_changed:
4705 4704 if file_regex.search(filename):
4706 4705 files_matches = True
4707 4706 break
4708 4707
4709 4708 return branch_matches and files_matches
4710 4709
4711 4710 @property
4712 4711 def review_users(self):
4713 4712 """ Returns the users which this rule applies to """
4714 4713
4715 4714 users = collections.OrderedDict()
4716 4715
4717 4716 for rule_user in self.rule_users:
4718 4717 if rule_user.user.active:
4719 4718 if rule_user.user not in users:
4720 4719 users[rule_user.user.username] = {
4721 4720 'user': rule_user.user,
4722 4721 'source': 'user',
4723 4722 'source_data': {},
4724 4723 'data': rule_user.rule_data()
4725 4724 }
4726 4725
4727 4726 for rule_user_group in self.rule_user_groups:
4728 4727 source_data = {
4729 4728 'user_group_id': rule_user_group.users_group.users_group_id,
4730 4729 'name': rule_user_group.users_group.users_group_name,
4731 4730 'members': len(rule_user_group.users_group.members)
4732 4731 }
4733 4732 for member in rule_user_group.users_group.members:
4734 4733 if member.user.active:
4735 4734 key = member.user.username
4736 4735 if key in users:
4737 4736 # skip this member as we have him already
4738 4737 # this prevents from override the "first" matched
4739 4738 # users with duplicates in multiple groups
4740 4739 continue
4741 4740
4742 4741 users[key] = {
4743 4742 'user': member.user,
4744 4743 'source': 'user_group',
4745 4744 'source_data': source_data,
4746 4745 'data': rule_user_group.rule_data()
4747 4746 }
4748 4747
4749 4748 return users
4750 4749
4751 4750 def user_group_vote_rule(self, user_id):
4752 4751
4753 4752 rules = []
4754 4753 if not self.rule_user_groups:
4755 4754 return rules
4756 4755
4757 4756 for user_group in self.rule_user_groups:
4758 4757 user_group_members = [x.user_id for x in user_group.users_group.members]
4759 4758 if user_id in user_group_members:
4760 4759 rules.append(user_group)
4761 4760 return rules
4762 4761
4763 4762 def __repr__(self):
4764 4763 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4765 4764 self.repo_review_rule_id, self.repo)
4766 4765
4767 4766
4768 4767 class ScheduleEntry(Base, BaseModel):
4769 4768 __tablename__ = 'schedule_entries'
4770 4769 __table_args__ = (
4771 4770 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4772 4771 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4773 4772 base_table_args,
4774 4773 )
4775 4774
4776 4775 schedule_types = ['crontab', 'timedelta', 'integer']
4777 4776 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4778 4777
4779 4778 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4780 4779 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4781 4780 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4782 4781
4783 4782 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4784 4783 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4785 4784
4786 4785 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4787 4786 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4788 4787
4789 4788 # task
4790 4789 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4791 4790 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4792 4791 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4793 4792 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4794 4793
4795 4794 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4796 4795 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4797 4796
4798 4797 @hybrid_property
4799 4798 def schedule_type(self):
4800 4799 return self._schedule_type
4801 4800
4802 4801 @schedule_type.setter
4803 4802 def schedule_type(self, val):
4804 4803 if val not in self.schedule_types:
4805 4804 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4806 4805 val, self.schedule_type))
4807 4806
4808 4807 self._schedule_type = val
4809 4808
4810 4809 @classmethod
4811 4810 def get_uid(cls, obj):
4812 4811 args = obj.task_args
4813 4812 kwargs = obj.task_kwargs
4814 4813 if isinstance(args, JsonRaw):
4815 4814 try:
4816 4815 args = json.loads(args)
4817 4816 except ValueError:
4818 4817 args = tuple()
4819 4818
4820 4819 if isinstance(kwargs, JsonRaw):
4821 4820 try:
4822 4821 kwargs = json.loads(kwargs)
4823 4822 except ValueError:
4824 4823 kwargs = dict()
4825 4824
4826 4825 dot_notation = obj.task_dot_notation
4827 4826 val = '.'.join(map(safe_str, [
4828 4827 sorted(dot_notation), args, sorted(kwargs.items())]))
4829 4828 return hashlib.sha1(val).hexdigest()
4830 4829
4831 4830 @classmethod
4832 4831 def get_by_schedule_name(cls, schedule_name):
4833 4832 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4834 4833
4835 4834 @classmethod
4836 4835 def get_by_schedule_id(cls, schedule_id):
4837 4836 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4838 4837
4839 4838 @property
4840 4839 def task(self):
4841 4840 return self.task_dot_notation
4842 4841
4843 4842 @property
4844 4843 def schedule(self):
4845 4844 from rhodecode.lib.celerylib.utils import raw_2_schedule
4846 4845 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4847 4846 return schedule
4848 4847
4849 4848 @property
4850 4849 def args(self):
4851 4850 try:
4852 4851 return list(self.task_args or [])
4853 4852 except ValueError:
4854 4853 return list()
4855 4854
4856 4855 @property
4857 4856 def kwargs(self):
4858 4857 try:
4859 4858 return dict(self.task_kwargs or {})
4860 4859 except ValueError:
4861 4860 return dict()
4862 4861
4863 4862 def _as_raw(self, val):
4864 4863 if hasattr(val, 'de_coerce'):
4865 4864 val = val.de_coerce()
4866 4865 if val:
4867 4866 val = json.dumps(val)
4868 4867
4869 4868 return val
4870 4869
4871 4870 @property
4872 4871 def schedule_definition_raw(self):
4873 4872 return self._as_raw(self.schedule_definition)
4874 4873
4875 4874 @property
4876 4875 def args_raw(self):
4877 4876 return self._as_raw(self.task_args)
4878 4877
4879 4878 @property
4880 4879 def kwargs_raw(self):
4881 4880 return self._as_raw(self.task_kwargs)
4882 4881
4883 4882 def __repr__(self):
4884 4883 return '<DB:ScheduleEntry({}:{})>'.format(
4885 4884 self.schedule_entry_id, self.schedule_name)
4886 4885
4887 4886
4888 4887 @event.listens_for(ScheduleEntry, 'before_update')
4889 4888 def update_task_uid(mapper, connection, target):
4890 4889 target.task_uid = ScheduleEntry.get_uid(target)
4891 4890
4892 4891
4893 4892 @event.listens_for(ScheduleEntry, 'before_insert')
4894 4893 def set_task_uid(mapper, connection, target):
4895 4894 target.task_uid = ScheduleEntry.get_uid(target)
4896 4895
4897 4896
4898 4897 class _BaseBranchPerms(BaseModel):
4899 4898 @classmethod
4900 4899 def compute_hash(cls, value):
4901 4900 return sha1_safe(value)
4902 4901
4903 4902 @hybrid_property
4904 4903 def branch_pattern(self):
4905 4904 return self._branch_pattern or '*'
4906 4905
4907 4906 @hybrid_property
4908 4907 def branch_hash(self):
4909 4908 return self._branch_hash
4910 4909
4911 4910 def _validate_glob(self, value):
4912 4911 re.compile('^' + glob2re(value) + '$')
4913 4912
4914 4913 @branch_pattern.setter
4915 4914 def branch_pattern(self, value):
4916 4915 self._validate_glob(value)
4917 4916 self._branch_pattern = value or '*'
4918 4917 # set the Hash when setting the branch pattern
4919 4918 self._branch_hash = self.compute_hash(self._branch_pattern)
4920 4919
4921 4920 def matches(self, branch):
4922 4921 """
4923 4922 Check if this the branch matches entry
4924 4923
4925 4924 :param branch: branch name for the commit
4926 4925 """
4927 4926
4928 4927 branch = branch or ''
4929 4928
4930 4929 branch_matches = True
4931 4930 if branch:
4932 4931 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4933 4932 branch_matches = bool(branch_regex.search(branch))
4934 4933
4935 4934 return branch_matches
4936 4935
4937 4936
4938 4937 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
4939 4938 __tablename__ = 'user_to_repo_branch_permissions'
4940 4939 __table_args__ = (
4941 4940 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4942 4941 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4943 4942 )
4944 4943
4945 4944 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4946 4945
4947 4946 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4948 4947 repo = relationship('Repository', backref='user_branch_perms')
4949 4948
4950 4949 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4951 4950 permission = relationship('Permission')
4952 4951
4953 4952 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
4954 4953 user_repo_to_perm = relationship('UserRepoToPerm')
4955 4954
4956 4955 rule_order = Column('rule_order', Integer(), nullable=False)
4957 4956 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4958 4957 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4959 4958
4960 4959 def __unicode__(self):
4961 4960 return u'<UserBranchPermission(%s => %r)>' % (
4962 4961 self.user_repo_to_perm, self.branch_pattern)
4963 4962
4964 4963
4965 4964 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
4966 4965 __tablename__ = 'user_group_to_repo_branch_permissions'
4967 4966 __table_args__ = (
4968 4967 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4969 4968 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4970 4969 )
4971 4970
4972 4971 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4973 4972
4974 4973 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4975 4974 repo = relationship('Repository', backref='user_group_branch_perms')
4976 4975
4977 4976 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4978 4977 permission = relationship('Permission')
4979 4978
4980 4979 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
4981 4980 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
4982 4981
4983 4982 rule_order = Column('rule_order', Integer(), nullable=False)
4984 4983 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4985 4984 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4986 4985
4987 4986 def __unicode__(self):
4988 4987 return u'<UserBranchPermission(%s => %r)>' % (
4989 4988 self.user_group_repo_to_perm, self.branch_pattern)
4990 4989
4991 4990
4992 4991 class UserBookmark(Base, BaseModel):
4993 4992 __tablename__ = 'user_bookmarks'
4994 4993 __table_args__ = (
4995 4994 UniqueConstraint('user_id', 'bookmark_repo_id'),
4996 4995 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
4997 4996 UniqueConstraint('user_id', 'bookmark_position'),
4998 4997 base_table_args
4999 4998 )
5000 4999
5001 5000 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5002 5001 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5003 5002 position = Column("bookmark_position", Integer(), nullable=False)
5004 5003 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5005 5004 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5006 5005 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5007 5006
5008 5007 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5009 5008 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5010 5009
5011 5010 user = relationship("User")
5012 5011
5013 5012 repository = relationship("Repository")
5014 5013 repository_group = relationship("RepoGroup")
5015 5014
5016 5015 @classmethod
5017 5016 def get_by_position_for_user(cls, position, user_id):
5018 5017 return cls.query() \
5019 5018 .filter(UserBookmark.user_id == user_id) \
5020 5019 .filter(UserBookmark.position == position).scalar()
5021 5020
5022 5021 @classmethod
5023 5022 def get_bookmarks_for_user(cls, user_id):
5024 5023 return cls.query() \
5025 5024 .filter(UserBookmark.user_id == user_id) \
5026 5025 .options(joinedload(UserBookmark.repository)) \
5027 5026 .options(joinedload(UserBookmark.repository_group)) \
5028 5027 .order_by(UserBookmark.position.asc()) \
5029 5028 .all()
5030 5029
5031 5030 def __unicode__(self):
5032 5031 return u'<UserBookmark(%d @ %r)>' % (self.position, self.redirect_url)
5033 5032
5034 5033
5035 5034 class FileStore(Base, BaseModel):
5036 5035 __tablename__ = 'file_store'
5037 5036 __table_args__ = (
5038 5037 base_table_args
5039 5038 )
5040 5039
5041 5040 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5042 5041 file_uid = Column('file_uid', String(1024), nullable=False)
5043 5042 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5044 5043 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5045 5044 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5046 5045
5047 5046 # sha256 hash
5048 5047 file_hash = Column('file_hash', String(512), nullable=False)
5049 5048 file_size = Column('file_size', Integer(), nullable=False)
5050 5049
5051 5050 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5052 5051 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5053 5052 accessed_count = Column('accessed_count', Integer(), default=0)
5054 5053
5055 5054 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5056 5055
5057 5056 # if repo/repo_group reference is set, check for permissions
5058 5057 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5059 5058
5060 5059 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5061 5060 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5062 5061
5063 5062 # scope limited to user, which requester have access to
5064 5063 scope_user_id = Column(
5065 5064 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5066 5065 nullable=True, unique=None, default=None)
5067 5066 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5068 5067
5069 5068 # scope limited to user group, which requester have access to
5070 5069 scope_user_group_id = Column(
5071 5070 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5072 5071 nullable=True, unique=None, default=None)
5073 5072 user_group = relationship('UserGroup', lazy='joined')
5074 5073
5075 5074 # scope limited to repo, which requester have access to
5076 5075 scope_repo_id = Column(
5077 5076 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5078 5077 nullable=True, unique=None, default=None)
5079 5078 repo = relationship('Repository', lazy='joined')
5080 5079
5081 5080 # scope limited to repo group, which requester have access to
5082 5081 scope_repo_group_id = Column(
5083 5082 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5084 5083 nullable=True, unique=None, default=None)
5085 5084 repo_group = relationship('RepoGroup', lazy='joined')
5086 5085
5087 5086 @classmethod
5088 5087 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5089 5088 file_description='', enabled=True, check_acl=True, user_id=None,
5090 5089 scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5091 5090
5092 5091 store_entry = FileStore()
5093 5092 store_entry.file_uid = file_uid
5094 5093 store_entry.file_display_name = file_display_name
5095 5094 store_entry.file_org_name = filename
5096 5095 store_entry.file_size = file_size
5097 5096 store_entry.file_hash = file_hash
5098 5097 store_entry.file_description = file_description
5099 5098
5100 5099 store_entry.check_acl = check_acl
5101 5100 store_entry.enabled = enabled
5102 5101
5103 5102 store_entry.user_id = user_id
5104 5103 store_entry.scope_user_id = scope_user_id
5105 5104 store_entry.scope_repo_id = scope_repo_id
5106 5105 store_entry.scope_repo_group_id = scope_repo_group_id
5107 5106 return store_entry
5108 5107
5109 5108 @classmethod
5110 5109 def bump_access_counter(cls, file_uid, commit=True):
5111 5110 FileStore().query()\
5112 5111 .filter(FileStore.file_uid == file_uid)\
5113 5112 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5114 5113 FileStore.accessed_on: datetime.datetime.now()})
5115 5114 if commit:
5116 5115 Session().commit()
5117 5116
5118 5117 def __repr__(self):
5119 5118 return '<FileStore({})>'.format(self.file_store_id)
5120 5119
5121 5120
5122 5121 class DbMigrateVersion(Base, BaseModel):
5123 5122 __tablename__ = 'db_migrate_version'
5124 5123 __table_args__ = (
5125 5124 base_table_args,
5126 5125 )
5127 5126
5128 5127 repository_id = Column('repository_id', String(250), primary_key=True)
5129 5128 repository_path = Column('repository_path', Text)
5130 5129 version = Column('version', Integer)
5131 5130
5132 5131 @classmethod
5133 5132 def set_version(cls, version):
5134 5133 """
5135 5134 Helper for forcing a different version, usually for debugging purposes via ishell.
5136 5135 """
5137 5136 ver = DbMigrateVersion.query().first()
5138 5137 ver.version = version
5139 5138 Session().commit()
5140 5139
5141 5140
5142 5141 class DbSession(Base, BaseModel):
5143 5142 __tablename__ = 'db_session'
5144 5143 __table_args__ = (
5145 5144 base_table_args,
5146 5145 )
5147 5146
5148 5147 def __repr__(self):
5149 5148 return '<DB:DbSession({})>'.format(self.id)
5150 5149
5151 5150 id = Column('id', Integer())
5152 5151 namespace = Column('namespace', String(255), primary_key=True)
5153 5152 accessed = Column('accessed', DateTime, nullable=False)
5154 5153 created = Column('created', DateTime, nullable=False)
5155 5154 data = Column('data', PickleType, nullable=False)
@@ -1,1739 +1,1745 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 141 opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=True):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 # source or target
150 150 if repo and source:
151 151 q = q.filter(PullRequest.source_repo == repo)
152 152 elif repo:
153 153 q = q.filter(PullRequest.target_repo == repo)
154 154
155 155 # closed,opened
156 156 if statuses:
157 157 q = q.filter(PullRequest.status.in_(statuses))
158 158
159 159 # opened by filter
160 160 if opened_by:
161 161 q = q.filter(PullRequest.user_id.in_(opened_by))
162 162
163 163 # only get those that are in "created" state
164 164 if only_created:
165 165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 166
167 167 if order_by:
168 168 order_map = {
169 169 'name_raw': PullRequest.pull_request_id,
170 170 'id': PullRequest.pull_request_id,
171 171 'title': PullRequest.title,
172 172 'updated_on_raw': PullRequest.updated_on,
173 173 'target_repo': PullRequest.target_repo_id
174 174 }
175 175 if order_dir == 'asc':
176 176 q = q.order_by(order_map[order_by].asc())
177 177 else:
178 178 q = q.order_by(order_map[order_by].desc())
179 179
180 180 return q
181 181
182 182 def count_all(self, repo_name, source=False, statuses=None,
183 183 opened_by=None):
184 184 """
185 185 Count the number of pull requests for a specific repository.
186 186
187 187 :param repo_name: target or source repo
188 188 :param source: boolean flag to specify if repo_name refers to source
189 189 :param statuses: list of pull request statuses
190 190 :param opened_by: author user of the pull request
191 191 :returns: int number of pull requests
192 192 """
193 193 q = self._prepare_get_all_query(
194 194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195 195
196 196 return q.count()
197 197
198 198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 199 offset=0, length=None, order_by=None, order_dir='desc'):
200 200 """
201 201 Get all pull requests for a specific repository.
202 202
203 203 :param repo_name: target or source repo
204 204 :param source: boolean flag to specify if repo_name refers to source
205 205 :param statuses: list of pull request statuses
206 206 :param opened_by: author user of the pull request
207 207 :param offset: pagination offset
208 208 :param length: length of returned list
209 209 :param order_by: order of the returned list
210 210 :param order_dir: 'asc' or 'desc' ordering direction
211 211 :returns: list of pull requests
212 212 """
213 213 q = self._prepare_get_all_query(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 215 order_by=order_by, order_dir=order_dir)
216 216
217 217 if length:
218 218 pull_requests = q.limit(length).offset(offset).all()
219 219 else:
220 220 pull_requests = q.all()
221 221
222 222 return pull_requests
223 223
224 224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 225 opened_by=None):
226 226 """
227 227 Count the number of pull requests for a specific repository that are
228 228 awaiting review.
229 229
230 230 :param repo_name: target or source repo
231 231 :param source: boolean flag to specify if repo_name refers to source
232 232 :param statuses: list of pull request statuses
233 233 :param opened_by: author user of the pull request
234 234 :returns: int number of pull requests
235 235 """
236 236 pull_requests = self.get_awaiting_review(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238 238
239 239 return len(pull_requests)
240 240
241 241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 242 opened_by=None, offset=0, length=None,
243 243 order_by=None, order_dir='desc'):
244 244 """
245 245 Get all pull requests for a specific repository that are awaiting
246 246 review.
247 247
248 248 :param repo_name: target or source repo
249 249 :param source: boolean flag to specify if repo_name refers to source
250 250 :param statuses: list of pull request statuses
251 251 :param opened_by: author user of the pull request
252 252 :param offset: pagination offset
253 253 :param length: length of returned list
254 254 :param order_by: order of the returned list
255 255 :param order_dir: 'asc' or 'desc' ordering direction
256 256 :returns: list of pull requests
257 257 """
258 258 pull_requests = self.get_all(
259 259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 260 order_by=order_by, order_dir=order_dir)
261 261
262 262 _filtered_pull_requests = []
263 263 for pr in pull_requests:
264 264 status = pr.calculated_review_status()
265 265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 267 _filtered_pull_requests.append(pr)
268 268 if length:
269 269 return _filtered_pull_requests[offset:offset+length]
270 270 else:
271 271 return _filtered_pull_requests
272 272
273 273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 274 opened_by=None, user_id=None):
275 275 """
276 276 Count the number of pull requests for a specific repository that are
277 277 awaiting review from a specific user.
278 278
279 279 :param repo_name: target or source repo
280 280 :param source: boolean flag to specify if repo_name refers to source
281 281 :param statuses: list of pull request statuses
282 282 :param opened_by: author user of the pull request
283 283 :param user_id: reviewer user of the pull request
284 284 :returns: int number of pull requests
285 285 """
286 286 pull_requests = self.get_awaiting_my_review(
287 287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 288 user_id=user_id)
289 289
290 290 return len(pull_requests)
291 291
292 292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 293 opened_by=None, user_id=None, offset=0,
294 294 length=None, order_by=None, order_dir='desc'):
295 295 """
296 296 Get all pull requests for a specific repository that are awaiting
297 297 review from a specific user.
298 298
299 299 :param repo_name: target or source repo
300 300 :param source: boolean flag to specify if repo_name refers to source
301 301 :param statuses: list of pull request statuses
302 302 :param opened_by: author user of the pull request
303 303 :param user_id: reviewer user of the pull request
304 304 :param offset: pagination offset
305 305 :param length: length of returned list
306 306 :param order_by: order of the returned list
307 307 :param order_dir: 'asc' or 'desc' ordering direction
308 308 :returns: list of pull requests
309 309 """
310 310 pull_requests = self.get_all(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 order_by=order_by, order_dir=order_dir)
313 313
314 314 _my = PullRequestModel().get_not_reviewed(user_id)
315 315 my_participation = []
316 316 for pr in pull_requests:
317 317 if pr in _my:
318 318 my_participation.append(pr)
319 319 _filtered_pull_requests = my_participation
320 320 if length:
321 321 return _filtered_pull_requests[offset:offset+length]
322 322 else:
323 323 return _filtered_pull_requests
324 324
325 325 def get_not_reviewed(self, user_id):
326 326 return [
327 327 x.pull_request for x in PullRequestReviewers.query().filter(
328 328 PullRequestReviewers.user_id == user_id).all()
329 329 ]
330 330
331 331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 332 order_by=None, order_dir='desc'):
333 333 q = PullRequest.query()
334 334 if user_id:
335 335 reviewers_subquery = Session().query(
336 336 PullRequestReviewers.pull_request_id).filter(
337 337 PullRequestReviewers.user_id == user_id).subquery()
338 338 user_filter = or_(
339 339 PullRequest.user_id == user_id,
340 340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 341 )
342 342 q = PullRequest.query().filter(user_filter)
343 343
344 344 # closed,opened
345 345 if statuses:
346 346 q = q.filter(PullRequest.status.in_(statuses))
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_im_participating_in(self, user_id=None, statuses=None):
363 363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 364 return q.count()
365 365
366 366 def get_im_participating_in(
367 367 self, user_id=None, statuses=None, offset=0,
368 368 length=None, order_by=None, order_dir='desc'):
369 369 """
370 370 Get all Pull requests that i'm participating in, or i have opened
371 371 """
372 372
373 373 q = self._prepare_participating_query(
374 374 user_id, statuses=statuses, order_by=order_by,
375 375 order_dir=order_dir)
376 376
377 377 if length:
378 378 pull_requests = q.limit(length).offset(offset).all()
379 379 else:
380 380 pull_requests = q.all()
381 381
382 382 return pull_requests
383 383
384 384 def get_versions(self, pull_request):
385 385 """
386 386 returns version of pull request sorted by ID descending
387 387 """
388 388 return PullRequestVersion.query()\
389 389 .filter(PullRequestVersion.pull_request == pull_request)\
390 390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 391 .all()
392 392
393 393 def get_pr_version(self, pull_request_id, version=None):
394 394 at_version = None
395 395
396 396 if version and version == 'latest':
397 397 pull_request_ver = PullRequest.get(pull_request_id)
398 398 pull_request_obj = pull_request_ver
399 399 _org_pull_request_obj = pull_request_obj
400 400 at_version = 'latest'
401 401 elif version:
402 402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 403 pull_request_obj = pull_request_ver
404 404 _org_pull_request_obj = pull_request_ver.pull_request
405 405 at_version = pull_request_ver.pull_request_version_id
406 406 else:
407 407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 408 pull_request_id)
409 409
410 410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 411 pull_request_obj, _org_pull_request_obj)
412 412
413 413 return _org_pull_request_obj, pull_request_obj, \
414 414 pull_request_display_obj, at_version
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None,
418 418 description_renderer=None,
419 419 reviewer_data=None, translator=None, auth_user=None):
420 420 translator = translator or get_current_request().translate
421 421
422 422 created_by_user = self._get_user(created_by)
423 423 auth_user = auth_user or created_by_user.AuthUser()
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.description_renderer = description_renderer
436 436 pull_request.author = created_by_user
437 437 pull_request.reviewer_data = reviewer_data
438 438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 439 Session().add(pull_request)
440 440 Session().flush()
441 441
442 442 reviewer_ids = set()
443 443 # members / reviewers
444 444 for reviewer_object in reviewers:
445 445 user_id, reasons, mandatory, rules = reviewer_object
446 446 user = self._get_user(user_id)
447 447
448 448 # skip duplicates
449 449 if user.user_id in reviewer_ids:
450 450 continue
451 451
452 452 reviewer_ids.add(user.user_id)
453 453
454 454 reviewer = PullRequestReviewers()
455 455 reviewer.user = user
456 456 reviewer.pull_request = pull_request
457 457 reviewer.reasons = reasons
458 458 reviewer.mandatory = mandatory
459 459
460 460 # NOTE(marcink): pick only first rule for now
461 461 rule_id = list(rules)[0] if rules else None
462 462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 463 if rule:
464 464 review_group = rule.user_group_vote_rule(user_id)
465 465 # we check if this particular reviewer is member of a voting group
466 466 if review_group:
467 467 # NOTE(marcink):
468 468 # can be that user is member of more but we pick the first same,
469 469 # same as default reviewers algo
470 470 review_group = review_group[0]
471 471
472 472 rule_data = {
473 473 'rule_name':
474 474 rule.review_rule_name,
475 475 'rule_user_group_entry_id':
476 476 review_group.repo_review_rule_users_group_id,
477 477 'rule_user_group_name':
478 478 review_group.users_group.users_group_name,
479 479 'rule_user_group_members':
480 480 [x.user.username for x in review_group.users_group.members],
481 481 'rule_user_group_members_id':
482 482 [x.user.user_id for x in review_group.users_group.members],
483 483 }
484 484 # e.g {'vote_rule': -1, 'mandatory': True}
485 485 rule_data.update(review_group.rule_data())
486 486
487 487 reviewer.rule_data = rule_data
488 488
489 489 Session().add(reviewer)
490 490 Session().flush()
491 491
492 492 # Set approval status to "Under Review" for all commits which are
493 493 # part of this pull request.
494 494 ChangesetStatusModel().set_status(
495 495 repo=target_repo,
496 496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 497 user=created_by_user,
498 498 pull_request=pull_request
499 499 )
500 500 # we commit early at this point. This has to do with a fact
501 501 # that before queries do some row-locking. And because of that
502 502 # we need to commit and finish transaction before below validate call
503 503 # that for large repos could be long resulting in long row locks
504 504 Session().commit()
505 505
506 506 # prepare workspace, and run initial merge simulation. Set state during that
507 507 # operation
508 508 pull_request = PullRequest.get(pull_request.pull_request_id)
509 509
510 510 # set as merging, for simulation, and if finished to created so we mark
511 511 # simulation is working fine
512 512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 513 final_state=PullRequest.STATE_CREATED):
514 514 MergeCheck.validate(
515 515 pull_request, auth_user=auth_user, translator=translator)
516 516
517 517 self.notify_reviewers(pull_request, reviewer_ids)
518 518 self.trigger_pull_request_hook(
519 519 pull_request, created_by_user, 'create')
520 520
521 521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 522 self._log_audit_action(
523 523 'repo.pull_request.create', {'data': creation_data},
524 524 auth_user, pull_request)
525 525
526 526 return pull_request
527 527
528 528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 529 pull_request = self.__get_pull_request(pull_request)
530 530 target_scm = pull_request.target_repo.scm_instance()
531 531 if action == 'create':
532 532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 533 elif action == 'merge':
534 534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 535 elif action == 'close':
536 536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 537 elif action == 'review_status_change':
538 538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 539 elif action == 'update':
540 540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 541 elif action == 'comment':
542 542 # dummy hook ! for comment. We want this function to handle all cases
543 543 def trigger_hook(*args, **kwargs):
544 544 pass
545 545 comment = data['comment']
546 546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 547 else:
548 548 return
549 549
550 550 trigger_hook(
551 551 username=user.username,
552 552 repo_name=pull_request.target_repo.repo_name,
553 553 repo_alias=target_scm.alias,
554 554 pull_request=pull_request,
555 555 data=data)
556 556
557 557 def _get_commit_ids(self, pull_request):
558 558 """
559 559 Return the commit ids of the merged pull request.
560 560
561 561 This method is not dealing correctly yet with the lack of autoupdates
562 562 nor with the implicit target updates.
563 563 For example: if a commit in the source repo is already in the target it
564 564 will be reported anyways.
565 565 """
566 566 merge_rev = pull_request.merge_rev
567 567 if merge_rev is None:
568 568 raise ValueError('This pull request was not merged yet')
569 569
570 570 commit_ids = list(pull_request.revisions)
571 571 if merge_rev not in commit_ids:
572 572 commit_ids.append(merge_rev)
573 573
574 574 return commit_ids
575 575
576 576 def merge_repo(self, pull_request, user, extras):
577 577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 578 extras['user_agent'] = 'internal-merge'
579 579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 580 if merge_state.executed:
581 581 log.debug("Merge was successful, updating the pull request comments.")
582 582 self._comment_and_close_pr(pull_request, user, merge_state)
583 583
584 584 self._log_audit_action(
585 585 'repo.pull_request.merge',
586 586 {'merge_state': merge_state.__dict__},
587 587 user, pull_request)
588 588
589 589 else:
590 590 log.warn("Merge failed, not updating the pull request.")
591 591 return merge_state
592 592
593 593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 594 target_vcs = pull_request.target_repo.scm_instance()
595 595 source_vcs = pull_request.source_repo.scm_instance()
596 596
597 597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 598 pr_id=pull_request.pull_request_id,
599 599 pr_title=pull_request.title,
600 600 source_repo=source_vcs.name,
601 601 source_ref_name=pull_request.source_ref_parts.name,
602 602 target_repo=target_vcs.name,
603 603 target_ref_name=pull_request.target_ref_parts.name,
604 604 )
605 605
606 606 workspace_id = self._workspace_id(pull_request)
607 607 repo_id = pull_request.target_repo.repo_id
608 608 use_rebase = self._use_rebase_for_merging(pull_request)
609 609 close_branch = self._close_branch_before_merging(pull_request)
610 610
611 611 target_ref = self._refresh_reference(
612 612 pull_request.target_ref_parts, target_vcs)
613 613
614 614 callback_daemon, extras = prepare_callback_daemon(
615 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 616 host=vcs_settings.HOOKS_HOST,
617 617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 618
619 619 with callback_daemon:
620 620 # TODO: johbo: Implement a clean way to run a config_override
621 621 # for a single call.
622 622 target_vcs.config.set(
623 623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 624
625 625 user_name = user.short_contact
626 626 merge_state = target_vcs.merge(
627 627 repo_id, workspace_id, target_ref, source_vcs,
628 628 pull_request.source_ref_parts,
629 629 user_name=user_name, user_email=user.email,
630 630 message=message, use_rebase=use_rebase,
631 631 close_branch=close_branch)
632 632 return merge_state
633 633
634 634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 636 pull_request.updated_on = datetime.datetime.now()
637 637 close_msg = close_msg or 'Pull request merged and closed'
638 638
639 639 CommentsModel().create(
640 640 text=safe_unicode(close_msg),
641 641 repo=pull_request.target_repo.repo_id,
642 642 user=user.user_id,
643 643 pull_request=pull_request.pull_request_id,
644 644 f_path=None,
645 645 line_no=None,
646 646 closing_pr=True
647 647 )
648 648
649 649 Session().add(pull_request)
650 650 Session().flush()
651 651 # TODO: paris: replace invalidation with less radical solution
652 652 ScmModel().mark_for_invalidation(
653 653 pull_request.target_repo.repo_name)
654 654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 655
656 656 def has_valid_update_type(self, pull_request):
657 657 source_ref_type = pull_request.source_ref_parts.type
658 658 return source_ref_type in self.REF_TYPES
659 659
660 660 def update_commits(self, pull_request):
661 661 """
662 662 Get the updated list of commits for the pull request
663 663 and return the new pull request version and the list
664 664 of commits processed by this update action
665 665 """
666 666 pull_request = self.__get_pull_request(pull_request)
667 667 source_ref_type = pull_request.source_ref_parts.type
668 668 source_ref_name = pull_request.source_ref_parts.name
669 669 source_ref_id = pull_request.source_ref_parts.commit_id
670 670
671 671 target_ref_type = pull_request.target_ref_parts.type
672 672 target_ref_name = pull_request.target_ref_parts.name
673 673 target_ref_id = pull_request.target_ref_parts.commit_id
674 674
675 675 if not self.has_valid_update_type(pull_request):
676 676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 677 pull_request, source_ref_type)
678 678 return UpdateResponse(
679 679 executed=False,
680 680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 681 old=pull_request, new=None, changes=None,
682 682 source_changed=False, target_changed=False)
683 683
684 684 # source repo
685 685 source_repo = pull_request.source_repo.scm_instance()
686 source_repo.count() # cache rebuild
687
686 688 try:
687 689 source_commit = source_repo.get_commit(commit_id=source_ref_name)
688 690 except CommitDoesNotExistError:
689 691 return UpdateResponse(
690 692 executed=False,
691 693 reason=UpdateFailureReason.MISSING_SOURCE_REF,
692 694 old=pull_request, new=None, changes=None,
693 695 source_changed=False, target_changed=False)
694 696
695 697 source_changed = source_ref_id != source_commit.raw_id
696 698
697 699 # target repo
698 700 target_repo = pull_request.target_repo.scm_instance()
701 target_repo.count() # cache rebuild
702
699 703 try:
700 704 target_commit = target_repo.get_commit(commit_id=target_ref_name)
701 705 except CommitDoesNotExistError:
702 706 return UpdateResponse(
703 707 executed=False,
704 708 reason=UpdateFailureReason.MISSING_TARGET_REF,
705 709 old=pull_request, new=None, changes=None,
706 710 source_changed=False, target_changed=False)
707 711 target_changed = target_ref_id != target_commit.raw_id
708 712
709 713 if not (source_changed or target_changed):
710 714 log.debug("Nothing changed in pull request %s", pull_request)
711 715 return UpdateResponse(
712 716 executed=False,
713 717 reason=UpdateFailureReason.NO_CHANGE,
714 718 old=pull_request, new=None, changes=None,
715 719 source_changed=target_changed, target_changed=source_changed)
716 720
717 721 change_in_found = 'target repo' if target_changed else 'source repo'
718 722 log.debug('Updating pull request because of change in %s detected',
719 723 change_in_found)
720 724
721 725 # Finally there is a need for an update, in case of source change
722 726 # we create a new version, else just an update
723 727 if source_changed:
724 728 pull_request_version = self._create_version_from_snapshot(pull_request)
725 729 self._link_comments_to_version(pull_request_version)
726 730 else:
727 731 try:
728 732 ver = pull_request.versions[-1]
729 733 except IndexError:
730 734 ver = None
731 735
732 736 pull_request.pull_request_version_id = \
733 737 ver.pull_request_version_id if ver else None
734 738 pull_request_version = pull_request
735 739
736 740 try:
737 741 if target_ref_type in self.REF_TYPES:
738 742 target_commit = target_repo.get_commit(target_ref_name)
739 743 else:
740 744 target_commit = target_repo.get_commit(target_ref_id)
741 745 except CommitDoesNotExistError:
742 746 return UpdateResponse(
743 747 executed=False,
744 748 reason=UpdateFailureReason.MISSING_TARGET_REF,
745 749 old=pull_request, new=None, changes=None,
746 750 source_changed=source_changed, target_changed=target_changed)
747 751
748 752 # re-compute commit ids
749 753 old_commit_ids = pull_request.revisions
750 754 pre_load = ["author", "branch", "date", "message"]
751 755 commit_ranges = target_repo.compare(
752 756 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
753 757 pre_load=pre_load)
754 758
755 759 ancestor = source_repo.get_common_ancestor(
756 760 source_commit.raw_id, target_commit.raw_id, target_repo)
757 761
758 762 pull_request.source_ref = '%s:%s:%s' % (
759 763 source_ref_type, source_ref_name, source_commit.raw_id)
760 764 pull_request.target_ref = '%s:%s:%s' % (
761 765 target_ref_type, target_ref_name, ancestor)
762 766
763 767 pull_request.revisions = [
764 768 commit.raw_id for commit in reversed(commit_ranges)]
765 769 pull_request.updated_on = datetime.datetime.now()
766 770 Session().add(pull_request)
767 771 new_commit_ids = pull_request.revisions
768 772
769 773 old_diff_data, new_diff_data = self._generate_update_diffs(
770 774 pull_request, pull_request_version)
771 775
772 776 # calculate commit and file changes
773 777 changes = self._calculate_commit_id_changes(
774 778 old_commit_ids, new_commit_ids)
775 779 file_changes = self._calculate_file_changes(
776 780 old_diff_data, new_diff_data)
777 781
778 782 # set comments as outdated if DIFFS changed
779 783 CommentsModel().outdate_comments(
780 784 pull_request, old_diff_data=old_diff_data,
781 785 new_diff_data=new_diff_data)
782 786
783 787 commit_changes = (changes.added or changes.removed)
784 788 file_node_changes = (
785 789 file_changes.added or file_changes.modified or file_changes.removed)
786 790 pr_has_changes = commit_changes or file_node_changes
787 791
788 792 # Add an automatic comment to the pull request, in case
789 793 # anything has changed
790 794 if pr_has_changes:
791 795 update_comment = CommentsModel().create(
792 796 text=self._render_update_message(changes, file_changes),
793 797 repo=pull_request.target_repo,
794 798 user=pull_request.author,
795 799 pull_request=pull_request,
796 800 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
797 801
798 802 # Update status to "Under Review" for added commits
799 803 for commit_id in changes.added:
800 804 ChangesetStatusModel().set_status(
801 805 repo=pull_request.source_repo,
802 806 status=ChangesetStatus.STATUS_UNDER_REVIEW,
803 807 comment=update_comment,
804 808 user=pull_request.author,
805 809 pull_request=pull_request,
806 810 revision=commit_id)
807 811
808 812 log.debug(
809 813 'Updated pull request %s, added_ids: %s, common_ids: %s, '
810 814 'removed_ids: %s', pull_request.pull_request_id,
811 815 changes.added, changes.common, changes.removed)
812 816 log.debug(
813 817 'Updated pull request with the following file changes: %s',
814 818 file_changes)
815 819
816 820 log.info(
817 821 "Updated pull request %s from commit %s to commit %s, "
818 822 "stored new version %s of this pull request.",
819 823 pull_request.pull_request_id, source_ref_id,
820 824 pull_request.source_ref_parts.commit_id,
821 825 pull_request_version.pull_request_version_id)
822 826 Session().commit()
823 827 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
824 828
825 829 return UpdateResponse(
826 830 executed=True, reason=UpdateFailureReason.NONE,
827 831 old=pull_request, new=pull_request_version, changes=changes,
828 832 source_changed=source_changed, target_changed=target_changed)
829 833
830 834 def _create_version_from_snapshot(self, pull_request):
831 835 version = PullRequestVersion()
832 836 version.title = pull_request.title
833 837 version.description = pull_request.description
834 838 version.status = pull_request.status
835 839 version.pull_request_state = pull_request.pull_request_state
836 840 version.created_on = datetime.datetime.now()
837 841 version.updated_on = pull_request.updated_on
838 842 version.user_id = pull_request.user_id
839 843 version.source_repo = pull_request.source_repo
840 844 version.source_ref = pull_request.source_ref
841 845 version.target_repo = pull_request.target_repo
842 846 version.target_ref = pull_request.target_ref
843 847
844 848 version._last_merge_source_rev = pull_request._last_merge_source_rev
845 849 version._last_merge_target_rev = pull_request._last_merge_target_rev
846 850 version.last_merge_status = pull_request.last_merge_status
847 851 version.shadow_merge_ref = pull_request.shadow_merge_ref
848 852 version.merge_rev = pull_request.merge_rev
849 853 version.reviewer_data = pull_request.reviewer_data
850 854
851 855 version.revisions = pull_request.revisions
852 856 version.pull_request = pull_request
853 857 Session().add(version)
854 858 Session().flush()
855 859
856 860 return version
857 861
858 862 def _generate_update_diffs(self, pull_request, pull_request_version):
859 863
860 864 diff_context = (
861 865 self.DIFF_CONTEXT +
862 866 CommentsModel.needed_extra_diff_context())
863 867 hide_whitespace_changes = False
864 868 source_repo = pull_request_version.source_repo
865 869 source_ref_id = pull_request_version.source_ref_parts.commit_id
866 870 target_ref_id = pull_request_version.target_ref_parts.commit_id
867 871 old_diff = self._get_diff_from_pr_or_version(
868 872 source_repo, source_ref_id, target_ref_id,
869 873 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
870 874
871 875 source_repo = pull_request.source_repo
872 876 source_ref_id = pull_request.source_ref_parts.commit_id
873 877 target_ref_id = pull_request.target_ref_parts.commit_id
874 878
875 879 new_diff = self._get_diff_from_pr_or_version(
876 880 source_repo, source_ref_id, target_ref_id,
877 881 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
878 882
879 883 old_diff_data = diffs.DiffProcessor(old_diff)
880 884 old_diff_data.prepare()
881 885 new_diff_data = diffs.DiffProcessor(new_diff)
882 886 new_diff_data.prepare()
883 887
884 888 return old_diff_data, new_diff_data
885 889
886 890 def _link_comments_to_version(self, pull_request_version):
887 891 """
888 892 Link all unlinked comments of this pull request to the given version.
889 893
890 894 :param pull_request_version: The `PullRequestVersion` to which
891 895 the comments shall be linked.
892 896
893 897 """
894 898 pull_request = pull_request_version.pull_request
895 899 comments = ChangesetComment.query()\
896 900 .filter(
897 901 # TODO: johbo: Should we query for the repo at all here?
898 902 # Pending decision on how comments of PRs are to be related
899 903 # to either the source repo, the target repo or no repo at all.
900 904 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
901 905 ChangesetComment.pull_request == pull_request,
902 906 ChangesetComment.pull_request_version == None)\
903 907 .order_by(ChangesetComment.comment_id.asc())
904 908
905 909 # TODO: johbo: Find out why this breaks if it is done in a bulk
906 910 # operation.
907 911 for comment in comments:
908 912 comment.pull_request_version_id = (
909 913 pull_request_version.pull_request_version_id)
910 914 Session().add(comment)
911 915
912 916 def _calculate_commit_id_changes(self, old_ids, new_ids):
913 917 added = [x for x in new_ids if x not in old_ids]
914 918 common = [x for x in new_ids if x in old_ids]
915 919 removed = [x for x in old_ids if x not in new_ids]
916 920 total = new_ids
917 921 return ChangeTuple(added, common, removed, total)
918 922
919 923 def _calculate_file_changes(self, old_diff_data, new_diff_data):
920 924
921 925 old_files = OrderedDict()
922 926 for diff_data in old_diff_data.parsed_diff:
923 927 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
924 928
925 929 added_files = []
926 930 modified_files = []
927 931 removed_files = []
928 932 for diff_data in new_diff_data.parsed_diff:
929 933 new_filename = diff_data['filename']
930 934 new_hash = md5_safe(diff_data['raw_diff'])
931 935
932 936 old_hash = old_files.get(new_filename)
933 937 if not old_hash:
934 938 # file is not present in old diff, means it's added
935 939 added_files.append(new_filename)
936 940 else:
937 941 if new_hash != old_hash:
938 942 modified_files.append(new_filename)
939 943 # now remove a file from old, since we have seen it already
940 944 del old_files[new_filename]
941 945
942 946 # removed files is when there are present in old, but not in NEW,
943 947 # since we remove old files that are present in new diff, left-overs
944 948 # if any should be the removed files
945 949 removed_files.extend(old_files.keys())
946 950
947 951 return FileChangeTuple(added_files, modified_files, removed_files)
948 952
949 953 def _render_update_message(self, changes, file_changes):
950 954 """
951 955 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
952 956 so it's always looking the same disregarding on which default
953 957 renderer system is using.
954 958
955 959 :param changes: changes named tuple
956 960 :param file_changes: file changes named tuple
957 961
958 962 """
959 963 new_status = ChangesetStatus.get_status_lbl(
960 964 ChangesetStatus.STATUS_UNDER_REVIEW)
961 965
962 966 changed_files = (
963 967 file_changes.added + file_changes.modified + file_changes.removed)
964 968
965 969 params = {
966 970 'under_review_label': new_status,
967 971 'added_commits': changes.added,
968 972 'removed_commits': changes.removed,
969 973 'changed_files': changed_files,
970 974 'added_files': file_changes.added,
971 975 'modified_files': file_changes.modified,
972 976 'removed_files': file_changes.removed,
973 977 }
974 978 renderer = RstTemplateRenderer()
975 979 return renderer.render('pull_request_update.mako', **params)
976 980
977 981 def edit(self, pull_request, title, description, description_renderer, user):
978 982 pull_request = self.__get_pull_request(pull_request)
979 983 old_data = pull_request.get_api_data(with_merge_state=False)
980 984 if pull_request.is_closed():
981 985 raise ValueError('This pull request is closed')
982 986 if title:
983 987 pull_request.title = title
984 988 pull_request.description = description
985 989 pull_request.updated_on = datetime.datetime.now()
986 990 pull_request.description_renderer = description_renderer
987 991 Session().add(pull_request)
988 992 self._log_audit_action(
989 993 'repo.pull_request.edit', {'old_data': old_data},
990 994 user, pull_request)
991 995
992 996 def update_reviewers(self, pull_request, reviewer_data, user):
993 997 """
994 998 Update the reviewers in the pull request
995 999
996 1000 :param pull_request: the pr to update
997 1001 :param reviewer_data: list of tuples
998 1002 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
999 1003 """
1000 1004 pull_request = self.__get_pull_request(pull_request)
1001 1005 if pull_request.is_closed():
1002 1006 raise ValueError('This pull request is closed')
1003 1007
1004 1008 reviewers = {}
1005 1009 for user_id, reasons, mandatory, rules in reviewer_data:
1006 1010 if isinstance(user_id, (int, compat.string_types)):
1007 1011 user_id = self._get_user(user_id).user_id
1008 1012 reviewers[user_id] = {
1009 1013 'reasons': reasons, 'mandatory': mandatory}
1010 1014
1011 1015 reviewers_ids = set(reviewers.keys())
1012 1016 current_reviewers = PullRequestReviewers.query()\
1013 1017 .filter(PullRequestReviewers.pull_request ==
1014 1018 pull_request).all()
1015 1019 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1016 1020
1017 1021 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1018 1022 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1019 1023
1020 1024 log.debug("Adding %s reviewers", ids_to_add)
1021 1025 log.debug("Removing %s reviewers", ids_to_remove)
1022 1026 changed = False
1023 1027 added_audit_reviewers = []
1024 1028 removed_audit_reviewers = []
1025 1029
1026 1030 for uid in ids_to_add:
1027 1031 changed = True
1028 1032 _usr = self._get_user(uid)
1029 1033 reviewer = PullRequestReviewers()
1030 1034 reviewer.user = _usr
1031 1035 reviewer.pull_request = pull_request
1032 1036 reviewer.reasons = reviewers[uid]['reasons']
1033 1037 # NOTE(marcink): mandatory shouldn't be changed now
1034 1038 # reviewer.mandatory = reviewers[uid]['reasons']
1035 1039 Session().add(reviewer)
1036 1040 added_audit_reviewers.append(reviewer.get_dict())
1037 1041
1038 1042 for uid in ids_to_remove:
1039 1043 changed = True
1040 1044 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1041 1045 # that prevents and fixes cases that we added the same reviewer twice.
1042 1046 # this CAN happen due to the lack of DB checks
1043 1047 reviewers = PullRequestReviewers.query()\
1044 1048 .filter(PullRequestReviewers.user_id == uid,
1045 1049 PullRequestReviewers.pull_request == pull_request)\
1046 1050 .all()
1047 1051
1048 1052 for obj in reviewers:
1049 1053 added_audit_reviewers.append(obj.get_dict())
1050 1054 Session().delete(obj)
1051 1055
1052 1056 if changed:
1053 1057 Session().expire_all()
1054 1058 pull_request.updated_on = datetime.datetime.now()
1055 1059 Session().add(pull_request)
1056 1060
1057 1061 # finally store audit logs
1058 1062 for user_data in added_audit_reviewers:
1059 1063 self._log_audit_action(
1060 1064 'repo.pull_request.reviewer.add', {'data': user_data},
1061 1065 user, pull_request)
1062 1066 for user_data in removed_audit_reviewers:
1063 1067 self._log_audit_action(
1064 1068 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1065 1069 user, pull_request)
1066 1070
1067 1071 self.notify_reviewers(pull_request, ids_to_add)
1068 1072 return ids_to_add, ids_to_remove
1069 1073
1070 1074 def get_url(self, pull_request, request=None, permalink=False):
1071 1075 if not request:
1072 1076 request = get_current_request()
1073 1077
1074 1078 if permalink:
1075 1079 return request.route_url(
1076 1080 'pull_requests_global',
1077 1081 pull_request_id=pull_request.pull_request_id,)
1078 1082 else:
1079 1083 return request.route_url('pullrequest_show',
1080 1084 repo_name=safe_str(pull_request.target_repo.repo_name),
1081 1085 pull_request_id=pull_request.pull_request_id,)
1082 1086
1083 1087 def get_shadow_clone_url(self, pull_request, request=None):
1084 1088 """
1085 1089 Returns qualified url pointing to the shadow repository. If this pull
1086 1090 request is closed there is no shadow repository and ``None`` will be
1087 1091 returned.
1088 1092 """
1089 1093 if pull_request.is_closed():
1090 1094 return None
1091 1095 else:
1092 1096 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1093 1097 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1094 1098
1095 1099 def notify_reviewers(self, pull_request, reviewers_ids):
1096 1100 # notification to reviewers
1097 1101 if not reviewers_ids:
1098 1102 return
1099 1103
1100 1104 pull_request_obj = pull_request
1101 1105 # get the current participants of this pull request
1102 1106 recipients = reviewers_ids
1103 1107 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1104 1108
1105 1109 pr_source_repo = pull_request_obj.source_repo
1106 1110 pr_target_repo = pull_request_obj.target_repo
1107 1111
1108 1112 pr_url = h.route_url('pullrequest_show',
1109 1113 repo_name=pr_target_repo.repo_name,
1110 1114 pull_request_id=pull_request_obj.pull_request_id,)
1111 1115
1112 1116 # set some variables for email notification
1113 1117 pr_target_repo_url = h.route_url(
1114 1118 'repo_summary', repo_name=pr_target_repo.repo_name)
1115 1119
1116 1120 pr_source_repo_url = h.route_url(
1117 1121 'repo_summary', repo_name=pr_source_repo.repo_name)
1118 1122
1119 1123 # pull request specifics
1120 1124 pull_request_commits = [
1121 1125 (x.raw_id, x.message)
1122 1126 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1123 1127
1124 1128 kwargs = {
1125 1129 'user': pull_request.author,
1126 1130 'pull_request': pull_request_obj,
1127 1131 'pull_request_commits': pull_request_commits,
1128 1132
1129 1133 'pull_request_target_repo': pr_target_repo,
1130 1134 'pull_request_target_repo_url': pr_target_repo_url,
1131 1135
1132 1136 'pull_request_source_repo': pr_source_repo,
1133 1137 'pull_request_source_repo_url': pr_source_repo_url,
1134 1138
1135 1139 'pull_request_url': pr_url,
1136 1140 }
1137 1141
1138 1142 # pre-generate the subject for notification itself
1139 1143 (subject,
1140 1144 _h, _e, # we don't care about those
1141 1145 body_plaintext) = EmailNotificationModel().render_email(
1142 1146 notification_type, **kwargs)
1143 1147
1144 1148 # create notification objects, and emails
1145 1149 NotificationModel().create(
1146 1150 created_by=pull_request.author,
1147 1151 notification_subject=subject,
1148 1152 notification_body=body_plaintext,
1149 1153 notification_type=notification_type,
1150 1154 recipients=recipients,
1151 1155 email_kwargs=kwargs,
1152 1156 )
1153 1157
1154 1158 def delete(self, pull_request, user):
1155 1159 pull_request = self.__get_pull_request(pull_request)
1156 1160 old_data = pull_request.get_api_data(with_merge_state=False)
1157 1161 self._cleanup_merge_workspace(pull_request)
1158 1162 self._log_audit_action(
1159 1163 'repo.pull_request.delete', {'old_data': old_data},
1160 1164 user, pull_request)
1161 1165 Session().delete(pull_request)
1162 1166
1163 1167 def close_pull_request(self, pull_request, user):
1164 1168 pull_request = self.__get_pull_request(pull_request)
1165 1169 self._cleanup_merge_workspace(pull_request)
1166 1170 pull_request.status = PullRequest.STATUS_CLOSED
1167 1171 pull_request.updated_on = datetime.datetime.now()
1168 1172 Session().add(pull_request)
1169 1173 self.trigger_pull_request_hook(
1170 1174 pull_request, pull_request.author, 'close')
1171 1175
1172 1176 pr_data = pull_request.get_api_data(with_merge_state=False)
1173 1177 self._log_audit_action(
1174 1178 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1175 1179
1176 1180 def close_pull_request_with_comment(
1177 1181 self, pull_request, user, repo, message=None, auth_user=None):
1178 1182
1179 1183 pull_request_review_status = pull_request.calculated_review_status()
1180 1184
1181 1185 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1182 1186 # approved only if we have voting consent
1183 1187 status = ChangesetStatus.STATUS_APPROVED
1184 1188 else:
1185 1189 status = ChangesetStatus.STATUS_REJECTED
1186 1190 status_lbl = ChangesetStatus.get_status_lbl(status)
1187 1191
1188 1192 default_message = (
1189 1193 'Closing with status change {transition_icon} {status}.'
1190 1194 ).format(transition_icon='>', status=status_lbl)
1191 1195 text = message or default_message
1192 1196
1193 1197 # create a comment, and link it to new status
1194 1198 comment = CommentsModel().create(
1195 1199 text=text,
1196 1200 repo=repo.repo_id,
1197 1201 user=user.user_id,
1198 1202 pull_request=pull_request.pull_request_id,
1199 1203 status_change=status_lbl,
1200 1204 status_change_type=status,
1201 1205 closing_pr=True,
1202 1206 auth_user=auth_user,
1203 1207 )
1204 1208
1205 1209 # calculate old status before we change it
1206 1210 old_calculated_status = pull_request.calculated_review_status()
1207 1211 ChangesetStatusModel().set_status(
1208 1212 repo.repo_id,
1209 1213 status,
1210 1214 user.user_id,
1211 1215 comment=comment,
1212 1216 pull_request=pull_request.pull_request_id
1213 1217 )
1214 1218
1215 1219 Session().flush()
1216 1220 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1217 1221 # we now calculate the status of pull request again, and based on that
1218 1222 # calculation trigger status change. This might happen in cases
1219 1223 # that non-reviewer admin closes a pr, which means his vote doesn't
1220 1224 # change the status, while if he's a reviewer this might change it.
1221 1225 calculated_status = pull_request.calculated_review_status()
1222 1226 if old_calculated_status != calculated_status:
1223 1227 self.trigger_pull_request_hook(
1224 1228 pull_request, user, 'review_status_change',
1225 1229 data={'status': calculated_status})
1226 1230
1227 1231 # finally close the PR
1228 1232 PullRequestModel().close_pull_request(
1229 1233 pull_request.pull_request_id, user)
1230 1234
1231 1235 return comment, status
1232 1236
1233 1237 def merge_status(self, pull_request, translator=None,
1234 1238 force_shadow_repo_refresh=False):
1235 1239 _ = translator or get_current_request().translate
1236 1240
1237 1241 if not self._is_merge_enabled(pull_request):
1238 1242 return False, _('Server-side pull request merging is disabled.')
1239 1243 if pull_request.is_closed():
1240 1244 return False, _('This pull request is closed.')
1241 1245 merge_possible, msg = self._check_repo_requirements(
1242 1246 target=pull_request.target_repo, source=pull_request.source_repo,
1243 1247 translator=_)
1244 1248 if not merge_possible:
1245 1249 return merge_possible, msg
1246 1250
1247 1251 try:
1248 1252 resp = self._try_merge(
1249 1253 pull_request,
1250 1254 force_shadow_repo_refresh=force_shadow_repo_refresh)
1251 1255 log.debug("Merge response: %s", resp)
1252 1256 status = resp.possible, resp.merge_status_message
1253 1257 except NotImplementedError:
1254 1258 status = False, _('Pull request merging is not supported.')
1255 1259
1256 1260 return status
1257 1261
1258 1262 def _check_repo_requirements(self, target, source, translator):
1259 1263 """
1260 1264 Check if `target` and `source` have compatible requirements.
1261 1265
1262 1266 Currently this is just checking for largefiles.
1263 1267 """
1264 1268 _ = translator
1265 1269 target_has_largefiles = self._has_largefiles(target)
1266 1270 source_has_largefiles = self._has_largefiles(source)
1267 1271 merge_possible = True
1268 1272 message = u''
1269 1273
1270 1274 if target_has_largefiles != source_has_largefiles:
1271 1275 merge_possible = False
1272 1276 if source_has_largefiles:
1273 1277 message = _(
1274 1278 'Target repository large files support is disabled.')
1275 1279 else:
1276 1280 message = _(
1277 1281 'Source repository large files support is disabled.')
1278 1282
1279 1283 return merge_possible, message
1280 1284
1281 1285 def _has_largefiles(self, repo):
1282 1286 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1283 1287 'extensions', 'largefiles')
1284 1288 return largefiles_ui and largefiles_ui[0].active
1285 1289
1286 1290 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1287 1291 """
1288 1292 Try to merge the pull request and return the merge status.
1289 1293 """
1290 1294 log.debug(
1291 1295 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1292 1296 pull_request.pull_request_id, force_shadow_repo_refresh)
1293 1297 target_vcs = pull_request.target_repo.scm_instance()
1294 1298 # Refresh the target reference.
1295 1299 try:
1296 1300 target_ref = self._refresh_reference(
1297 1301 pull_request.target_ref_parts, target_vcs)
1298 1302 except CommitDoesNotExistError:
1299 1303 merge_state = MergeResponse(
1300 1304 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1301 1305 metadata={'target_ref': pull_request.target_ref_parts})
1302 1306 return merge_state
1303 1307
1304 1308 target_locked = pull_request.target_repo.locked
1305 1309 if target_locked and target_locked[0]:
1306 1310 locked_by = 'user:{}'.format(target_locked[0])
1307 1311 log.debug("The target repository is locked by %s.", locked_by)
1308 1312 merge_state = MergeResponse(
1309 1313 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1310 1314 metadata={'locked_by': locked_by})
1311 1315 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1312 1316 pull_request, target_ref):
1313 1317 log.debug("Refreshing the merge status of the repository.")
1314 1318 merge_state = self._refresh_merge_state(
1315 1319 pull_request, target_vcs, target_ref)
1316 1320 else:
1317 1321 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1318 1322 metadata = {
1319 1323 'target_ref': pull_request.target_ref_parts,
1320 1324 'source_ref': pull_request.source_ref_parts,
1321 1325 }
1322 1326 if not possible and target_ref.type == 'branch':
1323 1327 # NOTE(marcink): case for mercurial multiple heads on branch
1324 1328 heads = target_vcs._heads(target_ref.name)
1325 1329 if len(heads) != 1:
1326 1330 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1327 1331 metadata.update({
1328 1332 'heads': heads
1329 1333 })
1330 1334 merge_state = MergeResponse(
1331 1335 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1332 1336
1333 1337 return merge_state
1334 1338
1335 1339 def _refresh_reference(self, reference, vcs_repository):
1336 1340 if reference.type in self.UPDATABLE_REF_TYPES:
1337 1341 name_or_id = reference.name
1338 1342 else:
1339 1343 name_or_id = reference.commit_id
1344
1345 vcs_repository.count() # cache rebuild
1340 1346 refreshed_commit = vcs_repository.get_commit(name_or_id)
1341 1347 refreshed_reference = Reference(
1342 1348 reference.type, reference.name, refreshed_commit.raw_id)
1343 1349 return refreshed_reference
1344 1350
1345 1351 def _needs_merge_state_refresh(self, pull_request, target_reference):
1346 1352 return not(
1347 1353 pull_request.revisions and
1348 1354 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1349 1355 target_reference.commit_id == pull_request._last_merge_target_rev)
1350 1356
1351 1357 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1352 1358 workspace_id = self._workspace_id(pull_request)
1353 1359 source_vcs = pull_request.source_repo.scm_instance()
1354 1360 repo_id = pull_request.target_repo.repo_id
1355 1361 use_rebase = self._use_rebase_for_merging(pull_request)
1356 1362 close_branch = self._close_branch_before_merging(pull_request)
1357 1363 merge_state = target_vcs.merge(
1358 1364 repo_id, workspace_id,
1359 1365 target_reference, source_vcs, pull_request.source_ref_parts,
1360 1366 dry_run=True, use_rebase=use_rebase,
1361 1367 close_branch=close_branch)
1362 1368
1363 1369 # Do not store the response if there was an unknown error.
1364 1370 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1365 1371 pull_request._last_merge_source_rev = \
1366 1372 pull_request.source_ref_parts.commit_id
1367 1373 pull_request._last_merge_target_rev = target_reference.commit_id
1368 1374 pull_request.last_merge_status = merge_state.failure_reason
1369 1375 pull_request.shadow_merge_ref = merge_state.merge_ref
1370 1376 Session().add(pull_request)
1371 1377 Session().commit()
1372 1378
1373 1379 return merge_state
1374 1380
1375 1381 def _workspace_id(self, pull_request):
1376 1382 workspace_id = 'pr-%s' % pull_request.pull_request_id
1377 1383 return workspace_id
1378 1384
1379 1385 def generate_repo_data(self, repo, commit_id=None, branch=None,
1380 1386 bookmark=None, translator=None):
1381 1387 from rhodecode.model.repo import RepoModel
1382 1388
1383 1389 all_refs, selected_ref = \
1384 1390 self._get_repo_pullrequest_sources(
1385 1391 repo.scm_instance(), commit_id=commit_id,
1386 1392 branch=branch, bookmark=bookmark, translator=translator)
1387 1393
1388 1394 refs_select2 = []
1389 1395 for element in all_refs:
1390 1396 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1391 1397 refs_select2.append({'text': element[1], 'children': children})
1392 1398
1393 1399 return {
1394 1400 'user': {
1395 1401 'user_id': repo.user.user_id,
1396 1402 'username': repo.user.username,
1397 1403 'firstname': repo.user.first_name,
1398 1404 'lastname': repo.user.last_name,
1399 1405 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1400 1406 },
1401 1407 'name': repo.repo_name,
1402 1408 'link': RepoModel().get_url(repo),
1403 1409 'description': h.chop_at_smart(repo.description_safe, '\n'),
1404 1410 'refs': {
1405 1411 'all_refs': all_refs,
1406 1412 'selected_ref': selected_ref,
1407 1413 'select2_refs': refs_select2
1408 1414 }
1409 1415 }
1410 1416
1411 1417 def generate_pullrequest_title(self, source, source_ref, target):
1412 1418 return u'{source}#{at_ref} to {target}'.format(
1413 1419 source=source,
1414 1420 at_ref=source_ref,
1415 1421 target=target,
1416 1422 )
1417 1423
1418 1424 def _cleanup_merge_workspace(self, pull_request):
1419 1425 # Merging related cleanup
1420 1426 repo_id = pull_request.target_repo.repo_id
1421 1427 target_scm = pull_request.target_repo.scm_instance()
1422 1428 workspace_id = self._workspace_id(pull_request)
1423 1429
1424 1430 try:
1425 1431 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1426 1432 except NotImplementedError:
1427 1433 pass
1428 1434
1429 1435 def _get_repo_pullrequest_sources(
1430 1436 self, repo, commit_id=None, branch=None, bookmark=None,
1431 1437 translator=None):
1432 1438 """
1433 1439 Return a structure with repo's interesting commits, suitable for
1434 1440 the selectors in pullrequest controller
1435 1441
1436 1442 :param commit_id: a commit that must be in the list somehow
1437 1443 and selected by default
1438 1444 :param branch: a branch that must be in the list and selected
1439 1445 by default - even if closed
1440 1446 :param bookmark: a bookmark that must be in the list and selected
1441 1447 """
1442 1448 _ = translator or get_current_request().translate
1443 1449
1444 1450 commit_id = safe_str(commit_id) if commit_id else None
1445 1451 branch = safe_unicode(branch) if branch else None
1446 1452 bookmark = safe_unicode(bookmark) if bookmark else None
1447 1453
1448 1454 selected = None
1449 1455
1450 1456 # order matters: first source that has commit_id in it will be selected
1451 1457 sources = []
1452 1458 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1453 1459 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1454 1460
1455 1461 if commit_id:
1456 1462 ref_commit = (h.short_id(commit_id), commit_id)
1457 1463 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1458 1464
1459 1465 sources.append(
1460 1466 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1461 1467 )
1462 1468
1463 1469 groups = []
1464 1470
1465 1471 for group_key, ref_list, group_name, match in sources:
1466 1472 group_refs = []
1467 1473 for ref_name, ref_id in ref_list:
1468 1474 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1469 1475 group_refs.append((ref_key, ref_name))
1470 1476
1471 1477 if not selected:
1472 1478 if set([commit_id, match]) & set([ref_id, ref_name]):
1473 1479 selected = ref_key
1474 1480
1475 1481 if group_refs:
1476 1482 groups.append((group_refs, group_name))
1477 1483
1478 1484 if not selected:
1479 1485 ref = commit_id or branch or bookmark
1480 1486 if ref:
1481 1487 raise CommitDoesNotExistError(
1482 1488 u'No commit refs could be found matching: {}'.format(ref))
1483 1489 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1484 1490 selected = u'branch:{}:{}'.format(
1485 1491 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1486 1492 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1487 1493 )
1488 1494 elif repo.commit_ids:
1489 1495 # make the user select in this case
1490 1496 selected = None
1491 1497 else:
1492 1498 raise EmptyRepositoryError()
1493 1499 return groups, selected
1494 1500
1495 1501 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1496 1502 hide_whitespace_changes, diff_context):
1497 1503
1498 1504 return self._get_diff_from_pr_or_version(
1499 1505 source_repo, source_ref_id, target_ref_id,
1500 1506 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1501 1507
1502 1508 def _get_diff_from_pr_or_version(
1503 1509 self, source_repo, source_ref_id, target_ref_id,
1504 1510 hide_whitespace_changes, diff_context):
1505 1511
1506 1512 target_commit = source_repo.get_commit(
1507 1513 commit_id=safe_str(target_ref_id))
1508 1514 source_commit = source_repo.get_commit(
1509 1515 commit_id=safe_str(source_ref_id))
1510 1516 if isinstance(source_repo, Repository):
1511 1517 vcs_repo = source_repo.scm_instance()
1512 1518 else:
1513 1519 vcs_repo = source_repo
1514 1520
1515 1521 # TODO: johbo: In the context of an update, we cannot reach
1516 1522 # the old commit anymore with our normal mechanisms. It needs
1517 1523 # some sort of special support in the vcs layer to avoid this
1518 1524 # workaround.
1519 1525 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1520 1526 vcs_repo.alias == 'git'):
1521 1527 source_commit.raw_id = safe_str(source_ref_id)
1522 1528
1523 1529 log.debug('calculating diff between '
1524 1530 'source_ref:%s and target_ref:%s for repo `%s`',
1525 1531 target_ref_id, source_ref_id,
1526 1532 safe_unicode(vcs_repo.path))
1527 1533
1528 1534 vcs_diff = vcs_repo.get_diff(
1529 1535 commit1=target_commit, commit2=source_commit,
1530 1536 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1531 1537 return vcs_diff
1532 1538
1533 1539 def _is_merge_enabled(self, pull_request):
1534 1540 return self._get_general_setting(
1535 1541 pull_request, 'rhodecode_pr_merge_enabled')
1536 1542
1537 1543 def _use_rebase_for_merging(self, pull_request):
1538 1544 repo_type = pull_request.target_repo.repo_type
1539 1545 if repo_type == 'hg':
1540 1546 return self._get_general_setting(
1541 1547 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1542 1548 elif repo_type == 'git':
1543 1549 return self._get_general_setting(
1544 1550 pull_request, 'rhodecode_git_use_rebase_for_merging')
1545 1551
1546 1552 return False
1547 1553
1548 1554 def _close_branch_before_merging(self, pull_request):
1549 1555 repo_type = pull_request.target_repo.repo_type
1550 1556 if repo_type == 'hg':
1551 1557 return self._get_general_setting(
1552 1558 pull_request, 'rhodecode_hg_close_branch_before_merging')
1553 1559 elif repo_type == 'git':
1554 1560 return self._get_general_setting(
1555 1561 pull_request, 'rhodecode_git_close_branch_before_merging')
1556 1562
1557 1563 return False
1558 1564
1559 1565 def _get_general_setting(self, pull_request, settings_key, default=False):
1560 1566 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1561 1567 settings = settings_model.get_general_settings()
1562 1568 return settings.get(settings_key, default)
1563 1569
1564 1570 def _log_audit_action(self, action, action_data, user, pull_request):
1565 1571 audit_logger.store(
1566 1572 action=action,
1567 1573 action_data=action_data,
1568 1574 user=user,
1569 1575 repo=pull_request.target_repo)
1570 1576
1571 1577 def get_reviewer_functions(self):
1572 1578 """
1573 1579 Fetches functions for validation and fetching default reviewers.
1574 1580 If available we use the EE package, else we fallback to CE
1575 1581 package functions
1576 1582 """
1577 1583 try:
1578 1584 from rc_reviewers.utils import get_default_reviewers_data
1579 1585 from rc_reviewers.utils import validate_default_reviewers
1580 1586 except ImportError:
1581 1587 from rhodecode.apps.repository.utils import get_default_reviewers_data
1582 1588 from rhodecode.apps.repository.utils import validate_default_reviewers
1583 1589
1584 1590 return get_default_reviewers_data, validate_default_reviewers
1585 1591
1586 1592
1587 1593 class MergeCheck(object):
1588 1594 """
1589 1595 Perform Merge Checks and returns a check object which stores information
1590 1596 about merge errors, and merge conditions
1591 1597 """
1592 1598 TODO_CHECK = 'todo'
1593 1599 PERM_CHECK = 'perm'
1594 1600 REVIEW_CHECK = 'review'
1595 1601 MERGE_CHECK = 'merge'
1596 1602
1597 1603 def __init__(self):
1598 1604 self.review_status = None
1599 1605 self.merge_possible = None
1600 1606 self.merge_msg = ''
1601 1607 self.failed = None
1602 1608 self.errors = []
1603 1609 self.error_details = OrderedDict()
1604 1610
1605 1611 def push_error(self, error_type, message, error_key, details):
1606 1612 self.failed = True
1607 1613 self.errors.append([error_type, message])
1608 1614 self.error_details[error_key] = dict(
1609 1615 details=details,
1610 1616 error_type=error_type,
1611 1617 message=message
1612 1618 )
1613 1619
1614 1620 @classmethod
1615 1621 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1616 1622 force_shadow_repo_refresh=False):
1617 1623 _ = translator
1618 1624 merge_check = cls()
1619 1625
1620 1626 # permissions to merge
1621 1627 user_allowed_to_merge = PullRequestModel().check_user_merge(
1622 1628 pull_request, auth_user)
1623 1629 if not user_allowed_to_merge:
1624 1630 log.debug("MergeCheck: cannot merge, approval is pending.")
1625 1631
1626 1632 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1627 1633 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1628 1634 if fail_early:
1629 1635 return merge_check
1630 1636
1631 1637 # permission to merge into the target branch
1632 1638 target_commit_id = pull_request.target_ref_parts.commit_id
1633 1639 if pull_request.target_ref_parts.type == 'branch':
1634 1640 branch_name = pull_request.target_ref_parts.name
1635 1641 else:
1636 1642 # for mercurial we can always figure out the branch from the commit
1637 1643 # in case of bookmark
1638 1644 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1639 1645 branch_name = target_commit.branch
1640 1646
1641 1647 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1642 1648 pull_request.target_repo.repo_name, branch_name)
1643 1649 if branch_perm and branch_perm == 'branch.none':
1644 1650 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1645 1651 branch_name, rule)
1646 1652 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1647 1653 if fail_early:
1648 1654 return merge_check
1649 1655
1650 1656 # review status, must be always present
1651 1657 review_status = pull_request.calculated_review_status()
1652 1658 merge_check.review_status = review_status
1653 1659
1654 1660 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1655 1661 if not status_approved:
1656 1662 log.debug("MergeCheck: cannot merge, approval is pending.")
1657 1663
1658 1664 msg = _('Pull request reviewer approval is pending.')
1659 1665
1660 1666 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1661 1667
1662 1668 if fail_early:
1663 1669 return merge_check
1664 1670
1665 1671 # left over TODOs
1666 1672 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1667 1673 if todos:
1668 1674 log.debug("MergeCheck: cannot merge, {} "
1669 1675 "unresolved TODOs left.".format(len(todos)))
1670 1676
1671 1677 if len(todos) == 1:
1672 1678 msg = _('Cannot merge, {} TODO still not resolved.').format(
1673 1679 len(todos))
1674 1680 else:
1675 1681 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1676 1682 len(todos))
1677 1683
1678 1684 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1679 1685
1680 1686 if fail_early:
1681 1687 return merge_check
1682 1688
1683 1689 # merge possible, here is the filesystem simulation + shadow repo
1684 1690 merge_status, msg = PullRequestModel().merge_status(
1685 1691 pull_request, translator=translator,
1686 1692 force_shadow_repo_refresh=force_shadow_repo_refresh)
1687 1693 merge_check.merge_possible = merge_status
1688 1694 merge_check.merge_msg = msg
1689 1695 if not merge_status:
1690 1696 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1691 1697 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1692 1698
1693 1699 if fail_early:
1694 1700 return merge_check
1695 1701
1696 1702 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1697 1703 return merge_check
1698 1704
1699 1705 @classmethod
1700 1706 def get_merge_conditions(cls, pull_request, translator):
1701 1707 _ = translator
1702 1708 merge_details = {}
1703 1709
1704 1710 model = PullRequestModel()
1705 1711 use_rebase = model._use_rebase_for_merging(pull_request)
1706 1712
1707 1713 if use_rebase:
1708 1714 merge_details['merge_strategy'] = dict(
1709 1715 details={},
1710 1716 message=_('Merge strategy: rebase')
1711 1717 )
1712 1718 else:
1713 1719 merge_details['merge_strategy'] = dict(
1714 1720 details={},
1715 1721 message=_('Merge strategy: explicit merge commit')
1716 1722 )
1717 1723
1718 1724 close_branch = model._close_branch_before_merging(pull_request)
1719 1725 if close_branch:
1720 1726 repo_type = pull_request.target_repo.repo_type
1721 1727 close_msg = ''
1722 1728 if repo_type == 'hg':
1723 1729 close_msg = _('Source branch will be closed after merge.')
1724 1730 elif repo_type == 'git':
1725 1731 close_msg = _('Source branch will be deleted after merge.')
1726 1732
1727 1733 merge_details['close_branch'] = dict(
1728 1734 details={},
1729 1735 message=close_msg
1730 1736 )
1731 1737
1732 1738 return merge_details
1733 1739
1734 1740
1735 1741 ChangeTuple = collections.namedtuple(
1736 1742 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1737 1743
1738 1744 FileChangeTuple = collections.namedtuple(
1739 1745 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,1886 +1,1888 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33 import functools
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 61 from rhodecode.lib.vcs.backends import get_backend
62 62 from rhodecode.lib.vcs.nodes import FileNode
63 63 from rhodecode.tests import (
64 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 66 TEST_USER_REGULAR_PASS)
67 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 68 from rhodecode.tests.fixture import Fixture
69 69 from rhodecode.config import utils as config_utils
70 70
71 71 def _split_comma(value):
72 72 return value.split(',')
73 73
74 74
75 75 def pytest_addoption(parser):
76 76 parser.addoption(
77 77 '--keep-tmp-path', action='store_true',
78 78 help="Keep the test temporary directories")
79 79 parser.addoption(
80 80 '--backends', action='store', type=_split_comma,
81 81 default=['git', 'hg', 'svn'],
82 82 help="Select which backends to test for backend specific tests.")
83 83 parser.addoption(
84 84 '--dbs', action='store', type=_split_comma,
85 85 default=['sqlite'],
86 86 help="Select which database to test for database specific tests. "
87 87 "Possible options are sqlite,postgres,mysql")
88 88 parser.addoption(
89 89 '--appenlight', '--ae', action='store_true',
90 90 help="Track statistics in appenlight.")
91 91 parser.addoption(
92 92 '--appenlight-api-key', '--ae-key',
93 93 help="API key for Appenlight.")
94 94 parser.addoption(
95 95 '--appenlight-url', '--ae-url',
96 96 default="https://ae.rhodecode.com",
97 97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 98 parser.addoption(
99 99 '--sqlite-connection-string', action='store',
100 100 default='', help="Connection string for the dbs tests with SQLite")
101 101 parser.addoption(
102 102 '--postgres-connection-string', action='store',
103 103 default='', help="Connection string for the dbs tests with Postgres")
104 104 parser.addoption(
105 105 '--mysql-connection-string', action='store',
106 106 default='', help="Connection string for the dbs tests with MySQL")
107 107 parser.addoption(
108 108 '--repeat', type=int, default=100,
109 109 help="Number of repetitions in performance tests.")
110 110
111 111
112 112 def pytest_configure(config):
113 113 from rhodecode.config import patches
114 114
115 115
116 116 def pytest_collection_modifyitems(session, config, items):
117 117 # nottest marked, compare nose, used for transition from nose to pytest
118 118 remaining = [
119 119 i for i in items if getattr(i.obj, '__test__', True)]
120 120 items[:] = remaining
121 121
122 122
123 123 def pytest_generate_tests(metafunc):
124 124 # Support test generation based on --backend parameter
125 125 if 'backend_alias' in metafunc.fixturenames:
126 126 backends = get_backends_from_metafunc(metafunc)
127 127 scope = None
128 128 if not backends:
129 129 pytest.skip("Not enabled for any of selected backends")
130 130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 131 elif hasattr(metafunc.function, 'backends'):
132 132 backends = get_backends_from_metafunc(metafunc)
133 133 if not backends:
134 134 pytest.skip("Not enabled for any of selected backends")
135 135
136 136
137 137 def get_backends_from_metafunc(metafunc):
138 138 requested_backends = set(metafunc.config.getoption('--backends'))
139 139 if hasattr(metafunc.function, 'backends'):
140 140 # Supported backends by this test function, created from
141 141 # pytest.mark.backends
142 142 backends = metafunc.definition.get_closest_marker('backends').args
143 143 elif hasattr(metafunc.cls, 'backend_alias'):
144 144 # Support class attribute "backend_alias", this is mainly
145 145 # for legacy reasons for tests not yet using pytest.mark.backends
146 146 backends = [metafunc.cls.backend_alias]
147 147 else:
148 148 backends = metafunc.config.getoption('--backends')
149 149 return requested_backends.intersection(backends)
150 150
151 151
152 152 @pytest.fixture(scope='session', autouse=True)
153 153 def activate_example_rcextensions(request):
154 154 """
155 155 Patch in an example rcextensions module which verifies passed in kwargs.
156 156 """
157 157 from rhodecode.config import rcextensions
158 158
159 159 old_extensions = rhodecode.EXTENSIONS
160 160 rhodecode.EXTENSIONS = rcextensions
161 161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
162 162
163 163 @request.addfinalizer
164 164 def cleanup():
165 165 rhodecode.EXTENSIONS = old_extensions
166 166
167 167
168 168 @pytest.fixture
169 169 def capture_rcextensions():
170 170 """
171 171 Returns the recorded calls to entry points in rcextensions.
172 172 """
173 173 calls = rhodecode.EXTENSIONS.calls
174 174 calls.clear()
175 175 # Note: At this moment, it is still the empty dict, but that will
176 176 # be filled during the test run and since it is a reference this
177 177 # is enough to make it work.
178 178 return calls
179 179
180 180
181 181 @pytest.fixture(scope='session')
182 182 def http_environ_session():
183 183 """
184 184 Allow to use "http_environ" in session scope.
185 185 """
186 186 return plain_http_environ()
187 187
188 188
189 189 def plain_http_host_stub():
190 190 """
191 191 Value of HTTP_HOST in the test run.
192 192 """
193 193 return 'example.com:80'
194 194
195 195
196 196 @pytest.fixture
197 197 def http_host_stub():
198 198 """
199 199 Value of HTTP_HOST in the test run.
200 200 """
201 201 return plain_http_host_stub()
202 202
203 203
204 204 def plain_http_host_only_stub():
205 205 """
206 206 Value of HTTP_HOST in the test run.
207 207 """
208 208 return plain_http_host_stub().split(':')[0]
209 209
210 210
211 211 @pytest.fixture
212 212 def http_host_only_stub():
213 213 """
214 214 Value of HTTP_HOST in the test run.
215 215 """
216 216 return plain_http_host_only_stub()
217 217
218 218
219 219 def plain_http_environ():
220 220 """
221 221 HTTP extra environ keys.
222 222
223 223 User by the test application and as well for setting up the pylons
224 224 environment. In the case of the fixture "app" it should be possible
225 225 to override this for a specific test case.
226 226 """
227 227 return {
228 228 'SERVER_NAME': plain_http_host_only_stub(),
229 229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
230 230 'HTTP_HOST': plain_http_host_stub(),
231 231 'HTTP_USER_AGENT': 'rc-test-agent',
232 232 'REQUEST_METHOD': 'GET'
233 233 }
234 234
235 235
236 236 @pytest.fixture
237 237 def http_environ():
238 238 """
239 239 HTTP extra environ keys.
240 240
241 241 User by the test application and as well for setting up the pylons
242 242 environment. In the case of the fixture "app" it should be possible
243 243 to override this for a specific test case.
244 244 """
245 245 return plain_http_environ()
246 246
247 247
248 248 @pytest.fixture(scope='session')
249 249 def baseapp(ini_config, vcsserver, http_environ_session):
250 250 from rhodecode.lib.pyramid_utils import get_app_config
251 251 from rhodecode.config.middleware import make_pyramid_app
252 252
253 253 print("Using the RhodeCode configuration:{}".format(ini_config))
254 254 pyramid.paster.setup_logging(ini_config)
255 255
256 256 settings = get_app_config(ini_config)
257 257 app = make_pyramid_app({'__file__': ini_config}, **settings)
258 258
259 259 return app
260 260
261 261
262 262 @pytest.fixture(scope='function')
263 263 def app(request, config_stub, baseapp, http_environ):
264 264 app = CustomTestApp(
265 265 baseapp,
266 266 extra_environ=http_environ)
267 267 if request.cls:
268 268 request.cls.app = app
269 269 return app
270 270
271 271
272 272 @pytest.fixture(scope='session')
273 273 def app_settings(baseapp, ini_config):
274 274 """
275 275 Settings dictionary used to create the app.
276 276
277 277 Parses the ini file and passes the result through the sanitize and apply
278 278 defaults mechanism in `rhodecode.config.middleware`.
279 279 """
280 280 return baseapp.config.get_settings()
281 281
282 282
283 283 @pytest.fixture(scope='session')
284 284 def db_connection(ini_settings):
285 285 # Initialize the database connection.
286 286 config_utils.initialize_database(ini_settings)
287 287
288 288
289 289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
290 290
291 291
292 292 def _autologin_user(app, *args):
293 293 session = login_user_session(app, *args)
294 294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
295 295 return LoginData(csrf_token, session['rhodecode_user'])
296 296
297 297
298 298 @pytest.fixture
299 299 def autologin_user(app):
300 300 """
301 301 Utility fixture which makes sure that the admin user is logged in
302 302 """
303 303 return _autologin_user(app)
304 304
305 305
306 306 @pytest.fixture
307 307 def autologin_regular_user(app):
308 308 """
309 309 Utility fixture which makes sure that the regular user is logged in
310 310 """
311 311 return _autologin_user(
312 312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
313 313
314 314
315 315 @pytest.fixture(scope='function')
316 316 def csrf_token(request, autologin_user):
317 317 return autologin_user.csrf_token
318 318
319 319
320 320 @pytest.fixture(scope='function')
321 321 def xhr_header(request):
322 322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
323 323
324 324
325 325 @pytest.fixture
326 326 def real_crypto_backend(monkeypatch):
327 327 """
328 328 Switch the production crypto backend on for this test.
329 329
330 330 During the test run the crypto backend is replaced with a faster
331 331 implementation based on the MD5 algorithm.
332 332 """
333 333 monkeypatch.setattr(rhodecode, 'is_test', False)
334 334
335 335
336 336 @pytest.fixture(scope='class')
337 337 def index_location(request, baseapp):
338 338 index_location = baseapp.config.get_settings()['search.location']
339 339 if request.cls:
340 340 request.cls.index_location = index_location
341 341 return index_location
342 342
343 343
344 344 @pytest.fixture(scope='session', autouse=True)
345 345 def tests_tmp_path(request):
346 346 """
347 347 Create temporary directory to be used during the test session.
348 348 """
349 349 if not os.path.exists(TESTS_TMP_PATH):
350 350 os.makedirs(TESTS_TMP_PATH)
351 351
352 352 if not request.config.getoption('--keep-tmp-path'):
353 353 @request.addfinalizer
354 354 def remove_tmp_path():
355 355 shutil.rmtree(TESTS_TMP_PATH)
356 356
357 357 return TESTS_TMP_PATH
358 358
359 359
360 360 @pytest.fixture
361 361 def test_repo_group(request):
362 362 """
363 363 Create a temporary repository group, and destroy it after
364 364 usage automatically
365 365 """
366 366 fixture = Fixture()
367 367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
368 368 repo_group = fixture.create_repo_group(repogroupid)
369 369
370 370 def _cleanup():
371 371 fixture.destroy_repo_group(repogroupid)
372 372
373 373 request.addfinalizer(_cleanup)
374 374 return repo_group
375 375
376 376
377 377 @pytest.fixture
378 378 def test_user_group(request):
379 379 """
380 380 Create a temporary user group, and destroy it after
381 381 usage automatically
382 382 """
383 383 fixture = Fixture()
384 384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
385 385 user_group = fixture.create_user_group(usergroupid)
386 386
387 387 def _cleanup():
388 388 fixture.destroy_user_group(user_group)
389 389
390 390 request.addfinalizer(_cleanup)
391 391 return user_group
392 392
393 393
394 394 @pytest.fixture(scope='session')
395 395 def test_repo(request):
396 396 container = TestRepoContainer()
397 397 request.addfinalizer(container._cleanup)
398 398 return container
399 399
400 400
401 401 class TestRepoContainer(object):
402 402 """
403 403 Container for test repositories which are used read only.
404 404
405 405 Repositories will be created on demand and re-used during the lifetime
406 406 of this object.
407 407
408 408 Usage to get the svn test repository "minimal"::
409 409
410 410 test_repo = TestContainer()
411 411 repo = test_repo('minimal', 'svn')
412 412
413 413 """
414 414
415 415 dump_extractors = {
416 416 'git': utils.extract_git_repo_from_dump,
417 417 'hg': utils.extract_hg_repo_from_dump,
418 418 'svn': utils.extract_svn_repo_from_dump,
419 419 }
420 420
421 421 def __init__(self):
422 422 self._cleanup_repos = []
423 423 self._fixture = Fixture()
424 424 self._repos = {}
425 425
426 426 def __call__(self, dump_name, backend_alias, config=None):
427 427 key = (dump_name, backend_alias)
428 428 if key not in self._repos:
429 429 repo = self._create_repo(dump_name, backend_alias, config)
430 430 self._repos[key] = repo.repo_id
431 431 return Repository.get(self._repos[key])
432 432
433 433 def _create_repo(self, dump_name, backend_alias, config):
434 434 repo_name = '%s-%s' % (backend_alias, dump_name)
435 435 backend_class = get_backend(backend_alias)
436 436 dump_extractor = self.dump_extractors[backend_alias]
437 437 repo_path = dump_extractor(dump_name, repo_name)
438 438
439 439 vcs_repo = backend_class(repo_path, config=config)
440 440 repo2db_mapper({repo_name: vcs_repo})
441 441
442 442 repo = RepoModel().get_by_repo_name(repo_name)
443 443 self._cleanup_repos.append(repo_name)
444 444 return repo
445 445
446 446 def _cleanup(self):
447 447 for repo_name in reversed(self._cleanup_repos):
448 448 self._fixture.destroy_repo(repo_name)
449 449
450 450
451 451 def backend_base(request, backend_alias, baseapp, test_repo):
452 452 if backend_alias not in request.config.getoption('--backends'):
453 453 pytest.skip("Backend %s not selected." % (backend_alias, ))
454 454
455 455 utils.check_xfail_backends(request.node, backend_alias)
456 456 utils.check_skip_backends(request.node, backend_alias)
457 457
458 458 repo_name = 'vcs_test_%s' % (backend_alias, )
459 459 backend = Backend(
460 460 alias=backend_alias,
461 461 repo_name=repo_name,
462 462 test_name=request.node.name,
463 463 test_repo_container=test_repo)
464 464 request.addfinalizer(backend.cleanup)
465 465 return backend
466 466
467 467
468 468 @pytest.fixture
469 469 def backend(request, backend_alias, baseapp, test_repo):
470 470 """
471 471 Parametrized fixture which represents a single backend implementation.
472 472
473 473 It respects the option `--backends` to focus the test run on specific
474 474 backend implementations.
475 475
476 476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
477 477 for specific backends. This is intended as a utility for incremental
478 478 development of a new backend implementation.
479 479 """
480 480 return backend_base(request, backend_alias, baseapp, test_repo)
481 481
482 482
483 483 @pytest.fixture
484 484 def backend_git(request, baseapp, test_repo):
485 485 return backend_base(request, 'git', baseapp, test_repo)
486 486
487 487
488 488 @pytest.fixture
489 489 def backend_hg(request, baseapp, test_repo):
490 490 return backend_base(request, 'hg', baseapp, test_repo)
491 491
492 492
493 493 @pytest.fixture
494 494 def backend_svn(request, baseapp, test_repo):
495 495 return backend_base(request, 'svn', baseapp, test_repo)
496 496
497 497
498 498 @pytest.fixture
499 499 def backend_random(backend_git):
500 500 """
501 501 Use this to express that your tests need "a backend.
502 502
503 503 A few of our tests need a backend, so that we can run the code. This
504 504 fixture is intended to be used for such cases. It will pick one of the
505 505 backends and run the tests.
506 506
507 507 The fixture `backend` would run the test multiple times for each
508 508 available backend which is a pure waste of time if the test is
509 509 independent of the backend type.
510 510 """
511 511 # TODO: johbo: Change this to pick a random backend
512 512 return backend_git
513 513
514 514
515 515 @pytest.fixture
516 516 def backend_stub(backend_git):
517 517 """
518 518 Use this to express that your tests need a backend stub
519 519
520 520 TODO: mikhail: Implement a real stub logic instead of returning
521 521 a git backend
522 522 """
523 523 return backend_git
524 524
525 525
526 526 @pytest.fixture
527 527 def repo_stub(backend_stub):
528 528 """
529 529 Use this to express that your tests need a repository stub
530 530 """
531 531 return backend_stub.create_repo()
532 532
533 533
534 534 class Backend(object):
535 535 """
536 536 Represents the test configuration for one supported backend
537 537
538 538 Provides easy access to different test repositories based on
539 539 `__getitem__`. Such repositories will only be created once per test
540 540 session.
541 541 """
542 542
543 543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
544 544 _master_repo = None
545 545 _commit_ids = {}
546 546
547 547 def __init__(self, alias, repo_name, test_name, test_repo_container):
548 548 self.alias = alias
549 549 self.repo_name = repo_name
550 550 self._cleanup_repos = []
551 551 self._test_name = test_name
552 552 self._test_repo_container = test_repo_container
553 553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
554 554 # Fixture will survive in the end.
555 555 self._fixture = Fixture()
556 556
557 557 def __getitem__(self, key):
558 558 return self._test_repo_container(key, self.alias)
559 559
560 560 def create_test_repo(self, key, config=None):
561 561 return self._test_repo_container(key, self.alias, config)
562 562
563 563 @property
564 564 def repo(self):
565 565 """
566 566 Returns the "current" repository. This is the vcs_test repo or the
567 567 last repo which has been created with `create_repo`.
568 568 """
569 569 from rhodecode.model.db import Repository
570 570 return Repository.get_by_repo_name(self.repo_name)
571 571
572 572 @property
573 573 def default_branch_name(self):
574 574 VcsRepository = get_backend(self.alias)
575 575 return VcsRepository.DEFAULT_BRANCH_NAME
576 576
577 577 @property
578 578 def default_head_id(self):
579 579 """
580 580 Returns the default head id of the underlying backend.
581 581
582 582 This will be the default branch name in case the backend does have a
583 583 default branch. In the other cases it will point to a valid head
584 584 which can serve as the base to create a new commit on top of it.
585 585 """
586 586 vcsrepo = self.repo.scm_instance()
587 587 head_id = (
588 588 vcsrepo.DEFAULT_BRANCH_NAME or
589 589 vcsrepo.commit_ids[-1])
590 590 return head_id
591 591
592 592 @property
593 593 def commit_ids(self):
594 594 """
595 595 Returns the list of commits for the last created repository
596 596 """
597 597 return self._commit_ids
598 598
599 599 def create_master_repo(self, commits):
600 600 """
601 601 Create a repository and remember it as a template.
602 602
603 603 This allows to easily create derived repositories to construct
604 604 more complex scenarios for diff, compare and pull requests.
605 605
606 606 Returns a commit map which maps from commit message to raw_id.
607 607 """
608 608 self._master_repo = self.create_repo(commits=commits)
609 609 return self._commit_ids
610 610
611 611 def create_repo(
612 612 self, commits=None, number_of_commits=0, heads=None,
613 613 name_suffix=u'', bare=False, **kwargs):
614 614 """
615 615 Create a repository and record it for later cleanup.
616 616
617 617 :param commits: Optional. A sequence of dict instances.
618 618 Will add a commit per entry to the new repository.
619 619 :param number_of_commits: Optional. If set to a number, this number of
620 620 commits will be added to the new repository.
621 621 :param heads: Optional. Can be set to a sequence of of commit
622 622 names which shall be pulled in from the master repository.
623 623 :param name_suffix: adds special suffix to generated repo name
624 624 :param bare: set a repo as bare (no checkout)
625 625 """
626 626 self.repo_name = self._next_repo_name() + name_suffix
627 627 repo = self._fixture.create_repo(
628 628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
629 629 self._cleanup_repos.append(repo.repo_name)
630 630
631 631 commits = commits or [
632 632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
633 633 for x in range(number_of_commits)]
634 self._add_commits_to_repo(repo.scm_instance(), commits)
634 vcs_repo = repo.scm_instance()
635 vcs_repo.count()
636 self._add_commits_to_repo(vcs_repo, commits)
635 637 if heads:
636 638 self.pull_heads(repo, heads)
637 639
638 640 return repo
639 641
640 642 def pull_heads(self, repo, heads):
641 643 """
642 644 Make sure that repo contains all commits mentioned in `heads`
643 645 """
644 646 vcsmaster = self._master_repo.scm_instance()
645 647 vcsrepo = repo.scm_instance()
646 648 vcsrepo.config.clear_section('hooks')
647 649 commit_ids = [self._commit_ids[h] for h in heads]
648 650 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
649 651
650 652 def create_fork(self):
651 653 repo_to_fork = self.repo_name
652 654 self.repo_name = self._next_repo_name()
653 655 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
654 656 self._cleanup_repos.append(self.repo_name)
655 657 return repo
656 658
657 659 def new_repo_name(self, suffix=u''):
658 660 self.repo_name = self._next_repo_name() + suffix
659 661 self._cleanup_repos.append(self.repo_name)
660 662 return self.repo_name
661 663
662 664 def _next_repo_name(self):
663 665 return u"%s_%s" % (
664 666 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
665 667
666 668 def ensure_file(self, filename, content='Test content\n'):
667 669 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
668 670 commits = [
669 671 {'added': [
670 672 FileNode(filename, content=content),
671 673 ]},
672 674 ]
673 675 self._add_commits_to_repo(self.repo.scm_instance(), commits)
674 676
675 677 def enable_downloads(self):
676 678 repo = self.repo
677 679 repo.enable_downloads = True
678 680 Session().add(repo)
679 681 Session().commit()
680 682
681 683 def cleanup(self):
682 684 for repo_name in reversed(self._cleanup_repos):
683 685 self._fixture.destroy_repo(repo_name)
684 686
685 687 def _add_commits_to_repo(self, repo, commits):
686 688 commit_ids = _add_commits_to_repo(repo, commits)
687 689 if not commit_ids:
688 690 return
689 691 self._commit_ids = commit_ids
690 692
691 693 # Creating refs for Git to allow fetching them from remote repository
692 694 if self.alias == 'git':
693 695 refs = {}
694 696 for message in self._commit_ids:
695 697 # TODO: mikhail: do more special chars replacements
696 698 ref_name = 'refs/test-refs/{}'.format(
697 699 message.replace(' ', ''))
698 700 refs[ref_name] = self._commit_ids[message]
699 701 self._create_refs(repo, refs)
700 702
701 703 def _create_refs(self, repo, refs):
702 704 for ref_name in refs:
703 705 repo.set_refs(ref_name, refs[ref_name])
704 706
705 707
706 708 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
707 709 if backend_alias not in request.config.getoption('--backends'):
708 710 pytest.skip("Backend %s not selected." % (backend_alias, ))
709 711
710 712 utils.check_xfail_backends(request.node, backend_alias)
711 713 utils.check_skip_backends(request.node, backend_alias)
712 714
713 715 repo_name = 'vcs_test_%s' % (backend_alias, )
714 716 repo_path = os.path.join(tests_tmp_path, repo_name)
715 717 backend = VcsBackend(
716 718 alias=backend_alias,
717 719 repo_path=repo_path,
718 720 test_name=request.node.name,
719 721 test_repo_container=test_repo)
720 722 request.addfinalizer(backend.cleanup)
721 723 return backend
722 724
723 725
724 726 @pytest.fixture
725 727 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
726 728 """
727 729 Parametrized fixture which represents a single vcs backend implementation.
728 730
729 731 See the fixture `backend` for more details. This one implements the same
730 732 concept, but on vcs level. So it does not provide model instances etc.
731 733
732 734 Parameters are generated dynamically, see :func:`pytest_generate_tests`
733 735 for how this works.
734 736 """
735 737 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
736 738
737 739
738 740 @pytest.fixture
739 741 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
740 742 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
741 743
742 744
743 745 @pytest.fixture
744 746 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
745 747 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
746 748
747 749
748 750 @pytest.fixture
749 751 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
750 752 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
751 753
752 754
753 755 @pytest.fixture
754 756 def vcsbackend_stub(vcsbackend_git):
755 757 """
756 758 Use this to express that your test just needs a stub of a vcsbackend.
757 759
758 760 Plan is to eventually implement an in-memory stub to speed tests up.
759 761 """
760 762 return vcsbackend_git
761 763
762 764
763 765 class VcsBackend(object):
764 766 """
765 767 Represents the test configuration for one supported vcs backend.
766 768 """
767 769
768 770 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
769 771
770 772 def __init__(self, alias, repo_path, test_name, test_repo_container):
771 773 self.alias = alias
772 774 self._repo_path = repo_path
773 775 self._cleanup_repos = []
774 776 self._test_name = test_name
775 777 self._test_repo_container = test_repo_container
776 778
777 779 def __getitem__(self, key):
778 780 return self._test_repo_container(key, self.alias).scm_instance()
779 781
780 782 @property
781 783 def repo(self):
782 784 """
783 785 Returns the "current" repository. This is the vcs_test repo of the last
784 786 repo which has been created.
785 787 """
786 788 Repository = get_backend(self.alias)
787 789 return Repository(self._repo_path)
788 790
789 791 @property
790 792 def backend(self):
791 793 """
792 794 Returns the backend implementation class.
793 795 """
794 796 return get_backend(self.alias)
795 797
796 798 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
797 799 bare=False):
798 800 repo_name = self._next_repo_name()
799 801 self._repo_path = get_new_dir(repo_name)
800 802 repo_class = get_backend(self.alias)
801 803 src_url = None
802 804 if _clone_repo:
803 805 src_url = _clone_repo.path
804 806 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
805 807 self._cleanup_repos.append(repo)
806 808
807 809 commits = commits or [
808 810 {'message': 'Commit %s of %s' % (x, repo_name)}
809 811 for x in xrange(number_of_commits)]
810 812 _add_commits_to_repo(repo, commits)
811 813 return repo
812 814
813 815 def clone_repo(self, repo):
814 816 return self.create_repo(_clone_repo=repo)
815 817
816 818 def cleanup(self):
817 819 for repo in self._cleanup_repos:
818 820 shutil.rmtree(repo.path)
819 821
820 822 def new_repo_path(self):
821 823 repo_name = self._next_repo_name()
822 824 self._repo_path = get_new_dir(repo_name)
823 825 return self._repo_path
824 826
825 827 def _next_repo_name(self):
826 828 return "%s_%s" % (
827 829 self.invalid_repo_name.sub('_', self._test_name),
828 830 len(self._cleanup_repos))
829 831
830 832 def add_file(self, repo, filename, content='Test content\n'):
831 833 imc = repo.in_memory_commit
832 834 imc.add(FileNode(filename, content=content))
833 835 imc.commit(
834 836 message=u'Automatic commit from vcsbackend fixture',
835 837 author=u'Automatic')
836 838
837 839 def ensure_file(self, filename, content='Test content\n'):
838 840 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
839 841 self.add_file(self.repo, filename, content)
840 842
841 843
842 844 def _add_commits_to_repo(vcs_repo, commits):
843 845 commit_ids = {}
844 846 if not commits:
845 847 return commit_ids
846 848
847 849 imc = vcs_repo.in_memory_commit
848 850 commit = None
849 851
850 852 for idx, commit in enumerate(commits):
851 853 message = unicode(commit.get('message', 'Commit %s' % idx))
852 854
853 855 for node in commit.get('added', []):
854 856 imc.add(FileNode(node.path, content=node.content))
855 857 for node in commit.get('changed', []):
856 858 imc.change(FileNode(node.path, content=node.content))
857 859 for node in commit.get('removed', []):
858 860 imc.remove(FileNode(node.path))
859 861
860 862 parents = [
861 863 vcs_repo.get_commit(commit_id=commit_ids[p])
862 864 for p in commit.get('parents', [])]
863 865
864 866 operations = ('added', 'changed', 'removed')
865 867 if not any((commit.get(o) for o in operations)):
866 868 imc.add(FileNode('file_%s' % idx, content=message))
867 869
868 870 commit = imc.commit(
869 871 message=message,
870 872 author=unicode(commit.get('author', 'Automatic')),
871 873 date=commit.get('date'),
872 874 branch=commit.get('branch'),
873 875 parents=parents)
874 876
875 877 commit_ids[commit.message] = commit.raw_id
876 878
877 879 return commit_ids
878 880
879 881
880 882 @pytest.fixture
881 883 def reposerver(request):
882 884 """
883 885 Allows to serve a backend repository
884 886 """
885 887
886 888 repo_server = RepoServer()
887 889 request.addfinalizer(repo_server.cleanup)
888 890 return repo_server
889 891
890 892
891 893 class RepoServer(object):
892 894 """
893 895 Utility to serve a local repository for the duration of a test case.
894 896
895 897 Supports only Subversion so far.
896 898 """
897 899
898 900 url = None
899 901
900 902 def __init__(self):
901 903 self._cleanup_servers = []
902 904
903 905 def serve(self, vcsrepo):
904 906 if vcsrepo.alias != 'svn':
905 907 raise TypeError("Backend %s not supported" % vcsrepo.alias)
906 908
907 909 proc = subprocess32.Popen(
908 910 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
909 911 '--root', vcsrepo.path])
910 912 self._cleanup_servers.append(proc)
911 913 self.url = 'svn://localhost'
912 914
913 915 def cleanup(self):
914 916 for proc in self._cleanup_servers:
915 917 proc.terminate()
916 918
917 919
918 920 @pytest.fixture
919 921 def pr_util(backend, request, config_stub):
920 922 """
921 923 Utility for tests of models and for functional tests around pull requests.
922 924
923 925 It gives an instance of :class:`PRTestUtility` which provides various
924 926 utility methods around one pull request.
925 927
926 928 This fixture uses `backend` and inherits its parameterization.
927 929 """
928 930
929 931 util = PRTestUtility(backend)
930 932 request.addfinalizer(util.cleanup)
931 933
932 934 return util
933 935
934 936
935 937 class PRTestUtility(object):
936 938
937 939 pull_request = None
938 940 pull_request_id = None
939 941 mergeable_patcher = None
940 942 mergeable_mock = None
941 943 notification_patcher = None
942 944
943 945 def __init__(self, backend):
944 946 self.backend = backend
945 947
946 948 def create_pull_request(
947 949 self, commits=None, target_head=None, source_head=None,
948 950 revisions=None, approved=False, author=None, mergeable=False,
949 951 enable_notifications=True, name_suffix=u'', reviewers=None,
950 952 title=u"Test", description=u"Description"):
951 953 self.set_mergeable(mergeable)
952 954 if not enable_notifications:
953 955 # mock notification side effect
954 956 self.notification_patcher = mock.patch(
955 957 'rhodecode.model.notification.NotificationModel.create')
956 958 self.notification_patcher.start()
957 959
958 960 if not self.pull_request:
959 961 if not commits:
960 962 commits = [
961 963 {'message': 'c1'},
962 964 {'message': 'c2'},
963 965 {'message': 'c3'},
964 966 ]
965 967 target_head = 'c1'
966 968 source_head = 'c2'
967 969 revisions = ['c2']
968 970
969 971 self.commit_ids = self.backend.create_master_repo(commits)
970 972 self.target_repository = self.backend.create_repo(
971 973 heads=[target_head], name_suffix=name_suffix)
972 974 self.source_repository = self.backend.create_repo(
973 975 heads=[source_head], name_suffix=name_suffix)
974 976 self.author = author or UserModel().get_by_username(
975 977 TEST_USER_ADMIN_LOGIN)
976 978
977 979 model = PullRequestModel()
978 980 self.create_parameters = {
979 981 'created_by': self.author,
980 982 'source_repo': self.source_repository.repo_name,
981 983 'source_ref': self._default_branch_reference(source_head),
982 984 'target_repo': self.target_repository.repo_name,
983 985 'target_ref': self._default_branch_reference(target_head),
984 986 'revisions': [self.commit_ids[r] for r in revisions],
985 987 'reviewers': reviewers or self._get_reviewers(),
986 988 'title': title,
987 989 'description': description,
988 990 }
989 991 self.pull_request = model.create(**self.create_parameters)
990 992 assert model.get_versions(self.pull_request) == []
991 993
992 994 self.pull_request_id = self.pull_request.pull_request_id
993 995
994 996 if approved:
995 997 self.approve()
996 998
997 999 Session().add(self.pull_request)
998 1000 Session().commit()
999 1001
1000 1002 return self.pull_request
1001 1003
1002 1004 def approve(self):
1003 1005 self.create_status_votes(
1004 1006 ChangesetStatus.STATUS_APPROVED,
1005 1007 *self.pull_request.reviewers)
1006 1008
1007 1009 def close(self):
1008 1010 PullRequestModel().close_pull_request(self.pull_request, self.author)
1009 1011
1010 1012 def _default_branch_reference(self, commit_message):
1011 1013 reference = '%s:%s:%s' % (
1012 1014 'branch',
1013 1015 self.backend.default_branch_name,
1014 1016 self.commit_ids[commit_message])
1015 1017 return reference
1016 1018
1017 1019 def _get_reviewers(self):
1018 1020 return [
1019 1021 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1020 1022 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1021 1023 ]
1022 1024
1023 1025 def update_source_repository(self, head=None):
1024 1026 heads = [head or 'c3']
1025 1027 self.backend.pull_heads(self.source_repository, heads=heads)
1026 1028
1027 1029 def add_one_commit(self, head=None):
1028 1030 self.update_source_repository(head=head)
1029 1031 old_commit_ids = set(self.pull_request.revisions)
1030 1032 PullRequestModel().update_commits(self.pull_request)
1031 1033 commit_ids = set(self.pull_request.revisions)
1032 1034 new_commit_ids = commit_ids - old_commit_ids
1033 1035 assert len(new_commit_ids) == 1
1034 1036 return new_commit_ids.pop()
1035 1037
1036 1038 def remove_one_commit(self):
1037 1039 assert len(self.pull_request.revisions) == 2
1038 1040 source_vcs = self.source_repository.scm_instance()
1039 1041 removed_commit_id = source_vcs.commit_ids[-1]
1040 1042
1041 1043 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1042 1044 # remove the if once that's sorted out.
1043 1045 if self.backend.alias == "git":
1044 1046 kwargs = {'branch_name': self.backend.default_branch_name}
1045 1047 else:
1046 1048 kwargs = {}
1047 1049 source_vcs.strip(removed_commit_id, **kwargs)
1048 1050
1049 1051 PullRequestModel().update_commits(self.pull_request)
1050 1052 assert len(self.pull_request.revisions) == 1
1051 1053 return removed_commit_id
1052 1054
1053 1055 def create_comment(self, linked_to=None):
1054 1056 comment = CommentsModel().create(
1055 1057 text=u"Test comment",
1056 1058 repo=self.target_repository.repo_name,
1057 1059 user=self.author,
1058 1060 pull_request=self.pull_request)
1059 1061 assert comment.pull_request_version_id is None
1060 1062
1061 1063 if linked_to:
1062 1064 PullRequestModel()._link_comments_to_version(linked_to)
1063 1065
1064 1066 return comment
1065 1067
1066 1068 def create_inline_comment(
1067 1069 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1068 1070 comment = CommentsModel().create(
1069 1071 text=u"Test comment",
1070 1072 repo=self.target_repository.repo_name,
1071 1073 user=self.author,
1072 1074 line_no=line_no,
1073 1075 f_path=file_path,
1074 1076 pull_request=self.pull_request)
1075 1077 assert comment.pull_request_version_id is None
1076 1078
1077 1079 if linked_to:
1078 1080 PullRequestModel()._link_comments_to_version(linked_to)
1079 1081
1080 1082 return comment
1081 1083
1082 1084 def create_version_of_pull_request(self):
1083 1085 pull_request = self.create_pull_request()
1084 1086 version = PullRequestModel()._create_version_from_snapshot(
1085 1087 pull_request)
1086 1088 return version
1087 1089
1088 1090 def create_status_votes(self, status, *reviewers):
1089 1091 for reviewer in reviewers:
1090 1092 ChangesetStatusModel().set_status(
1091 1093 repo=self.pull_request.target_repo,
1092 1094 status=status,
1093 1095 user=reviewer.user_id,
1094 1096 pull_request=self.pull_request)
1095 1097
1096 1098 def set_mergeable(self, value):
1097 1099 if not self.mergeable_patcher:
1098 1100 self.mergeable_patcher = mock.patch.object(
1099 1101 VcsSettingsModel, 'get_general_settings')
1100 1102 self.mergeable_mock = self.mergeable_patcher.start()
1101 1103 self.mergeable_mock.return_value = {
1102 1104 'rhodecode_pr_merge_enabled': value}
1103 1105
1104 1106 def cleanup(self):
1105 1107 # In case the source repository is already cleaned up, the pull
1106 1108 # request will already be deleted.
1107 1109 pull_request = PullRequest().get(self.pull_request_id)
1108 1110 if pull_request:
1109 1111 PullRequestModel().delete(pull_request, pull_request.author)
1110 1112 Session().commit()
1111 1113
1112 1114 if self.notification_patcher:
1113 1115 self.notification_patcher.stop()
1114 1116
1115 1117 if self.mergeable_patcher:
1116 1118 self.mergeable_patcher.stop()
1117 1119
1118 1120
1119 1121 @pytest.fixture
1120 1122 def user_admin(baseapp):
1121 1123 """
1122 1124 Provides the default admin test user as an instance of `db.User`.
1123 1125 """
1124 1126 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1125 1127 return user
1126 1128
1127 1129
1128 1130 @pytest.fixture
1129 1131 def user_regular(baseapp):
1130 1132 """
1131 1133 Provides the default regular test user as an instance of `db.User`.
1132 1134 """
1133 1135 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1134 1136 return user
1135 1137
1136 1138
1137 1139 @pytest.fixture
1138 1140 def user_util(request, db_connection):
1139 1141 """
1140 1142 Provides a wired instance of `UserUtility` with integrated cleanup.
1141 1143 """
1142 1144 utility = UserUtility(test_name=request.node.name)
1143 1145 request.addfinalizer(utility.cleanup)
1144 1146 return utility
1145 1147
1146 1148
1147 1149 # TODO: johbo: Split this up into utilities per domain or something similar
1148 1150 class UserUtility(object):
1149 1151
1150 1152 def __init__(self, test_name="test"):
1151 1153 self._test_name = self._sanitize_name(test_name)
1152 1154 self.fixture = Fixture()
1153 1155 self.repo_group_ids = []
1154 1156 self.repos_ids = []
1155 1157 self.user_ids = []
1156 1158 self.user_group_ids = []
1157 1159 self.user_repo_permission_ids = []
1158 1160 self.user_group_repo_permission_ids = []
1159 1161 self.user_repo_group_permission_ids = []
1160 1162 self.user_group_repo_group_permission_ids = []
1161 1163 self.user_user_group_permission_ids = []
1162 1164 self.user_group_user_group_permission_ids = []
1163 1165 self.user_permissions = []
1164 1166
1165 1167 def _sanitize_name(self, name):
1166 1168 for char in ['[', ']']:
1167 1169 name = name.replace(char, '_')
1168 1170 return name
1169 1171
1170 1172 def create_repo_group(
1171 1173 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1172 1174 group_name = "{prefix}_repogroup_{count}".format(
1173 1175 prefix=self._test_name,
1174 1176 count=len(self.repo_group_ids))
1175 1177 repo_group = self.fixture.create_repo_group(
1176 1178 group_name, cur_user=owner)
1177 1179 if auto_cleanup:
1178 1180 self.repo_group_ids.append(repo_group.group_id)
1179 1181 return repo_group
1180 1182
1181 1183 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1182 1184 auto_cleanup=True, repo_type='hg', bare=False):
1183 1185 repo_name = "{prefix}_repository_{count}".format(
1184 1186 prefix=self._test_name,
1185 1187 count=len(self.repos_ids))
1186 1188
1187 1189 repository = self.fixture.create_repo(
1188 1190 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1189 1191 if auto_cleanup:
1190 1192 self.repos_ids.append(repository.repo_id)
1191 1193 return repository
1192 1194
1193 1195 def create_user(self, auto_cleanup=True, **kwargs):
1194 1196 user_name = "{prefix}_user_{count}".format(
1195 1197 prefix=self._test_name,
1196 1198 count=len(self.user_ids))
1197 1199 user = self.fixture.create_user(user_name, **kwargs)
1198 1200 if auto_cleanup:
1199 1201 self.user_ids.append(user.user_id)
1200 1202 return user
1201 1203
1202 1204 def create_additional_user_email(self, user, email):
1203 1205 uem = self.fixture.create_additional_user_email(user=user, email=email)
1204 1206 return uem
1205 1207
1206 1208 def create_user_with_group(self):
1207 1209 user = self.create_user()
1208 1210 user_group = self.create_user_group(members=[user])
1209 1211 return user, user_group
1210 1212
1211 1213 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1212 1214 auto_cleanup=True, **kwargs):
1213 1215 group_name = "{prefix}_usergroup_{count}".format(
1214 1216 prefix=self._test_name,
1215 1217 count=len(self.user_group_ids))
1216 1218 user_group = self.fixture.create_user_group(
1217 1219 group_name, cur_user=owner, **kwargs)
1218 1220
1219 1221 if auto_cleanup:
1220 1222 self.user_group_ids.append(user_group.users_group_id)
1221 1223 if members:
1222 1224 for user in members:
1223 1225 UserGroupModel().add_user_to_group(user_group, user)
1224 1226 return user_group
1225 1227
1226 1228 def grant_user_permission(self, user_name, permission_name):
1227 1229 self.inherit_default_user_permissions(user_name, False)
1228 1230 self.user_permissions.append((user_name, permission_name))
1229 1231
1230 1232 def grant_user_permission_to_repo_group(
1231 1233 self, repo_group, user, permission_name):
1232 1234 permission = RepoGroupModel().grant_user_permission(
1233 1235 repo_group, user, permission_name)
1234 1236 self.user_repo_group_permission_ids.append(
1235 1237 (repo_group.group_id, user.user_id))
1236 1238 return permission
1237 1239
1238 1240 def grant_user_group_permission_to_repo_group(
1239 1241 self, repo_group, user_group, permission_name):
1240 1242 permission = RepoGroupModel().grant_user_group_permission(
1241 1243 repo_group, user_group, permission_name)
1242 1244 self.user_group_repo_group_permission_ids.append(
1243 1245 (repo_group.group_id, user_group.users_group_id))
1244 1246 return permission
1245 1247
1246 1248 def grant_user_permission_to_repo(
1247 1249 self, repo, user, permission_name):
1248 1250 permission = RepoModel().grant_user_permission(
1249 1251 repo, user, permission_name)
1250 1252 self.user_repo_permission_ids.append(
1251 1253 (repo.repo_id, user.user_id))
1252 1254 return permission
1253 1255
1254 1256 def grant_user_group_permission_to_repo(
1255 1257 self, repo, user_group, permission_name):
1256 1258 permission = RepoModel().grant_user_group_permission(
1257 1259 repo, user_group, permission_name)
1258 1260 self.user_group_repo_permission_ids.append(
1259 1261 (repo.repo_id, user_group.users_group_id))
1260 1262 return permission
1261 1263
1262 1264 def grant_user_permission_to_user_group(
1263 1265 self, target_user_group, user, permission_name):
1264 1266 permission = UserGroupModel().grant_user_permission(
1265 1267 target_user_group, user, permission_name)
1266 1268 self.user_user_group_permission_ids.append(
1267 1269 (target_user_group.users_group_id, user.user_id))
1268 1270 return permission
1269 1271
1270 1272 def grant_user_group_permission_to_user_group(
1271 1273 self, target_user_group, user_group, permission_name):
1272 1274 permission = UserGroupModel().grant_user_group_permission(
1273 1275 target_user_group, user_group, permission_name)
1274 1276 self.user_group_user_group_permission_ids.append(
1275 1277 (target_user_group.users_group_id, user_group.users_group_id))
1276 1278 return permission
1277 1279
1278 1280 def revoke_user_permission(self, user_name, permission_name):
1279 1281 self.inherit_default_user_permissions(user_name, True)
1280 1282 UserModel().revoke_perm(user_name, permission_name)
1281 1283
1282 1284 def inherit_default_user_permissions(self, user_name, value):
1283 1285 user = UserModel().get_by_username(user_name)
1284 1286 user.inherit_default_permissions = value
1285 1287 Session().add(user)
1286 1288 Session().commit()
1287 1289
1288 1290 def cleanup(self):
1289 1291 self._cleanup_permissions()
1290 1292 self._cleanup_repos()
1291 1293 self._cleanup_repo_groups()
1292 1294 self._cleanup_user_groups()
1293 1295 self._cleanup_users()
1294 1296
1295 1297 def _cleanup_permissions(self):
1296 1298 if self.user_permissions:
1297 1299 for user_name, permission_name in self.user_permissions:
1298 1300 self.revoke_user_permission(user_name, permission_name)
1299 1301
1300 1302 for permission in self.user_repo_permission_ids:
1301 1303 RepoModel().revoke_user_permission(*permission)
1302 1304
1303 1305 for permission in self.user_group_repo_permission_ids:
1304 1306 RepoModel().revoke_user_group_permission(*permission)
1305 1307
1306 1308 for permission in self.user_repo_group_permission_ids:
1307 1309 RepoGroupModel().revoke_user_permission(*permission)
1308 1310
1309 1311 for permission in self.user_group_repo_group_permission_ids:
1310 1312 RepoGroupModel().revoke_user_group_permission(*permission)
1311 1313
1312 1314 for permission in self.user_user_group_permission_ids:
1313 1315 UserGroupModel().revoke_user_permission(*permission)
1314 1316
1315 1317 for permission in self.user_group_user_group_permission_ids:
1316 1318 UserGroupModel().revoke_user_group_permission(*permission)
1317 1319
1318 1320 def _cleanup_repo_groups(self):
1319 1321 def _repo_group_compare(first_group_id, second_group_id):
1320 1322 """
1321 1323 Gives higher priority to the groups with the most complex paths
1322 1324 """
1323 1325 first_group = RepoGroup.get(first_group_id)
1324 1326 second_group = RepoGroup.get(second_group_id)
1325 1327 first_group_parts = (
1326 1328 len(first_group.group_name.split('/')) if first_group else 0)
1327 1329 second_group_parts = (
1328 1330 len(second_group.group_name.split('/')) if second_group else 0)
1329 1331 return cmp(second_group_parts, first_group_parts)
1330 1332
1331 1333 sorted_repo_group_ids = sorted(
1332 1334 self.repo_group_ids, cmp=_repo_group_compare)
1333 1335 for repo_group_id in sorted_repo_group_ids:
1334 1336 self.fixture.destroy_repo_group(repo_group_id)
1335 1337
1336 1338 def _cleanup_repos(self):
1337 1339 sorted_repos_ids = sorted(self.repos_ids)
1338 1340 for repo_id in sorted_repos_ids:
1339 1341 self.fixture.destroy_repo(repo_id)
1340 1342
1341 1343 def _cleanup_user_groups(self):
1342 1344 def _user_group_compare(first_group_id, second_group_id):
1343 1345 """
1344 1346 Gives higher priority to the groups with the most complex paths
1345 1347 """
1346 1348 first_group = UserGroup.get(first_group_id)
1347 1349 second_group = UserGroup.get(second_group_id)
1348 1350 first_group_parts = (
1349 1351 len(first_group.users_group_name.split('/'))
1350 1352 if first_group else 0)
1351 1353 second_group_parts = (
1352 1354 len(second_group.users_group_name.split('/'))
1353 1355 if second_group else 0)
1354 1356 return cmp(second_group_parts, first_group_parts)
1355 1357
1356 1358 sorted_user_group_ids = sorted(
1357 1359 self.user_group_ids, cmp=_user_group_compare)
1358 1360 for user_group_id in sorted_user_group_ids:
1359 1361 self.fixture.destroy_user_group(user_group_id)
1360 1362
1361 1363 def _cleanup_users(self):
1362 1364 for user_id in self.user_ids:
1363 1365 self.fixture.destroy_user(user_id)
1364 1366
1365 1367
1366 1368 # TODO: Think about moving this into a pytest-pyro package and make it a
1367 1369 # pytest plugin
1368 1370 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1369 1371 def pytest_runtest_makereport(item, call):
1370 1372 """
1371 1373 Adding the remote traceback if the exception has this information.
1372 1374
1373 1375 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1374 1376 to the exception instance.
1375 1377 """
1376 1378 outcome = yield
1377 1379 report = outcome.get_result()
1378 1380 if call.excinfo:
1379 1381 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1380 1382
1381 1383
1382 1384 def _add_vcsserver_remote_traceback(report, exc):
1383 1385 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1384 1386
1385 1387 if vcsserver_traceback:
1386 1388 section = 'VCSServer remote traceback ' + report.when
1387 1389 report.sections.append((section, vcsserver_traceback))
1388 1390
1389 1391
1390 1392 @pytest.fixture(scope='session')
1391 1393 def testrun():
1392 1394 return {
1393 1395 'uuid': uuid.uuid4(),
1394 1396 'start': datetime.datetime.utcnow().isoformat(),
1395 1397 'timestamp': int(time.time()),
1396 1398 }
1397 1399
1398 1400
1399 1401 @pytest.fixture(autouse=True)
1400 1402 def collect_appenlight_stats(request, testrun):
1401 1403 """
1402 1404 This fixture reports memory consumtion of single tests.
1403 1405
1404 1406 It gathers data based on `psutil` and sends them to Appenlight. The option
1405 1407 ``--ae`` has te be used to enable this fixture and the API key for your
1406 1408 application has to be provided in ``--ae-key``.
1407 1409 """
1408 1410 try:
1409 1411 # cygwin cannot have yet psutil support.
1410 1412 import psutil
1411 1413 except ImportError:
1412 1414 return
1413 1415
1414 1416 if not request.config.getoption('--appenlight'):
1415 1417 return
1416 1418 else:
1417 1419 # Only request the baseapp fixture if appenlight tracking is
1418 1420 # enabled. This will speed up a test run of unit tests by 2 to 3
1419 1421 # seconds if appenlight is not enabled.
1420 1422 baseapp = request.getfuncargvalue("baseapp")
1421 1423 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1422 1424 client = AppenlightClient(
1423 1425 url=url,
1424 1426 api_key=request.config.getoption('--appenlight-api-key'),
1425 1427 namespace=request.node.nodeid,
1426 1428 request=str(testrun['uuid']),
1427 1429 testrun=testrun)
1428 1430
1429 1431 client.collect({
1430 1432 'message': "Starting",
1431 1433 })
1432 1434
1433 1435 server_and_port = baseapp.config.get_settings()['vcs.server']
1434 1436 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1435 1437 server = create_vcsserver_proxy(server_and_port, protocol)
1436 1438 with server:
1437 1439 vcs_pid = server.get_pid()
1438 1440 server.run_gc()
1439 1441 vcs_process = psutil.Process(vcs_pid)
1440 1442 mem = vcs_process.memory_info()
1441 1443 client.tag_before('vcsserver.rss', mem.rss)
1442 1444 client.tag_before('vcsserver.vms', mem.vms)
1443 1445
1444 1446 test_process = psutil.Process()
1445 1447 mem = test_process.memory_info()
1446 1448 client.tag_before('test.rss', mem.rss)
1447 1449 client.tag_before('test.vms', mem.vms)
1448 1450
1449 1451 client.tag_before('time', time.time())
1450 1452
1451 1453 @request.addfinalizer
1452 1454 def send_stats():
1453 1455 client.tag_after('time', time.time())
1454 1456 with server:
1455 1457 gc_stats = server.run_gc()
1456 1458 for tag, value in gc_stats.items():
1457 1459 client.tag_after(tag, value)
1458 1460 mem = vcs_process.memory_info()
1459 1461 client.tag_after('vcsserver.rss', mem.rss)
1460 1462 client.tag_after('vcsserver.vms', mem.vms)
1461 1463
1462 1464 mem = test_process.memory_info()
1463 1465 client.tag_after('test.rss', mem.rss)
1464 1466 client.tag_after('test.vms', mem.vms)
1465 1467
1466 1468 client.collect({
1467 1469 'message': "Finished",
1468 1470 })
1469 1471 client.send_stats()
1470 1472
1471 1473 return client
1472 1474
1473 1475
1474 1476 class AppenlightClient():
1475 1477
1476 1478 url_template = '{url}?protocol_version=0.5'
1477 1479
1478 1480 def __init__(
1479 1481 self, url, api_key, add_server=True, add_timestamp=True,
1480 1482 namespace=None, request=None, testrun=None):
1481 1483 self.url = self.url_template.format(url=url)
1482 1484 self.api_key = api_key
1483 1485 self.add_server = add_server
1484 1486 self.add_timestamp = add_timestamp
1485 1487 self.namespace = namespace
1486 1488 self.request = request
1487 1489 self.server = socket.getfqdn(socket.gethostname())
1488 1490 self.tags_before = {}
1489 1491 self.tags_after = {}
1490 1492 self.stats = []
1491 1493 self.testrun = testrun or {}
1492 1494
1493 1495 def tag_before(self, tag, value):
1494 1496 self.tags_before[tag] = value
1495 1497
1496 1498 def tag_after(self, tag, value):
1497 1499 self.tags_after[tag] = value
1498 1500
1499 1501 def collect(self, data):
1500 1502 if self.add_server:
1501 1503 data.setdefault('server', self.server)
1502 1504 if self.add_timestamp:
1503 1505 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1504 1506 if self.namespace:
1505 1507 data.setdefault('namespace', self.namespace)
1506 1508 if self.request:
1507 1509 data.setdefault('request', self.request)
1508 1510 self.stats.append(data)
1509 1511
1510 1512 def send_stats(self):
1511 1513 tags = [
1512 1514 ('testrun', self.request),
1513 1515 ('testrun.start', self.testrun['start']),
1514 1516 ('testrun.timestamp', self.testrun['timestamp']),
1515 1517 ('test', self.namespace),
1516 1518 ]
1517 1519 for key, value in self.tags_before.items():
1518 1520 tags.append((key + '.before', value))
1519 1521 try:
1520 1522 delta = self.tags_after[key] - value
1521 1523 tags.append((key + '.delta', delta))
1522 1524 except Exception:
1523 1525 pass
1524 1526 for key, value in self.tags_after.items():
1525 1527 tags.append((key + '.after', value))
1526 1528 self.collect({
1527 1529 'message': "Collected tags",
1528 1530 'tags': tags,
1529 1531 })
1530 1532
1531 1533 response = requests.post(
1532 1534 self.url,
1533 1535 headers={
1534 1536 'X-appenlight-api-key': self.api_key},
1535 1537 json=self.stats,
1536 1538 )
1537 1539
1538 1540 if not response.status_code == 200:
1539 1541 pprint.pprint(self.stats)
1540 1542 print(response.headers)
1541 1543 print(response.text)
1542 1544 raise Exception('Sending to appenlight failed')
1543 1545
1544 1546
1545 1547 @pytest.fixture
1546 1548 def gist_util(request, db_connection):
1547 1549 """
1548 1550 Provides a wired instance of `GistUtility` with integrated cleanup.
1549 1551 """
1550 1552 utility = GistUtility()
1551 1553 request.addfinalizer(utility.cleanup)
1552 1554 return utility
1553 1555
1554 1556
1555 1557 class GistUtility(object):
1556 1558 def __init__(self):
1557 1559 self.fixture = Fixture()
1558 1560 self.gist_ids = []
1559 1561
1560 1562 def create_gist(self, **kwargs):
1561 1563 gist = self.fixture.create_gist(**kwargs)
1562 1564 self.gist_ids.append(gist.gist_id)
1563 1565 return gist
1564 1566
1565 1567 def cleanup(self):
1566 1568 for id_ in self.gist_ids:
1567 1569 self.fixture.destroy_gists(str(id_))
1568 1570
1569 1571
1570 1572 @pytest.fixture
1571 1573 def enabled_backends(request):
1572 1574 backends = request.config.option.backends
1573 1575 return backends[:]
1574 1576
1575 1577
1576 1578 @pytest.fixture
1577 1579 def settings_util(request, db_connection):
1578 1580 """
1579 1581 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1580 1582 """
1581 1583 utility = SettingsUtility()
1582 1584 request.addfinalizer(utility.cleanup)
1583 1585 return utility
1584 1586
1585 1587
1586 1588 class SettingsUtility(object):
1587 1589 def __init__(self):
1588 1590 self.rhodecode_ui_ids = []
1589 1591 self.rhodecode_setting_ids = []
1590 1592 self.repo_rhodecode_ui_ids = []
1591 1593 self.repo_rhodecode_setting_ids = []
1592 1594
1593 1595 def create_repo_rhodecode_ui(
1594 1596 self, repo, section, value, key=None, active=True, cleanup=True):
1595 1597 key = key or hashlib.sha1(
1596 1598 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1597 1599
1598 1600 setting = RepoRhodeCodeUi()
1599 1601 setting.repository_id = repo.repo_id
1600 1602 setting.ui_section = section
1601 1603 setting.ui_value = value
1602 1604 setting.ui_key = key
1603 1605 setting.ui_active = active
1604 1606 Session().add(setting)
1605 1607 Session().commit()
1606 1608
1607 1609 if cleanup:
1608 1610 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1609 1611 return setting
1610 1612
1611 1613 def create_rhodecode_ui(
1612 1614 self, section, value, key=None, active=True, cleanup=True):
1613 1615 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1614 1616
1615 1617 setting = RhodeCodeUi()
1616 1618 setting.ui_section = section
1617 1619 setting.ui_value = value
1618 1620 setting.ui_key = key
1619 1621 setting.ui_active = active
1620 1622 Session().add(setting)
1621 1623 Session().commit()
1622 1624
1623 1625 if cleanup:
1624 1626 self.rhodecode_ui_ids.append(setting.ui_id)
1625 1627 return setting
1626 1628
1627 1629 def create_repo_rhodecode_setting(
1628 1630 self, repo, name, value, type_, cleanup=True):
1629 1631 setting = RepoRhodeCodeSetting(
1630 1632 repo.repo_id, key=name, val=value, type=type_)
1631 1633 Session().add(setting)
1632 1634 Session().commit()
1633 1635
1634 1636 if cleanup:
1635 1637 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1636 1638 return setting
1637 1639
1638 1640 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1639 1641 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1640 1642 Session().add(setting)
1641 1643 Session().commit()
1642 1644
1643 1645 if cleanup:
1644 1646 self.rhodecode_setting_ids.append(setting.app_settings_id)
1645 1647
1646 1648 return setting
1647 1649
1648 1650 def cleanup(self):
1649 1651 for id_ in self.rhodecode_ui_ids:
1650 1652 setting = RhodeCodeUi.get(id_)
1651 1653 Session().delete(setting)
1652 1654
1653 1655 for id_ in self.rhodecode_setting_ids:
1654 1656 setting = RhodeCodeSetting.get(id_)
1655 1657 Session().delete(setting)
1656 1658
1657 1659 for id_ in self.repo_rhodecode_ui_ids:
1658 1660 setting = RepoRhodeCodeUi.get(id_)
1659 1661 Session().delete(setting)
1660 1662
1661 1663 for id_ in self.repo_rhodecode_setting_ids:
1662 1664 setting = RepoRhodeCodeSetting.get(id_)
1663 1665 Session().delete(setting)
1664 1666
1665 1667 Session().commit()
1666 1668
1667 1669
1668 1670 @pytest.fixture
1669 1671 def no_notifications(request):
1670 1672 notification_patcher = mock.patch(
1671 1673 'rhodecode.model.notification.NotificationModel.create')
1672 1674 notification_patcher.start()
1673 1675 request.addfinalizer(notification_patcher.stop)
1674 1676
1675 1677
1676 1678 @pytest.fixture(scope='session')
1677 1679 def repeat(request):
1678 1680 """
1679 1681 The number of repetitions is based on this fixture.
1680 1682
1681 1683 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1682 1684 tests are not too slow in our default test suite.
1683 1685 """
1684 1686 return request.config.getoption('--repeat')
1685 1687
1686 1688
1687 1689 @pytest.fixture
1688 1690 def rhodecode_fixtures():
1689 1691 return Fixture()
1690 1692
1691 1693
1692 1694 @pytest.fixture
1693 1695 def context_stub():
1694 1696 """
1695 1697 Stub context object.
1696 1698 """
1697 1699 context = pyramid.testing.DummyResource()
1698 1700 return context
1699 1701
1700 1702
1701 1703 @pytest.fixture
1702 1704 def request_stub():
1703 1705 """
1704 1706 Stub request object.
1705 1707 """
1706 1708 from rhodecode.lib.base import bootstrap_request
1707 1709 request = bootstrap_request(scheme='https')
1708 1710 return request
1709 1711
1710 1712
1711 1713 @pytest.fixture
1712 1714 def config_stub(request, request_stub):
1713 1715 """
1714 1716 Set up pyramid.testing and return the Configurator.
1715 1717 """
1716 1718 from rhodecode.lib.base import bootstrap_config
1717 1719 config = bootstrap_config(request=request_stub)
1718 1720
1719 1721 @request.addfinalizer
1720 1722 def cleanup():
1721 1723 pyramid.testing.tearDown()
1722 1724
1723 1725 return config
1724 1726
1725 1727
1726 1728 @pytest.fixture
1727 1729 def StubIntegrationType():
1728 1730 class _StubIntegrationType(IntegrationTypeBase):
1729 1731 """ Test integration type class """
1730 1732
1731 1733 key = 'test'
1732 1734 display_name = 'Test integration type'
1733 1735 description = 'A test integration type for testing'
1734 1736
1735 1737 @classmethod
1736 1738 def icon(cls):
1737 1739 return 'test_icon_html_image'
1738 1740
1739 1741 def __init__(self, settings):
1740 1742 super(_StubIntegrationType, self).__init__(settings)
1741 1743 self.sent_events = [] # for testing
1742 1744
1743 1745 def send_event(self, event):
1744 1746 self.sent_events.append(event)
1745 1747
1746 1748 def settings_schema(self):
1747 1749 class SettingsSchema(colander.Schema):
1748 1750 test_string_field = colander.SchemaNode(
1749 1751 colander.String(),
1750 1752 missing=colander.required,
1751 1753 title='test string field',
1752 1754 )
1753 1755 test_int_field = colander.SchemaNode(
1754 1756 colander.Int(),
1755 1757 title='some integer setting',
1756 1758 )
1757 1759 return SettingsSchema()
1758 1760
1759 1761
1760 1762 integration_type_registry.register_integration_type(_StubIntegrationType)
1761 1763 return _StubIntegrationType
1762 1764
1763 1765 @pytest.fixture
1764 1766 def stub_integration_settings():
1765 1767 return {
1766 1768 'test_string_field': 'some data',
1767 1769 'test_int_field': 100,
1768 1770 }
1769 1771
1770 1772
1771 1773 @pytest.fixture
1772 1774 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1773 1775 stub_integration_settings):
1774 1776 integration = IntegrationModel().create(
1775 1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1776 1778 name='test repo integration',
1777 1779 repo=repo_stub, repo_group=None, child_repos_only=None)
1778 1780
1779 1781 @request.addfinalizer
1780 1782 def cleanup():
1781 1783 IntegrationModel().delete(integration)
1782 1784
1783 1785 return integration
1784 1786
1785 1787
1786 1788 @pytest.fixture
1787 1789 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1788 1790 stub_integration_settings):
1789 1791 integration = IntegrationModel().create(
1790 1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1791 1793 name='test repogroup integration',
1792 1794 repo=None, repo_group=test_repo_group, child_repos_only=True)
1793 1795
1794 1796 @request.addfinalizer
1795 1797 def cleanup():
1796 1798 IntegrationModel().delete(integration)
1797 1799
1798 1800 return integration
1799 1801
1800 1802
1801 1803 @pytest.fixture
1802 1804 def repogroup_recursive_integration_stub(request, test_repo_group,
1803 1805 StubIntegrationType, stub_integration_settings):
1804 1806 integration = IntegrationModel().create(
1805 1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1806 1808 name='test recursive repogroup integration',
1807 1809 repo=None, repo_group=test_repo_group, child_repos_only=False)
1808 1810
1809 1811 @request.addfinalizer
1810 1812 def cleanup():
1811 1813 IntegrationModel().delete(integration)
1812 1814
1813 1815 return integration
1814 1816
1815 1817
1816 1818 @pytest.fixture
1817 1819 def global_integration_stub(request, StubIntegrationType,
1818 1820 stub_integration_settings):
1819 1821 integration = IntegrationModel().create(
1820 1822 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1821 1823 name='test global integration',
1822 1824 repo=None, repo_group=None, child_repos_only=None)
1823 1825
1824 1826 @request.addfinalizer
1825 1827 def cleanup():
1826 1828 IntegrationModel().delete(integration)
1827 1829
1828 1830 return integration
1829 1831
1830 1832
1831 1833 @pytest.fixture
1832 1834 def root_repos_integration_stub(request, StubIntegrationType,
1833 1835 stub_integration_settings):
1834 1836 integration = IntegrationModel().create(
1835 1837 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1836 1838 name='test global integration',
1837 1839 repo=None, repo_group=None, child_repos_only=True)
1838 1840
1839 1841 @request.addfinalizer
1840 1842 def cleanup():
1841 1843 IntegrationModel().delete(integration)
1842 1844
1843 1845 return integration
1844 1846
1845 1847
1846 1848 @pytest.fixture
1847 1849 def local_dt_to_utc():
1848 1850 def _factory(dt):
1849 1851 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1850 1852 dateutil.tz.tzutc()).replace(tzinfo=None)
1851 1853 return _factory
1852 1854
1853 1855
1854 1856 @pytest.fixture
1855 1857 def disable_anonymous_user(request, baseapp):
1856 1858 set_anonymous_access(False)
1857 1859
1858 1860 @request.addfinalizer
1859 1861 def cleanup():
1860 1862 set_anonymous_access(True)
1861 1863
1862 1864
1863 1865 @pytest.fixture(scope='module')
1864 1866 def rc_fixture(request):
1865 1867 return Fixture()
1866 1868
1867 1869
1868 1870 @pytest.fixture
1869 1871 def repo_groups(request):
1870 1872 fixture = Fixture()
1871 1873
1872 1874 session = Session()
1873 1875 zombie_group = fixture.create_repo_group('zombie')
1874 1876 parent_group = fixture.create_repo_group('parent')
1875 1877 child_group = fixture.create_repo_group('parent/child')
1876 1878 groups_in_db = session.query(RepoGroup).all()
1877 1879 assert len(groups_in_db) == 3
1878 1880 assert child_group.group_parent_id == parent_group.group_id
1879 1881
1880 1882 @request.addfinalizer
1881 1883 def cleanup():
1882 1884 fixture.destroy_repo_group(zombie_group)
1883 1885 fixture.destroy_repo_group(child_group)
1884 1886 fixture.destroy_repo_group(parent_group)
1885 1887
1886 1888 return zombie_group, parent_group, child_group
General Comments 0
You need to be logged in to leave comments. Login now