Show More
@@ -1,4400 +1,4402 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Database Models for RhodeCode Enterprise |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import re |
|
26 | 26 | import os |
|
27 | 27 | import time |
|
28 | 28 | import hashlib |
|
29 | 29 | import logging |
|
30 | 30 | import datetime |
|
31 | 31 | import warnings |
|
32 | 32 | import ipaddress |
|
33 | 33 | import functools |
|
34 | 34 | import traceback |
|
35 | 35 | import collections |
|
36 | 36 | |
|
37 | 37 | from sqlalchemy import ( |
|
38 | 38 | or_, and_, not_, func, TypeDecorator, event, |
|
39 | 39 | Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column, |
|
40 | 40 | Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary, |
|
41 | 41 | Text, Float, PickleType) |
|
42 | 42 | from sqlalchemy.sql.expression import true, false |
|
43 | 43 | from sqlalchemy.sql.functions import coalesce, count # noqa |
|
44 | 44 | from sqlalchemy.orm import ( |
|
45 | 45 | relationship, joinedload, class_mapper, validates, aliased) |
|
46 | 46 | from sqlalchemy.ext.declarative import declared_attr |
|
47 | 47 | from sqlalchemy.ext.hybrid import hybrid_property |
|
48 | 48 | from sqlalchemy.exc import IntegrityError # noqa |
|
49 | 49 | from sqlalchemy.dialects.mysql import LONGTEXT |
|
50 | 50 | from beaker.cache import cache_region |
|
51 | 51 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
52 | 52 | |
|
53 | 53 | from pyramid.threadlocal import get_current_request |
|
54 | 54 | |
|
55 | 55 | from rhodecode.translation import _ |
|
56 | 56 | from rhodecode.lib.vcs import get_vcs_instance |
|
57 | 57 | from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference |
|
58 | 58 | from rhodecode.lib.utils2 import ( |
|
59 | 59 | str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe, |
|
60 | 60 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict, |
|
61 | 61 | glob2re, StrictAttributeDict, cleaned_uri) |
|
62 | 62 | from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \ |
|
63 | 63 | JsonRaw |
|
64 | 64 | from rhodecode.lib.ext_json import json |
|
65 | 65 | from rhodecode.lib.caching_query import FromCache |
|
66 | 66 | from rhodecode.lib.encrypt import AESCipher |
|
67 | 67 | |
|
68 | 68 | from rhodecode.model.meta import Base, Session |
|
69 | 69 | |
|
70 | 70 | URL_SEP = '/' |
|
71 | 71 | log = logging.getLogger(__name__) |
|
72 | 72 | |
|
73 | 73 | # ============================================================================= |
|
74 | 74 | # BASE CLASSES |
|
75 | 75 | # ============================================================================= |
|
76 | 76 | |
|
77 | 77 | # this is propagated from .ini file rhodecode.encrypted_values.secret or |
|
78 | 78 | # beaker.session.secret if first is not set. |
|
79 | 79 | # and initialized at environment.py |
|
80 | 80 | ENCRYPTION_KEY = None |
|
81 | 81 | |
|
82 | 82 | # used to sort permissions by types, '#' used here is not allowed to be in |
|
83 | 83 | # usernames, and it's very early in sorted string.printable table. |
|
84 | 84 | PERMISSION_TYPE_SORT = { |
|
85 | 85 | 'admin': '####', |
|
86 | 86 | 'write': '###', |
|
87 | 87 | 'read': '##', |
|
88 | 88 | 'none': '#', |
|
89 | 89 | } |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def display_user_sort(obj): |
|
93 | 93 | """ |
|
94 | 94 | Sort function used to sort permissions in .permissions() function of |
|
95 | 95 | Repository, RepoGroup, UserGroup. Also it put the default user in front |
|
96 | 96 | of all other resources |
|
97 | 97 | """ |
|
98 | 98 | |
|
99 | 99 | if obj.username == User.DEFAULT_USER: |
|
100 | 100 | return '#####' |
|
101 | 101 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') |
|
102 | 102 | return prefix + obj.username |
|
103 | 103 | |
|
104 | 104 | |
|
105 | 105 | def display_user_group_sort(obj): |
|
106 | 106 | """ |
|
107 | 107 | Sort function used to sort permissions in .permissions() function of |
|
108 | 108 | Repository, RepoGroup, UserGroup. Also it put the default user in front |
|
109 | 109 | of all other resources |
|
110 | 110 | """ |
|
111 | 111 | |
|
112 | 112 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') |
|
113 | 113 | return prefix + obj.users_group_name |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | def _hash_key(k): |
|
117 | 117 | return md5_safe(k) |
|
118 | 118 | |
|
119 | 119 | |
|
120 | 120 | def in_filter_generator(qry, items, limit=500): |
|
121 | 121 | """ |
|
122 | 122 | Splits IN() into multiple with OR |
|
123 | 123 | e.g.:: |
|
124 | 124 | cnt = Repository.query().filter( |
|
125 | 125 | or_( |
|
126 | 126 | *in_filter_generator(Repository.repo_id, range(100000)) |
|
127 | 127 | )).count() |
|
128 | 128 | """ |
|
129 | 129 | if not items: |
|
130 | 130 | # empty list will cause empty query which might cause security issues |
|
131 | 131 | # this can lead to hidden unpleasant results |
|
132 | 132 | items = [-1] |
|
133 | 133 | |
|
134 | 134 | parts = [] |
|
135 | 135 | for chunk in xrange(0, len(items), limit): |
|
136 | 136 | parts.append( |
|
137 | 137 | qry.in_(items[chunk: chunk + limit]) |
|
138 | 138 | ) |
|
139 | 139 | |
|
140 | 140 | return parts |
|
141 | 141 | |
|
142 | 142 | |
|
143 | 143 | class EncryptedTextValue(TypeDecorator): |
|
144 | 144 | """ |
|
145 | 145 | Special column for encrypted long text data, use like:: |
|
146 | 146 | |
|
147 | 147 | value = Column("encrypted_value", EncryptedValue(), nullable=False) |
|
148 | 148 | |
|
149 | 149 | This column is intelligent so if value is in unencrypted form it return |
|
150 | 150 | unencrypted form, but on save it always encrypts |
|
151 | 151 | """ |
|
152 | 152 | impl = Text |
|
153 | 153 | |
|
154 | 154 | def process_bind_param(self, value, dialect): |
|
155 | 155 | if not value: |
|
156 | 156 | return value |
|
157 | 157 | if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'): |
|
158 | 158 | # protect against double encrypting if someone manually starts |
|
159 | 159 | # doing |
|
160 | 160 | raise ValueError('value needs to be in unencrypted format, ie. ' |
|
161 | 161 | 'not starting with enc$aes') |
|
162 | 162 | return 'enc$aes_hmac$%s' % AESCipher( |
|
163 | 163 | ENCRYPTION_KEY, hmac=True).encrypt(value) |
|
164 | 164 | |
|
165 | 165 | def process_result_value(self, value, dialect): |
|
166 | 166 | import rhodecode |
|
167 | 167 | |
|
168 | 168 | if not value: |
|
169 | 169 | return value |
|
170 | 170 | |
|
171 | 171 | parts = value.split('$', 3) |
|
172 | 172 | if not len(parts) == 3: |
|
173 | 173 | # probably not encrypted values |
|
174 | 174 | return value |
|
175 | 175 | else: |
|
176 | 176 | if parts[0] != 'enc': |
|
177 | 177 | # parts ok but without our header ? |
|
178 | 178 | return value |
|
179 | 179 | enc_strict_mode = str2bool(rhodecode.CONFIG.get( |
|
180 | 180 | 'rhodecode.encrypted_values.strict') or True) |
|
181 | 181 | # at that stage we know it's our encryption |
|
182 | 182 | if parts[1] == 'aes': |
|
183 | 183 | decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2]) |
|
184 | 184 | elif parts[1] == 'aes_hmac': |
|
185 | 185 | decrypted_data = AESCipher( |
|
186 | 186 | ENCRYPTION_KEY, hmac=True, |
|
187 | 187 | strict_verification=enc_strict_mode).decrypt(parts[2]) |
|
188 | 188 | else: |
|
189 | 189 | raise ValueError( |
|
190 | 190 | 'Encryption type part is wrong, must be `aes` ' |
|
191 | 191 | 'or `aes_hmac`, got `%s` instead' % (parts[1])) |
|
192 | 192 | return decrypted_data |
|
193 | 193 | |
|
194 | 194 | |
|
195 | 195 | class BaseModel(object): |
|
196 | 196 | """ |
|
197 | 197 | Base Model for all classes |
|
198 | 198 | """ |
|
199 | 199 | |
|
200 | 200 | @classmethod |
|
201 | 201 | def _get_keys(cls): |
|
202 | 202 | """return column names for this model """ |
|
203 | 203 | return class_mapper(cls).c.keys() |
|
204 | 204 | |
|
205 | 205 | def get_dict(self): |
|
206 | 206 | """ |
|
207 | 207 | return dict with keys and values corresponding |
|
208 | 208 | to this model data """ |
|
209 | 209 | |
|
210 | 210 | d = {} |
|
211 | 211 | for k in self._get_keys(): |
|
212 | 212 | d[k] = getattr(self, k) |
|
213 | 213 | |
|
214 | 214 | # also use __json__() if present to get additional fields |
|
215 | 215 | _json_attr = getattr(self, '__json__', None) |
|
216 | 216 | if _json_attr: |
|
217 | 217 | # update with attributes from __json__ |
|
218 | 218 | if callable(_json_attr): |
|
219 | 219 | _json_attr = _json_attr() |
|
220 | 220 | for k, val in _json_attr.iteritems(): |
|
221 | 221 | d[k] = val |
|
222 | 222 | return d |
|
223 | 223 | |
|
224 | 224 | def get_appstruct(self): |
|
225 | 225 | """return list with keys and values tuples corresponding |
|
226 | 226 | to this model data """ |
|
227 | 227 | |
|
228 | 228 | lst = [] |
|
229 | 229 | for k in self._get_keys(): |
|
230 | 230 | lst.append((k, getattr(self, k),)) |
|
231 | 231 | return lst |
|
232 | 232 | |
|
233 | 233 | def populate_obj(self, populate_dict): |
|
234 | 234 | """populate model with data from given populate_dict""" |
|
235 | 235 | |
|
236 | 236 | for k in self._get_keys(): |
|
237 | 237 | if k in populate_dict: |
|
238 | 238 | setattr(self, k, populate_dict[k]) |
|
239 | 239 | |
|
240 | 240 | @classmethod |
|
241 | 241 | def query(cls): |
|
242 | 242 | return Session().query(cls) |
|
243 | 243 | |
|
244 | 244 | @classmethod |
|
245 | 245 | def get(cls, id_): |
|
246 | 246 | if id_: |
|
247 | 247 | return cls.query().get(id_) |
|
248 | 248 | |
|
249 | 249 | @classmethod |
|
250 | 250 | def get_or_404(cls, id_): |
|
251 | 251 | from pyramid.httpexceptions import HTTPNotFound |
|
252 | 252 | |
|
253 | 253 | try: |
|
254 | 254 | id_ = int(id_) |
|
255 | 255 | except (TypeError, ValueError): |
|
256 | 256 | raise HTTPNotFound() |
|
257 | 257 | |
|
258 | 258 | res = cls.query().get(id_) |
|
259 | 259 | if not res: |
|
260 | 260 | raise HTTPNotFound() |
|
261 | 261 | return res |
|
262 | 262 | |
|
263 | 263 | @classmethod |
|
264 | 264 | def getAll(cls): |
|
265 | 265 | # deprecated and left for backward compatibility |
|
266 | 266 | return cls.get_all() |
|
267 | 267 | |
|
268 | 268 | @classmethod |
|
269 | 269 | def get_all(cls): |
|
270 | 270 | return cls.query().all() |
|
271 | 271 | |
|
272 | 272 | @classmethod |
|
273 | 273 | def delete(cls, id_): |
|
274 | 274 | obj = cls.query().get(id_) |
|
275 | 275 | Session().delete(obj) |
|
276 | 276 | |
|
277 | 277 | @classmethod |
|
278 | 278 | def identity_cache(cls, session, attr_name, value): |
|
279 | 279 | exist_in_session = [] |
|
280 | 280 | for (item_cls, pkey), instance in session.identity_map.items(): |
|
281 | 281 | if cls == item_cls and getattr(instance, attr_name) == value: |
|
282 | 282 | exist_in_session.append(instance) |
|
283 | 283 | if exist_in_session: |
|
284 | 284 | if len(exist_in_session) == 1: |
|
285 | 285 | return exist_in_session[0] |
|
286 | 286 | log.exception( |
|
287 | 287 | 'multiple objects with attr %s and ' |
|
288 | 288 | 'value %s found with same name: %r', |
|
289 | 289 | attr_name, value, exist_in_session) |
|
290 | 290 | |
|
291 | 291 | def __repr__(self): |
|
292 | 292 | if hasattr(self, '__unicode__'): |
|
293 | 293 | # python repr needs to return str |
|
294 | 294 | try: |
|
295 | 295 | return safe_str(self.__unicode__()) |
|
296 | 296 | except UnicodeDecodeError: |
|
297 | 297 | pass |
|
298 | 298 | return '<DB:%s>' % (self.__class__.__name__) |
|
299 | 299 | |
|
300 | 300 | |
|
301 | 301 | class RhodeCodeSetting(Base, BaseModel): |
|
302 | 302 | __tablename__ = 'rhodecode_settings' |
|
303 | 303 | __table_args__ = ( |
|
304 | 304 | UniqueConstraint('app_settings_name'), |
|
305 | 305 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
306 | 306 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
307 | 307 | ) |
|
308 | 308 | |
|
309 | 309 | SETTINGS_TYPES = { |
|
310 | 310 | 'str': safe_str, |
|
311 | 311 | 'int': safe_int, |
|
312 | 312 | 'unicode': safe_unicode, |
|
313 | 313 | 'bool': str2bool, |
|
314 | 314 | 'list': functools.partial(aslist, sep=',') |
|
315 | 315 | } |
|
316 | 316 | DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' |
|
317 | 317 | GLOBAL_CONF_KEY = 'app_settings' |
|
318 | 318 | |
|
319 | 319 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
320 | 320 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
321 | 321 | _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) |
|
322 | 322 | _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) |
|
323 | 323 | |
|
324 | 324 | def __init__(self, key='', val='', type='unicode'): |
|
325 | 325 | self.app_settings_name = key |
|
326 | 326 | self.app_settings_type = type |
|
327 | 327 | self.app_settings_value = val |
|
328 | 328 | |
|
329 | 329 | @validates('_app_settings_value') |
|
330 | 330 | def validate_settings_value(self, key, val): |
|
331 | 331 | assert type(val) == unicode |
|
332 | 332 | return val |
|
333 | 333 | |
|
334 | 334 | @hybrid_property |
|
335 | 335 | def app_settings_value(self): |
|
336 | 336 | v = self._app_settings_value |
|
337 | 337 | _type = self.app_settings_type |
|
338 | 338 | if _type: |
|
339 | 339 | _type = self.app_settings_type.split('.')[0] |
|
340 | 340 | # decode the encrypted value |
|
341 | 341 | if 'encrypted' in self.app_settings_type: |
|
342 | 342 | cipher = EncryptedTextValue() |
|
343 | 343 | v = safe_unicode(cipher.process_result_value(v, None)) |
|
344 | 344 | |
|
345 | 345 | converter = self.SETTINGS_TYPES.get(_type) or \ |
|
346 | 346 | self.SETTINGS_TYPES['unicode'] |
|
347 | 347 | return converter(v) |
|
348 | 348 | |
|
349 | 349 | @app_settings_value.setter |
|
350 | 350 | def app_settings_value(self, val): |
|
351 | 351 | """ |
|
352 | 352 | Setter that will always make sure we use unicode in app_settings_value |
|
353 | 353 | |
|
354 | 354 | :param val: |
|
355 | 355 | """ |
|
356 | 356 | val = safe_unicode(val) |
|
357 | 357 | # encode the encrypted value |
|
358 | 358 | if 'encrypted' in self.app_settings_type: |
|
359 | 359 | cipher = EncryptedTextValue() |
|
360 | 360 | val = safe_unicode(cipher.process_bind_param(val, None)) |
|
361 | 361 | self._app_settings_value = val |
|
362 | 362 | |
|
363 | 363 | @hybrid_property |
|
364 | 364 | def app_settings_type(self): |
|
365 | 365 | return self._app_settings_type |
|
366 | 366 | |
|
367 | 367 | @app_settings_type.setter |
|
368 | 368 | def app_settings_type(self, val): |
|
369 | 369 | if val.split('.')[0] not in self.SETTINGS_TYPES: |
|
370 | 370 | raise Exception('type must be one of %s got %s' |
|
371 | 371 | % (self.SETTINGS_TYPES.keys(), val)) |
|
372 | 372 | self._app_settings_type = val |
|
373 | 373 | |
|
374 | 374 | def __unicode__(self): |
|
375 | 375 | return u"<%s('%s:%s[%s]')>" % ( |
|
376 | 376 | self.__class__.__name__, |
|
377 | 377 | self.app_settings_name, self.app_settings_value, |
|
378 | 378 | self.app_settings_type |
|
379 | 379 | ) |
|
380 | 380 | |
|
381 | 381 | |
|
382 | 382 | class RhodeCodeUi(Base, BaseModel): |
|
383 | 383 | __tablename__ = 'rhodecode_ui' |
|
384 | 384 | __table_args__ = ( |
|
385 | 385 | UniqueConstraint('ui_key'), |
|
386 | 386 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
387 | 387 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
388 | 388 | ) |
|
389 | 389 | |
|
390 | 390 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
391 | 391 | # HG |
|
392 | 392 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' |
|
393 | 393 | HOOK_PULL = 'outgoing.pull_logger' |
|
394 | 394 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' |
|
395 | 395 | HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push' |
|
396 | 396 | HOOK_PUSH = 'changegroup.push_logger' |
|
397 | 397 | HOOK_PUSH_KEY = 'pushkey.key_push' |
|
398 | 398 | |
|
399 | 399 | # TODO: johbo: Unify way how hooks are configured for git and hg, |
|
400 | 400 | # git part is currently hardcoded. |
|
401 | 401 | |
|
402 | 402 | # SVN PATTERNS |
|
403 | 403 | SVN_BRANCH_ID = 'vcs_svn_branch' |
|
404 | 404 | SVN_TAG_ID = 'vcs_svn_tag' |
|
405 | 405 | |
|
406 | 406 | ui_id = Column( |
|
407 | 407 | "ui_id", Integer(), nullable=False, unique=True, default=None, |
|
408 | 408 | primary_key=True) |
|
409 | 409 | ui_section = Column( |
|
410 | 410 | "ui_section", String(255), nullable=True, unique=None, default=None) |
|
411 | 411 | ui_key = Column( |
|
412 | 412 | "ui_key", String(255), nullable=True, unique=None, default=None) |
|
413 | 413 | ui_value = Column( |
|
414 | 414 | "ui_value", String(255), nullable=True, unique=None, default=None) |
|
415 | 415 | ui_active = Column( |
|
416 | 416 | "ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
417 | 417 | |
|
418 | 418 | def __repr__(self): |
|
419 | 419 | return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, |
|
420 | 420 | self.ui_key, self.ui_value) |
|
421 | 421 | |
|
422 | 422 | |
|
423 | 423 | class RepoRhodeCodeSetting(Base, BaseModel): |
|
424 | 424 | __tablename__ = 'repo_rhodecode_settings' |
|
425 | 425 | __table_args__ = ( |
|
426 | 426 | UniqueConstraint( |
|
427 | 427 | 'app_settings_name', 'repository_id', |
|
428 | 428 | name='uq_repo_rhodecode_setting_name_repo_id'), |
|
429 | 429 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
430 | 430 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
431 | 431 | ) |
|
432 | 432 | |
|
433 | 433 | repository_id = Column( |
|
434 | 434 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), |
|
435 | 435 | nullable=False) |
|
436 | 436 | app_settings_id = Column( |
|
437 | 437 | "app_settings_id", Integer(), nullable=False, unique=True, |
|
438 | 438 | default=None, primary_key=True) |
|
439 | 439 | app_settings_name = Column( |
|
440 | 440 | "app_settings_name", String(255), nullable=True, unique=None, |
|
441 | 441 | default=None) |
|
442 | 442 | _app_settings_value = Column( |
|
443 | 443 | "app_settings_value", String(4096), nullable=True, unique=None, |
|
444 | 444 | default=None) |
|
445 | 445 | _app_settings_type = Column( |
|
446 | 446 | "app_settings_type", String(255), nullable=True, unique=None, |
|
447 | 447 | default=None) |
|
448 | 448 | |
|
449 | 449 | repository = relationship('Repository') |
|
450 | 450 | |
|
451 | 451 | def __init__(self, repository_id, key='', val='', type='unicode'): |
|
452 | 452 | self.repository_id = repository_id |
|
453 | 453 | self.app_settings_name = key |
|
454 | 454 | self.app_settings_type = type |
|
455 | 455 | self.app_settings_value = val |
|
456 | 456 | |
|
457 | 457 | @validates('_app_settings_value') |
|
458 | 458 | def validate_settings_value(self, key, val): |
|
459 | 459 | assert type(val) == unicode |
|
460 | 460 | return val |
|
461 | 461 | |
|
462 | 462 | @hybrid_property |
|
463 | 463 | def app_settings_value(self): |
|
464 | 464 | v = self._app_settings_value |
|
465 | 465 | type_ = self.app_settings_type |
|
466 | 466 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES |
|
467 | 467 | converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] |
|
468 | 468 | return converter(v) |
|
469 | 469 | |
|
470 | 470 | @app_settings_value.setter |
|
471 | 471 | def app_settings_value(self, val): |
|
472 | 472 | """ |
|
473 | 473 | Setter that will always make sure we use unicode in app_settings_value |
|
474 | 474 | |
|
475 | 475 | :param val: |
|
476 | 476 | """ |
|
477 | 477 | self._app_settings_value = safe_unicode(val) |
|
478 | 478 | |
|
479 | 479 | @hybrid_property |
|
480 | 480 | def app_settings_type(self): |
|
481 | 481 | return self._app_settings_type |
|
482 | 482 | |
|
483 | 483 | @app_settings_type.setter |
|
484 | 484 | def app_settings_type(self, val): |
|
485 | 485 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES |
|
486 | 486 | if val not in SETTINGS_TYPES: |
|
487 | 487 | raise Exception('type must be one of %s got %s' |
|
488 | 488 | % (SETTINGS_TYPES.keys(), val)) |
|
489 | 489 | self._app_settings_type = val |
|
490 | 490 | |
|
491 | 491 | def __unicode__(self): |
|
492 | 492 | return u"<%s('%s:%s:%s[%s]')>" % ( |
|
493 | 493 | self.__class__.__name__, self.repository.repo_name, |
|
494 | 494 | self.app_settings_name, self.app_settings_value, |
|
495 | 495 | self.app_settings_type |
|
496 | 496 | ) |
|
497 | 497 | |
|
498 | 498 | |
|
499 | 499 | class RepoRhodeCodeUi(Base, BaseModel): |
|
500 | 500 | __tablename__ = 'repo_rhodecode_ui' |
|
501 | 501 | __table_args__ = ( |
|
502 | 502 | UniqueConstraint( |
|
503 | 503 | 'repository_id', 'ui_section', 'ui_key', |
|
504 | 504 | name='uq_repo_rhodecode_ui_repository_id_section_key'), |
|
505 | 505 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
506 | 506 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
507 | 507 | ) |
|
508 | 508 | |
|
509 | 509 | repository_id = Column( |
|
510 | 510 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), |
|
511 | 511 | nullable=False) |
|
512 | 512 | ui_id = Column( |
|
513 | 513 | "ui_id", Integer(), nullable=False, unique=True, default=None, |
|
514 | 514 | primary_key=True) |
|
515 | 515 | ui_section = Column( |
|
516 | 516 | "ui_section", String(255), nullable=True, unique=None, default=None) |
|
517 | 517 | ui_key = Column( |
|
518 | 518 | "ui_key", String(255), nullable=True, unique=None, default=None) |
|
519 | 519 | ui_value = Column( |
|
520 | 520 | "ui_value", String(255), nullable=True, unique=None, default=None) |
|
521 | 521 | ui_active = Column( |
|
522 | 522 | "ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
523 | 523 | |
|
524 | 524 | repository = relationship('Repository') |
|
525 | 525 | |
|
526 | 526 | def __repr__(self): |
|
527 | 527 | return '<%s[%s:%s]%s=>%s]>' % ( |
|
528 | 528 | self.__class__.__name__, self.repository.repo_name, |
|
529 | 529 | self.ui_section, self.ui_key, self.ui_value) |
|
530 | 530 | |
|
531 | 531 | |
|
532 | 532 | class User(Base, BaseModel): |
|
533 | 533 | __tablename__ = 'users' |
|
534 | 534 | __table_args__ = ( |
|
535 | 535 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
536 | 536 | Index('u_username_idx', 'username'), |
|
537 | 537 | Index('u_email_idx', 'email'), |
|
538 | 538 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
539 | 539 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
540 | 540 | ) |
|
541 | 541 | DEFAULT_USER = 'default' |
|
542 | 542 | DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' |
|
543 | 543 | DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' |
|
544 | 544 | |
|
545 | 545 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
546 | 546 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
547 | 547 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
548 | 548 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
549 | 549 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
550 | 550 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) |
|
551 | 551 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
552 | 552 | _email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
553 | 553 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
554 | 554 | last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
555 | 555 | |
|
556 | 556 | extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) |
|
557 | 557 | extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) |
|
558 | 558 | _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
559 | 559 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
560 | 560 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
561 | 561 | _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data |
|
562 | 562 | |
|
563 | 563 | user_log = relationship('UserLog') |
|
564 | 564 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
565 | 565 | |
|
566 | 566 | repositories = relationship('Repository') |
|
567 | 567 | repository_groups = relationship('RepoGroup') |
|
568 | 568 | user_groups = relationship('UserGroup') |
|
569 | 569 | |
|
570 | 570 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
571 | 571 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') |
|
572 | 572 | |
|
573 | 573 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
574 | 574 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') |
|
575 | 575 | user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all') |
|
576 | 576 | |
|
577 | 577 | group_member = relationship('UserGroupMember', cascade='all') |
|
578 | 578 | |
|
579 | 579 | notifications = relationship('UserNotification', cascade='all') |
|
580 | 580 | # notifications assigned to this user |
|
581 | 581 | user_created_notifications = relationship('Notification', cascade='all') |
|
582 | 582 | # comments created by this user |
|
583 | 583 | user_comments = relationship('ChangesetComment', cascade='all') |
|
584 | 584 | # user profile extra info |
|
585 | 585 | user_emails = relationship('UserEmailMap', cascade='all') |
|
586 | 586 | user_ip_map = relationship('UserIpMap', cascade='all') |
|
587 | 587 | user_auth_tokens = relationship('UserApiKeys', cascade='all') |
|
588 | 588 | user_ssh_keys = relationship('UserSshKeys', cascade='all') |
|
589 | 589 | |
|
590 | 590 | # gists |
|
591 | 591 | user_gists = relationship('Gist', cascade='all') |
|
592 | 592 | # user pull requests |
|
593 | 593 | user_pull_requests = relationship('PullRequest', cascade='all') |
|
594 | 594 | # external identities |
|
595 | 595 | extenal_identities = relationship( |
|
596 | 596 | 'ExternalIdentity', |
|
597 | 597 | primaryjoin="User.user_id==ExternalIdentity.local_user_id", |
|
598 | 598 | cascade='all') |
|
599 | 599 | # review rules |
|
600 | 600 | user_review_rules = relationship('RepoReviewRuleUser', cascade='all') |
|
601 | 601 | |
|
602 | 602 | def __unicode__(self): |
|
603 | 603 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
604 | 604 | self.user_id, self.username) |
|
605 | 605 | |
|
606 | 606 | @hybrid_property |
|
607 | 607 | def email(self): |
|
608 | 608 | return self._email |
|
609 | 609 | |
|
610 | 610 | @email.setter |
|
611 | 611 | def email(self, val): |
|
612 | 612 | self._email = val.lower() if val else None |
|
613 | 613 | |
|
614 | 614 | @hybrid_property |
|
615 | 615 | def first_name(self): |
|
616 | 616 | from rhodecode.lib import helpers as h |
|
617 | 617 | if self.name: |
|
618 | 618 | return h.escape(self.name) |
|
619 | 619 | return self.name |
|
620 | 620 | |
|
621 | 621 | @hybrid_property |
|
622 | 622 | def last_name(self): |
|
623 | 623 | from rhodecode.lib import helpers as h |
|
624 | 624 | if self.lastname: |
|
625 | 625 | return h.escape(self.lastname) |
|
626 | 626 | return self.lastname |
|
627 | 627 | |
|
628 | 628 | @hybrid_property |
|
629 | 629 | def api_key(self): |
|
630 | 630 | """ |
|
631 | 631 | Fetch if exist an auth-token with role ALL connected to this user |
|
632 | 632 | """ |
|
633 | 633 | user_auth_token = UserApiKeys.query()\ |
|
634 | 634 | .filter(UserApiKeys.user_id == self.user_id)\ |
|
635 | 635 | .filter(or_(UserApiKeys.expires == -1, |
|
636 | 636 | UserApiKeys.expires >= time.time()))\ |
|
637 | 637 | .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first() |
|
638 | 638 | if user_auth_token: |
|
639 | 639 | user_auth_token = user_auth_token.api_key |
|
640 | 640 | |
|
641 | 641 | return user_auth_token |
|
642 | 642 | |
|
643 | 643 | @api_key.setter |
|
644 | 644 | def api_key(self, val): |
|
645 | 645 | # don't allow to set API key this is deprecated for now |
|
646 | 646 | self._api_key = None |
|
647 | 647 | |
|
648 | 648 | @property |
|
649 | 649 | def reviewer_pull_requests(self): |
|
650 | 650 | return PullRequestReviewers.query() \ |
|
651 | 651 | .options(joinedload(PullRequestReviewers.pull_request)) \ |
|
652 | 652 | .filter(PullRequestReviewers.user_id == self.user_id) \ |
|
653 | 653 | .all() |
|
654 | 654 | |
|
655 | 655 | @property |
|
656 | 656 | def firstname(self): |
|
657 | 657 | # alias for future |
|
658 | 658 | return self.name |
|
659 | 659 | |
|
660 | 660 | @property |
|
661 | 661 | def emails(self): |
|
662 | 662 | other = UserEmailMap.query()\ |
|
663 | 663 | .filter(UserEmailMap.user == self) \ |
|
664 | 664 | .order_by(UserEmailMap.email_id.asc()) \ |
|
665 | 665 | .all() |
|
666 | 666 | return [self.email] + [x.email for x in other] |
|
667 | 667 | |
|
668 | 668 | @property |
|
669 | 669 | def auth_tokens(self): |
|
670 | 670 | auth_tokens = self.get_auth_tokens() |
|
671 | 671 | return [x.api_key for x in auth_tokens] |
|
672 | 672 | |
|
673 | 673 | def get_auth_tokens(self): |
|
674 | 674 | return UserApiKeys.query()\ |
|
675 | 675 | .filter(UserApiKeys.user == self)\ |
|
676 | 676 | .order_by(UserApiKeys.user_api_key_id.asc())\ |
|
677 | 677 | .all() |
|
678 | 678 | |
|
679 | 679 | @LazyProperty |
|
680 | 680 | def feed_token(self): |
|
681 | 681 | return self.get_feed_token() |
|
682 | 682 | |
|
683 | 683 | def get_feed_token(self, cache=True): |
|
684 | 684 | feed_tokens = UserApiKeys.query()\ |
|
685 | 685 | .filter(UserApiKeys.user == self)\ |
|
686 | 686 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) |
|
687 | 687 | if cache: |
|
688 | 688 | feed_tokens = feed_tokens.options( |
|
689 | 689 | FromCache("long_term", "get_user_feed_token_%s" % self.user_id)) |
|
690 | 690 | |
|
691 | 691 | feed_tokens = feed_tokens.all() |
|
692 | 692 | if feed_tokens: |
|
693 | 693 | return feed_tokens[0].api_key |
|
694 | 694 | return 'NO_FEED_TOKEN_AVAILABLE' |
|
695 | 695 | |
|
696 | 696 | @classmethod |
|
697 | 697 | def get(cls, user_id, cache=False): |
|
698 | 698 | if not user_id: |
|
699 | 699 | return |
|
700 | 700 | |
|
701 | 701 | user = cls.query() |
|
702 | 702 | if cache: |
|
703 | 703 | user = user.options( |
|
704 | 704 | FromCache("sql_cache_short", "get_users_%s" % user_id)) |
|
705 | 705 | return user.get(user_id) |
|
706 | 706 | |
|
707 | 707 | @classmethod |
|
708 | 708 | def extra_valid_auth_tokens(cls, user, role=None): |
|
709 | 709 | tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ |
|
710 | 710 | .filter(or_(UserApiKeys.expires == -1, |
|
711 | 711 | UserApiKeys.expires >= time.time())) |
|
712 | 712 | if role: |
|
713 | 713 | tokens = tokens.filter(or_(UserApiKeys.role == role, |
|
714 | 714 | UserApiKeys.role == UserApiKeys.ROLE_ALL)) |
|
715 | 715 | return tokens.all() |
|
716 | 716 | |
|
717 | 717 | def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None): |
|
718 | 718 | from rhodecode.lib import auth |
|
719 | 719 | |
|
720 | 720 | log.debug('Trying to authenticate user: %s via auth-token, ' |
|
721 | 721 | 'and roles: %s', self, roles) |
|
722 | 722 | |
|
723 | 723 | if not auth_token: |
|
724 | 724 | return False |
|
725 | 725 | |
|
726 | 726 | crypto_backend = auth.crypto_backend() |
|
727 | 727 | |
|
728 | 728 | roles = (roles or []) + [UserApiKeys.ROLE_ALL] |
|
729 | 729 | tokens_q = UserApiKeys.query()\ |
|
730 | 730 | .filter(UserApiKeys.user_id == self.user_id)\ |
|
731 | 731 | .filter(or_(UserApiKeys.expires == -1, |
|
732 | 732 | UserApiKeys.expires >= time.time())) |
|
733 | 733 | |
|
734 | 734 | tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles)) |
|
735 | 735 | |
|
736 | 736 | plain_tokens = [] |
|
737 | 737 | hash_tokens = [] |
|
738 | 738 | |
|
739 | 739 | for token in tokens_q.all(): |
|
740 | 740 | # verify scope first |
|
741 | 741 | if token.repo_id: |
|
742 | 742 | # token has a scope, we need to verify it |
|
743 | 743 | if scope_repo_id != token.repo_id: |
|
744 | 744 | log.debug( |
|
745 | 745 | 'Scope mismatch: token has a set repo scope: %s, ' |
|
746 | 746 | 'and calling scope is:%s, skipping further checks', |
|
747 | 747 | token.repo, scope_repo_id) |
|
748 | 748 | # token has a scope, and it doesn't match, skip token |
|
749 | 749 | continue |
|
750 | 750 | |
|
751 | 751 | if token.api_key.startswith(crypto_backend.ENC_PREF): |
|
752 | 752 | hash_tokens.append(token.api_key) |
|
753 | 753 | else: |
|
754 | 754 | plain_tokens.append(token.api_key) |
|
755 | 755 | |
|
756 | 756 | is_plain_match = auth_token in plain_tokens |
|
757 | 757 | if is_plain_match: |
|
758 | 758 | return True |
|
759 | 759 | |
|
760 | 760 | for hashed in hash_tokens: |
|
761 | 761 | # TODO(marcink): this is expensive to calculate, but most secure |
|
762 | 762 | match = crypto_backend.hash_check(auth_token, hashed) |
|
763 | 763 | if match: |
|
764 | 764 | return True |
|
765 | 765 | |
|
766 | 766 | return False |
|
767 | 767 | |
|
768 | 768 | @property |
|
769 | 769 | def ip_addresses(self): |
|
770 | 770 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() |
|
771 | 771 | return [x.ip_addr for x in ret] |
|
772 | 772 | |
|
773 | 773 | @property |
|
774 | 774 | def username_and_name(self): |
|
775 | 775 | return '%s (%s %s)' % (self.username, self.first_name, self.last_name) |
|
776 | 776 | |
|
777 | 777 | @property |
|
778 | 778 | def username_or_name_or_email(self): |
|
779 | 779 | full_name = self.full_name if self.full_name is not ' ' else None |
|
780 | 780 | return self.username or full_name or self.email |
|
781 | 781 | |
|
782 | 782 | @property |
|
783 | 783 | def full_name(self): |
|
784 | 784 | return '%s %s' % (self.first_name, self.last_name) |
|
785 | 785 | |
|
786 | 786 | @property |
|
787 | 787 | def full_name_or_username(self): |
|
788 | 788 | return ('%s %s' % (self.first_name, self.last_name) |
|
789 | 789 | if (self.first_name and self.last_name) else self.username) |
|
790 | 790 | |
|
791 | 791 | @property |
|
792 | 792 | def full_contact(self): |
|
793 | 793 | return '%s %s <%s>' % (self.first_name, self.last_name, self.email) |
|
794 | 794 | |
|
795 | 795 | @property |
|
796 | 796 | def short_contact(self): |
|
797 | 797 | return '%s %s' % (self.first_name, self.last_name) |
|
798 | 798 | |
|
799 | 799 | @property |
|
800 | 800 | def is_admin(self): |
|
801 | 801 | return self.admin |
|
802 | 802 | |
|
803 | 803 | def AuthUser(self, **kwargs): |
|
804 | 804 | """ |
|
805 | 805 | Returns instance of AuthUser for this user |
|
806 | 806 | """ |
|
807 | 807 | from rhodecode.lib.auth import AuthUser |
|
808 | 808 | return AuthUser(user_id=self.user_id, username=self.username, **kwargs) |
|
809 | 809 | |
|
810 | 810 | @hybrid_property |
|
811 | 811 | def user_data(self): |
|
812 | 812 | if not self._user_data: |
|
813 | 813 | return {} |
|
814 | 814 | |
|
815 | 815 | try: |
|
816 | 816 | return json.loads(self._user_data) |
|
817 | 817 | except TypeError: |
|
818 | 818 | return {} |
|
819 | 819 | |
|
820 | 820 | @user_data.setter |
|
821 | 821 | def user_data(self, val): |
|
822 | 822 | if not isinstance(val, dict): |
|
823 | 823 | raise Exception('user_data must be dict, got %s' % type(val)) |
|
824 | 824 | try: |
|
825 | 825 | self._user_data = json.dumps(val) |
|
826 | 826 | except Exception: |
|
827 | 827 | log.error(traceback.format_exc()) |
|
828 | 828 | |
|
829 | 829 | @classmethod |
|
830 | 830 | def get_by_username(cls, username, case_insensitive=False, |
|
831 | 831 | cache=False, identity_cache=False): |
|
832 | 832 | session = Session() |
|
833 | 833 | |
|
834 | 834 | if case_insensitive: |
|
835 | 835 | q = cls.query().filter( |
|
836 | 836 | func.lower(cls.username) == func.lower(username)) |
|
837 | 837 | else: |
|
838 | 838 | q = cls.query().filter(cls.username == username) |
|
839 | 839 | |
|
840 | 840 | if cache: |
|
841 | 841 | if identity_cache: |
|
842 | 842 | val = cls.identity_cache(session, 'username', username) |
|
843 | 843 | if val: |
|
844 | 844 | return val |
|
845 | 845 | else: |
|
846 | 846 | cache_key = "get_user_by_name_%s" % _hash_key(username) |
|
847 | 847 | q = q.options( |
|
848 | 848 | FromCache("sql_cache_short", cache_key)) |
|
849 | 849 | |
|
850 | 850 | return q.scalar() |
|
851 | 851 | |
|
852 | 852 | @classmethod |
|
853 | 853 | def get_by_auth_token(cls, auth_token, cache=False): |
|
854 | 854 | q = UserApiKeys.query()\ |
|
855 | 855 | .filter(UserApiKeys.api_key == auth_token)\ |
|
856 | 856 | .filter(or_(UserApiKeys.expires == -1, |
|
857 | 857 | UserApiKeys.expires >= time.time())) |
|
858 | 858 | if cache: |
|
859 | 859 | q = q.options( |
|
860 | 860 | FromCache("sql_cache_short", "get_auth_token_%s" % auth_token)) |
|
861 | 861 | |
|
862 | 862 | match = q.first() |
|
863 | 863 | if match: |
|
864 | 864 | return match.user |
|
865 | 865 | |
|
866 | 866 | @classmethod |
|
867 | 867 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
868 | 868 | |
|
869 | 869 | if case_insensitive: |
|
870 | 870 | q = cls.query().filter(func.lower(cls.email) == func.lower(email)) |
|
871 | 871 | |
|
872 | 872 | else: |
|
873 | 873 | q = cls.query().filter(cls.email == email) |
|
874 | 874 | |
|
875 | 875 | email_key = _hash_key(email) |
|
876 | 876 | if cache: |
|
877 | 877 | q = q.options( |
|
878 | 878 | FromCache("sql_cache_short", "get_email_key_%s" % email_key)) |
|
879 | 879 | |
|
880 | 880 | ret = q.scalar() |
|
881 | 881 | if ret is None: |
|
882 | 882 | q = UserEmailMap.query() |
|
883 | 883 | # try fetching in alternate email map |
|
884 | 884 | if case_insensitive: |
|
885 | 885 | q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) |
|
886 | 886 | else: |
|
887 | 887 | q = q.filter(UserEmailMap.email == email) |
|
888 | 888 | q = q.options(joinedload(UserEmailMap.user)) |
|
889 | 889 | if cache: |
|
890 | 890 | q = q.options( |
|
891 | 891 | FromCache("sql_cache_short", "get_email_map_key_%s" % email_key)) |
|
892 | 892 | ret = getattr(q.scalar(), 'user', None) |
|
893 | 893 | |
|
894 | 894 | return ret |
|
895 | 895 | |
|
896 | 896 | @classmethod |
|
897 | 897 | def get_from_cs_author(cls, author): |
|
898 | 898 | """ |
|
899 | 899 | Tries to get User objects out of commit author string |
|
900 | 900 | |
|
901 | 901 | :param author: |
|
902 | 902 | """ |
|
903 | 903 | from rhodecode.lib.helpers import email, author_name |
|
904 | 904 | # Valid email in the attribute passed, see if they're in the system |
|
905 | 905 | _email = email(author) |
|
906 | 906 | if _email: |
|
907 | 907 | user = cls.get_by_email(_email, case_insensitive=True) |
|
908 | 908 | if user: |
|
909 | 909 | return user |
|
910 | 910 | # Maybe we can match by username? |
|
911 | 911 | _author = author_name(author) |
|
912 | 912 | user = cls.get_by_username(_author, case_insensitive=True) |
|
913 | 913 | if user: |
|
914 | 914 | return user |
|
915 | 915 | |
|
916 | 916 | def update_userdata(self, **kwargs): |
|
917 | 917 | usr = self |
|
918 | 918 | old = usr.user_data |
|
919 | 919 | old.update(**kwargs) |
|
920 | 920 | usr.user_data = old |
|
921 | 921 | Session().add(usr) |
|
922 | 922 | log.debug('updated userdata with ', kwargs) |
|
923 | 923 | |
|
924 | 924 | def update_lastlogin(self): |
|
925 | 925 | """Update user lastlogin""" |
|
926 | 926 | self.last_login = datetime.datetime.now() |
|
927 | 927 | Session().add(self) |
|
928 | 928 | log.debug('updated user %s lastlogin', self.username) |
|
929 | 929 | |
|
930 | 930 | def update_lastactivity(self): |
|
931 | 931 | """Update user lastactivity""" |
|
932 | 932 | self.last_activity = datetime.datetime.now() |
|
933 | 933 | Session().add(self) |
|
934 | 934 | log.debug('updated user `%s` last activity', self.username) |
|
935 | 935 | |
|
936 | 936 | def update_password(self, new_password): |
|
937 | 937 | from rhodecode.lib.auth import get_crypt_password |
|
938 | 938 | |
|
939 | 939 | self.password = get_crypt_password(new_password) |
|
940 | 940 | Session().add(self) |
|
941 | 941 | |
|
942 | 942 | @classmethod |
|
943 | 943 | def get_first_super_admin(cls): |
|
944 | 944 | user = User.query().filter(User.admin == true()).first() |
|
945 | 945 | if user is None: |
|
946 | 946 | raise Exception('FATAL: Missing administrative account!') |
|
947 | 947 | return user |
|
948 | 948 | |
|
949 | 949 | @classmethod |
|
950 | 950 | def get_all_super_admins(cls): |
|
951 | 951 | """ |
|
952 | 952 | Returns all admin accounts sorted by username |
|
953 | 953 | """ |
|
954 | 954 | return User.query().filter(User.admin == true())\ |
|
955 | 955 | .order_by(User.username.asc()).all() |
|
956 | 956 | |
|
957 | 957 | @classmethod |
|
958 | 958 | def get_default_user(cls, cache=False, refresh=False): |
|
959 | 959 | user = User.get_by_username(User.DEFAULT_USER, cache=cache) |
|
960 | 960 | if user is None: |
|
961 | 961 | raise Exception('FATAL: Missing default account!') |
|
962 | 962 | if refresh: |
|
963 | 963 | # The default user might be based on outdated state which |
|
964 | 964 | # has been loaded from the cache. |
|
965 | 965 | # A call to refresh() ensures that the |
|
966 | 966 | # latest state from the database is used. |
|
967 | 967 | Session().refresh(user) |
|
968 | 968 | return user |
|
969 | 969 | |
|
970 | 970 | def _get_default_perms(self, user, suffix=''): |
|
971 | 971 | from rhodecode.model.permission import PermissionModel |
|
972 | 972 | return PermissionModel().get_default_perms(user.user_perms, suffix) |
|
973 | 973 | |
|
974 | 974 | def get_default_perms(self, suffix=''): |
|
975 | 975 | return self._get_default_perms(self, suffix) |
|
976 | 976 | |
|
977 | 977 | def get_api_data(self, include_secrets=False, details='full'): |
|
978 | 978 | """ |
|
979 | 979 | Common function for generating user related data for API |
|
980 | 980 | |
|
981 | 981 | :param include_secrets: By default secrets in the API data will be replaced |
|
982 | 982 | by a placeholder value to prevent exposing this data by accident. In case |
|
983 | 983 | this data shall be exposed, set this flag to ``True``. |
|
984 | 984 | |
|
985 | 985 | :param details: details can be 'basic|full' basic gives only a subset of |
|
986 | 986 | the available user information that includes user_id, name and emails. |
|
987 | 987 | """ |
|
988 | 988 | user = self |
|
989 | 989 | user_data = self.user_data |
|
990 | 990 | data = { |
|
991 | 991 | 'user_id': user.user_id, |
|
992 | 992 | 'username': user.username, |
|
993 | 993 | 'firstname': user.name, |
|
994 | 994 | 'lastname': user.lastname, |
|
995 | 995 | 'email': user.email, |
|
996 | 996 | 'emails': user.emails, |
|
997 | 997 | } |
|
998 | 998 | if details == 'basic': |
|
999 | 999 | return data |
|
1000 | 1000 | |
|
1001 | 1001 | auth_token_length = 40 |
|
1002 | 1002 | auth_token_replacement = '*' * auth_token_length |
|
1003 | 1003 | |
|
1004 | 1004 | extras = { |
|
1005 | 1005 | 'auth_tokens': [auth_token_replacement], |
|
1006 | 1006 | 'active': user.active, |
|
1007 | 1007 | 'admin': user.admin, |
|
1008 | 1008 | 'extern_type': user.extern_type, |
|
1009 | 1009 | 'extern_name': user.extern_name, |
|
1010 | 1010 | 'last_login': user.last_login, |
|
1011 | 1011 | 'last_activity': user.last_activity, |
|
1012 | 1012 | 'ip_addresses': user.ip_addresses, |
|
1013 | 1013 | 'language': user_data.get('language') |
|
1014 | 1014 | } |
|
1015 | 1015 | data.update(extras) |
|
1016 | 1016 | |
|
1017 | 1017 | if include_secrets: |
|
1018 | 1018 | data['auth_tokens'] = user.auth_tokens |
|
1019 | 1019 | return data |
|
1020 | 1020 | |
|
1021 | 1021 | def __json__(self): |
|
1022 | 1022 | data = { |
|
1023 | 1023 | 'full_name': self.full_name, |
|
1024 | 1024 | 'full_name_or_username': self.full_name_or_username, |
|
1025 | 1025 | 'short_contact': self.short_contact, |
|
1026 | 1026 | 'full_contact': self.full_contact, |
|
1027 | 1027 | } |
|
1028 | 1028 | data.update(self.get_api_data()) |
|
1029 | 1029 | return data |
|
1030 | 1030 | |
|
1031 | 1031 | |
|
1032 | 1032 | class UserApiKeys(Base, BaseModel): |
|
1033 | 1033 | __tablename__ = 'user_api_keys' |
|
1034 | 1034 | __table_args__ = ( |
|
1035 | 1035 | Index('uak_api_key_idx', 'api_key', unique=True), |
|
1036 | 1036 | Index('uak_api_key_expires_idx', 'api_key', 'expires'), |
|
1037 | 1037 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1038 | 1038 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
1039 | 1039 | ) |
|
1040 | 1040 | __mapper_args__ = {} |
|
1041 | 1041 | |
|
1042 | 1042 | # ApiKey role |
|
1043 | 1043 | ROLE_ALL = 'token_role_all' |
|
1044 | 1044 | ROLE_HTTP = 'token_role_http' |
|
1045 | 1045 | ROLE_VCS = 'token_role_vcs' |
|
1046 | 1046 | ROLE_API = 'token_role_api' |
|
1047 | 1047 | ROLE_FEED = 'token_role_feed' |
|
1048 | 1048 | ROLE_PASSWORD_RESET = 'token_password_reset' |
|
1049 | 1049 | |
|
1050 | 1050 | ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED] |
|
1051 | 1051 | |
|
1052 | 1052 | user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1053 | 1053 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1054 | 1054 | api_key = Column("api_key", String(255), nullable=False, unique=True) |
|
1055 | 1055 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
1056 | 1056 | expires = Column('expires', Float(53), nullable=False) |
|
1057 | 1057 | role = Column('role', String(255), nullable=True) |
|
1058 | 1058 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1059 | 1059 | |
|
1060 | 1060 | # scope columns |
|
1061 | 1061 | repo_id = Column( |
|
1062 | 1062 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
1063 | 1063 | nullable=True, unique=None, default=None) |
|
1064 | 1064 | repo = relationship('Repository', lazy='joined') |
|
1065 | 1065 | |
|
1066 | 1066 | repo_group_id = Column( |
|
1067 | 1067 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), |
|
1068 | 1068 | nullable=True, unique=None, default=None) |
|
1069 | 1069 | repo_group = relationship('RepoGroup', lazy='joined') |
|
1070 | 1070 | |
|
1071 | 1071 | user = relationship('User', lazy='joined') |
|
1072 | 1072 | |
|
1073 | 1073 | def __unicode__(self): |
|
1074 | 1074 | return u"<%s('%s')>" % (self.__class__.__name__, self.role) |
|
1075 | 1075 | |
|
1076 | 1076 | def __json__(self): |
|
1077 | 1077 | data = { |
|
1078 | 1078 | 'auth_token': self.api_key, |
|
1079 | 1079 | 'role': self.role, |
|
1080 | 1080 | 'scope': self.scope_humanized, |
|
1081 | 1081 | 'expired': self.expired |
|
1082 | 1082 | } |
|
1083 | 1083 | return data |
|
1084 | 1084 | |
|
1085 | 1085 | def get_api_data(self, include_secrets=False): |
|
1086 | 1086 | data = self.__json__() |
|
1087 | 1087 | if include_secrets: |
|
1088 | 1088 | return data |
|
1089 | 1089 | else: |
|
1090 | 1090 | data['auth_token'] = self.token_obfuscated |
|
1091 | 1091 | return data |
|
1092 | 1092 | |
|
1093 | 1093 | @hybrid_property |
|
1094 | 1094 | def description_safe(self): |
|
1095 | 1095 | from rhodecode.lib import helpers as h |
|
1096 | 1096 | return h.escape(self.description) |
|
1097 | 1097 | |
|
1098 | 1098 | @property |
|
1099 | 1099 | def expired(self): |
|
1100 | 1100 | if self.expires == -1: |
|
1101 | 1101 | return False |
|
1102 | 1102 | return time.time() > self.expires |
|
1103 | 1103 | |
|
1104 | 1104 | @classmethod |
|
1105 | 1105 | def _get_role_name(cls, role): |
|
1106 | 1106 | return { |
|
1107 | 1107 | cls.ROLE_ALL: _('all'), |
|
1108 | 1108 | cls.ROLE_HTTP: _('http/web interface'), |
|
1109 | 1109 | cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), |
|
1110 | 1110 | cls.ROLE_API: _('api calls'), |
|
1111 | 1111 | cls.ROLE_FEED: _('feed access'), |
|
1112 | 1112 | }.get(role, role) |
|
1113 | 1113 | |
|
1114 | 1114 | @property |
|
1115 | 1115 | def role_humanized(self): |
|
1116 | 1116 | return self._get_role_name(self.role) |
|
1117 | 1117 | |
|
1118 | 1118 | def _get_scope(self): |
|
1119 | 1119 | if self.repo: |
|
1120 | 1120 | return repr(self.repo) |
|
1121 | 1121 | if self.repo_group: |
|
1122 | 1122 | return repr(self.repo_group) + ' (recursive)' |
|
1123 | 1123 | return 'global' |
|
1124 | 1124 | |
|
1125 | 1125 | @property |
|
1126 | 1126 | def scope_humanized(self): |
|
1127 | 1127 | return self._get_scope() |
|
1128 | 1128 | |
|
1129 | 1129 | @property |
|
1130 | 1130 | def token_obfuscated(self): |
|
1131 | 1131 | if self.api_key: |
|
1132 | 1132 | return self.api_key[:4] + "****" |
|
1133 | 1133 | |
|
1134 | 1134 | |
|
1135 | 1135 | class UserEmailMap(Base, BaseModel): |
|
1136 | 1136 | __tablename__ = 'user_email_map' |
|
1137 | 1137 | __table_args__ = ( |
|
1138 | 1138 | Index('uem_email_idx', 'email'), |
|
1139 | 1139 | UniqueConstraint('email'), |
|
1140 | 1140 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1141 | 1141 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
1142 | 1142 | ) |
|
1143 | 1143 | __mapper_args__ = {} |
|
1144 | 1144 | |
|
1145 | 1145 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1146 | 1146 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1147 | 1147 | _email = Column("email", String(255), nullable=True, unique=False, default=None) |
|
1148 | 1148 | user = relationship('User', lazy='joined') |
|
1149 | 1149 | |
|
1150 | 1150 | @validates('_email') |
|
1151 | 1151 | def validate_email(self, key, email): |
|
1152 | 1152 | # check if this email is not main one |
|
1153 | 1153 | main_email = Session().query(User).filter(User.email == email).scalar() |
|
1154 | 1154 | if main_email is not None: |
|
1155 | 1155 | raise AttributeError('email %s is present is user table' % email) |
|
1156 | 1156 | return email |
|
1157 | 1157 | |
|
1158 | 1158 | @hybrid_property |
|
1159 | 1159 | def email(self): |
|
1160 | 1160 | return self._email |
|
1161 | 1161 | |
|
1162 | 1162 | @email.setter |
|
1163 | 1163 | def email(self, val): |
|
1164 | 1164 | self._email = val.lower() if val else None |
|
1165 | 1165 | |
|
1166 | 1166 | |
|
1167 | 1167 | class UserIpMap(Base, BaseModel): |
|
1168 | 1168 | __tablename__ = 'user_ip_map' |
|
1169 | 1169 | __table_args__ = ( |
|
1170 | 1170 | UniqueConstraint('user_id', 'ip_addr'), |
|
1171 | 1171 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1172 | 1172 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
1173 | 1173 | ) |
|
1174 | 1174 | __mapper_args__ = {} |
|
1175 | 1175 | |
|
1176 | 1176 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1177 | 1177 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1178 | 1178 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) |
|
1179 | 1179 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
1180 | 1180 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
1181 | 1181 | user = relationship('User', lazy='joined') |
|
1182 | 1182 | |
|
1183 | 1183 | @hybrid_property |
|
1184 | 1184 | def description_safe(self): |
|
1185 | 1185 | from rhodecode.lib import helpers as h |
|
1186 | 1186 | return h.escape(self.description) |
|
1187 | 1187 | |
|
1188 | 1188 | @classmethod |
|
1189 | 1189 | def _get_ip_range(cls, ip_addr): |
|
1190 | 1190 | net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False) |
|
1191 | 1191 | return [str(net.network_address), str(net.broadcast_address)] |
|
1192 | 1192 | |
|
1193 | 1193 | def __json__(self): |
|
1194 | 1194 | return { |
|
1195 | 1195 | 'ip_addr': self.ip_addr, |
|
1196 | 1196 | 'ip_range': self._get_ip_range(self.ip_addr), |
|
1197 | 1197 | } |
|
1198 | 1198 | |
|
1199 | 1199 | def __unicode__(self): |
|
1200 | 1200 | return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, |
|
1201 | 1201 | self.user_id, self.ip_addr) |
|
1202 | 1202 | |
|
1203 | 1203 | |
|
1204 | 1204 | class UserSshKeys(Base, BaseModel): |
|
1205 | 1205 | __tablename__ = 'user_ssh_keys' |
|
1206 | 1206 | __table_args__ = ( |
|
1207 | 1207 | Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'), |
|
1208 | 1208 | |
|
1209 | 1209 | UniqueConstraint('ssh_key_fingerprint'), |
|
1210 | 1210 | |
|
1211 | 1211 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1212 | 1212 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
1213 | 1213 | ) |
|
1214 | 1214 | __mapper_args__ = {} |
|
1215 | 1215 | |
|
1216 | 1216 | ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1217 | 1217 | ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None) |
|
1218 | 1218 | ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None) |
|
1219 | 1219 | |
|
1220 | 1220 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
1221 | 1221 | |
|
1222 | 1222 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1223 | 1223 | accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None) |
|
1224 | 1224 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1225 | 1225 | |
|
1226 | 1226 | user = relationship('User', lazy='joined') |
|
1227 | 1227 | |
|
1228 | 1228 | def __json__(self): |
|
1229 | 1229 | data = { |
|
1230 | 1230 | 'ssh_fingerprint': self.ssh_key_fingerprint, |
|
1231 | 1231 | 'description': self.description, |
|
1232 | 1232 | 'created_on': self.created_on |
|
1233 | 1233 | } |
|
1234 | 1234 | return data |
|
1235 | 1235 | |
|
1236 | 1236 | def get_api_data(self): |
|
1237 | 1237 | data = self.__json__() |
|
1238 | 1238 | return data |
|
1239 | 1239 | |
|
1240 | 1240 | |
|
1241 | 1241 | class UserLog(Base, BaseModel): |
|
1242 | 1242 | __tablename__ = 'user_logs' |
|
1243 | 1243 | __table_args__ = ( |
|
1244 | 1244 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1245 | 1245 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1246 | 1246 | ) |
|
1247 | 1247 | VERSION_1 = 'v1' |
|
1248 | 1248 | VERSION_2 = 'v2' |
|
1249 | 1249 | VERSIONS = [VERSION_1, VERSION_2] |
|
1250 | 1250 | |
|
1251 | 1251 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1252 | 1252 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None) |
|
1253 | 1253 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
1254 | 1254 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None) |
|
1255 | 1255 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
1256 | 1256 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
1257 | 1257 | action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) |
|
1258 | 1258 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
1259 | 1259 | |
|
1260 | 1260 | version = Column("version", String(255), nullable=True, default=VERSION_1) |
|
1261 | 1261 | user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) |
|
1262 | 1262 | action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) |
|
1263 | 1263 | |
|
1264 | 1264 | def __unicode__(self): |
|
1265 | 1265 | return u"<%s('id:%s:%s')>" % ( |
|
1266 | 1266 | self.__class__.__name__, self.repository_name, self.action) |
|
1267 | 1267 | |
|
1268 | 1268 | def __json__(self): |
|
1269 | 1269 | return { |
|
1270 | 1270 | 'user_id': self.user_id, |
|
1271 | 1271 | 'username': self.username, |
|
1272 | 1272 | 'repository_id': self.repository_id, |
|
1273 | 1273 | 'repository_name': self.repository_name, |
|
1274 | 1274 | 'user_ip': self.user_ip, |
|
1275 | 1275 | 'action_date': self.action_date, |
|
1276 | 1276 | 'action': self.action, |
|
1277 | 1277 | } |
|
1278 | 1278 | |
|
1279 | 1279 | @hybrid_property |
|
1280 | 1280 | def entry_id(self): |
|
1281 | 1281 | return self.user_log_id |
|
1282 | 1282 | |
|
1283 | 1283 | @property |
|
1284 | 1284 | def action_as_day(self): |
|
1285 | 1285 | return datetime.date(*self.action_date.timetuple()[:3]) |
|
1286 | 1286 | |
|
1287 | 1287 | user = relationship('User') |
|
1288 | 1288 | repository = relationship('Repository', cascade='') |
|
1289 | 1289 | |
|
1290 | 1290 | |
|
1291 | 1291 | class UserGroup(Base, BaseModel): |
|
1292 | 1292 | __tablename__ = 'users_groups' |
|
1293 | 1293 | __table_args__ = ( |
|
1294 | 1294 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1295 | 1295 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1296 | 1296 | ) |
|
1297 | 1297 | |
|
1298 | 1298 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1299 | 1299 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
1300 | 1300 | user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) |
|
1301 | 1301 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
1302 | 1302 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
1303 | 1303 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
1304 | 1304 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1305 | 1305 | _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data |
|
1306 | 1306 | |
|
1307 | 1307 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
1308 | 1308 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') |
|
1309 | 1309 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
1310 | 1310 | users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
1311 | 1311 | user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') |
|
1312 | 1312 | user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') |
|
1313 | 1313 | |
|
1314 | 1314 | user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all') |
|
1315 | 1315 | user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id") |
|
1316 | 1316 | |
|
1317 | 1317 | @classmethod |
|
1318 | 1318 | def _load_group_data(cls, column): |
|
1319 | 1319 | if not column: |
|
1320 | 1320 | return {} |
|
1321 | 1321 | |
|
1322 | 1322 | try: |
|
1323 | 1323 | return json.loads(column) or {} |
|
1324 | 1324 | except TypeError: |
|
1325 | 1325 | return {} |
|
1326 | 1326 | |
|
1327 | 1327 | @hybrid_property |
|
1328 | 1328 | def description_safe(self): |
|
1329 | 1329 | from rhodecode.lib import helpers as h |
|
1330 | 1330 | return h.escape(self.description) |
|
1331 | 1331 | |
|
1332 | 1332 | @hybrid_property |
|
1333 | 1333 | def group_data(self): |
|
1334 | 1334 | return self._load_group_data(self._group_data) |
|
1335 | 1335 | |
|
1336 | 1336 | @group_data.expression |
|
1337 | 1337 | def group_data(self, **kwargs): |
|
1338 | 1338 | return self._group_data |
|
1339 | 1339 | |
|
1340 | 1340 | @group_data.setter |
|
1341 | 1341 | def group_data(self, val): |
|
1342 | 1342 | try: |
|
1343 | 1343 | self._group_data = json.dumps(val) |
|
1344 | 1344 | except Exception: |
|
1345 | 1345 | log.error(traceback.format_exc()) |
|
1346 | 1346 | |
|
1347 | 1347 | def __unicode__(self): |
|
1348 | 1348 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
1349 | 1349 | self.users_group_id, |
|
1350 | 1350 | self.users_group_name) |
|
1351 | 1351 | |
|
1352 | 1352 | @classmethod |
|
1353 | 1353 | def get_by_group_name(cls, group_name, cache=False, |
|
1354 | 1354 | case_insensitive=False): |
|
1355 | 1355 | if case_insensitive: |
|
1356 | 1356 | q = cls.query().filter(func.lower(cls.users_group_name) == |
|
1357 | 1357 | func.lower(group_name)) |
|
1358 | 1358 | |
|
1359 | 1359 | else: |
|
1360 | 1360 | q = cls.query().filter(cls.users_group_name == group_name) |
|
1361 | 1361 | if cache: |
|
1362 | 1362 | q = q.options( |
|
1363 | 1363 | FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name))) |
|
1364 | 1364 | return q.scalar() |
|
1365 | 1365 | |
|
1366 | 1366 | @classmethod |
|
1367 | 1367 | def get(cls, user_group_id, cache=False): |
|
1368 | 1368 | if not user_group_id: |
|
1369 | 1369 | return |
|
1370 | 1370 | |
|
1371 | 1371 | user_group = cls.query() |
|
1372 | 1372 | if cache: |
|
1373 | 1373 | user_group = user_group.options( |
|
1374 | 1374 | FromCache("sql_cache_short", "get_users_group_%s" % user_group_id)) |
|
1375 | 1375 | return user_group.get(user_group_id) |
|
1376 | 1376 | |
|
1377 | 1377 | def permissions(self, with_admins=True, with_owner=True): |
|
1378 | 1378 | q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) |
|
1379 | 1379 | q = q.options(joinedload(UserUserGroupToPerm.user_group), |
|
1380 | 1380 | joinedload(UserUserGroupToPerm.user), |
|
1381 | 1381 | joinedload(UserUserGroupToPerm.permission),) |
|
1382 | 1382 | |
|
1383 | 1383 | # get owners and admins and permissions. We do a trick of re-writing |
|
1384 | 1384 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
1385 | 1385 | # has a global reference and changing one object propagates to all |
|
1386 | 1386 | # others. This means if admin is also an owner admin_row that change |
|
1387 | 1387 | # would propagate to both objects |
|
1388 | 1388 | perm_rows = [] |
|
1389 | 1389 | for _usr in q.all(): |
|
1390 | 1390 | usr = AttributeDict(_usr.user.get_dict()) |
|
1391 | 1391 | usr.permission = _usr.permission.permission_name |
|
1392 | 1392 | perm_rows.append(usr) |
|
1393 | 1393 | |
|
1394 | 1394 | # filter the perm rows by 'default' first and then sort them by |
|
1395 | 1395 | # admin,write,read,none permissions sorted again alphabetically in |
|
1396 | 1396 | # each group |
|
1397 | 1397 | perm_rows = sorted(perm_rows, key=display_user_sort) |
|
1398 | 1398 | |
|
1399 | 1399 | _admin_perm = 'usergroup.admin' |
|
1400 | 1400 | owner_row = [] |
|
1401 | 1401 | if with_owner: |
|
1402 | 1402 | usr = AttributeDict(self.user.get_dict()) |
|
1403 | 1403 | usr.owner_row = True |
|
1404 | 1404 | usr.permission = _admin_perm |
|
1405 | 1405 | owner_row.append(usr) |
|
1406 | 1406 | |
|
1407 | 1407 | super_admin_rows = [] |
|
1408 | 1408 | if with_admins: |
|
1409 | 1409 | for usr in User.get_all_super_admins(): |
|
1410 | 1410 | # if this admin is also owner, don't double the record |
|
1411 | 1411 | if usr.user_id == owner_row[0].user_id: |
|
1412 | 1412 | owner_row[0].admin_row = True |
|
1413 | 1413 | else: |
|
1414 | 1414 | usr = AttributeDict(usr.get_dict()) |
|
1415 | 1415 | usr.admin_row = True |
|
1416 | 1416 | usr.permission = _admin_perm |
|
1417 | 1417 | super_admin_rows.append(usr) |
|
1418 | 1418 | |
|
1419 | 1419 | return super_admin_rows + owner_row + perm_rows |
|
1420 | 1420 | |
|
1421 | 1421 | def permission_user_groups(self): |
|
1422 | 1422 | q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self) |
|
1423 | 1423 | q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), |
|
1424 | 1424 | joinedload(UserGroupUserGroupToPerm.target_user_group), |
|
1425 | 1425 | joinedload(UserGroupUserGroupToPerm.permission),) |
|
1426 | 1426 | |
|
1427 | 1427 | perm_rows = [] |
|
1428 | 1428 | for _user_group in q.all(): |
|
1429 | 1429 | usr = AttributeDict(_user_group.user_group.get_dict()) |
|
1430 | 1430 | usr.permission = _user_group.permission.permission_name |
|
1431 | 1431 | perm_rows.append(usr) |
|
1432 | 1432 | |
|
1433 | 1433 | perm_rows = sorted(perm_rows, key=display_user_group_sort) |
|
1434 | 1434 | return perm_rows |
|
1435 | 1435 | |
|
1436 | 1436 | def _get_default_perms(self, user_group, suffix=''): |
|
1437 | 1437 | from rhodecode.model.permission import PermissionModel |
|
1438 | 1438 | return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) |
|
1439 | 1439 | |
|
1440 | 1440 | def get_default_perms(self, suffix=''): |
|
1441 | 1441 | return self._get_default_perms(self, suffix) |
|
1442 | 1442 | |
|
1443 | 1443 | def get_api_data(self, with_group_members=True, include_secrets=False): |
|
1444 | 1444 | """ |
|
1445 | 1445 | :param include_secrets: See :meth:`User.get_api_data`, this parameter is |
|
1446 | 1446 | basically forwarded. |
|
1447 | 1447 | |
|
1448 | 1448 | """ |
|
1449 | 1449 | user_group = self |
|
1450 | 1450 | data = { |
|
1451 | 1451 | 'users_group_id': user_group.users_group_id, |
|
1452 | 1452 | 'group_name': user_group.users_group_name, |
|
1453 | 1453 | 'group_description': user_group.user_group_description, |
|
1454 | 1454 | 'active': user_group.users_group_active, |
|
1455 | 1455 | 'owner': user_group.user.username, |
|
1456 | 1456 | 'owner_email': user_group.user.email, |
|
1457 | 1457 | } |
|
1458 | 1458 | |
|
1459 | 1459 | if with_group_members: |
|
1460 | 1460 | users = [] |
|
1461 | 1461 | for user in user_group.members: |
|
1462 | 1462 | user = user.user |
|
1463 | 1463 | users.append(user.get_api_data(include_secrets=include_secrets)) |
|
1464 | 1464 | data['users'] = users |
|
1465 | 1465 | |
|
1466 | 1466 | return data |
|
1467 | 1467 | |
|
1468 | 1468 | |
|
1469 | 1469 | class UserGroupMember(Base, BaseModel): |
|
1470 | 1470 | __tablename__ = 'users_groups_members' |
|
1471 | 1471 | __table_args__ = ( |
|
1472 | 1472 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1473 | 1473 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1474 | 1474 | ) |
|
1475 | 1475 | |
|
1476 | 1476 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1477 | 1477 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
1478 | 1478 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1479 | 1479 | |
|
1480 | 1480 | user = relationship('User', lazy='joined') |
|
1481 | 1481 | users_group = relationship('UserGroup') |
|
1482 | 1482 | |
|
1483 | 1483 | def __init__(self, gr_id='', u_id=''): |
|
1484 | 1484 | self.users_group_id = gr_id |
|
1485 | 1485 | self.user_id = u_id |
|
1486 | 1486 | |
|
1487 | 1487 | |
|
1488 | 1488 | class RepositoryField(Base, BaseModel): |
|
1489 | 1489 | __tablename__ = 'repositories_fields' |
|
1490 | 1490 | __table_args__ = ( |
|
1491 | 1491 | UniqueConstraint('repository_id', 'field_key'), # no-multi field |
|
1492 | 1492 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1493 | 1493 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1494 | 1494 | ) |
|
1495 | 1495 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields |
|
1496 | 1496 | |
|
1497 | 1497 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1498 | 1498 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
1499 | 1499 | field_key = Column("field_key", String(250)) |
|
1500 | 1500 | field_label = Column("field_label", String(1024), nullable=False) |
|
1501 | 1501 | field_value = Column("field_value", String(10000), nullable=False) |
|
1502 | 1502 | field_desc = Column("field_desc", String(1024), nullable=False) |
|
1503 | 1503 | field_type = Column("field_type", String(255), nullable=False, unique=None) |
|
1504 | 1504 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1505 | 1505 | |
|
1506 | 1506 | repository = relationship('Repository') |
|
1507 | 1507 | |
|
1508 | 1508 | @property |
|
1509 | 1509 | def field_key_prefixed(self): |
|
1510 | 1510 | return 'ex_%s' % self.field_key |
|
1511 | 1511 | |
|
1512 | 1512 | @classmethod |
|
1513 | 1513 | def un_prefix_key(cls, key): |
|
1514 | 1514 | if key.startswith(cls.PREFIX): |
|
1515 | 1515 | return key[len(cls.PREFIX):] |
|
1516 | 1516 | return key |
|
1517 | 1517 | |
|
1518 | 1518 | @classmethod |
|
1519 | 1519 | def get_by_key_name(cls, key, repo): |
|
1520 | 1520 | row = cls.query()\ |
|
1521 | 1521 | .filter(cls.repository == repo)\ |
|
1522 | 1522 | .filter(cls.field_key == key).scalar() |
|
1523 | 1523 | return row |
|
1524 | 1524 | |
|
1525 | 1525 | |
|
1526 | 1526 | class Repository(Base, BaseModel): |
|
1527 | 1527 | __tablename__ = 'repositories' |
|
1528 | 1528 | __table_args__ = ( |
|
1529 | 1529 | Index('r_repo_name_idx', 'repo_name', mysql_length=255), |
|
1530 | 1530 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1531 | 1531 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1532 | 1532 | ) |
|
1533 | 1533 | DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' |
|
1534 | 1534 | DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' |
|
1535 | 1535 | |
|
1536 | 1536 | STATE_CREATED = 'repo_state_created' |
|
1537 | 1537 | STATE_PENDING = 'repo_state_pending' |
|
1538 | 1538 | STATE_ERROR = 'repo_state_error' |
|
1539 | 1539 | |
|
1540 | 1540 | LOCK_AUTOMATIC = 'lock_auto' |
|
1541 | 1541 | LOCK_API = 'lock_api' |
|
1542 | 1542 | LOCK_WEB = 'lock_web' |
|
1543 | 1543 | LOCK_PULL = 'lock_pull' |
|
1544 | 1544 | |
|
1545 | 1545 | NAME_SEP = URL_SEP |
|
1546 | 1546 | |
|
1547 | 1547 | repo_id = Column( |
|
1548 | 1548 | "repo_id", Integer(), nullable=False, unique=True, default=None, |
|
1549 | 1549 | primary_key=True) |
|
1550 | 1550 | _repo_name = Column( |
|
1551 | 1551 | "repo_name", Text(), nullable=False, default=None) |
|
1552 | 1552 | _repo_name_hash = Column( |
|
1553 | 1553 | "repo_name_hash", String(255), nullable=False, unique=True) |
|
1554 | 1554 | repo_state = Column("repo_state", String(255), nullable=True) |
|
1555 | 1555 | |
|
1556 | 1556 | clone_uri = Column( |
|
1557 | 1557 | "clone_uri", EncryptedTextValue(), nullable=True, unique=False, |
|
1558 | 1558 | default=None) |
|
1559 | 1559 | repo_type = Column( |
|
1560 | 1560 | "repo_type", String(255), nullable=False, unique=False, default=None) |
|
1561 | 1561 | user_id = Column( |
|
1562 | 1562 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, |
|
1563 | 1563 | unique=False, default=None) |
|
1564 | 1564 | private = Column( |
|
1565 | 1565 | "private", Boolean(), nullable=True, unique=None, default=None) |
|
1566 | 1566 | enable_statistics = Column( |
|
1567 | 1567 | "statistics", Boolean(), nullable=True, unique=None, default=True) |
|
1568 | 1568 | enable_downloads = Column( |
|
1569 | 1569 | "downloads", Boolean(), nullable=True, unique=None, default=True) |
|
1570 | 1570 | description = Column( |
|
1571 | 1571 | "description", String(10000), nullable=True, unique=None, default=None) |
|
1572 | 1572 | created_on = Column( |
|
1573 | 1573 | 'created_on', DateTime(timezone=False), nullable=True, unique=None, |
|
1574 | 1574 | default=datetime.datetime.now) |
|
1575 | 1575 | updated_on = Column( |
|
1576 | 1576 | 'updated_on', DateTime(timezone=False), nullable=True, unique=None, |
|
1577 | 1577 | default=datetime.datetime.now) |
|
1578 | 1578 | _landing_revision = Column( |
|
1579 | 1579 | "landing_revision", String(255), nullable=False, unique=False, |
|
1580 | 1580 | default=None) |
|
1581 | 1581 | enable_locking = Column( |
|
1582 | 1582 | "enable_locking", Boolean(), nullable=False, unique=None, |
|
1583 | 1583 | default=False) |
|
1584 | 1584 | _locked = Column( |
|
1585 | 1585 | "locked", String(255), nullable=True, unique=False, default=None) |
|
1586 | 1586 | _changeset_cache = Column( |
|
1587 | 1587 | "changeset_cache", LargeBinary(), nullable=True) # JSON data |
|
1588 | 1588 | |
|
1589 | 1589 | fork_id = Column( |
|
1590 | 1590 | "fork_id", Integer(), ForeignKey('repositories.repo_id'), |
|
1591 | 1591 | nullable=True, unique=False, default=None) |
|
1592 | 1592 | group_id = Column( |
|
1593 | 1593 | "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, |
|
1594 | 1594 | unique=False, default=None) |
|
1595 | 1595 | |
|
1596 | 1596 | user = relationship('User', lazy='joined') |
|
1597 | 1597 | fork = relationship('Repository', remote_side=repo_id, lazy='joined') |
|
1598 | 1598 | group = relationship('RepoGroup', lazy='joined') |
|
1599 | 1599 | repo_to_perm = relationship( |
|
1600 | 1600 | 'UserRepoToPerm', cascade='all', |
|
1601 | 1601 | order_by='UserRepoToPerm.repo_to_perm_id') |
|
1602 | 1602 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
1603 | 1603 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
1604 | 1604 | |
|
1605 | 1605 | followers = relationship( |
|
1606 | 1606 | 'UserFollowing', |
|
1607 | 1607 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', |
|
1608 | 1608 | cascade='all') |
|
1609 | 1609 | extra_fields = relationship( |
|
1610 | 1610 | 'RepositoryField', cascade="all, delete, delete-orphan") |
|
1611 | 1611 | logs = relationship('UserLog') |
|
1612 | 1612 | comments = relationship( |
|
1613 | 1613 | 'ChangesetComment', cascade="all, delete, delete-orphan") |
|
1614 | 1614 | pull_requests_source = relationship( |
|
1615 | 1615 | 'PullRequest', |
|
1616 | 1616 | primaryjoin='PullRequest.source_repo_id==Repository.repo_id', |
|
1617 | 1617 | cascade="all, delete, delete-orphan") |
|
1618 | 1618 | pull_requests_target = relationship( |
|
1619 | 1619 | 'PullRequest', |
|
1620 | 1620 | primaryjoin='PullRequest.target_repo_id==Repository.repo_id', |
|
1621 | 1621 | cascade="all, delete, delete-orphan") |
|
1622 | 1622 | ui = relationship('RepoRhodeCodeUi', cascade="all") |
|
1623 | 1623 | settings = relationship('RepoRhodeCodeSetting', cascade="all") |
|
1624 | 1624 | integrations = relationship('Integration', |
|
1625 | 1625 | cascade="all, delete, delete-orphan") |
|
1626 | 1626 | |
|
1627 | scoped_tokens = relationship('UserApiKeys', cascade="all") | |
|
1628 | ||
|
1627 | 1629 | def __unicode__(self): |
|
1628 | 1630 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, |
|
1629 | 1631 | safe_unicode(self.repo_name)) |
|
1630 | 1632 | |
|
1631 | 1633 | @hybrid_property |
|
1632 | 1634 | def description_safe(self): |
|
1633 | 1635 | from rhodecode.lib import helpers as h |
|
1634 | 1636 | return h.escape(self.description) |
|
1635 | 1637 | |
|
1636 | 1638 | @hybrid_property |
|
1637 | 1639 | def landing_rev(self): |
|
1638 | 1640 | # always should return [rev_type, rev] |
|
1639 | 1641 | if self._landing_revision: |
|
1640 | 1642 | _rev_info = self._landing_revision.split(':') |
|
1641 | 1643 | if len(_rev_info) < 2: |
|
1642 | 1644 | _rev_info.insert(0, 'rev') |
|
1643 | 1645 | return [_rev_info[0], _rev_info[1]] |
|
1644 | 1646 | return [None, None] |
|
1645 | 1647 | |
|
1646 | 1648 | @landing_rev.setter |
|
1647 | 1649 | def landing_rev(self, val): |
|
1648 | 1650 | if ':' not in val: |
|
1649 | 1651 | raise ValueError('value must be delimited with `:` and consist ' |
|
1650 | 1652 | 'of <rev_type>:<rev>, got %s instead' % val) |
|
1651 | 1653 | self._landing_revision = val |
|
1652 | 1654 | |
|
1653 | 1655 | @hybrid_property |
|
1654 | 1656 | def locked(self): |
|
1655 | 1657 | if self._locked: |
|
1656 | 1658 | user_id, timelocked, reason = self._locked.split(':') |
|
1657 | 1659 | lock_values = int(user_id), timelocked, reason |
|
1658 | 1660 | else: |
|
1659 | 1661 | lock_values = [None, None, None] |
|
1660 | 1662 | return lock_values |
|
1661 | 1663 | |
|
1662 | 1664 | @locked.setter |
|
1663 | 1665 | def locked(self, val): |
|
1664 | 1666 | if val and isinstance(val, (list, tuple)): |
|
1665 | 1667 | self._locked = ':'.join(map(str, val)) |
|
1666 | 1668 | else: |
|
1667 | 1669 | self._locked = None |
|
1668 | 1670 | |
|
1669 | 1671 | @hybrid_property |
|
1670 | 1672 | def changeset_cache(self): |
|
1671 | 1673 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
1672 | 1674 | dummy = EmptyCommit().__json__() |
|
1673 | 1675 | if not self._changeset_cache: |
|
1674 | 1676 | return dummy |
|
1675 | 1677 | try: |
|
1676 | 1678 | return json.loads(self._changeset_cache) |
|
1677 | 1679 | except TypeError: |
|
1678 | 1680 | return dummy |
|
1679 | 1681 | except Exception: |
|
1680 | 1682 | log.error(traceback.format_exc()) |
|
1681 | 1683 | return dummy |
|
1682 | 1684 | |
|
1683 | 1685 | @changeset_cache.setter |
|
1684 | 1686 | def changeset_cache(self, val): |
|
1685 | 1687 | try: |
|
1686 | 1688 | self._changeset_cache = json.dumps(val) |
|
1687 | 1689 | except Exception: |
|
1688 | 1690 | log.error(traceback.format_exc()) |
|
1689 | 1691 | |
|
1690 | 1692 | @hybrid_property |
|
1691 | 1693 | def repo_name(self): |
|
1692 | 1694 | return self._repo_name |
|
1693 | 1695 | |
|
1694 | 1696 | @repo_name.setter |
|
1695 | 1697 | def repo_name(self, value): |
|
1696 | 1698 | self._repo_name = value |
|
1697 | 1699 | self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() |
|
1698 | 1700 | |
|
1699 | 1701 | @classmethod |
|
1700 | 1702 | def normalize_repo_name(cls, repo_name): |
|
1701 | 1703 | """ |
|
1702 | 1704 | Normalizes os specific repo_name to the format internally stored inside |
|
1703 | 1705 | database using URL_SEP |
|
1704 | 1706 | |
|
1705 | 1707 | :param cls: |
|
1706 | 1708 | :param repo_name: |
|
1707 | 1709 | """ |
|
1708 | 1710 | return cls.NAME_SEP.join(repo_name.split(os.sep)) |
|
1709 | 1711 | |
|
1710 | 1712 | @classmethod |
|
1711 | 1713 | def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): |
|
1712 | 1714 | session = Session() |
|
1713 | 1715 | q = session.query(cls).filter(cls.repo_name == repo_name) |
|
1714 | 1716 | |
|
1715 | 1717 | if cache: |
|
1716 | 1718 | if identity_cache: |
|
1717 | 1719 | val = cls.identity_cache(session, 'repo_name', repo_name) |
|
1718 | 1720 | if val: |
|
1719 | 1721 | return val |
|
1720 | 1722 | else: |
|
1721 | 1723 | cache_key = "get_repo_by_name_%s" % _hash_key(repo_name) |
|
1722 | 1724 | q = q.options( |
|
1723 | 1725 | FromCache("sql_cache_short", cache_key)) |
|
1724 | 1726 | |
|
1725 | 1727 | return q.scalar() |
|
1726 | 1728 | |
|
1727 | 1729 | @classmethod |
|
1728 | 1730 | def get_by_id_or_repo_name(cls, repoid): |
|
1729 | 1731 | if isinstance(repoid, (int, long)): |
|
1730 | 1732 | try: |
|
1731 | 1733 | repo = cls.get(repoid) |
|
1732 | 1734 | except ValueError: |
|
1733 | 1735 | repo = None |
|
1734 | 1736 | else: |
|
1735 | 1737 | repo = cls.get_by_repo_name(repoid) |
|
1736 | 1738 | return repo |
|
1737 | 1739 | |
|
1738 | 1740 | @classmethod |
|
1739 | 1741 | def get_by_full_path(cls, repo_full_path): |
|
1740 | 1742 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] |
|
1741 | 1743 | repo_name = cls.normalize_repo_name(repo_name) |
|
1742 | 1744 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) |
|
1743 | 1745 | |
|
1744 | 1746 | @classmethod |
|
1745 | 1747 | def get_repo_forks(cls, repo_id): |
|
1746 | 1748 | return cls.query().filter(Repository.fork_id == repo_id) |
|
1747 | 1749 | |
|
1748 | 1750 | @classmethod |
|
1749 | 1751 | def base_path(cls): |
|
1750 | 1752 | """ |
|
1751 | 1753 | Returns base path when all repos are stored |
|
1752 | 1754 | |
|
1753 | 1755 | :param cls: |
|
1754 | 1756 | """ |
|
1755 | 1757 | q = Session().query(RhodeCodeUi)\ |
|
1756 | 1758 | .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) |
|
1757 | 1759 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
1758 | 1760 | return q.one().ui_value |
|
1759 | 1761 | |
|
1760 | 1762 | @classmethod |
|
1761 | 1763 | def is_valid(cls, repo_name): |
|
1762 | 1764 | """ |
|
1763 | 1765 | returns True if given repo name is a valid filesystem repository |
|
1764 | 1766 | |
|
1765 | 1767 | :param cls: |
|
1766 | 1768 | :param repo_name: |
|
1767 | 1769 | """ |
|
1768 | 1770 | from rhodecode.lib.utils import is_valid_repo |
|
1769 | 1771 | |
|
1770 | 1772 | return is_valid_repo(repo_name, cls.base_path()) |
|
1771 | 1773 | |
|
1772 | 1774 | @classmethod |
|
1773 | 1775 | def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), |
|
1774 | 1776 | case_insensitive=True): |
|
1775 | 1777 | q = Repository.query() |
|
1776 | 1778 | |
|
1777 | 1779 | if not isinstance(user_id, Optional): |
|
1778 | 1780 | q = q.filter(Repository.user_id == user_id) |
|
1779 | 1781 | |
|
1780 | 1782 | if not isinstance(group_id, Optional): |
|
1781 | 1783 | q = q.filter(Repository.group_id == group_id) |
|
1782 | 1784 | |
|
1783 | 1785 | if case_insensitive: |
|
1784 | 1786 | q = q.order_by(func.lower(Repository.repo_name)) |
|
1785 | 1787 | else: |
|
1786 | 1788 | q = q.order_by(Repository.repo_name) |
|
1787 | 1789 | return q.all() |
|
1788 | 1790 | |
|
1789 | 1791 | @property |
|
1790 | 1792 | def forks(self): |
|
1791 | 1793 | """ |
|
1792 | 1794 | Return forks of this repo |
|
1793 | 1795 | """ |
|
1794 | 1796 | return Repository.get_repo_forks(self.repo_id) |
|
1795 | 1797 | |
|
1796 | 1798 | @property |
|
1797 | 1799 | def parent(self): |
|
1798 | 1800 | """ |
|
1799 | 1801 | Returns fork parent |
|
1800 | 1802 | """ |
|
1801 | 1803 | return self.fork |
|
1802 | 1804 | |
|
1803 | 1805 | @property |
|
1804 | 1806 | def just_name(self): |
|
1805 | 1807 | return self.repo_name.split(self.NAME_SEP)[-1] |
|
1806 | 1808 | |
|
1807 | 1809 | @property |
|
1808 | 1810 | def groups_with_parents(self): |
|
1809 | 1811 | groups = [] |
|
1810 | 1812 | if self.group is None: |
|
1811 | 1813 | return groups |
|
1812 | 1814 | |
|
1813 | 1815 | cur_gr = self.group |
|
1814 | 1816 | groups.insert(0, cur_gr) |
|
1815 | 1817 | while 1: |
|
1816 | 1818 | gr = getattr(cur_gr, 'parent_group', None) |
|
1817 | 1819 | cur_gr = cur_gr.parent_group |
|
1818 | 1820 | if gr is None: |
|
1819 | 1821 | break |
|
1820 | 1822 | groups.insert(0, gr) |
|
1821 | 1823 | |
|
1822 | 1824 | return groups |
|
1823 | 1825 | |
|
1824 | 1826 | @property |
|
1825 | 1827 | def groups_and_repo(self): |
|
1826 | 1828 | return self.groups_with_parents, self |
|
1827 | 1829 | |
|
1828 | 1830 | @LazyProperty |
|
1829 | 1831 | def repo_path(self): |
|
1830 | 1832 | """ |
|
1831 | 1833 | Returns base full path for that repository means where it actually |
|
1832 | 1834 | exists on a filesystem |
|
1833 | 1835 | """ |
|
1834 | 1836 | q = Session().query(RhodeCodeUi).filter( |
|
1835 | 1837 | RhodeCodeUi.ui_key == self.NAME_SEP) |
|
1836 | 1838 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
1837 | 1839 | return q.one().ui_value |
|
1838 | 1840 | |
|
1839 | 1841 | @property |
|
1840 | 1842 | def repo_full_path(self): |
|
1841 | 1843 | p = [self.repo_path] |
|
1842 | 1844 | # we need to split the name by / since this is how we store the |
|
1843 | 1845 | # names in the database, but that eventually needs to be converted |
|
1844 | 1846 | # into a valid system path |
|
1845 | 1847 | p += self.repo_name.split(self.NAME_SEP) |
|
1846 | 1848 | return os.path.join(*map(safe_unicode, p)) |
|
1847 | 1849 | |
|
1848 | 1850 | @property |
|
1849 | 1851 | def cache_keys(self): |
|
1850 | 1852 | """ |
|
1851 | 1853 | Returns associated cache keys for that repo |
|
1852 | 1854 | """ |
|
1853 | 1855 | return CacheKey.query()\ |
|
1854 | 1856 | .filter(CacheKey.cache_args == self.repo_name)\ |
|
1855 | 1857 | .order_by(CacheKey.cache_key)\ |
|
1856 | 1858 | .all() |
|
1857 | 1859 | |
|
1858 | 1860 | def get_new_name(self, repo_name): |
|
1859 | 1861 | """ |
|
1860 | 1862 | returns new full repository name based on assigned group and new new |
|
1861 | 1863 | |
|
1862 | 1864 | :param group_name: |
|
1863 | 1865 | """ |
|
1864 | 1866 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
1865 | 1867 | return self.NAME_SEP.join(path_prefix + [repo_name]) |
|
1866 | 1868 | |
|
1867 | 1869 | @property |
|
1868 | 1870 | def _config(self): |
|
1869 | 1871 | """ |
|
1870 | 1872 | Returns db based config object. |
|
1871 | 1873 | """ |
|
1872 | 1874 | from rhodecode.lib.utils import make_db_config |
|
1873 | 1875 | return make_db_config(clear_session=False, repo=self) |
|
1874 | 1876 | |
|
1875 | 1877 | def permissions(self, with_admins=True, with_owner=True): |
|
1876 | 1878 | q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) |
|
1877 | 1879 | q = q.options(joinedload(UserRepoToPerm.repository), |
|
1878 | 1880 | joinedload(UserRepoToPerm.user), |
|
1879 | 1881 | joinedload(UserRepoToPerm.permission),) |
|
1880 | 1882 | |
|
1881 | 1883 | # get owners and admins and permissions. We do a trick of re-writing |
|
1882 | 1884 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
1883 | 1885 | # has a global reference and changing one object propagates to all |
|
1884 | 1886 | # others. This means if admin is also an owner admin_row that change |
|
1885 | 1887 | # would propagate to both objects |
|
1886 | 1888 | perm_rows = [] |
|
1887 | 1889 | for _usr in q.all(): |
|
1888 | 1890 | usr = AttributeDict(_usr.user.get_dict()) |
|
1889 | 1891 | usr.permission = _usr.permission.permission_name |
|
1890 | 1892 | perm_rows.append(usr) |
|
1891 | 1893 | |
|
1892 | 1894 | # filter the perm rows by 'default' first and then sort them by |
|
1893 | 1895 | # admin,write,read,none permissions sorted again alphabetically in |
|
1894 | 1896 | # each group |
|
1895 | 1897 | perm_rows = sorted(perm_rows, key=display_user_sort) |
|
1896 | 1898 | |
|
1897 | 1899 | _admin_perm = 'repository.admin' |
|
1898 | 1900 | owner_row = [] |
|
1899 | 1901 | if with_owner: |
|
1900 | 1902 | usr = AttributeDict(self.user.get_dict()) |
|
1901 | 1903 | usr.owner_row = True |
|
1902 | 1904 | usr.permission = _admin_perm |
|
1903 | 1905 | owner_row.append(usr) |
|
1904 | 1906 | |
|
1905 | 1907 | super_admin_rows = [] |
|
1906 | 1908 | if with_admins: |
|
1907 | 1909 | for usr in User.get_all_super_admins(): |
|
1908 | 1910 | # if this admin is also owner, don't double the record |
|
1909 | 1911 | if usr.user_id == owner_row[0].user_id: |
|
1910 | 1912 | owner_row[0].admin_row = True |
|
1911 | 1913 | else: |
|
1912 | 1914 | usr = AttributeDict(usr.get_dict()) |
|
1913 | 1915 | usr.admin_row = True |
|
1914 | 1916 | usr.permission = _admin_perm |
|
1915 | 1917 | super_admin_rows.append(usr) |
|
1916 | 1918 | |
|
1917 | 1919 | return super_admin_rows + owner_row + perm_rows |
|
1918 | 1920 | |
|
1919 | 1921 | def permission_user_groups(self): |
|
1920 | 1922 | q = UserGroupRepoToPerm.query().filter( |
|
1921 | 1923 | UserGroupRepoToPerm.repository == self) |
|
1922 | 1924 | q = q.options(joinedload(UserGroupRepoToPerm.repository), |
|
1923 | 1925 | joinedload(UserGroupRepoToPerm.users_group), |
|
1924 | 1926 | joinedload(UserGroupRepoToPerm.permission),) |
|
1925 | 1927 | |
|
1926 | 1928 | perm_rows = [] |
|
1927 | 1929 | for _user_group in q.all(): |
|
1928 | 1930 | usr = AttributeDict(_user_group.users_group.get_dict()) |
|
1929 | 1931 | usr.permission = _user_group.permission.permission_name |
|
1930 | 1932 | perm_rows.append(usr) |
|
1931 | 1933 | |
|
1932 | 1934 | perm_rows = sorted(perm_rows, key=display_user_group_sort) |
|
1933 | 1935 | return perm_rows |
|
1934 | 1936 | |
|
1935 | 1937 | def get_api_data(self, include_secrets=False): |
|
1936 | 1938 | """ |
|
1937 | 1939 | Common function for generating repo api data |
|
1938 | 1940 | |
|
1939 | 1941 | :param include_secrets: See :meth:`User.get_api_data`. |
|
1940 | 1942 | |
|
1941 | 1943 | """ |
|
1942 | 1944 | # TODO: mikhail: Here there is an anti-pattern, we probably need to |
|
1943 | 1945 | # move this methods on models level. |
|
1944 | 1946 | from rhodecode.model.settings import SettingsModel |
|
1945 | 1947 | from rhodecode.model.repo import RepoModel |
|
1946 | 1948 | |
|
1947 | 1949 | repo = self |
|
1948 | 1950 | _user_id, _time, _reason = self.locked |
|
1949 | 1951 | |
|
1950 | 1952 | data = { |
|
1951 | 1953 | 'repo_id': repo.repo_id, |
|
1952 | 1954 | 'repo_name': repo.repo_name, |
|
1953 | 1955 | 'repo_type': repo.repo_type, |
|
1954 | 1956 | 'clone_uri': repo.clone_uri or '', |
|
1955 | 1957 | 'url': RepoModel().get_url(self), |
|
1956 | 1958 | 'private': repo.private, |
|
1957 | 1959 | 'created_on': repo.created_on, |
|
1958 | 1960 | 'description': repo.description_safe, |
|
1959 | 1961 | 'landing_rev': repo.landing_rev, |
|
1960 | 1962 | 'owner': repo.user.username, |
|
1961 | 1963 | 'fork_of': repo.fork.repo_name if repo.fork else None, |
|
1962 | 1964 | 'fork_of_id': repo.fork.repo_id if repo.fork else None, |
|
1963 | 1965 | 'enable_statistics': repo.enable_statistics, |
|
1964 | 1966 | 'enable_locking': repo.enable_locking, |
|
1965 | 1967 | 'enable_downloads': repo.enable_downloads, |
|
1966 | 1968 | 'last_changeset': repo.changeset_cache, |
|
1967 | 1969 | 'locked_by': User.get(_user_id).get_api_data( |
|
1968 | 1970 | include_secrets=include_secrets) if _user_id else None, |
|
1969 | 1971 | 'locked_date': time_to_datetime(_time) if _time else None, |
|
1970 | 1972 | 'lock_reason': _reason if _reason else None, |
|
1971 | 1973 | } |
|
1972 | 1974 | |
|
1973 | 1975 | # TODO: mikhail: should be per-repo settings here |
|
1974 | 1976 | rc_config = SettingsModel().get_all_settings() |
|
1975 | 1977 | repository_fields = str2bool( |
|
1976 | 1978 | rc_config.get('rhodecode_repository_fields')) |
|
1977 | 1979 | if repository_fields: |
|
1978 | 1980 | for f in self.extra_fields: |
|
1979 | 1981 | data[f.field_key_prefixed] = f.field_value |
|
1980 | 1982 | |
|
1981 | 1983 | return data |
|
1982 | 1984 | |
|
1983 | 1985 | @classmethod |
|
1984 | 1986 | def lock(cls, repo, user_id, lock_time=None, lock_reason=None): |
|
1985 | 1987 | if not lock_time: |
|
1986 | 1988 | lock_time = time.time() |
|
1987 | 1989 | if not lock_reason: |
|
1988 | 1990 | lock_reason = cls.LOCK_AUTOMATIC |
|
1989 | 1991 | repo.locked = [user_id, lock_time, lock_reason] |
|
1990 | 1992 | Session().add(repo) |
|
1991 | 1993 | Session().commit() |
|
1992 | 1994 | |
|
1993 | 1995 | @classmethod |
|
1994 | 1996 | def unlock(cls, repo): |
|
1995 | 1997 | repo.locked = None |
|
1996 | 1998 | Session().add(repo) |
|
1997 | 1999 | Session().commit() |
|
1998 | 2000 | |
|
1999 | 2001 | @classmethod |
|
2000 | 2002 | def getlock(cls, repo): |
|
2001 | 2003 | return repo.locked |
|
2002 | 2004 | |
|
2003 | 2005 | def is_user_lock(self, user_id): |
|
2004 | 2006 | if self.lock[0]: |
|
2005 | 2007 | lock_user_id = safe_int(self.lock[0]) |
|
2006 | 2008 | user_id = safe_int(user_id) |
|
2007 | 2009 | # both are ints, and they are equal |
|
2008 | 2010 | return all([lock_user_id, user_id]) and lock_user_id == user_id |
|
2009 | 2011 | |
|
2010 | 2012 | return False |
|
2011 | 2013 | |
|
2012 | 2014 | def get_locking_state(self, action, user_id, only_when_enabled=True): |
|
2013 | 2015 | """ |
|
2014 | 2016 | Checks locking on this repository, if locking is enabled and lock is |
|
2015 | 2017 | present returns a tuple of make_lock, locked, locked_by. |
|
2016 | 2018 | make_lock can have 3 states None (do nothing) True, make lock |
|
2017 | 2019 | False release lock, This value is later propagated to hooks, which |
|
2018 | 2020 | do the locking. Think about this as signals passed to hooks what to do. |
|
2019 | 2021 | |
|
2020 | 2022 | """ |
|
2021 | 2023 | # TODO: johbo: This is part of the business logic and should be moved |
|
2022 | 2024 | # into the RepositoryModel. |
|
2023 | 2025 | |
|
2024 | 2026 | if action not in ('push', 'pull'): |
|
2025 | 2027 | raise ValueError("Invalid action value: %s" % repr(action)) |
|
2026 | 2028 | |
|
2027 | 2029 | # defines if locked error should be thrown to user |
|
2028 | 2030 | currently_locked = False |
|
2029 | 2031 | # defines if new lock should be made, tri-state |
|
2030 | 2032 | make_lock = None |
|
2031 | 2033 | repo = self |
|
2032 | 2034 | user = User.get(user_id) |
|
2033 | 2035 | |
|
2034 | 2036 | lock_info = repo.locked |
|
2035 | 2037 | |
|
2036 | 2038 | if repo and (repo.enable_locking or not only_when_enabled): |
|
2037 | 2039 | if action == 'push': |
|
2038 | 2040 | # check if it's already locked !, if it is compare users |
|
2039 | 2041 | locked_by_user_id = lock_info[0] |
|
2040 | 2042 | if user.user_id == locked_by_user_id: |
|
2041 | 2043 | log.debug( |
|
2042 | 2044 | 'Got `push` action from user %s, now unlocking', user) |
|
2043 | 2045 | # unlock if we have push from user who locked |
|
2044 | 2046 | make_lock = False |
|
2045 | 2047 | else: |
|
2046 | 2048 | # we're not the same user who locked, ban with |
|
2047 | 2049 | # code defined in settings (default is 423 HTTP Locked) ! |
|
2048 | 2050 | log.debug('Repo %s is currently locked by %s', repo, user) |
|
2049 | 2051 | currently_locked = True |
|
2050 | 2052 | elif action == 'pull': |
|
2051 | 2053 | # [0] user [1] date |
|
2052 | 2054 | if lock_info[0] and lock_info[1]: |
|
2053 | 2055 | log.debug('Repo %s is currently locked by %s', repo, user) |
|
2054 | 2056 | currently_locked = True |
|
2055 | 2057 | else: |
|
2056 | 2058 | log.debug('Setting lock on repo %s by %s', repo, user) |
|
2057 | 2059 | make_lock = True |
|
2058 | 2060 | |
|
2059 | 2061 | else: |
|
2060 | 2062 | log.debug('Repository %s do not have locking enabled', repo) |
|
2061 | 2063 | |
|
2062 | 2064 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', |
|
2063 | 2065 | make_lock, currently_locked, lock_info) |
|
2064 | 2066 | |
|
2065 | 2067 | from rhodecode.lib.auth import HasRepoPermissionAny |
|
2066 | 2068 | perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') |
|
2067 | 2069 | if make_lock and not perm_check(repo_name=repo.repo_name, user=user): |
|
2068 | 2070 | # if we don't have at least write permission we cannot make a lock |
|
2069 | 2071 | log.debug('lock state reset back to FALSE due to lack ' |
|
2070 | 2072 | 'of at least read permission') |
|
2071 | 2073 | make_lock = False |
|
2072 | 2074 | |
|
2073 | 2075 | return make_lock, currently_locked, lock_info |
|
2074 | 2076 | |
|
2075 | 2077 | @property |
|
2076 | 2078 | def last_db_change(self): |
|
2077 | 2079 | return self.updated_on |
|
2078 | 2080 | |
|
2079 | 2081 | @property |
|
2080 | 2082 | def clone_uri_hidden(self): |
|
2081 | 2083 | clone_uri = self.clone_uri |
|
2082 | 2084 | if clone_uri: |
|
2083 | 2085 | import urlobject |
|
2084 | 2086 | url_obj = urlobject.URLObject(cleaned_uri(clone_uri)) |
|
2085 | 2087 | if url_obj.password: |
|
2086 | 2088 | clone_uri = url_obj.with_password('*****') |
|
2087 | 2089 | return clone_uri |
|
2088 | 2090 | |
|
2089 | 2091 | def clone_url(self, **override): |
|
2090 | 2092 | from rhodecode.model.settings import SettingsModel |
|
2091 | 2093 | |
|
2092 | 2094 | uri_tmpl = None |
|
2093 | 2095 | if 'with_id' in override: |
|
2094 | 2096 | uri_tmpl = self.DEFAULT_CLONE_URI_ID |
|
2095 | 2097 | del override['with_id'] |
|
2096 | 2098 | |
|
2097 | 2099 | if 'uri_tmpl' in override: |
|
2098 | 2100 | uri_tmpl = override['uri_tmpl'] |
|
2099 | 2101 | del override['uri_tmpl'] |
|
2100 | 2102 | |
|
2101 | 2103 | # we didn't override our tmpl from **overrides |
|
2102 | 2104 | if not uri_tmpl: |
|
2103 | 2105 | rc_config = SettingsModel().get_all_settings(cache=True) |
|
2104 | 2106 | uri_tmpl = rc_config.get( |
|
2105 | 2107 | 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI |
|
2106 | 2108 | |
|
2107 | 2109 | request = get_current_request() |
|
2108 | 2110 | return get_clone_url(request=request, |
|
2109 | 2111 | uri_tmpl=uri_tmpl, |
|
2110 | 2112 | repo_name=self.repo_name, |
|
2111 | 2113 | repo_id=self.repo_id, **override) |
|
2112 | 2114 | |
|
2113 | 2115 | def set_state(self, state): |
|
2114 | 2116 | self.repo_state = state |
|
2115 | 2117 | Session().add(self) |
|
2116 | 2118 | #========================================================================== |
|
2117 | 2119 | # SCM PROPERTIES |
|
2118 | 2120 | #========================================================================== |
|
2119 | 2121 | |
|
2120 | 2122 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
2121 | 2123 | return get_commit_safe( |
|
2122 | 2124 | self.scm_instance(), commit_id, commit_idx, pre_load=pre_load) |
|
2123 | 2125 | |
|
2124 | 2126 | def get_changeset(self, rev=None, pre_load=None): |
|
2125 | 2127 | warnings.warn("Use get_commit", DeprecationWarning) |
|
2126 | 2128 | commit_id = None |
|
2127 | 2129 | commit_idx = None |
|
2128 | 2130 | if isinstance(rev, basestring): |
|
2129 | 2131 | commit_id = rev |
|
2130 | 2132 | else: |
|
2131 | 2133 | commit_idx = rev |
|
2132 | 2134 | return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, |
|
2133 | 2135 | pre_load=pre_load) |
|
2134 | 2136 | |
|
2135 | 2137 | def get_landing_commit(self): |
|
2136 | 2138 | """ |
|
2137 | 2139 | Returns landing commit, or if that doesn't exist returns the tip |
|
2138 | 2140 | """ |
|
2139 | 2141 | _rev_type, _rev = self.landing_rev |
|
2140 | 2142 | commit = self.get_commit(_rev) |
|
2141 | 2143 | if isinstance(commit, EmptyCommit): |
|
2142 | 2144 | return self.get_commit() |
|
2143 | 2145 | return commit |
|
2144 | 2146 | |
|
2145 | 2147 | def update_commit_cache(self, cs_cache=None, config=None): |
|
2146 | 2148 | """ |
|
2147 | 2149 | Update cache of last changeset for repository, keys should be:: |
|
2148 | 2150 | |
|
2149 | 2151 | short_id |
|
2150 | 2152 | raw_id |
|
2151 | 2153 | revision |
|
2152 | 2154 | parents |
|
2153 | 2155 | message |
|
2154 | 2156 | date |
|
2155 | 2157 | author |
|
2156 | 2158 | |
|
2157 | 2159 | :param cs_cache: |
|
2158 | 2160 | """ |
|
2159 | 2161 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
2160 | 2162 | if cs_cache is None: |
|
2161 | 2163 | # use no-cache version here |
|
2162 | 2164 | scm_repo = self.scm_instance(cache=False, config=config) |
|
2163 | 2165 | if scm_repo: |
|
2164 | 2166 | cs_cache = scm_repo.get_commit( |
|
2165 | 2167 | pre_load=["author", "date", "message", "parents"]) |
|
2166 | 2168 | else: |
|
2167 | 2169 | cs_cache = EmptyCommit() |
|
2168 | 2170 | |
|
2169 | 2171 | if isinstance(cs_cache, BaseChangeset): |
|
2170 | 2172 | cs_cache = cs_cache.__json__() |
|
2171 | 2173 | |
|
2172 | 2174 | def is_outdated(new_cs_cache): |
|
2173 | 2175 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or |
|
2174 | 2176 | new_cs_cache['revision'] != self.changeset_cache['revision']): |
|
2175 | 2177 | return True |
|
2176 | 2178 | return False |
|
2177 | 2179 | |
|
2178 | 2180 | # check if we have maybe already latest cached revision |
|
2179 | 2181 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
2180 | 2182 | _default = datetime.datetime.fromtimestamp(0) |
|
2181 | 2183 | last_change = cs_cache.get('date') or _default |
|
2182 | 2184 | log.debug('updated repo %s with new cs cache %s', |
|
2183 | 2185 | self.repo_name, cs_cache) |
|
2184 | 2186 | self.updated_on = last_change |
|
2185 | 2187 | self.changeset_cache = cs_cache |
|
2186 | 2188 | Session().add(self) |
|
2187 | 2189 | Session().commit() |
|
2188 | 2190 | else: |
|
2189 | 2191 | log.debug('Skipping update_commit_cache for repo:`%s` ' |
|
2190 | 2192 | 'commit already with latest changes', self.repo_name) |
|
2191 | 2193 | |
|
2192 | 2194 | @property |
|
2193 | 2195 | def tip(self): |
|
2194 | 2196 | return self.get_commit('tip') |
|
2195 | 2197 | |
|
2196 | 2198 | @property |
|
2197 | 2199 | def author(self): |
|
2198 | 2200 | return self.tip.author |
|
2199 | 2201 | |
|
2200 | 2202 | @property |
|
2201 | 2203 | def last_change(self): |
|
2202 | 2204 | return self.scm_instance().last_change |
|
2203 | 2205 | |
|
2204 | 2206 | def get_comments(self, revisions=None): |
|
2205 | 2207 | """ |
|
2206 | 2208 | Returns comments for this repository grouped by revisions |
|
2207 | 2209 | |
|
2208 | 2210 | :param revisions: filter query by revisions only |
|
2209 | 2211 | """ |
|
2210 | 2212 | cmts = ChangesetComment.query()\ |
|
2211 | 2213 | .filter(ChangesetComment.repo == self) |
|
2212 | 2214 | if revisions: |
|
2213 | 2215 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) |
|
2214 | 2216 | grouped = collections.defaultdict(list) |
|
2215 | 2217 | for cmt in cmts.all(): |
|
2216 | 2218 | grouped[cmt.revision].append(cmt) |
|
2217 | 2219 | return grouped |
|
2218 | 2220 | |
|
2219 | 2221 | def statuses(self, revisions=None): |
|
2220 | 2222 | """ |
|
2221 | 2223 | Returns statuses for this repository |
|
2222 | 2224 | |
|
2223 | 2225 | :param revisions: list of revisions to get statuses for |
|
2224 | 2226 | """ |
|
2225 | 2227 | statuses = ChangesetStatus.query()\ |
|
2226 | 2228 | .filter(ChangesetStatus.repo == self)\ |
|
2227 | 2229 | .filter(ChangesetStatus.version == 0) |
|
2228 | 2230 | |
|
2229 | 2231 | if revisions: |
|
2230 | 2232 | # Try doing the filtering in chunks to avoid hitting limits |
|
2231 | 2233 | size = 500 |
|
2232 | 2234 | status_results = [] |
|
2233 | 2235 | for chunk in xrange(0, len(revisions), size): |
|
2234 | 2236 | status_results += statuses.filter( |
|
2235 | 2237 | ChangesetStatus.revision.in_( |
|
2236 | 2238 | revisions[chunk: chunk+size]) |
|
2237 | 2239 | ).all() |
|
2238 | 2240 | else: |
|
2239 | 2241 | status_results = statuses.all() |
|
2240 | 2242 | |
|
2241 | 2243 | grouped = {} |
|
2242 | 2244 | |
|
2243 | 2245 | # maybe we have open new pullrequest without a status? |
|
2244 | 2246 | stat = ChangesetStatus.STATUS_UNDER_REVIEW |
|
2245 | 2247 | status_lbl = ChangesetStatus.get_status_lbl(stat) |
|
2246 | 2248 | for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): |
|
2247 | 2249 | for rev in pr.revisions: |
|
2248 | 2250 | pr_id = pr.pull_request_id |
|
2249 | 2251 | pr_repo = pr.target_repo.repo_name |
|
2250 | 2252 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] |
|
2251 | 2253 | |
|
2252 | 2254 | for stat in status_results: |
|
2253 | 2255 | pr_id = pr_repo = None |
|
2254 | 2256 | if stat.pull_request: |
|
2255 | 2257 | pr_id = stat.pull_request.pull_request_id |
|
2256 | 2258 | pr_repo = stat.pull_request.target_repo.repo_name |
|
2257 | 2259 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, |
|
2258 | 2260 | pr_id, pr_repo] |
|
2259 | 2261 | return grouped |
|
2260 | 2262 | |
|
2261 | 2263 | # ========================================================================== |
|
2262 | 2264 | # SCM CACHE INSTANCE |
|
2263 | 2265 | # ========================================================================== |
|
2264 | 2266 | |
|
2265 | 2267 | def scm_instance(self, **kwargs): |
|
2266 | 2268 | import rhodecode |
|
2267 | 2269 | |
|
2268 | 2270 | # Passing a config will not hit the cache currently only used |
|
2269 | 2271 | # for repo2dbmapper |
|
2270 | 2272 | config = kwargs.pop('config', None) |
|
2271 | 2273 | cache = kwargs.pop('cache', None) |
|
2272 | 2274 | full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) |
|
2273 | 2275 | # if cache is NOT defined use default global, else we have a full |
|
2274 | 2276 | # control over cache behaviour |
|
2275 | 2277 | if cache is None and full_cache and not config: |
|
2276 | 2278 | return self._get_instance_cached() |
|
2277 | 2279 | return self._get_instance(cache=bool(cache), config=config) |
|
2278 | 2280 | |
|
2279 | 2281 | def _get_instance_cached(self): |
|
2280 | 2282 | @cache_region('long_term') |
|
2281 | 2283 | def _get_repo(cache_key): |
|
2282 | 2284 | return self._get_instance() |
|
2283 | 2285 | |
|
2284 | 2286 | invalidator_context = CacheKey.repo_context_cache( |
|
2285 | 2287 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
2286 | 2288 | |
|
2287 | 2289 | with invalidator_context as context: |
|
2288 | 2290 | context.invalidate() |
|
2289 | 2291 | repo = context.compute() |
|
2290 | 2292 | |
|
2291 | 2293 | return repo |
|
2292 | 2294 | |
|
2293 | 2295 | def _get_instance(self, cache=True, config=None): |
|
2294 | 2296 | config = config or self._config |
|
2295 | 2297 | custom_wire = { |
|
2296 | 2298 | 'cache': cache # controls the vcs.remote cache |
|
2297 | 2299 | } |
|
2298 | 2300 | repo = get_vcs_instance( |
|
2299 | 2301 | repo_path=safe_str(self.repo_full_path), |
|
2300 | 2302 | config=config, |
|
2301 | 2303 | with_wire=custom_wire, |
|
2302 | 2304 | create=False, |
|
2303 | 2305 | _vcs_alias=self.repo_type) |
|
2304 | 2306 | |
|
2305 | 2307 | return repo |
|
2306 | 2308 | |
|
2307 | 2309 | def __json__(self): |
|
2308 | 2310 | return {'landing_rev': self.landing_rev} |
|
2309 | 2311 | |
|
2310 | 2312 | def get_dict(self): |
|
2311 | 2313 | |
|
2312 | 2314 | # Since we transformed `repo_name` to a hybrid property, we need to |
|
2313 | 2315 | # keep compatibility with the code which uses `repo_name` field. |
|
2314 | 2316 | |
|
2315 | 2317 | result = super(Repository, self).get_dict() |
|
2316 | 2318 | result['repo_name'] = result.pop('_repo_name', None) |
|
2317 | 2319 | return result |
|
2318 | 2320 | |
|
2319 | 2321 | |
|
2320 | 2322 | class RepoGroup(Base, BaseModel): |
|
2321 | 2323 | __tablename__ = 'groups' |
|
2322 | 2324 | __table_args__ = ( |
|
2323 | 2325 | UniqueConstraint('group_name', 'group_parent_id'), |
|
2324 | 2326 | CheckConstraint('group_id != group_parent_id'), |
|
2325 | 2327 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2326 | 2328 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2327 | 2329 | ) |
|
2328 | 2330 | __mapper_args__ = {'order_by': 'group_name'} |
|
2329 | 2331 | |
|
2330 | 2332 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups |
|
2331 | 2333 | |
|
2332 | 2334 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2333 | 2335 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
2334 | 2336 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
2335 | 2337 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
2336 | 2338 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
2337 | 2339 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
2338 | 2340 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
2339 | 2341 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
2340 | 2342 | personal = Column('personal', Boolean(), nullable=True, unique=None, default=None) |
|
2341 | 2343 | |
|
2342 | 2344 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
2343 | 2345 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
2344 | 2346 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
2345 | 2347 | user = relationship('User') |
|
2346 | 2348 | integrations = relationship('Integration', |
|
2347 | 2349 | cascade="all, delete, delete-orphan") |
|
2348 | 2350 | |
|
2349 | 2351 | def __init__(self, group_name='', parent_group=None): |
|
2350 | 2352 | self.group_name = group_name |
|
2351 | 2353 | self.parent_group = parent_group |
|
2352 | 2354 | |
|
2353 | 2355 | def __unicode__(self): |
|
2354 | 2356 | return u"<%s('id:%s:%s')>" % ( |
|
2355 | 2357 | self.__class__.__name__, self.group_id, self.group_name) |
|
2356 | 2358 | |
|
2357 | 2359 | @hybrid_property |
|
2358 | 2360 | def description_safe(self): |
|
2359 | 2361 | from rhodecode.lib import helpers as h |
|
2360 | 2362 | return h.escape(self.group_description) |
|
2361 | 2363 | |
|
2362 | 2364 | @classmethod |
|
2363 | 2365 | def _generate_choice(cls, repo_group): |
|
2364 | 2366 | from webhelpers.html import literal as _literal |
|
2365 | 2367 | _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) |
|
2366 | 2368 | return repo_group.group_id, _name(repo_group.full_path_splitted) |
|
2367 | 2369 | |
|
2368 | 2370 | @classmethod |
|
2369 | 2371 | def groups_choices(cls, groups=None, show_empty_group=True): |
|
2370 | 2372 | if not groups: |
|
2371 | 2373 | groups = cls.query().all() |
|
2372 | 2374 | |
|
2373 | 2375 | repo_groups = [] |
|
2374 | 2376 | if show_empty_group: |
|
2375 | 2377 | repo_groups = [(-1, u'-- %s --' % _('No parent'))] |
|
2376 | 2378 | |
|
2377 | 2379 | repo_groups.extend([cls._generate_choice(x) for x in groups]) |
|
2378 | 2380 | |
|
2379 | 2381 | repo_groups = sorted( |
|
2380 | 2382 | repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) |
|
2381 | 2383 | return repo_groups |
|
2382 | 2384 | |
|
2383 | 2385 | @classmethod |
|
2384 | 2386 | def url_sep(cls): |
|
2385 | 2387 | return URL_SEP |
|
2386 | 2388 | |
|
2387 | 2389 | @classmethod |
|
2388 | 2390 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
2389 | 2391 | if case_insensitive: |
|
2390 | 2392 | gr = cls.query().filter(func.lower(cls.group_name) |
|
2391 | 2393 | == func.lower(group_name)) |
|
2392 | 2394 | else: |
|
2393 | 2395 | gr = cls.query().filter(cls.group_name == group_name) |
|
2394 | 2396 | if cache: |
|
2395 | 2397 | name_key = _hash_key(group_name) |
|
2396 | 2398 | gr = gr.options( |
|
2397 | 2399 | FromCache("sql_cache_short", "get_group_%s" % name_key)) |
|
2398 | 2400 | return gr.scalar() |
|
2399 | 2401 | |
|
2400 | 2402 | @classmethod |
|
2401 | 2403 | def get_user_personal_repo_group(cls, user_id): |
|
2402 | 2404 | user = User.get(user_id) |
|
2403 | 2405 | if user.username == User.DEFAULT_USER: |
|
2404 | 2406 | return None |
|
2405 | 2407 | |
|
2406 | 2408 | return cls.query()\ |
|
2407 | 2409 | .filter(cls.personal == true()) \ |
|
2408 | 2410 | .filter(cls.user == user).scalar() |
|
2409 | 2411 | |
|
2410 | 2412 | @classmethod |
|
2411 | 2413 | def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), |
|
2412 | 2414 | case_insensitive=True): |
|
2413 | 2415 | q = RepoGroup.query() |
|
2414 | 2416 | |
|
2415 | 2417 | if not isinstance(user_id, Optional): |
|
2416 | 2418 | q = q.filter(RepoGroup.user_id == user_id) |
|
2417 | 2419 | |
|
2418 | 2420 | if not isinstance(group_id, Optional): |
|
2419 | 2421 | q = q.filter(RepoGroup.group_parent_id == group_id) |
|
2420 | 2422 | |
|
2421 | 2423 | if case_insensitive: |
|
2422 | 2424 | q = q.order_by(func.lower(RepoGroup.group_name)) |
|
2423 | 2425 | else: |
|
2424 | 2426 | q = q.order_by(RepoGroup.group_name) |
|
2425 | 2427 | return q.all() |
|
2426 | 2428 | |
|
2427 | 2429 | @property |
|
2428 | 2430 | def parents(self): |
|
2429 | 2431 | parents_recursion_limit = 10 |
|
2430 | 2432 | groups = [] |
|
2431 | 2433 | if self.parent_group is None: |
|
2432 | 2434 | return groups |
|
2433 | 2435 | cur_gr = self.parent_group |
|
2434 | 2436 | groups.insert(0, cur_gr) |
|
2435 | 2437 | cnt = 0 |
|
2436 | 2438 | while 1: |
|
2437 | 2439 | cnt += 1 |
|
2438 | 2440 | gr = getattr(cur_gr, 'parent_group', None) |
|
2439 | 2441 | cur_gr = cur_gr.parent_group |
|
2440 | 2442 | if gr is None: |
|
2441 | 2443 | break |
|
2442 | 2444 | if cnt == parents_recursion_limit: |
|
2443 | 2445 | # this will prevent accidental infinit loops |
|
2444 | 2446 | log.error(('more than %s parents found for group %s, stopping ' |
|
2445 | 2447 | 'recursive parent fetching' % (parents_recursion_limit, self))) |
|
2446 | 2448 | break |
|
2447 | 2449 | |
|
2448 | 2450 | groups.insert(0, gr) |
|
2449 | 2451 | return groups |
|
2450 | 2452 | |
|
2451 | 2453 | @property |
|
2452 | 2454 | def last_db_change(self): |
|
2453 | 2455 | return self.updated_on |
|
2454 | 2456 | |
|
2455 | 2457 | @property |
|
2456 | 2458 | def children(self): |
|
2457 | 2459 | return RepoGroup.query().filter(RepoGroup.parent_group == self) |
|
2458 | 2460 | |
|
2459 | 2461 | @property |
|
2460 | 2462 | def name(self): |
|
2461 | 2463 | return self.group_name.split(RepoGroup.url_sep())[-1] |
|
2462 | 2464 | |
|
2463 | 2465 | @property |
|
2464 | 2466 | def full_path(self): |
|
2465 | 2467 | return self.group_name |
|
2466 | 2468 | |
|
2467 | 2469 | @property |
|
2468 | 2470 | def full_path_splitted(self): |
|
2469 | 2471 | return self.group_name.split(RepoGroup.url_sep()) |
|
2470 | 2472 | |
|
2471 | 2473 | @property |
|
2472 | 2474 | def repositories(self): |
|
2473 | 2475 | return Repository.query()\ |
|
2474 | 2476 | .filter(Repository.group == self)\ |
|
2475 | 2477 | .order_by(Repository.repo_name) |
|
2476 | 2478 | |
|
2477 | 2479 | @property |
|
2478 | 2480 | def repositories_recursive_count(self): |
|
2479 | 2481 | cnt = self.repositories.count() |
|
2480 | 2482 | |
|
2481 | 2483 | def children_count(group): |
|
2482 | 2484 | cnt = 0 |
|
2483 | 2485 | for child in group.children: |
|
2484 | 2486 | cnt += child.repositories.count() |
|
2485 | 2487 | cnt += children_count(child) |
|
2486 | 2488 | return cnt |
|
2487 | 2489 | |
|
2488 | 2490 | return cnt + children_count(self) |
|
2489 | 2491 | |
|
2490 | 2492 | def _recursive_objects(self, include_repos=True): |
|
2491 | 2493 | all_ = [] |
|
2492 | 2494 | |
|
2493 | 2495 | def _get_members(root_gr): |
|
2494 | 2496 | if include_repos: |
|
2495 | 2497 | for r in root_gr.repositories: |
|
2496 | 2498 | all_.append(r) |
|
2497 | 2499 | childs = root_gr.children.all() |
|
2498 | 2500 | if childs: |
|
2499 | 2501 | for gr in childs: |
|
2500 | 2502 | all_.append(gr) |
|
2501 | 2503 | _get_members(gr) |
|
2502 | 2504 | |
|
2503 | 2505 | _get_members(self) |
|
2504 | 2506 | return [self] + all_ |
|
2505 | 2507 | |
|
2506 | 2508 | def recursive_groups_and_repos(self): |
|
2507 | 2509 | """ |
|
2508 | 2510 | Recursive return all groups, with repositories in those groups |
|
2509 | 2511 | """ |
|
2510 | 2512 | return self._recursive_objects() |
|
2511 | 2513 | |
|
2512 | 2514 | def recursive_groups(self): |
|
2513 | 2515 | """ |
|
2514 | 2516 | Returns all children groups for this group including children of children |
|
2515 | 2517 | """ |
|
2516 | 2518 | return self._recursive_objects(include_repos=False) |
|
2517 | 2519 | |
|
2518 | 2520 | def get_new_name(self, group_name): |
|
2519 | 2521 | """ |
|
2520 | 2522 | returns new full group name based on parent and new name |
|
2521 | 2523 | |
|
2522 | 2524 | :param group_name: |
|
2523 | 2525 | """ |
|
2524 | 2526 | path_prefix = (self.parent_group.full_path_splitted if |
|
2525 | 2527 | self.parent_group else []) |
|
2526 | 2528 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
2527 | 2529 | |
|
2528 | 2530 | def permissions(self, with_admins=True, with_owner=True): |
|
2529 | 2531 | q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) |
|
2530 | 2532 | q = q.options(joinedload(UserRepoGroupToPerm.group), |
|
2531 | 2533 | joinedload(UserRepoGroupToPerm.user), |
|
2532 | 2534 | joinedload(UserRepoGroupToPerm.permission),) |
|
2533 | 2535 | |
|
2534 | 2536 | # get owners and admins and permissions. We do a trick of re-writing |
|
2535 | 2537 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
2536 | 2538 | # has a global reference and changing one object propagates to all |
|
2537 | 2539 | # others. This means if admin is also an owner admin_row that change |
|
2538 | 2540 | # would propagate to both objects |
|
2539 | 2541 | perm_rows = [] |
|
2540 | 2542 | for _usr in q.all(): |
|
2541 | 2543 | usr = AttributeDict(_usr.user.get_dict()) |
|
2542 | 2544 | usr.permission = _usr.permission.permission_name |
|
2543 | 2545 | perm_rows.append(usr) |
|
2544 | 2546 | |
|
2545 | 2547 | # filter the perm rows by 'default' first and then sort them by |
|
2546 | 2548 | # admin,write,read,none permissions sorted again alphabetically in |
|
2547 | 2549 | # each group |
|
2548 | 2550 | perm_rows = sorted(perm_rows, key=display_user_sort) |
|
2549 | 2551 | |
|
2550 | 2552 | _admin_perm = 'group.admin' |
|
2551 | 2553 | owner_row = [] |
|
2552 | 2554 | if with_owner: |
|
2553 | 2555 | usr = AttributeDict(self.user.get_dict()) |
|
2554 | 2556 | usr.owner_row = True |
|
2555 | 2557 | usr.permission = _admin_perm |
|
2556 | 2558 | owner_row.append(usr) |
|
2557 | 2559 | |
|
2558 | 2560 | super_admin_rows = [] |
|
2559 | 2561 | if with_admins: |
|
2560 | 2562 | for usr in User.get_all_super_admins(): |
|
2561 | 2563 | # if this admin is also owner, don't double the record |
|
2562 | 2564 | if usr.user_id == owner_row[0].user_id: |
|
2563 | 2565 | owner_row[0].admin_row = True |
|
2564 | 2566 | else: |
|
2565 | 2567 | usr = AttributeDict(usr.get_dict()) |
|
2566 | 2568 | usr.admin_row = True |
|
2567 | 2569 | usr.permission = _admin_perm |
|
2568 | 2570 | super_admin_rows.append(usr) |
|
2569 | 2571 | |
|
2570 | 2572 | return super_admin_rows + owner_row + perm_rows |
|
2571 | 2573 | |
|
2572 | 2574 | def permission_user_groups(self): |
|
2573 | 2575 | q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self) |
|
2574 | 2576 | q = q.options(joinedload(UserGroupRepoGroupToPerm.group), |
|
2575 | 2577 | joinedload(UserGroupRepoGroupToPerm.users_group), |
|
2576 | 2578 | joinedload(UserGroupRepoGroupToPerm.permission),) |
|
2577 | 2579 | |
|
2578 | 2580 | perm_rows = [] |
|
2579 | 2581 | for _user_group in q.all(): |
|
2580 | 2582 | usr = AttributeDict(_user_group.users_group.get_dict()) |
|
2581 | 2583 | usr.permission = _user_group.permission.permission_name |
|
2582 | 2584 | perm_rows.append(usr) |
|
2583 | 2585 | |
|
2584 | 2586 | perm_rows = sorted(perm_rows, key=display_user_group_sort) |
|
2585 | 2587 | return perm_rows |
|
2586 | 2588 | |
|
2587 | 2589 | def get_api_data(self): |
|
2588 | 2590 | """ |
|
2589 | 2591 | Common function for generating api data |
|
2590 | 2592 | |
|
2591 | 2593 | """ |
|
2592 | 2594 | group = self |
|
2593 | 2595 | data = { |
|
2594 | 2596 | 'group_id': group.group_id, |
|
2595 | 2597 | 'group_name': group.group_name, |
|
2596 | 2598 | 'group_description': group.description_safe, |
|
2597 | 2599 | 'parent_group': group.parent_group.group_name if group.parent_group else None, |
|
2598 | 2600 | 'repositories': [x.repo_name for x in group.repositories], |
|
2599 | 2601 | 'owner': group.user.username, |
|
2600 | 2602 | } |
|
2601 | 2603 | return data |
|
2602 | 2604 | |
|
2603 | 2605 | |
|
2604 | 2606 | class Permission(Base, BaseModel): |
|
2605 | 2607 | __tablename__ = 'permissions' |
|
2606 | 2608 | __table_args__ = ( |
|
2607 | 2609 | Index('p_perm_name_idx', 'permission_name'), |
|
2608 | 2610 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2609 | 2611 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2610 | 2612 | ) |
|
2611 | 2613 | PERMS = [ |
|
2612 | 2614 | ('hg.admin', _('RhodeCode Super Administrator')), |
|
2613 | 2615 | |
|
2614 | 2616 | ('repository.none', _('Repository no access')), |
|
2615 | 2617 | ('repository.read', _('Repository read access')), |
|
2616 | 2618 | ('repository.write', _('Repository write access')), |
|
2617 | 2619 | ('repository.admin', _('Repository admin access')), |
|
2618 | 2620 | |
|
2619 | 2621 | ('group.none', _('Repository group no access')), |
|
2620 | 2622 | ('group.read', _('Repository group read access')), |
|
2621 | 2623 | ('group.write', _('Repository group write access')), |
|
2622 | 2624 | ('group.admin', _('Repository group admin access')), |
|
2623 | 2625 | |
|
2624 | 2626 | ('usergroup.none', _('User group no access')), |
|
2625 | 2627 | ('usergroup.read', _('User group read access')), |
|
2626 | 2628 | ('usergroup.write', _('User group write access')), |
|
2627 | 2629 | ('usergroup.admin', _('User group admin access')), |
|
2628 | 2630 | |
|
2629 | 2631 | ('hg.repogroup.create.false', _('Repository Group creation disabled')), |
|
2630 | 2632 | ('hg.repogroup.create.true', _('Repository Group creation enabled')), |
|
2631 | 2633 | |
|
2632 | 2634 | ('hg.usergroup.create.false', _('User Group creation disabled')), |
|
2633 | 2635 | ('hg.usergroup.create.true', _('User Group creation enabled')), |
|
2634 | 2636 | |
|
2635 | 2637 | ('hg.create.none', _('Repository creation disabled')), |
|
2636 | 2638 | ('hg.create.repository', _('Repository creation enabled')), |
|
2637 | 2639 | ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), |
|
2638 | 2640 | ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), |
|
2639 | 2641 | |
|
2640 | 2642 | ('hg.fork.none', _('Repository forking disabled')), |
|
2641 | 2643 | ('hg.fork.repository', _('Repository forking enabled')), |
|
2642 | 2644 | |
|
2643 | 2645 | ('hg.register.none', _('Registration disabled')), |
|
2644 | 2646 | ('hg.register.manual_activate', _('User Registration with manual account activation')), |
|
2645 | 2647 | ('hg.register.auto_activate', _('User Registration with automatic account activation')), |
|
2646 | 2648 | |
|
2647 | 2649 | ('hg.password_reset.enabled', _('Password reset enabled')), |
|
2648 | 2650 | ('hg.password_reset.hidden', _('Password reset hidden')), |
|
2649 | 2651 | ('hg.password_reset.disabled', _('Password reset disabled')), |
|
2650 | 2652 | |
|
2651 | 2653 | ('hg.extern_activate.manual', _('Manual activation of external account')), |
|
2652 | 2654 | ('hg.extern_activate.auto', _('Automatic activation of external account')), |
|
2653 | 2655 | |
|
2654 | 2656 | ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), |
|
2655 | 2657 | ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), |
|
2656 | 2658 | ] |
|
2657 | 2659 | |
|
2658 | 2660 | # definition of system default permissions for DEFAULT user |
|
2659 | 2661 | DEFAULT_USER_PERMISSIONS = [ |
|
2660 | 2662 | 'repository.read', |
|
2661 | 2663 | 'group.read', |
|
2662 | 2664 | 'usergroup.read', |
|
2663 | 2665 | 'hg.create.repository', |
|
2664 | 2666 | 'hg.repogroup.create.false', |
|
2665 | 2667 | 'hg.usergroup.create.false', |
|
2666 | 2668 | 'hg.create.write_on_repogroup.true', |
|
2667 | 2669 | 'hg.fork.repository', |
|
2668 | 2670 | 'hg.register.manual_activate', |
|
2669 | 2671 | 'hg.password_reset.enabled', |
|
2670 | 2672 | 'hg.extern_activate.auto', |
|
2671 | 2673 | 'hg.inherit_default_perms.true', |
|
2672 | 2674 | ] |
|
2673 | 2675 | |
|
2674 | 2676 | # defines which permissions are more important higher the more important |
|
2675 | 2677 | # Weight defines which permissions are more important. |
|
2676 | 2678 | # The higher number the more important. |
|
2677 | 2679 | PERM_WEIGHTS = { |
|
2678 | 2680 | 'repository.none': 0, |
|
2679 | 2681 | 'repository.read': 1, |
|
2680 | 2682 | 'repository.write': 3, |
|
2681 | 2683 | 'repository.admin': 4, |
|
2682 | 2684 | |
|
2683 | 2685 | 'group.none': 0, |
|
2684 | 2686 | 'group.read': 1, |
|
2685 | 2687 | 'group.write': 3, |
|
2686 | 2688 | 'group.admin': 4, |
|
2687 | 2689 | |
|
2688 | 2690 | 'usergroup.none': 0, |
|
2689 | 2691 | 'usergroup.read': 1, |
|
2690 | 2692 | 'usergroup.write': 3, |
|
2691 | 2693 | 'usergroup.admin': 4, |
|
2692 | 2694 | |
|
2693 | 2695 | 'hg.repogroup.create.false': 0, |
|
2694 | 2696 | 'hg.repogroup.create.true': 1, |
|
2695 | 2697 | |
|
2696 | 2698 | 'hg.usergroup.create.false': 0, |
|
2697 | 2699 | 'hg.usergroup.create.true': 1, |
|
2698 | 2700 | |
|
2699 | 2701 | 'hg.fork.none': 0, |
|
2700 | 2702 | 'hg.fork.repository': 1, |
|
2701 | 2703 | 'hg.create.none': 0, |
|
2702 | 2704 | 'hg.create.repository': 1 |
|
2703 | 2705 | } |
|
2704 | 2706 | |
|
2705 | 2707 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2706 | 2708 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
2707 | 2709 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
2708 | 2710 | |
|
2709 | 2711 | def __unicode__(self): |
|
2710 | 2712 | return u"<%s('%s:%s')>" % ( |
|
2711 | 2713 | self.__class__.__name__, self.permission_id, self.permission_name |
|
2712 | 2714 | ) |
|
2713 | 2715 | |
|
2714 | 2716 | @classmethod |
|
2715 | 2717 | def get_by_key(cls, key): |
|
2716 | 2718 | return cls.query().filter(cls.permission_name == key).scalar() |
|
2717 | 2719 | |
|
2718 | 2720 | @classmethod |
|
2719 | 2721 | def get_default_repo_perms(cls, user_id, repo_id=None): |
|
2720 | 2722 | q = Session().query(UserRepoToPerm, Repository, Permission)\ |
|
2721 | 2723 | .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ |
|
2722 | 2724 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
|
2723 | 2725 | .filter(UserRepoToPerm.user_id == user_id) |
|
2724 | 2726 | if repo_id: |
|
2725 | 2727 | q = q.filter(UserRepoToPerm.repository_id == repo_id) |
|
2726 | 2728 | return q.all() |
|
2727 | 2729 | |
|
2728 | 2730 | @classmethod |
|
2729 | 2731 | def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): |
|
2730 | 2732 | q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ |
|
2731 | 2733 | .join( |
|
2732 | 2734 | Permission, |
|
2733 | 2735 | UserGroupRepoToPerm.permission_id == Permission.permission_id)\ |
|
2734 | 2736 | .join( |
|
2735 | 2737 | Repository, |
|
2736 | 2738 | UserGroupRepoToPerm.repository_id == Repository.repo_id)\ |
|
2737 | 2739 | .join( |
|
2738 | 2740 | UserGroup, |
|
2739 | 2741 | UserGroupRepoToPerm.users_group_id == |
|
2740 | 2742 | UserGroup.users_group_id)\ |
|
2741 | 2743 | .join( |
|
2742 | 2744 | UserGroupMember, |
|
2743 | 2745 | UserGroupRepoToPerm.users_group_id == |
|
2744 | 2746 | UserGroupMember.users_group_id)\ |
|
2745 | 2747 | .filter( |
|
2746 | 2748 | UserGroupMember.user_id == user_id, |
|
2747 | 2749 | UserGroup.users_group_active == true()) |
|
2748 | 2750 | if repo_id: |
|
2749 | 2751 | q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) |
|
2750 | 2752 | return q.all() |
|
2751 | 2753 | |
|
2752 | 2754 | @classmethod |
|
2753 | 2755 | def get_default_group_perms(cls, user_id, repo_group_id=None): |
|
2754 | 2756 | q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ |
|
2755 | 2757 | .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\ |
|
2756 | 2758 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
2757 | 2759 | .filter(UserRepoGroupToPerm.user_id == user_id) |
|
2758 | 2760 | if repo_group_id: |
|
2759 | 2761 | q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) |
|
2760 | 2762 | return q.all() |
|
2761 | 2763 | |
|
2762 | 2764 | @classmethod |
|
2763 | 2765 | def get_default_group_perms_from_user_group( |
|
2764 | 2766 | cls, user_id, repo_group_id=None): |
|
2765 | 2767 | q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ |
|
2766 | 2768 | .join( |
|
2767 | 2769 | Permission, |
|
2768 | 2770 | UserGroupRepoGroupToPerm.permission_id == |
|
2769 | 2771 | Permission.permission_id)\ |
|
2770 | 2772 | .join( |
|
2771 | 2773 | RepoGroup, |
|
2772 | 2774 | UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ |
|
2773 | 2775 | .join( |
|
2774 | 2776 | UserGroup, |
|
2775 | 2777 | UserGroupRepoGroupToPerm.users_group_id == |
|
2776 | 2778 | UserGroup.users_group_id)\ |
|
2777 | 2779 | .join( |
|
2778 | 2780 | UserGroupMember, |
|
2779 | 2781 | UserGroupRepoGroupToPerm.users_group_id == |
|
2780 | 2782 | UserGroupMember.users_group_id)\ |
|
2781 | 2783 | .filter( |
|
2782 | 2784 | UserGroupMember.user_id == user_id, |
|
2783 | 2785 | UserGroup.users_group_active == true()) |
|
2784 | 2786 | if repo_group_id: |
|
2785 | 2787 | q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) |
|
2786 | 2788 | return q.all() |
|
2787 | 2789 | |
|
2788 | 2790 | @classmethod |
|
2789 | 2791 | def get_default_user_group_perms(cls, user_id, user_group_id=None): |
|
2790 | 2792 | q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ |
|
2791 | 2793 | .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ |
|
2792 | 2794 | .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ |
|
2793 | 2795 | .filter(UserUserGroupToPerm.user_id == user_id) |
|
2794 | 2796 | if user_group_id: |
|
2795 | 2797 | q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) |
|
2796 | 2798 | return q.all() |
|
2797 | 2799 | |
|
2798 | 2800 | @classmethod |
|
2799 | 2801 | def get_default_user_group_perms_from_user_group( |
|
2800 | 2802 | cls, user_id, user_group_id=None): |
|
2801 | 2803 | TargetUserGroup = aliased(UserGroup, name='target_user_group') |
|
2802 | 2804 | q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ |
|
2803 | 2805 | .join( |
|
2804 | 2806 | Permission, |
|
2805 | 2807 | UserGroupUserGroupToPerm.permission_id == |
|
2806 | 2808 | Permission.permission_id)\ |
|
2807 | 2809 | .join( |
|
2808 | 2810 | TargetUserGroup, |
|
2809 | 2811 | UserGroupUserGroupToPerm.target_user_group_id == |
|
2810 | 2812 | TargetUserGroup.users_group_id)\ |
|
2811 | 2813 | .join( |
|
2812 | 2814 | UserGroup, |
|
2813 | 2815 | UserGroupUserGroupToPerm.user_group_id == |
|
2814 | 2816 | UserGroup.users_group_id)\ |
|
2815 | 2817 | .join( |
|
2816 | 2818 | UserGroupMember, |
|
2817 | 2819 | UserGroupUserGroupToPerm.user_group_id == |
|
2818 | 2820 | UserGroupMember.users_group_id)\ |
|
2819 | 2821 | .filter( |
|
2820 | 2822 | UserGroupMember.user_id == user_id, |
|
2821 | 2823 | UserGroup.users_group_active == true()) |
|
2822 | 2824 | if user_group_id: |
|
2823 | 2825 | q = q.filter( |
|
2824 | 2826 | UserGroupUserGroupToPerm.user_group_id == user_group_id) |
|
2825 | 2827 | |
|
2826 | 2828 | return q.all() |
|
2827 | 2829 | |
|
2828 | 2830 | |
|
2829 | 2831 | class UserRepoToPerm(Base, BaseModel): |
|
2830 | 2832 | __tablename__ = 'repo_to_perm' |
|
2831 | 2833 | __table_args__ = ( |
|
2832 | 2834 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
2833 | 2835 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2834 | 2836 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2835 | 2837 | ) |
|
2836 | 2838 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2837 | 2839 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2838 | 2840 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2839 | 2841 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
2840 | 2842 | |
|
2841 | 2843 | user = relationship('User') |
|
2842 | 2844 | repository = relationship('Repository') |
|
2843 | 2845 | permission = relationship('Permission') |
|
2844 | 2846 | |
|
2845 | 2847 | @classmethod |
|
2846 | 2848 | def create(cls, user, repository, permission): |
|
2847 | 2849 | n = cls() |
|
2848 | 2850 | n.user = user |
|
2849 | 2851 | n.repository = repository |
|
2850 | 2852 | n.permission = permission |
|
2851 | 2853 | Session().add(n) |
|
2852 | 2854 | return n |
|
2853 | 2855 | |
|
2854 | 2856 | def __unicode__(self): |
|
2855 | 2857 | return u'<%s => %s >' % (self.user, self.repository) |
|
2856 | 2858 | |
|
2857 | 2859 | |
|
2858 | 2860 | class UserUserGroupToPerm(Base, BaseModel): |
|
2859 | 2861 | __tablename__ = 'user_user_group_to_perm' |
|
2860 | 2862 | __table_args__ = ( |
|
2861 | 2863 | UniqueConstraint('user_id', 'user_group_id', 'permission_id'), |
|
2862 | 2864 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2863 | 2865 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2864 | 2866 | ) |
|
2865 | 2867 | user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2866 | 2868 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2867 | 2869 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2868 | 2870 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2869 | 2871 | |
|
2870 | 2872 | user = relationship('User') |
|
2871 | 2873 | user_group = relationship('UserGroup') |
|
2872 | 2874 | permission = relationship('Permission') |
|
2873 | 2875 | |
|
2874 | 2876 | @classmethod |
|
2875 | 2877 | def create(cls, user, user_group, permission): |
|
2876 | 2878 | n = cls() |
|
2877 | 2879 | n.user = user |
|
2878 | 2880 | n.user_group = user_group |
|
2879 | 2881 | n.permission = permission |
|
2880 | 2882 | Session().add(n) |
|
2881 | 2883 | return n |
|
2882 | 2884 | |
|
2883 | 2885 | def __unicode__(self): |
|
2884 | 2886 | return u'<%s => %s >' % (self.user, self.user_group) |
|
2885 | 2887 | |
|
2886 | 2888 | |
|
2887 | 2889 | class UserToPerm(Base, BaseModel): |
|
2888 | 2890 | __tablename__ = 'user_to_perm' |
|
2889 | 2891 | __table_args__ = ( |
|
2890 | 2892 | UniqueConstraint('user_id', 'permission_id'), |
|
2891 | 2893 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2892 | 2894 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2893 | 2895 | ) |
|
2894 | 2896 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2895 | 2897 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2896 | 2898 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2897 | 2899 | |
|
2898 | 2900 | user = relationship('User') |
|
2899 | 2901 | permission = relationship('Permission', lazy='joined') |
|
2900 | 2902 | |
|
2901 | 2903 | def __unicode__(self): |
|
2902 | 2904 | return u'<%s => %s >' % (self.user, self.permission) |
|
2903 | 2905 | |
|
2904 | 2906 | |
|
2905 | 2907 | class UserGroupRepoToPerm(Base, BaseModel): |
|
2906 | 2908 | __tablename__ = 'users_group_repo_to_perm' |
|
2907 | 2909 | __table_args__ = ( |
|
2908 | 2910 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
2909 | 2911 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2910 | 2912 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2911 | 2913 | ) |
|
2912 | 2914 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2913 | 2915 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2914 | 2916 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2915 | 2917 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
2916 | 2918 | |
|
2917 | 2919 | users_group = relationship('UserGroup') |
|
2918 | 2920 | permission = relationship('Permission') |
|
2919 | 2921 | repository = relationship('Repository') |
|
2920 | 2922 | |
|
2921 | 2923 | @classmethod |
|
2922 | 2924 | def create(cls, users_group, repository, permission): |
|
2923 | 2925 | n = cls() |
|
2924 | 2926 | n.users_group = users_group |
|
2925 | 2927 | n.repository = repository |
|
2926 | 2928 | n.permission = permission |
|
2927 | 2929 | Session().add(n) |
|
2928 | 2930 | return n |
|
2929 | 2931 | |
|
2930 | 2932 | def __unicode__(self): |
|
2931 | 2933 | return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository) |
|
2932 | 2934 | |
|
2933 | 2935 | |
|
2934 | 2936 | class UserGroupUserGroupToPerm(Base, BaseModel): |
|
2935 | 2937 | __tablename__ = 'user_group_user_group_to_perm' |
|
2936 | 2938 | __table_args__ = ( |
|
2937 | 2939 | UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), |
|
2938 | 2940 | CheckConstraint('target_user_group_id != user_group_id'), |
|
2939 | 2941 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2940 | 2942 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2941 | 2943 | ) |
|
2942 | 2944 | user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2943 | 2945 | target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2944 | 2946 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2945 | 2947 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2946 | 2948 | |
|
2947 | 2949 | target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') |
|
2948 | 2950 | user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') |
|
2949 | 2951 | permission = relationship('Permission') |
|
2950 | 2952 | |
|
2951 | 2953 | @classmethod |
|
2952 | 2954 | def create(cls, target_user_group, user_group, permission): |
|
2953 | 2955 | n = cls() |
|
2954 | 2956 | n.target_user_group = target_user_group |
|
2955 | 2957 | n.user_group = user_group |
|
2956 | 2958 | n.permission = permission |
|
2957 | 2959 | Session().add(n) |
|
2958 | 2960 | return n |
|
2959 | 2961 | |
|
2960 | 2962 | def __unicode__(self): |
|
2961 | 2963 | return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group) |
|
2962 | 2964 | |
|
2963 | 2965 | |
|
2964 | 2966 | class UserGroupToPerm(Base, BaseModel): |
|
2965 | 2967 | __tablename__ = 'users_group_to_perm' |
|
2966 | 2968 | __table_args__ = ( |
|
2967 | 2969 | UniqueConstraint('users_group_id', 'permission_id',), |
|
2968 | 2970 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2969 | 2971 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2970 | 2972 | ) |
|
2971 | 2973 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2972 | 2974 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2973 | 2975 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2974 | 2976 | |
|
2975 | 2977 | users_group = relationship('UserGroup') |
|
2976 | 2978 | permission = relationship('Permission') |
|
2977 | 2979 | |
|
2978 | 2980 | |
|
2979 | 2981 | class UserRepoGroupToPerm(Base, BaseModel): |
|
2980 | 2982 | __tablename__ = 'user_repo_group_to_perm' |
|
2981 | 2983 | __table_args__ = ( |
|
2982 | 2984 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
2983 | 2985 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2984 | 2986 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2985 | 2987 | ) |
|
2986 | 2988 | |
|
2987 | 2989 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2988 | 2990 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2989 | 2991 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
2990 | 2992 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2991 | 2993 | |
|
2992 | 2994 | user = relationship('User') |
|
2993 | 2995 | group = relationship('RepoGroup') |
|
2994 | 2996 | permission = relationship('Permission') |
|
2995 | 2997 | |
|
2996 | 2998 | @classmethod |
|
2997 | 2999 | def create(cls, user, repository_group, permission): |
|
2998 | 3000 | n = cls() |
|
2999 | 3001 | n.user = user |
|
3000 | 3002 | n.group = repository_group |
|
3001 | 3003 | n.permission = permission |
|
3002 | 3004 | Session().add(n) |
|
3003 | 3005 | return n |
|
3004 | 3006 | |
|
3005 | 3007 | |
|
3006 | 3008 | class UserGroupRepoGroupToPerm(Base, BaseModel): |
|
3007 | 3009 | __tablename__ = 'users_group_repo_group_to_perm' |
|
3008 | 3010 | __table_args__ = ( |
|
3009 | 3011 | UniqueConstraint('users_group_id', 'group_id'), |
|
3010 | 3012 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3011 | 3013 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3012 | 3014 | ) |
|
3013 | 3015 | |
|
3014 | 3016 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3015 | 3017 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
3016 | 3018 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
3017 | 3019 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
3018 | 3020 | |
|
3019 | 3021 | users_group = relationship('UserGroup') |
|
3020 | 3022 | permission = relationship('Permission') |
|
3021 | 3023 | group = relationship('RepoGroup') |
|
3022 | 3024 | |
|
3023 | 3025 | @classmethod |
|
3024 | 3026 | def create(cls, user_group, repository_group, permission): |
|
3025 | 3027 | n = cls() |
|
3026 | 3028 | n.users_group = user_group |
|
3027 | 3029 | n.group = repository_group |
|
3028 | 3030 | n.permission = permission |
|
3029 | 3031 | Session().add(n) |
|
3030 | 3032 | return n |
|
3031 | 3033 | |
|
3032 | 3034 | def __unicode__(self): |
|
3033 | 3035 | return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group) |
|
3034 | 3036 | |
|
3035 | 3037 | |
|
3036 | 3038 | class Statistics(Base, BaseModel): |
|
3037 | 3039 | __tablename__ = 'statistics' |
|
3038 | 3040 | __table_args__ = ( |
|
3039 | 3041 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3040 | 3042 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3041 | 3043 | ) |
|
3042 | 3044 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3043 | 3045 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
3044 | 3046 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
3045 | 3047 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
3046 | 3048 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
3047 | 3049 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
3048 | 3050 | |
|
3049 | 3051 | repository = relationship('Repository', single_parent=True) |
|
3050 | 3052 | |
|
3051 | 3053 | |
|
3052 | 3054 | class UserFollowing(Base, BaseModel): |
|
3053 | 3055 | __tablename__ = 'user_followings' |
|
3054 | 3056 | __table_args__ = ( |
|
3055 | 3057 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
3056 | 3058 | UniqueConstraint('user_id', 'follows_user_id'), |
|
3057 | 3059 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3058 | 3060 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3059 | 3061 | ) |
|
3060 | 3062 | |
|
3061 | 3063 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3062 | 3064 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
3063 | 3065 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
3064 | 3066 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
3065 | 3067 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
3066 | 3068 | |
|
3067 | 3069 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
3068 | 3070 | |
|
3069 | 3071 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
3070 | 3072 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
3071 | 3073 | |
|
3072 | 3074 | @classmethod |
|
3073 | 3075 | def get_repo_followers(cls, repo_id): |
|
3074 | 3076 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
3075 | 3077 | |
|
3076 | 3078 | |
|
3077 | 3079 | class CacheKey(Base, BaseModel): |
|
3078 | 3080 | __tablename__ = 'cache_invalidation' |
|
3079 | 3081 | __table_args__ = ( |
|
3080 | 3082 | UniqueConstraint('cache_key'), |
|
3081 | 3083 | Index('key_idx', 'cache_key'), |
|
3082 | 3084 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3083 | 3085 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3084 | 3086 | ) |
|
3085 | 3087 | CACHE_TYPE_ATOM = 'ATOM' |
|
3086 | 3088 | CACHE_TYPE_RSS = 'RSS' |
|
3087 | 3089 | CACHE_TYPE_README = 'README' |
|
3088 | 3090 | |
|
3089 | 3091 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3090 | 3092 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
3091 | 3093 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
3092 | 3094 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
3093 | 3095 | |
|
3094 | 3096 | def __init__(self, cache_key, cache_args=''): |
|
3095 | 3097 | self.cache_key = cache_key |
|
3096 | 3098 | self.cache_args = cache_args |
|
3097 | 3099 | self.cache_active = False |
|
3098 | 3100 | |
|
3099 | 3101 | def __unicode__(self): |
|
3100 | 3102 | return u"<%s('%s:%s[%s]')>" % ( |
|
3101 | 3103 | self.__class__.__name__, |
|
3102 | 3104 | self.cache_id, self.cache_key, self.cache_active) |
|
3103 | 3105 | |
|
3104 | 3106 | def _cache_key_partition(self): |
|
3105 | 3107 | prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) |
|
3106 | 3108 | return prefix, repo_name, suffix |
|
3107 | 3109 | |
|
3108 | 3110 | def get_prefix(self): |
|
3109 | 3111 | """ |
|
3110 | 3112 | Try to extract prefix from existing cache key. The key could consist |
|
3111 | 3113 | of prefix, repo_name, suffix |
|
3112 | 3114 | """ |
|
3113 | 3115 | # this returns prefix, repo_name, suffix |
|
3114 | 3116 | return self._cache_key_partition()[0] |
|
3115 | 3117 | |
|
3116 | 3118 | def get_suffix(self): |
|
3117 | 3119 | """ |
|
3118 | 3120 | get suffix that might have been used in _get_cache_key to |
|
3119 | 3121 | generate self.cache_key. Only used for informational purposes |
|
3120 | 3122 | in repo_edit.mako. |
|
3121 | 3123 | """ |
|
3122 | 3124 | # prefix, repo_name, suffix |
|
3123 | 3125 | return self._cache_key_partition()[2] |
|
3124 | 3126 | |
|
3125 | 3127 | @classmethod |
|
3126 | 3128 | def delete_all_cache(cls): |
|
3127 | 3129 | """ |
|
3128 | 3130 | Delete all cache keys from database. |
|
3129 | 3131 | Should only be run when all instances are down and all entries |
|
3130 | 3132 | thus stale. |
|
3131 | 3133 | """ |
|
3132 | 3134 | cls.query().delete() |
|
3133 | 3135 | Session().commit() |
|
3134 | 3136 | |
|
3135 | 3137 | @classmethod |
|
3136 | 3138 | def get_cache_key(cls, repo_name, cache_type): |
|
3137 | 3139 | """ |
|
3138 | 3140 | |
|
3139 | 3141 | Generate a cache key for this process of RhodeCode instance. |
|
3140 | 3142 | Prefix most likely will be process id or maybe explicitly set |
|
3141 | 3143 | instance_id from .ini file. |
|
3142 | 3144 | """ |
|
3143 | 3145 | import rhodecode |
|
3144 | 3146 | prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '') |
|
3145 | 3147 | |
|
3146 | 3148 | repo_as_unicode = safe_unicode(repo_name) |
|
3147 | 3149 | key = u'{}_{}'.format(repo_as_unicode, cache_type) \ |
|
3148 | 3150 | if cache_type else repo_as_unicode |
|
3149 | 3151 | |
|
3150 | 3152 | return u'{}{}'.format(prefix, key) |
|
3151 | 3153 | |
|
3152 | 3154 | @classmethod |
|
3153 | 3155 | def set_invalidate(cls, repo_name, delete=False): |
|
3154 | 3156 | """ |
|
3155 | 3157 | Mark all caches of a repo as invalid in the database. |
|
3156 | 3158 | """ |
|
3157 | 3159 | |
|
3158 | 3160 | try: |
|
3159 | 3161 | qry = Session().query(cls).filter(cls.cache_args == repo_name) |
|
3160 | 3162 | if delete: |
|
3161 | 3163 | log.debug('cache objects deleted for repo %s', |
|
3162 | 3164 | safe_str(repo_name)) |
|
3163 | 3165 | qry.delete() |
|
3164 | 3166 | else: |
|
3165 | 3167 | log.debug('cache objects marked as invalid for repo %s', |
|
3166 | 3168 | safe_str(repo_name)) |
|
3167 | 3169 | qry.update({"cache_active": False}) |
|
3168 | 3170 | |
|
3169 | 3171 | Session().commit() |
|
3170 | 3172 | except Exception: |
|
3171 | 3173 | log.exception( |
|
3172 | 3174 | 'Cache key invalidation failed for repository %s', |
|
3173 | 3175 | safe_str(repo_name)) |
|
3174 | 3176 | Session().rollback() |
|
3175 | 3177 | |
|
3176 | 3178 | @classmethod |
|
3177 | 3179 | def get_active_cache(cls, cache_key): |
|
3178 | 3180 | inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() |
|
3179 | 3181 | if inv_obj: |
|
3180 | 3182 | return inv_obj |
|
3181 | 3183 | return None |
|
3182 | 3184 | |
|
3183 | 3185 | @classmethod |
|
3184 | 3186 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
3185 | 3187 | thread_scoped=False): |
|
3186 | 3188 | """ |
|
3187 | 3189 | @cache_region('long_term') |
|
3188 | 3190 | def _heavy_calculation(cache_key): |
|
3189 | 3191 | return 'result' |
|
3190 | 3192 | |
|
3191 | 3193 | cache_context = CacheKey.repo_context_cache( |
|
3192 | 3194 | _heavy_calculation, repo_name, cache_type) |
|
3193 | 3195 | |
|
3194 | 3196 | with cache_context as context: |
|
3195 | 3197 | context.invalidate() |
|
3196 | 3198 | computed = context.compute() |
|
3197 | 3199 | |
|
3198 | 3200 | assert computed == 'result' |
|
3199 | 3201 | """ |
|
3200 | 3202 | from rhodecode.lib import caches |
|
3201 | 3203 | return caches.InvalidationContext( |
|
3202 | 3204 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
3203 | 3205 | |
|
3204 | 3206 | |
|
3205 | 3207 | class ChangesetComment(Base, BaseModel): |
|
3206 | 3208 | __tablename__ = 'changeset_comments' |
|
3207 | 3209 | __table_args__ = ( |
|
3208 | 3210 | Index('cc_revision_idx', 'revision'), |
|
3209 | 3211 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3210 | 3212 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3211 | 3213 | ) |
|
3212 | 3214 | |
|
3213 | 3215 | COMMENT_OUTDATED = u'comment_outdated' |
|
3214 | 3216 | COMMENT_TYPE_NOTE = u'note' |
|
3215 | 3217 | COMMENT_TYPE_TODO = u'todo' |
|
3216 | 3218 | COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO] |
|
3217 | 3219 | |
|
3218 | 3220 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
3219 | 3221 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
3220 | 3222 | revision = Column('revision', String(40), nullable=True) |
|
3221 | 3223 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
3222 | 3224 | pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) |
|
3223 | 3225 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
3224 | 3226 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) |
|
3225 | 3227 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
3226 | 3228 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
3227 | 3229 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) |
|
3228 | 3230 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3229 | 3231 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3230 | 3232 | renderer = Column('renderer', Unicode(64), nullable=True) |
|
3231 | 3233 | display_state = Column('display_state', Unicode(128), nullable=True) |
|
3232 | 3234 | |
|
3233 | 3235 | comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE) |
|
3234 | 3236 | resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True) |
|
3235 | 3237 | resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by') |
|
3236 | 3238 | author = relationship('User', lazy='joined') |
|
3237 | 3239 | repo = relationship('Repository') |
|
3238 | 3240 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined') |
|
3239 | 3241 | pull_request = relationship('PullRequest', lazy='joined') |
|
3240 | 3242 | pull_request_version = relationship('PullRequestVersion') |
|
3241 | 3243 | |
|
3242 | 3244 | @classmethod |
|
3243 | 3245 | def get_users(cls, revision=None, pull_request_id=None): |
|
3244 | 3246 | """ |
|
3245 | 3247 | Returns user associated with this ChangesetComment. ie those |
|
3246 | 3248 | who actually commented |
|
3247 | 3249 | |
|
3248 | 3250 | :param cls: |
|
3249 | 3251 | :param revision: |
|
3250 | 3252 | """ |
|
3251 | 3253 | q = Session().query(User)\ |
|
3252 | 3254 | .join(ChangesetComment.author) |
|
3253 | 3255 | if revision: |
|
3254 | 3256 | q = q.filter(cls.revision == revision) |
|
3255 | 3257 | elif pull_request_id: |
|
3256 | 3258 | q = q.filter(cls.pull_request_id == pull_request_id) |
|
3257 | 3259 | return q.all() |
|
3258 | 3260 | |
|
3259 | 3261 | @classmethod |
|
3260 | 3262 | def get_index_from_version(cls, pr_version, versions): |
|
3261 | 3263 | num_versions = [x.pull_request_version_id for x in versions] |
|
3262 | 3264 | try: |
|
3263 | 3265 | return num_versions.index(pr_version) +1 |
|
3264 | 3266 | except (IndexError, ValueError): |
|
3265 | 3267 | return |
|
3266 | 3268 | |
|
3267 | 3269 | @property |
|
3268 | 3270 | def outdated(self): |
|
3269 | 3271 | return self.display_state == self.COMMENT_OUTDATED |
|
3270 | 3272 | |
|
3271 | 3273 | def outdated_at_version(self, version): |
|
3272 | 3274 | """ |
|
3273 | 3275 | Checks if comment is outdated for given pull request version |
|
3274 | 3276 | """ |
|
3275 | 3277 | return self.outdated and self.pull_request_version_id != version |
|
3276 | 3278 | |
|
3277 | 3279 | def older_than_version(self, version): |
|
3278 | 3280 | """ |
|
3279 | 3281 | Checks if comment is made from previous version than given |
|
3280 | 3282 | """ |
|
3281 | 3283 | if version is None: |
|
3282 | 3284 | return self.pull_request_version_id is not None |
|
3283 | 3285 | |
|
3284 | 3286 | return self.pull_request_version_id < version |
|
3285 | 3287 | |
|
3286 | 3288 | @property |
|
3287 | 3289 | def resolved(self): |
|
3288 | 3290 | return self.resolved_by[0] if self.resolved_by else None |
|
3289 | 3291 | |
|
3290 | 3292 | @property |
|
3291 | 3293 | def is_todo(self): |
|
3292 | 3294 | return self.comment_type == self.COMMENT_TYPE_TODO |
|
3293 | 3295 | |
|
3294 | 3296 | @property |
|
3295 | 3297 | def is_inline(self): |
|
3296 | 3298 | return self.line_no and self.f_path |
|
3297 | 3299 | |
|
3298 | 3300 | def get_index_version(self, versions): |
|
3299 | 3301 | return self.get_index_from_version( |
|
3300 | 3302 | self.pull_request_version_id, versions) |
|
3301 | 3303 | |
|
3302 | 3304 | def __repr__(self): |
|
3303 | 3305 | if self.comment_id: |
|
3304 | 3306 | return '<DB:Comment #%s>' % self.comment_id |
|
3305 | 3307 | else: |
|
3306 | 3308 | return '<DB:Comment at %#x>' % id(self) |
|
3307 | 3309 | |
|
3308 | 3310 | def get_api_data(self): |
|
3309 | 3311 | comment = self |
|
3310 | 3312 | data = { |
|
3311 | 3313 | 'comment_id': comment.comment_id, |
|
3312 | 3314 | 'comment_type': comment.comment_type, |
|
3313 | 3315 | 'comment_text': comment.text, |
|
3314 | 3316 | 'comment_status': comment.status_change, |
|
3315 | 3317 | 'comment_f_path': comment.f_path, |
|
3316 | 3318 | 'comment_lineno': comment.line_no, |
|
3317 | 3319 | 'comment_author': comment.author, |
|
3318 | 3320 | 'comment_created_on': comment.created_on |
|
3319 | 3321 | } |
|
3320 | 3322 | return data |
|
3321 | 3323 | |
|
3322 | 3324 | def __json__(self): |
|
3323 | 3325 | data = dict() |
|
3324 | 3326 | data.update(self.get_api_data()) |
|
3325 | 3327 | return data |
|
3326 | 3328 | |
|
3327 | 3329 | |
|
3328 | 3330 | class ChangesetStatus(Base, BaseModel): |
|
3329 | 3331 | __tablename__ = 'changeset_statuses' |
|
3330 | 3332 | __table_args__ = ( |
|
3331 | 3333 | Index('cs_revision_idx', 'revision'), |
|
3332 | 3334 | Index('cs_version_idx', 'version'), |
|
3333 | 3335 | UniqueConstraint('repo_id', 'revision', 'version'), |
|
3334 | 3336 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3335 | 3337 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3336 | 3338 | ) |
|
3337 | 3339 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' |
|
3338 | 3340 | STATUS_APPROVED = 'approved' |
|
3339 | 3341 | STATUS_REJECTED = 'rejected' |
|
3340 | 3342 | STATUS_UNDER_REVIEW = 'under_review' |
|
3341 | 3343 | |
|
3342 | 3344 | STATUSES = [ |
|
3343 | 3345 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default |
|
3344 | 3346 | (STATUS_APPROVED, _("Approved")), |
|
3345 | 3347 | (STATUS_REJECTED, _("Rejected")), |
|
3346 | 3348 | (STATUS_UNDER_REVIEW, _("Under Review")), |
|
3347 | 3349 | ] |
|
3348 | 3350 | |
|
3349 | 3351 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) |
|
3350 | 3352 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
3351 | 3353 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
3352 | 3354 | revision = Column('revision', String(40), nullable=False) |
|
3353 | 3355 | status = Column('status', String(128), nullable=False, default=DEFAULT) |
|
3354 | 3356 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) |
|
3355 | 3357 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
3356 | 3358 | version = Column('version', Integer(), nullable=False, default=0) |
|
3357 | 3359 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
3358 | 3360 | |
|
3359 | 3361 | author = relationship('User', lazy='joined') |
|
3360 | 3362 | repo = relationship('Repository') |
|
3361 | 3363 | comment = relationship('ChangesetComment', lazy='joined') |
|
3362 | 3364 | pull_request = relationship('PullRequest', lazy='joined') |
|
3363 | 3365 | |
|
3364 | 3366 | def __unicode__(self): |
|
3365 | 3367 | return u"<%s('%s[v%s]:%s')>" % ( |
|
3366 | 3368 | self.__class__.__name__, |
|
3367 | 3369 | self.status, self.version, self.author |
|
3368 | 3370 | ) |
|
3369 | 3371 | |
|
3370 | 3372 | @classmethod |
|
3371 | 3373 | def get_status_lbl(cls, value): |
|
3372 | 3374 | return dict(cls.STATUSES).get(value) |
|
3373 | 3375 | |
|
3374 | 3376 | @property |
|
3375 | 3377 | def status_lbl(self): |
|
3376 | 3378 | return ChangesetStatus.get_status_lbl(self.status) |
|
3377 | 3379 | |
|
3378 | 3380 | def get_api_data(self): |
|
3379 | 3381 | status = self |
|
3380 | 3382 | data = { |
|
3381 | 3383 | 'status_id': status.changeset_status_id, |
|
3382 | 3384 | 'status': status.status, |
|
3383 | 3385 | } |
|
3384 | 3386 | return data |
|
3385 | 3387 | |
|
3386 | 3388 | def __json__(self): |
|
3387 | 3389 | data = dict() |
|
3388 | 3390 | data.update(self.get_api_data()) |
|
3389 | 3391 | return data |
|
3390 | 3392 | |
|
3391 | 3393 | |
|
3392 | 3394 | class _PullRequestBase(BaseModel): |
|
3393 | 3395 | """ |
|
3394 | 3396 | Common attributes of pull request and version entries. |
|
3395 | 3397 | """ |
|
3396 | 3398 | |
|
3397 | 3399 | # .status values |
|
3398 | 3400 | STATUS_NEW = u'new' |
|
3399 | 3401 | STATUS_OPEN = u'open' |
|
3400 | 3402 | STATUS_CLOSED = u'closed' |
|
3401 | 3403 | |
|
3402 | 3404 | title = Column('title', Unicode(255), nullable=True) |
|
3403 | 3405 | description = Column( |
|
3404 | 3406 | 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), |
|
3405 | 3407 | nullable=True) |
|
3406 | 3408 | # new/open/closed status of pull request (not approve/reject/etc) |
|
3407 | 3409 | status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) |
|
3408 | 3410 | created_on = Column( |
|
3409 | 3411 | 'created_on', DateTime(timezone=False), nullable=False, |
|
3410 | 3412 | default=datetime.datetime.now) |
|
3411 | 3413 | updated_on = Column( |
|
3412 | 3414 | 'updated_on', DateTime(timezone=False), nullable=False, |
|
3413 | 3415 | default=datetime.datetime.now) |
|
3414 | 3416 | |
|
3415 | 3417 | @declared_attr |
|
3416 | 3418 | def user_id(cls): |
|
3417 | 3419 | return Column( |
|
3418 | 3420 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, |
|
3419 | 3421 | unique=None) |
|
3420 | 3422 | |
|
3421 | 3423 | # 500 revisions max |
|
3422 | 3424 | _revisions = Column( |
|
3423 | 3425 | 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) |
|
3424 | 3426 | |
|
3425 | 3427 | @declared_attr |
|
3426 | 3428 | def source_repo_id(cls): |
|
3427 | 3429 | # TODO: dan: rename column to source_repo_id |
|
3428 | 3430 | return Column( |
|
3429 | 3431 | 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
3430 | 3432 | nullable=False) |
|
3431 | 3433 | |
|
3432 | 3434 | source_ref = Column('org_ref', Unicode(255), nullable=False) |
|
3433 | 3435 | |
|
3434 | 3436 | @declared_attr |
|
3435 | 3437 | def target_repo_id(cls): |
|
3436 | 3438 | # TODO: dan: rename column to target_repo_id |
|
3437 | 3439 | return Column( |
|
3438 | 3440 | 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
3439 | 3441 | nullable=False) |
|
3440 | 3442 | |
|
3441 | 3443 | target_ref = Column('other_ref', Unicode(255), nullable=False) |
|
3442 | 3444 | _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True) |
|
3443 | 3445 | |
|
3444 | 3446 | # TODO: dan: rename column to last_merge_source_rev |
|
3445 | 3447 | _last_merge_source_rev = Column( |
|
3446 | 3448 | 'last_merge_org_rev', String(40), nullable=True) |
|
3447 | 3449 | # TODO: dan: rename column to last_merge_target_rev |
|
3448 | 3450 | _last_merge_target_rev = Column( |
|
3449 | 3451 | 'last_merge_other_rev', String(40), nullable=True) |
|
3450 | 3452 | _last_merge_status = Column('merge_status', Integer(), nullable=True) |
|
3451 | 3453 | merge_rev = Column('merge_rev', String(40), nullable=True) |
|
3452 | 3454 | |
|
3453 | 3455 | reviewer_data = Column( |
|
3454 | 3456 | 'reviewer_data_json', MutationObj.as_mutable( |
|
3455 | 3457 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) |
|
3456 | 3458 | |
|
3457 | 3459 | @property |
|
3458 | 3460 | def reviewer_data_json(self): |
|
3459 | 3461 | return json.dumps(self.reviewer_data) |
|
3460 | 3462 | |
|
3461 | 3463 | @hybrid_property |
|
3462 | 3464 | def description_safe(self): |
|
3463 | 3465 | from rhodecode.lib import helpers as h |
|
3464 | 3466 | return h.escape(self.description) |
|
3465 | 3467 | |
|
3466 | 3468 | @hybrid_property |
|
3467 | 3469 | def revisions(self): |
|
3468 | 3470 | return self._revisions.split(':') if self._revisions else [] |
|
3469 | 3471 | |
|
3470 | 3472 | @revisions.setter |
|
3471 | 3473 | def revisions(self, val): |
|
3472 | 3474 | self._revisions = ':'.join(val) |
|
3473 | 3475 | |
|
3474 | 3476 | @hybrid_property |
|
3475 | 3477 | def last_merge_status(self): |
|
3476 | 3478 | return safe_int(self._last_merge_status) |
|
3477 | 3479 | |
|
3478 | 3480 | @last_merge_status.setter |
|
3479 | 3481 | def last_merge_status(self, val): |
|
3480 | 3482 | self._last_merge_status = val |
|
3481 | 3483 | |
|
3482 | 3484 | @declared_attr |
|
3483 | 3485 | def author(cls): |
|
3484 | 3486 | return relationship('User', lazy='joined') |
|
3485 | 3487 | |
|
3486 | 3488 | @declared_attr |
|
3487 | 3489 | def source_repo(cls): |
|
3488 | 3490 | return relationship( |
|
3489 | 3491 | 'Repository', |
|
3490 | 3492 | primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) |
|
3491 | 3493 | |
|
3492 | 3494 | @property |
|
3493 | 3495 | def source_ref_parts(self): |
|
3494 | 3496 | return self.unicode_to_reference(self.source_ref) |
|
3495 | 3497 | |
|
3496 | 3498 | @declared_attr |
|
3497 | 3499 | def target_repo(cls): |
|
3498 | 3500 | return relationship( |
|
3499 | 3501 | 'Repository', |
|
3500 | 3502 | primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) |
|
3501 | 3503 | |
|
3502 | 3504 | @property |
|
3503 | 3505 | def target_ref_parts(self): |
|
3504 | 3506 | return self.unicode_to_reference(self.target_ref) |
|
3505 | 3507 | |
|
3506 | 3508 | @property |
|
3507 | 3509 | def shadow_merge_ref(self): |
|
3508 | 3510 | return self.unicode_to_reference(self._shadow_merge_ref) |
|
3509 | 3511 | |
|
3510 | 3512 | @shadow_merge_ref.setter |
|
3511 | 3513 | def shadow_merge_ref(self, ref): |
|
3512 | 3514 | self._shadow_merge_ref = self.reference_to_unicode(ref) |
|
3513 | 3515 | |
|
3514 | 3516 | def unicode_to_reference(self, raw): |
|
3515 | 3517 | """ |
|
3516 | 3518 | Convert a unicode (or string) to a reference object. |
|
3517 | 3519 | If unicode evaluates to False it returns None. |
|
3518 | 3520 | """ |
|
3519 | 3521 | if raw: |
|
3520 | 3522 | refs = raw.split(':') |
|
3521 | 3523 | return Reference(*refs) |
|
3522 | 3524 | else: |
|
3523 | 3525 | return None |
|
3524 | 3526 | |
|
3525 | 3527 | def reference_to_unicode(self, ref): |
|
3526 | 3528 | """ |
|
3527 | 3529 | Convert a reference object to unicode. |
|
3528 | 3530 | If reference is None it returns None. |
|
3529 | 3531 | """ |
|
3530 | 3532 | if ref: |
|
3531 | 3533 | return u':'.join(ref) |
|
3532 | 3534 | else: |
|
3533 | 3535 | return None |
|
3534 | 3536 | |
|
3535 | 3537 | def get_api_data(self, with_merge_state=True): |
|
3536 | 3538 | from rhodecode.model.pull_request import PullRequestModel |
|
3537 | 3539 | |
|
3538 | 3540 | pull_request = self |
|
3539 | 3541 | if with_merge_state: |
|
3540 | 3542 | merge_status = PullRequestModel().merge_status(pull_request) |
|
3541 | 3543 | merge_state = { |
|
3542 | 3544 | 'status': merge_status[0], |
|
3543 | 3545 | 'message': safe_unicode(merge_status[1]), |
|
3544 | 3546 | } |
|
3545 | 3547 | else: |
|
3546 | 3548 | merge_state = {'status': 'not_available', |
|
3547 | 3549 | 'message': 'not_available'} |
|
3548 | 3550 | |
|
3549 | 3551 | merge_data = { |
|
3550 | 3552 | 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request), |
|
3551 | 3553 | 'reference': ( |
|
3552 | 3554 | pull_request.shadow_merge_ref._asdict() |
|
3553 | 3555 | if pull_request.shadow_merge_ref else None), |
|
3554 | 3556 | } |
|
3555 | 3557 | |
|
3556 | 3558 | data = { |
|
3557 | 3559 | 'pull_request_id': pull_request.pull_request_id, |
|
3558 | 3560 | 'url': PullRequestModel().get_url(pull_request), |
|
3559 | 3561 | 'title': pull_request.title, |
|
3560 | 3562 | 'description': pull_request.description, |
|
3561 | 3563 | 'status': pull_request.status, |
|
3562 | 3564 | 'created_on': pull_request.created_on, |
|
3563 | 3565 | 'updated_on': pull_request.updated_on, |
|
3564 | 3566 | 'commit_ids': pull_request.revisions, |
|
3565 | 3567 | 'review_status': pull_request.calculated_review_status(), |
|
3566 | 3568 | 'mergeable': merge_state, |
|
3567 | 3569 | 'source': { |
|
3568 | 3570 | 'clone_url': pull_request.source_repo.clone_url(), |
|
3569 | 3571 | 'repository': pull_request.source_repo.repo_name, |
|
3570 | 3572 | 'reference': { |
|
3571 | 3573 | 'name': pull_request.source_ref_parts.name, |
|
3572 | 3574 | 'type': pull_request.source_ref_parts.type, |
|
3573 | 3575 | 'commit_id': pull_request.source_ref_parts.commit_id, |
|
3574 | 3576 | }, |
|
3575 | 3577 | }, |
|
3576 | 3578 | 'target': { |
|
3577 | 3579 | 'clone_url': pull_request.target_repo.clone_url(), |
|
3578 | 3580 | 'repository': pull_request.target_repo.repo_name, |
|
3579 | 3581 | 'reference': { |
|
3580 | 3582 | 'name': pull_request.target_ref_parts.name, |
|
3581 | 3583 | 'type': pull_request.target_ref_parts.type, |
|
3582 | 3584 | 'commit_id': pull_request.target_ref_parts.commit_id, |
|
3583 | 3585 | }, |
|
3584 | 3586 | }, |
|
3585 | 3587 | 'merge': merge_data, |
|
3586 | 3588 | 'author': pull_request.author.get_api_data(include_secrets=False, |
|
3587 | 3589 | details='basic'), |
|
3588 | 3590 | 'reviewers': [ |
|
3589 | 3591 | { |
|
3590 | 3592 | 'user': reviewer.get_api_data(include_secrets=False, |
|
3591 | 3593 | details='basic'), |
|
3592 | 3594 | 'reasons': reasons, |
|
3593 | 3595 | 'review_status': st[0][1].status if st else 'not_reviewed', |
|
3594 | 3596 | } |
|
3595 | 3597 | for reviewer, reasons, mandatory, st in |
|
3596 | 3598 | pull_request.reviewers_statuses() |
|
3597 | 3599 | ] |
|
3598 | 3600 | } |
|
3599 | 3601 | |
|
3600 | 3602 | return data |
|
3601 | 3603 | |
|
3602 | 3604 | |
|
3603 | 3605 | class PullRequest(Base, _PullRequestBase): |
|
3604 | 3606 | __tablename__ = 'pull_requests' |
|
3605 | 3607 | __table_args__ = ( |
|
3606 | 3608 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3607 | 3609 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3608 | 3610 | ) |
|
3609 | 3611 | |
|
3610 | 3612 | pull_request_id = Column( |
|
3611 | 3613 | 'pull_request_id', Integer(), nullable=False, primary_key=True) |
|
3612 | 3614 | |
|
3613 | 3615 | def __repr__(self): |
|
3614 | 3616 | if self.pull_request_id: |
|
3615 | 3617 | return '<DB:PullRequest #%s>' % self.pull_request_id |
|
3616 | 3618 | else: |
|
3617 | 3619 | return '<DB:PullRequest at %#x>' % id(self) |
|
3618 | 3620 | |
|
3619 | 3621 | reviewers = relationship('PullRequestReviewers', |
|
3620 | 3622 | cascade="all, delete, delete-orphan") |
|
3621 | 3623 | statuses = relationship('ChangesetStatus', |
|
3622 | 3624 | cascade="all, delete, delete-orphan") |
|
3623 | 3625 | comments = relationship('ChangesetComment', |
|
3624 | 3626 | cascade="all, delete, delete-orphan") |
|
3625 | 3627 | versions = relationship('PullRequestVersion', |
|
3626 | 3628 | cascade="all, delete, delete-orphan", |
|
3627 | 3629 | lazy='dynamic') |
|
3628 | 3630 | |
|
3629 | 3631 | @classmethod |
|
3630 | 3632 | def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj, |
|
3631 | 3633 | internal_methods=None): |
|
3632 | 3634 | |
|
3633 | 3635 | class PullRequestDisplay(object): |
|
3634 | 3636 | """ |
|
3635 | 3637 | Special object wrapper for showing PullRequest data via Versions |
|
3636 | 3638 | It mimics PR object as close as possible. This is read only object |
|
3637 | 3639 | just for display |
|
3638 | 3640 | """ |
|
3639 | 3641 | |
|
3640 | 3642 | def __init__(self, attrs, internal=None): |
|
3641 | 3643 | self.attrs = attrs |
|
3642 | 3644 | # internal have priority over the given ones via attrs |
|
3643 | 3645 | self.internal = internal or ['versions'] |
|
3644 | 3646 | |
|
3645 | 3647 | def __getattr__(self, item): |
|
3646 | 3648 | if item in self.internal: |
|
3647 | 3649 | return getattr(self, item) |
|
3648 | 3650 | try: |
|
3649 | 3651 | return self.attrs[item] |
|
3650 | 3652 | except KeyError: |
|
3651 | 3653 | raise AttributeError( |
|
3652 | 3654 | '%s object has no attribute %s' % (self, item)) |
|
3653 | 3655 | |
|
3654 | 3656 | def __repr__(self): |
|
3655 | 3657 | return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id') |
|
3656 | 3658 | |
|
3657 | 3659 | def versions(self): |
|
3658 | 3660 | return pull_request_obj.versions.order_by( |
|
3659 | 3661 | PullRequestVersion.pull_request_version_id).all() |
|
3660 | 3662 | |
|
3661 | 3663 | def is_closed(self): |
|
3662 | 3664 | return pull_request_obj.is_closed() |
|
3663 | 3665 | |
|
3664 | 3666 | @property |
|
3665 | 3667 | def pull_request_version_id(self): |
|
3666 | 3668 | return getattr(pull_request_obj, 'pull_request_version_id', None) |
|
3667 | 3669 | |
|
3668 | 3670 | attrs = StrictAttributeDict(pull_request_obj.get_api_data()) |
|
3669 | 3671 | |
|
3670 | 3672 | attrs.author = StrictAttributeDict( |
|
3671 | 3673 | pull_request_obj.author.get_api_data()) |
|
3672 | 3674 | if pull_request_obj.target_repo: |
|
3673 | 3675 | attrs.target_repo = StrictAttributeDict( |
|
3674 | 3676 | pull_request_obj.target_repo.get_api_data()) |
|
3675 | 3677 | attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url |
|
3676 | 3678 | |
|
3677 | 3679 | if pull_request_obj.source_repo: |
|
3678 | 3680 | attrs.source_repo = StrictAttributeDict( |
|
3679 | 3681 | pull_request_obj.source_repo.get_api_data()) |
|
3680 | 3682 | attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url |
|
3681 | 3683 | |
|
3682 | 3684 | attrs.source_ref_parts = pull_request_obj.source_ref_parts |
|
3683 | 3685 | attrs.target_ref_parts = pull_request_obj.target_ref_parts |
|
3684 | 3686 | attrs.revisions = pull_request_obj.revisions |
|
3685 | 3687 | |
|
3686 | 3688 | attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref |
|
3687 | 3689 | attrs.reviewer_data = org_pull_request_obj.reviewer_data |
|
3688 | 3690 | attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json |
|
3689 | 3691 | |
|
3690 | 3692 | return PullRequestDisplay(attrs, internal=internal_methods) |
|
3691 | 3693 | |
|
3692 | 3694 | def is_closed(self): |
|
3693 | 3695 | return self.status == self.STATUS_CLOSED |
|
3694 | 3696 | |
|
3695 | 3697 | def __json__(self): |
|
3696 | 3698 | return { |
|
3697 | 3699 | 'revisions': self.revisions, |
|
3698 | 3700 | } |
|
3699 | 3701 | |
|
3700 | 3702 | def calculated_review_status(self): |
|
3701 | 3703 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
3702 | 3704 | return ChangesetStatusModel().calculated_review_status(self) |
|
3703 | 3705 | |
|
3704 | 3706 | def reviewers_statuses(self): |
|
3705 | 3707 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
3706 | 3708 | return ChangesetStatusModel().reviewers_statuses(self) |
|
3707 | 3709 | |
|
3708 | 3710 | @property |
|
3709 | 3711 | def workspace_id(self): |
|
3710 | 3712 | from rhodecode.model.pull_request import PullRequestModel |
|
3711 | 3713 | return PullRequestModel()._workspace_id(self) |
|
3712 | 3714 | |
|
3713 | 3715 | def get_shadow_repo(self): |
|
3714 | 3716 | workspace_id = self.workspace_id |
|
3715 | 3717 | vcs_obj = self.target_repo.scm_instance() |
|
3716 | 3718 | shadow_repository_path = vcs_obj._get_shadow_repository_path( |
|
3717 | 3719 | workspace_id) |
|
3718 | 3720 | return vcs_obj._get_shadow_instance(shadow_repository_path) |
|
3719 | 3721 | |
|
3720 | 3722 | |
|
3721 | 3723 | class PullRequestVersion(Base, _PullRequestBase): |
|
3722 | 3724 | __tablename__ = 'pull_request_versions' |
|
3723 | 3725 | __table_args__ = ( |
|
3724 | 3726 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3725 | 3727 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3726 | 3728 | ) |
|
3727 | 3729 | |
|
3728 | 3730 | pull_request_version_id = Column( |
|
3729 | 3731 | 'pull_request_version_id', Integer(), nullable=False, primary_key=True) |
|
3730 | 3732 | pull_request_id = Column( |
|
3731 | 3733 | 'pull_request_id', Integer(), |
|
3732 | 3734 | ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
3733 | 3735 | pull_request = relationship('PullRequest') |
|
3734 | 3736 | |
|
3735 | 3737 | def __repr__(self): |
|
3736 | 3738 | if self.pull_request_version_id: |
|
3737 | 3739 | return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id |
|
3738 | 3740 | else: |
|
3739 | 3741 | return '<DB:PullRequestVersion at %#x>' % id(self) |
|
3740 | 3742 | |
|
3741 | 3743 | @property |
|
3742 | 3744 | def reviewers(self): |
|
3743 | 3745 | return self.pull_request.reviewers |
|
3744 | 3746 | |
|
3745 | 3747 | @property |
|
3746 | 3748 | def versions(self): |
|
3747 | 3749 | return self.pull_request.versions |
|
3748 | 3750 | |
|
3749 | 3751 | def is_closed(self): |
|
3750 | 3752 | # calculate from original |
|
3751 | 3753 | return self.pull_request.status == self.STATUS_CLOSED |
|
3752 | 3754 | |
|
3753 | 3755 | def calculated_review_status(self): |
|
3754 | 3756 | return self.pull_request.calculated_review_status() |
|
3755 | 3757 | |
|
3756 | 3758 | def reviewers_statuses(self): |
|
3757 | 3759 | return self.pull_request.reviewers_statuses() |
|
3758 | 3760 | |
|
3759 | 3761 | |
|
3760 | 3762 | class PullRequestReviewers(Base, BaseModel): |
|
3761 | 3763 | __tablename__ = 'pull_request_reviewers' |
|
3762 | 3764 | __table_args__ = ( |
|
3763 | 3765 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3764 | 3766 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3765 | 3767 | ) |
|
3766 | 3768 | |
|
3767 | 3769 | @hybrid_property |
|
3768 | 3770 | def reasons(self): |
|
3769 | 3771 | if not self._reasons: |
|
3770 | 3772 | return [] |
|
3771 | 3773 | return self._reasons |
|
3772 | 3774 | |
|
3773 | 3775 | @reasons.setter |
|
3774 | 3776 | def reasons(self, val): |
|
3775 | 3777 | val = val or [] |
|
3776 | 3778 | if any(not isinstance(x, basestring) for x in val): |
|
3777 | 3779 | raise Exception('invalid reasons type, must be list of strings') |
|
3778 | 3780 | self._reasons = val |
|
3779 | 3781 | |
|
3780 | 3782 | pull_requests_reviewers_id = Column( |
|
3781 | 3783 | 'pull_requests_reviewers_id', Integer(), nullable=False, |
|
3782 | 3784 | primary_key=True) |
|
3783 | 3785 | pull_request_id = Column( |
|
3784 | 3786 | "pull_request_id", Integer(), |
|
3785 | 3787 | ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
3786 | 3788 | user_id = Column( |
|
3787 | 3789 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
3788 | 3790 | _reasons = Column( |
|
3789 | 3791 | 'reason', MutationList.as_mutable( |
|
3790 | 3792 | JsonType('list', dialect_map=dict(mysql=UnicodeText(16384))))) |
|
3791 | 3793 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) |
|
3792 | 3794 | user = relationship('User') |
|
3793 | 3795 | pull_request = relationship('PullRequest') |
|
3794 | 3796 | |
|
3795 | 3797 | |
|
3796 | 3798 | class Notification(Base, BaseModel): |
|
3797 | 3799 | __tablename__ = 'notifications' |
|
3798 | 3800 | __table_args__ = ( |
|
3799 | 3801 | Index('notification_type_idx', 'type'), |
|
3800 | 3802 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3801 | 3803 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3802 | 3804 | ) |
|
3803 | 3805 | |
|
3804 | 3806 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
3805 | 3807 | TYPE_MESSAGE = u'message' |
|
3806 | 3808 | TYPE_MENTION = u'mention' |
|
3807 | 3809 | TYPE_REGISTRATION = u'registration' |
|
3808 | 3810 | TYPE_PULL_REQUEST = u'pull_request' |
|
3809 | 3811 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' |
|
3810 | 3812 | |
|
3811 | 3813 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
3812 | 3814 | subject = Column('subject', Unicode(512), nullable=True) |
|
3813 | 3815 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) |
|
3814 | 3816 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
3815 | 3817 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3816 | 3818 | type_ = Column('type', Unicode(255)) |
|
3817 | 3819 | |
|
3818 | 3820 | created_by_user = relationship('User') |
|
3819 | 3821 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
3820 | 3822 | cascade="all, delete, delete-orphan") |
|
3821 | 3823 | |
|
3822 | 3824 | @property |
|
3823 | 3825 | def recipients(self): |
|
3824 | 3826 | return [x.user for x in UserNotification.query()\ |
|
3825 | 3827 | .filter(UserNotification.notification == self)\ |
|
3826 | 3828 | .order_by(UserNotification.user_id.asc()).all()] |
|
3827 | 3829 | |
|
3828 | 3830 | @classmethod |
|
3829 | 3831 | def create(cls, created_by, subject, body, recipients, type_=None): |
|
3830 | 3832 | if type_ is None: |
|
3831 | 3833 | type_ = Notification.TYPE_MESSAGE |
|
3832 | 3834 | |
|
3833 | 3835 | notification = cls() |
|
3834 | 3836 | notification.created_by_user = created_by |
|
3835 | 3837 | notification.subject = subject |
|
3836 | 3838 | notification.body = body |
|
3837 | 3839 | notification.type_ = type_ |
|
3838 | 3840 | notification.created_on = datetime.datetime.now() |
|
3839 | 3841 | |
|
3840 | 3842 | for u in recipients: |
|
3841 | 3843 | assoc = UserNotification() |
|
3842 | 3844 | assoc.notification = notification |
|
3843 | 3845 | |
|
3844 | 3846 | # if created_by is inside recipients mark his notification |
|
3845 | 3847 | # as read |
|
3846 | 3848 | if u.user_id == created_by.user_id: |
|
3847 | 3849 | assoc.read = True |
|
3848 | 3850 | |
|
3849 | 3851 | u.notifications.append(assoc) |
|
3850 | 3852 | Session().add(notification) |
|
3851 | 3853 | |
|
3852 | 3854 | return notification |
|
3853 | 3855 | |
|
3854 | 3856 | |
|
3855 | 3857 | class UserNotification(Base, BaseModel): |
|
3856 | 3858 | __tablename__ = 'user_to_notification' |
|
3857 | 3859 | __table_args__ = ( |
|
3858 | 3860 | UniqueConstraint('user_id', 'notification_id'), |
|
3859 | 3861 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3860 | 3862 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3861 | 3863 | ) |
|
3862 | 3864 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
3863 | 3865 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
3864 | 3866 | read = Column('read', Boolean, default=False) |
|
3865 | 3867 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
3866 | 3868 | |
|
3867 | 3869 | user = relationship('User', lazy="joined") |
|
3868 | 3870 | notification = relationship('Notification', lazy="joined", |
|
3869 | 3871 | order_by=lambda: Notification.created_on.desc(),) |
|
3870 | 3872 | |
|
3871 | 3873 | def mark_as_read(self): |
|
3872 | 3874 | self.read = True |
|
3873 | 3875 | Session().add(self) |
|
3874 | 3876 | |
|
3875 | 3877 | |
|
3876 | 3878 | class Gist(Base, BaseModel): |
|
3877 | 3879 | __tablename__ = 'gists' |
|
3878 | 3880 | __table_args__ = ( |
|
3879 | 3881 | Index('g_gist_access_id_idx', 'gist_access_id'), |
|
3880 | 3882 | Index('g_created_on_idx', 'created_on'), |
|
3881 | 3883 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3882 | 3884 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3883 | 3885 | ) |
|
3884 | 3886 | GIST_PUBLIC = u'public' |
|
3885 | 3887 | GIST_PRIVATE = u'private' |
|
3886 | 3888 | DEFAULT_FILENAME = u'gistfile1.txt' |
|
3887 | 3889 | |
|
3888 | 3890 | ACL_LEVEL_PUBLIC = u'acl_public' |
|
3889 | 3891 | ACL_LEVEL_PRIVATE = u'acl_private' |
|
3890 | 3892 | |
|
3891 | 3893 | gist_id = Column('gist_id', Integer(), primary_key=True) |
|
3892 | 3894 | gist_access_id = Column('gist_access_id', Unicode(250)) |
|
3893 | 3895 | gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
3894 | 3896 | gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) |
|
3895 | 3897 | gist_expires = Column('gist_expires', Float(53), nullable=False) |
|
3896 | 3898 | gist_type = Column('gist_type', Unicode(128), nullable=False) |
|
3897 | 3899 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3898 | 3900 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3899 | 3901 | acl_level = Column('acl_level', Unicode(128), nullable=True) |
|
3900 | 3902 | |
|
3901 | 3903 | owner = relationship('User') |
|
3902 | 3904 | |
|
3903 | 3905 | def __repr__(self): |
|
3904 | 3906 | return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id) |
|
3905 | 3907 | |
|
3906 | 3908 | @hybrid_property |
|
3907 | 3909 | def description_safe(self): |
|
3908 | 3910 | from rhodecode.lib import helpers as h |
|
3909 | 3911 | return h.escape(self.gist_description) |
|
3910 | 3912 | |
|
3911 | 3913 | @classmethod |
|
3912 | 3914 | def get_or_404(cls, id_): |
|
3913 | 3915 | from pyramid.httpexceptions import HTTPNotFound |
|
3914 | 3916 | |
|
3915 | 3917 | res = cls.query().filter(cls.gist_access_id == id_).scalar() |
|
3916 | 3918 | if not res: |
|
3917 | 3919 | raise HTTPNotFound() |
|
3918 | 3920 | return res |
|
3919 | 3921 | |
|
3920 | 3922 | @classmethod |
|
3921 | 3923 | def get_by_access_id(cls, gist_access_id): |
|
3922 | 3924 | return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() |
|
3923 | 3925 | |
|
3924 | 3926 | def gist_url(self): |
|
3925 | 3927 | from rhodecode.model.gist import GistModel |
|
3926 | 3928 | return GistModel().get_url(self) |
|
3927 | 3929 | |
|
3928 | 3930 | @classmethod |
|
3929 | 3931 | def base_path(cls): |
|
3930 | 3932 | """ |
|
3931 | 3933 | Returns base path when all gists are stored |
|
3932 | 3934 | |
|
3933 | 3935 | :param cls: |
|
3934 | 3936 | """ |
|
3935 | 3937 | from rhodecode.model.gist import GIST_STORE_LOC |
|
3936 | 3938 | q = Session().query(RhodeCodeUi)\ |
|
3937 | 3939 | .filter(RhodeCodeUi.ui_key == URL_SEP) |
|
3938 | 3940 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
3939 | 3941 | return os.path.join(q.one().ui_value, GIST_STORE_LOC) |
|
3940 | 3942 | |
|
3941 | 3943 | def get_api_data(self): |
|
3942 | 3944 | """ |
|
3943 | 3945 | Common function for generating gist related data for API |
|
3944 | 3946 | """ |
|
3945 | 3947 | gist = self |
|
3946 | 3948 | data = { |
|
3947 | 3949 | 'gist_id': gist.gist_id, |
|
3948 | 3950 | 'type': gist.gist_type, |
|
3949 | 3951 | 'access_id': gist.gist_access_id, |
|
3950 | 3952 | 'description': gist.gist_description, |
|
3951 | 3953 | 'url': gist.gist_url(), |
|
3952 | 3954 | 'expires': gist.gist_expires, |
|
3953 | 3955 | 'created_on': gist.created_on, |
|
3954 | 3956 | 'modified_at': gist.modified_at, |
|
3955 | 3957 | 'content': None, |
|
3956 | 3958 | 'acl_level': gist.acl_level, |
|
3957 | 3959 | } |
|
3958 | 3960 | return data |
|
3959 | 3961 | |
|
3960 | 3962 | def __json__(self): |
|
3961 | 3963 | data = dict( |
|
3962 | 3964 | ) |
|
3963 | 3965 | data.update(self.get_api_data()) |
|
3964 | 3966 | return data |
|
3965 | 3967 | # SCM functions |
|
3966 | 3968 | |
|
3967 | 3969 | def scm_instance(self, **kwargs): |
|
3968 | 3970 | full_repo_path = os.path.join(self.base_path(), self.gist_access_id) |
|
3969 | 3971 | return get_vcs_instance( |
|
3970 | 3972 | repo_path=safe_str(full_repo_path), create=False) |
|
3971 | 3973 | |
|
3972 | 3974 | |
|
3973 | 3975 | class ExternalIdentity(Base, BaseModel): |
|
3974 | 3976 | __tablename__ = 'external_identities' |
|
3975 | 3977 | __table_args__ = ( |
|
3976 | 3978 | Index('local_user_id_idx', 'local_user_id'), |
|
3977 | 3979 | Index('external_id_idx', 'external_id'), |
|
3978 | 3980 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3979 | 3981 | 'mysql_charset': 'utf8'}) |
|
3980 | 3982 | |
|
3981 | 3983 | external_id = Column('external_id', Unicode(255), default=u'', |
|
3982 | 3984 | primary_key=True) |
|
3983 | 3985 | external_username = Column('external_username', Unicode(1024), default=u'') |
|
3984 | 3986 | local_user_id = Column('local_user_id', Integer(), |
|
3985 | 3987 | ForeignKey('users.user_id'), primary_key=True) |
|
3986 | 3988 | provider_name = Column('provider_name', Unicode(255), default=u'', |
|
3987 | 3989 | primary_key=True) |
|
3988 | 3990 | access_token = Column('access_token', String(1024), default=u'') |
|
3989 | 3991 | alt_token = Column('alt_token', String(1024), default=u'') |
|
3990 | 3992 | token_secret = Column('token_secret', String(1024), default=u'') |
|
3991 | 3993 | |
|
3992 | 3994 | @classmethod |
|
3993 | 3995 | def by_external_id_and_provider(cls, external_id, provider_name, |
|
3994 | 3996 | local_user_id=None): |
|
3995 | 3997 | """ |
|
3996 | 3998 | Returns ExternalIdentity instance based on search params |
|
3997 | 3999 | |
|
3998 | 4000 | :param external_id: |
|
3999 | 4001 | :param provider_name: |
|
4000 | 4002 | :return: ExternalIdentity |
|
4001 | 4003 | """ |
|
4002 | 4004 | query = cls.query() |
|
4003 | 4005 | query = query.filter(cls.external_id == external_id) |
|
4004 | 4006 | query = query.filter(cls.provider_name == provider_name) |
|
4005 | 4007 | if local_user_id: |
|
4006 | 4008 | query = query.filter(cls.local_user_id == local_user_id) |
|
4007 | 4009 | return query.first() |
|
4008 | 4010 | |
|
4009 | 4011 | @classmethod |
|
4010 | 4012 | def user_by_external_id_and_provider(cls, external_id, provider_name): |
|
4011 | 4013 | """ |
|
4012 | 4014 | Returns User instance based on search params |
|
4013 | 4015 | |
|
4014 | 4016 | :param external_id: |
|
4015 | 4017 | :param provider_name: |
|
4016 | 4018 | :return: User |
|
4017 | 4019 | """ |
|
4018 | 4020 | query = User.query() |
|
4019 | 4021 | query = query.filter(cls.external_id == external_id) |
|
4020 | 4022 | query = query.filter(cls.provider_name == provider_name) |
|
4021 | 4023 | query = query.filter(User.user_id == cls.local_user_id) |
|
4022 | 4024 | return query.first() |
|
4023 | 4025 | |
|
4024 | 4026 | @classmethod |
|
4025 | 4027 | def by_local_user_id(cls, local_user_id): |
|
4026 | 4028 | """ |
|
4027 | 4029 | Returns all tokens for user |
|
4028 | 4030 | |
|
4029 | 4031 | :param local_user_id: |
|
4030 | 4032 | :return: ExternalIdentity |
|
4031 | 4033 | """ |
|
4032 | 4034 | query = cls.query() |
|
4033 | 4035 | query = query.filter(cls.local_user_id == local_user_id) |
|
4034 | 4036 | return query |
|
4035 | 4037 | |
|
4036 | 4038 | |
|
4037 | 4039 | class Integration(Base, BaseModel): |
|
4038 | 4040 | __tablename__ = 'integrations' |
|
4039 | 4041 | __table_args__ = ( |
|
4040 | 4042 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
4041 | 4043 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
4042 | 4044 | ) |
|
4043 | 4045 | |
|
4044 | 4046 | integration_id = Column('integration_id', Integer(), primary_key=True) |
|
4045 | 4047 | integration_type = Column('integration_type', String(255)) |
|
4046 | 4048 | enabled = Column('enabled', Boolean(), nullable=False) |
|
4047 | 4049 | name = Column('name', String(255), nullable=False) |
|
4048 | 4050 | child_repos_only = Column('child_repos_only', Boolean(), nullable=False, |
|
4049 | 4051 | default=False) |
|
4050 | 4052 | |
|
4051 | 4053 | settings = Column( |
|
4052 | 4054 | 'settings_json', MutationObj.as_mutable( |
|
4053 | 4055 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) |
|
4054 | 4056 | repo_id = Column( |
|
4055 | 4057 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
4056 | 4058 | nullable=True, unique=None, default=None) |
|
4057 | 4059 | repo = relationship('Repository', lazy='joined') |
|
4058 | 4060 | |
|
4059 | 4061 | repo_group_id = Column( |
|
4060 | 4062 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), |
|
4061 | 4063 | nullable=True, unique=None, default=None) |
|
4062 | 4064 | repo_group = relationship('RepoGroup', lazy='joined') |
|
4063 | 4065 | |
|
4064 | 4066 | @property |
|
4065 | 4067 | def scope(self): |
|
4066 | 4068 | if self.repo: |
|
4067 | 4069 | return repr(self.repo) |
|
4068 | 4070 | if self.repo_group: |
|
4069 | 4071 | if self.child_repos_only: |
|
4070 | 4072 | return repr(self.repo_group) + ' (child repos only)' |
|
4071 | 4073 | else: |
|
4072 | 4074 | return repr(self.repo_group) + ' (recursive)' |
|
4073 | 4075 | if self.child_repos_only: |
|
4074 | 4076 | return 'root_repos' |
|
4075 | 4077 | return 'global' |
|
4076 | 4078 | |
|
4077 | 4079 | def __repr__(self): |
|
4078 | 4080 | return '<Integration(%r, %r)>' % (self.integration_type, self.scope) |
|
4079 | 4081 | |
|
4080 | 4082 | |
|
4081 | 4083 | class RepoReviewRuleUser(Base, BaseModel): |
|
4082 | 4084 | __tablename__ = 'repo_review_rules_users' |
|
4083 | 4085 | __table_args__ = ( |
|
4084 | 4086 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
4085 | 4087 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,} |
|
4086 | 4088 | ) |
|
4087 | 4089 | repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True) |
|
4088 | 4090 | repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) |
|
4089 | 4091 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False) |
|
4090 | 4092 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) |
|
4091 | 4093 | user = relationship('User') |
|
4092 | 4094 | |
|
4093 | 4095 | def rule_data(self): |
|
4094 | 4096 | return { |
|
4095 | 4097 | 'mandatory': self.mandatory |
|
4096 | 4098 | } |
|
4097 | 4099 | |
|
4098 | 4100 | |
|
4099 | 4101 | class RepoReviewRuleUserGroup(Base, BaseModel): |
|
4100 | 4102 | __tablename__ = 'repo_review_rules_users_groups' |
|
4101 | 4103 | __table_args__ = ( |
|
4102 | 4104 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
4103 | 4105 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,} |
|
4104 | 4106 | ) |
|
4105 | 4107 | repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True) |
|
4106 | 4108 | repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) |
|
4107 | 4109 | users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False) |
|
4108 | 4110 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) |
|
4109 | 4111 | users_group = relationship('UserGroup') |
|
4110 | 4112 | |
|
4111 | 4113 | def rule_data(self): |
|
4112 | 4114 | return { |
|
4113 | 4115 | 'mandatory': self.mandatory |
|
4114 | 4116 | } |
|
4115 | 4117 | |
|
4116 | 4118 | |
|
4117 | 4119 | class RepoReviewRule(Base, BaseModel): |
|
4118 | 4120 | __tablename__ = 'repo_review_rules' |
|
4119 | 4121 | __table_args__ = ( |
|
4120 | 4122 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
4121 | 4123 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,} |
|
4122 | 4124 | ) |
|
4123 | 4125 | |
|
4124 | 4126 | repo_review_rule_id = Column( |
|
4125 | 4127 | 'repo_review_rule_id', Integer(), primary_key=True) |
|
4126 | 4128 | repo_id = Column( |
|
4127 | 4129 | "repo_id", Integer(), ForeignKey('repositories.repo_id')) |
|
4128 | 4130 | repo = relationship('Repository', backref='review_rules') |
|
4129 | 4131 | |
|
4130 | 4132 | review_rule_name = Column('review_rule_name', String(255)) |
|
4131 | 4133 | _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob |
|
4132 | 4134 | _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob |
|
4133 | 4135 | _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob |
|
4134 | 4136 | |
|
4135 | 4137 | use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False) |
|
4136 | 4138 | forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False) |
|
4137 | 4139 | forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False) |
|
4138 | 4140 | forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False) |
|
4139 | 4141 | |
|
4140 | 4142 | rule_users = relationship('RepoReviewRuleUser') |
|
4141 | 4143 | rule_user_groups = relationship('RepoReviewRuleUserGroup') |
|
4142 | 4144 | |
|
4143 | 4145 | def _validate_glob(self, value): |
|
4144 | 4146 | re.compile('^' + glob2re(value) + '$') |
|
4145 | 4147 | |
|
4146 | 4148 | @hybrid_property |
|
4147 | 4149 | def source_branch_pattern(self): |
|
4148 | 4150 | return self._branch_pattern or '*' |
|
4149 | 4151 | |
|
4150 | 4152 | @source_branch_pattern.setter |
|
4151 | 4153 | def source_branch_pattern(self, value): |
|
4152 | 4154 | self._validate_glob(value) |
|
4153 | 4155 | self._branch_pattern = value or '*' |
|
4154 | 4156 | |
|
4155 | 4157 | @hybrid_property |
|
4156 | 4158 | def target_branch_pattern(self): |
|
4157 | 4159 | return self._target_branch_pattern or '*' |
|
4158 | 4160 | |
|
4159 | 4161 | @target_branch_pattern.setter |
|
4160 | 4162 | def target_branch_pattern(self, value): |
|
4161 | 4163 | self._validate_glob(value) |
|
4162 | 4164 | self._target_branch_pattern = value or '*' |
|
4163 | 4165 | |
|
4164 | 4166 | @hybrid_property |
|
4165 | 4167 | def file_pattern(self): |
|
4166 | 4168 | return self._file_pattern or '*' |
|
4167 | 4169 | |
|
4168 | 4170 | @file_pattern.setter |
|
4169 | 4171 | def file_pattern(self, value): |
|
4170 | 4172 | self._validate_glob(value) |
|
4171 | 4173 | self._file_pattern = value or '*' |
|
4172 | 4174 | |
|
4173 | 4175 | def matches(self, source_branch, target_branch, files_changed): |
|
4174 | 4176 | """ |
|
4175 | 4177 | Check if this review rule matches a branch/files in a pull request |
|
4176 | 4178 | |
|
4177 | 4179 | :param branch: branch name for the commit |
|
4178 | 4180 | :param files_changed: list of file paths changed in the pull request |
|
4179 | 4181 | """ |
|
4180 | 4182 | |
|
4181 | 4183 | source_branch = source_branch or '' |
|
4182 | 4184 | target_branch = target_branch or '' |
|
4183 | 4185 | files_changed = files_changed or [] |
|
4184 | 4186 | |
|
4185 | 4187 | branch_matches = True |
|
4186 | 4188 | if source_branch or target_branch: |
|
4187 | 4189 | source_branch_regex = re.compile( |
|
4188 | 4190 | '^' + glob2re(self.source_branch_pattern) + '$') |
|
4189 | 4191 | target_branch_regex = re.compile( |
|
4190 | 4192 | '^' + glob2re(self.target_branch_pattern) + '$') |
|
4191 | 4193 | |
|
4192 | 4194 | branch_matches = ( |
|
4193 | 4195 | bool(source_branch_regex.search(source_branch)) and |
|
4194 | 4196 | bool(target_branch_regex.search(target_branch)) |
|
4195 | 4197 | ) |
|
4196 | 4198 | |
|
4197 | 4199 | files_matches = True |
|
4198 | 4200 | if self.file_pattern != '*': |
|
4199 | 4201 | files_matches = False |
|
4200 | 4202 | file_regex = re.compile(glob2re(self.file_pattern)) |
|
4201 | 4203 | for filename in files_changed: |
|
4202 | 4204 | if file_regex.search(filename): |
|
4203 | 4205 | files_matches = True |
|
4204 | 4206 | break |
|
4205 | 4207 | |
|
4206 | 4208 | return branch_matches and files_matches |
|
4207 | 4209 | |
|
4208 | 4210 | @property |
|
4209 | 4211 | def review_users(self): |
|
4210 | 4212 | """ Returns the users which this rule applies to """ |
|
4211 | 4213 | |
|
4212 | 4214 | users = collections.OrderedDict() |
|
4213 | 4215 | |
|
4214 | 4216 | for rule_user in self.rule_users: |
|
4215 | 4217 | if rule_user.user.active: |
|
4216 | 4218 | if rule_user.user not in users: |
|
4217 | 4219 | users[rule_user.user.username] = { |
|
4218 | 4220 | 'user': rule_user.user, |
|
4219 | 4221 | 'source': 'user', |
|
4220 | 4222 | 'source_data': {}, |
|
4221 | 4223 | 'data': rule_user.rule_data() |
|
4222 | 4224 | } |
|
4223 | 4225 | |
|
4224 | 4226 | for rule_user_group in self.rule_user_groups: |
|
4225 | 4227 | source_data = { |
|
4226 | 4228 | 'name': rule_user_group.users_group.users_group_name, |
|
4227 | 4229 | 'members': len(rule_user_group.users_group.members) |
|
4228 | 4230 | } |
|
4229 | 4231 | for member in rule_user_group.users_group.members: |
|
4230 | 4232 | if member.user.active: |
|
4231 | 4233 | users[member.user.username] = { |
|
4232 | 4234 | 'user': member.user, |
|
4233 | 4235 | 'source': 'user_group', |
|
4234 | 4236 | 'source_data': source_data, |
|
4235 | 4237 | 'data': rule_user_group.rule_data() |
|
4236 | 4238 | } |
|
4237 | 4239 | |
|
4238 | 4240 | return users |
|
4239 | 4241 | |
|
4240 | 4242 | def __repr__(self): |
|
4241 | 4243 | return '<RepoReviewerRule(id=%r, repo=%r)>' % ( |
|
4242 | 4244 | self.repo_review_rule_id, self.repo) |
|
4243 | 4245 | |
|
4244 | 4246 | |
|
4245 | 4247 | class ScheduleEntry(Base, BaseModel): |
|
4246 | 4248 | __tablename__ = 'schedule_entries' |
|
4247 | 4249 | __table_args__ = ( |
|
4248 | 4250 | UniqueConstraint('schedule_name', name='s_schedule_name_idx'), |
|
4249 | 4251 | UniqueConstraint('task_uid', name='s_task_uid_idx'), |
|
4250 | 4252 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
4251 | 4253 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
4252 | 4254 | ) |
|
4253 | 4255 | schedule_types = ['crontab', 'timedelta', 'integer'] |
|
4254 | 4256 | schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True) |
|
4255 | 4257 | |
|
4256 | 4258 | schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None) |
|
4257 | 4259 | schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None) |
|
4258 | 4260 | schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True) |
|
4259 | 4261 | |
|
4260 | 4262 | _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None) |
|
4261 | 4263 | schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT())))) |
|
4262 | 4264 | |
|
4263 | 4265 | schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
4264 | 4266 | schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0) |
|
4265 | 4267 | |
|
4266 | 4268 | # task |
|
4267 | 4269 | task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None) |
|
4268 | 4270 | task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None) |
|
4269 | 4271 | task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT())))) |
|
4270 | 4272 | task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT())))) |
|
4271 | 4273 | |
|
4272 | 4274 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
4273 | 4275 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
4274 | 4276 | |
|
4275 | 4277 | @hybrid_property |
|
4276 | 4278 | def schedule_type(self): |
|
4277 | 4279 | return self._schedule_type |
|
4278 | 4280 | |
|
4279 | 4281 | @schedule_type.setter |
|
4280 | 4282 | def schedule_type(self, val): |
|
4281 | 4283 | if val not in self.schedule_types: |
|
4282 | 4284 | raise ValueError('Value must be on of `{}` and got `{}`'.format( |
|
4283 | 4285 | val, self.schedule_type)) |
|
4284 | 4286 | |
|
4285 | 4287 | self._schedule_type = val |
|
4286 | 4288 | |
|
4287 | 4289 | @classmethod |
|
4288 | 4290 | def get_uid(cls, obj): |
|
4289 | 4291 | args = obj.task_args |
|
4290 | 4292 | kwargs = obj.task_kwargs |
|
4291 | 4293 | if isinstance(args, JsonRaw): |
|
4292 | 4294 | try: |
|
4293 | 4295 | args = json.loads(args) |
|
4294 | 4296 | except ValueError: |
|
4295 | 4297 | args = tuple() |
|
4296 | 4298 | |
|
4297 | 4299 | if isinstance(kwargs, JsonRaw): |
|
4298 | 4300 | try: |
|
4299 | 4301 | kwargs = json.loads(kwargs) |
|
4300 | 4302 | except ValueError: |
|
4301 | 4303 | kwargs = dict() |
|
4302 | 4304 | |
|
4303 | 4305 | dot_notation = obj.task_dot_notation |
|
4304 | 4306 | val = '.'.join(map(safe_str, [ |
|
4305 | 4307 | sorted(dot_notation), args, sorted(kwargs.items())])) |
|
4306 | 4308 | return hashlib.sha1(val).hexdigest() |
|
4307 | 4309 | |
|
4308 | 4310 | @classmethod |
|
4309 | 4311 | def get_by_schedule_name(cls, schedule_name): |
|
4310 | 4312 | return cls.query().filter(cls.schedule_name == schedule_name).scalar() |
|
4311 | 4313 | |
|
4312 | 4314 | @classmethod |
|
4313 | 4315 | def get_by_schedule_id(cls, schedule_id): |
|
4314 | 4316 | return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar() |
|
4315 | 4317 | |
|
4316 | 4318 | @property |
|
4317 | 4319 | def task(self): |
|
4318 | 4320 | return self.task_dot_notation |
|
4319 | 4321 | |
|
4320 | 4322 | @property |
|
4321 | 4323 | def schedule(self): |
|
4322 | 4324 | from rhodecode.lib.celerylib.utils import raw_2_schedule |
|
4323 | 4325 | schedule = raw_2_schedule(self.schedule_definition, self.schedule_type) |
|
4324 | 4326 | return schedule |
|
4325 | 4327 | |
|
4326 | 4328 | @property |
|
4327 | 4329 | def args(self): |
|
4328 | 4330 | try: |
|
4329 | 4331 | return list(self.task_args or []) |
|
4330 | 4332 | except ValueError: |
|
4331 | 4333 | return list() |
|
4332 | 4334 | |
|
4333 | 4335 | @property |
|
4334 | 4336 | def kwargs(self): |
|
4335 | 4337 | try: |
|
4336 | 4338 | return dict(self.task_kwargs or {}) |
|
4337 | 4339 | except ValueError: |
|
4338 | 4340 | return dict() |
|
4339 | 4341 | |
|
4340 | 4342 | def _as_raw(self, val): |
|
4341 | 4343 | if hasattr(val, 'de_coerce'): |
|
4342 | 4344 | val = val.de_coerce() |
|
4343 | 4345 | if val: |
|
4344 | 4346 | val = json.dumps(val) |
|
4345 | 4347 | |
|
4346 | 4348 | return val |
|
4347 | 4349 | |
|
4348 | 4350 | @property |
|
4349 | 4351 | def schedule_definition_raw(self): |
|
4350 | 4352 | return self._as_raw(self.schedule_definition) |
|
4351 | 4353 | |
|
4352 | 4354 | @property |
|
4353 | 4355 | def args_raw(self): |
|
4354 | 4356 | return self._as_raw(self.task_args) |
|
4355 | 4357 | |
|
4356 | 4358 | @property |
|
4357 | 4359 | def kwargs_raw(self): |
|
4358 | 4360 | return self._as_raw(self.task_kwargs) |
|
4359 | 4361 | |
|
4360 | 4362 | def __repr__(self): |
|
4361 | 4363 | return '<DB:ScheduleEntry({}:{})>'.format( |
|
4362 | 4364 | self.schedule_entry_id, self.schedule_name) |
|
4363 | 4365 | |
|
4364 | 4366 | |
|
4365 | 4367 | @event.listens_for(ScheduleEntry, 'before_update') |
|
4366 | 4368 | def update_task_uid(mapper, connection, target): |
|
4367 | 4369 | target.task_uid = ScheduleEntry.get_uid(target) |
|
4368 | 4370 | |
|
4369 | 4371 | |
|
4370 | 4372 | @event.listens_for(ScheduleEntry, 'before_insert') |
|
4371 | 4373 | def set_task_uid(mapper, connection, target): |
|
4372 | 4374 | target.task_uid = ScheduleEntry.get_uid(target) |
|
4373 | 4375 | |
|
4374 | 4376 | |
|
4375 | 4377 | class DbMigrateVersion(Base, BaseModel): |
|
4376 | 4378 | __tablename__ = 'db_migrate_version' |
|
4377 | 4379 | __table_args__ = ( |
|
4378 | 4380 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
4379 | 4381 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
4380 | 4382 | ) |
|
4381 | 4383 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
4382 | 4384 | repository_path = Column('repository_path', Text) |
|
4383 | 4385 | version = Column('version', Integer) |
|
4384 | 4386 | |
|
4385 | 4387 | |
|
4386 | 4388 | class DbSession(Base, BaseModel): |
|
4387 | 4389 | __tablename__ = 'db_session' |
|
4388 | 4390 | __table_args__ = ( |
|
4389 | 4391 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
4390 | 4392 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
4391 | 4393 | ) |
|
4392 | 4394 | |
|
4393 | 4395 | def __repr__(self): |
|
4394 | 4396 | return '<DB:DbSession({})>'.format(self.id) |
|
4395 | 4397 | |
|
4396 | 4398 | id = Column('id', Integer()) |
|
4397 | 4399 | namespace = Column('namespace', String(255), primary_key=True) |
|
4398 | 4400 | accessed = Column('accessed', DateTime, nullable=False) |
|
4399 | 4401 | created = Column('created', DateTime, nullable=False) |
|
4400 | 4402 | data = Column('data', PickleType, nullable=False) |
@@ -1,210 +1,211 b'' | |||
|
1 | 1 | <%namespace name="base" file="/base/base.mako"/> |
|
2 | 2 | |
|
3 | 3 | <% |
|
4 | 4 | elems = [ |
|
5 | 5 | (_('Owner'), lambda:base.gravatar_with_user(c.rhodecode_db_repo.user.email), '', ''), |
|
6 | 6 | (_('Created on'), h.format_date(c.rhodecode_db_repo.created_on), '', ''), |
|
7 | 7 | (_('Updated on'), h.format_date(c.rhodecode_db_repo.updated_on), '', ''), |
|
8 | 8 | (_('Cached Commit id'), lambda: h.link_to(c.rhodecode_db_repo.changeset_cache.get('short_id'), h.route_path('repo_commit',repo_name=c.repo_name,commit_id=c.rhodecode_db_repo.changeset_cache.get('raw_id'))), '', ''), |
|
9 | (_('Attached scoped tokens'), len(c.rhodecode_db_repo.scoped_tokens), '', [x.user for x in c.rhodecode_db_repo.scoped_tokens]), | |
|
9 | 10 | ] |
|
10 | 11 | %> |
|
11 | 12 | |
|
12 | 13 | <div class="panel panel-default"> |
|
13 | 14 | <div class="panel-heading" id="advanced-info" > |
|
14 | 15 | <h3 class="panel-title">${_('Repository: %s') % c.rhodecode_db_repo.repo_name} <a class="permalink" href="#advanced-info"> ΒΆ</a></h3> |
|
15 | 16 | </div> |
|
16 | 17 | <div class="panel-body"> |
|
17 | 18 | ${base.dt_info_panel(elems)} |
|
18 | 19 | </div> |
|
19 | 20 | </div> |
|
20 | 21 | |
|
21 | 22 | |
|
22 | 23 | <div class="panel panel-default"> |
|
23 | 24 | <div class="panel-heading" id="advanced-fork"> |
|
24 | 25 | <h3 class="panel-title">${_('Fork Reference')} <a class="permalink" href="#advanced-fork"> ΒΆ</a></h3> |
|
25 | 26 | </div> |
|
26 | 27 | <div class="panel-body"> |
|
27 | 28 | ${h.secure_form(h.route_path('edit_repo_advanced_fork', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
28 | 29 | |
|
29 | 30 | % if c.rhodecode_db_repo.fork: |
|
30 | 31 | <div class="panel-body-title-text">${h.literal(_('This repository is a fork of %(repo_link)s') % {'repo_link': h.link_to_if(c.has_origin_repo_read_perm,c.rhodecode_db_repo.fork.repo_name, h.route_path('repo_summary', repo_name=c.rhodecode_db_repo.fork.repo_name))})} |
|
31 | 32 | | <button class="btn btn-link btn-danger" type="submit">Remove fork reference</button></div> |
|
32 | 33 | % endif |
|
33 | 34 | |
|
34 | 35 | <div class="field"> |
|
35 | 36 | ${h.hidden('id_fork_of')} |
|
36 | 37 | ${h.submit('set_as_fork_%s' % c.rhodecode_db_repo.repo_name,_('Set'),class_="btn btn-small",)} |
|
37 | 38 | </div> |
|
38 | 39 | <div class="field"> |
|
39 | 40 | <span class="help-block">${_('Manually set this repository as a fork of another from the list')}</span> |
|
40 | 41 | </div> |
|
41 | 42 | ${h.end_form()} |
|
42 | 43 | </div> |
|
43 | 44 | </div> |
|
44 | 45 | |
|
45 | 46 | |
|
46 | 47 | <div class="panel panel-default"> |
|
47 | 48 | <div class="panel-heading" id="advanced-journal"> |
|
48 | 49 | <h3 class="panel-title">${_('Public Journal Visibility')} <a class="permalink" href="#advanced-journal"> ΒΆ</a></h3> |
|
49 | 50 | </div> |
|
50 | 51 | <div class="panel-body"> |
|
51 | 52 | ${h.secure_form(h.route_path('edit_repo_advanced_journal', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
52 | 53 | <div class="field"> |
|
53 | 54 | %if c.in_public_journal: |
|
54 | 55 | <button class="btn btn-small" type="submit"> |
|
55 | 56 | ${_('Remove from Public Journal')} |
|
56 | 57 | </button> |
|
57 | 58 | %else: |
|
58 | 59 | <button class="btn btn-small" type="submit"> |
|
59 | 60 | ${_('Add to Public Journal')} |
|
60 | 61 | </button> |
|
61 | 62 | %endif |
|
62 | 63 | </div> |
|
63 | 64 | <div class="field" > |
|
64 | 65 | <span class="help-block">${_('All actions made on this repository will be visible to everyone following the public journal.')}</span> |
|
65 | 66 | </div> |
|
66 | 67 | ${h.end_form()} |
|
67 | 68 | </div> |
|
68 | 69 | </div> |
|
69 | 70 | |
|
70 | 71 | |
|
71 | 72 | <div class="panel panel-default"> |
|
72 | 73 | <div class="panel-heading" id="advanced-locking"> |
|
73 | 74 | <h3 class="panel-title">${_('Locking state')} <a class="permalink" href="#advanced-locking"> ΒΆ</a></h3> |
|
74 | 75 | </div> |
|
75 | 76 | <div class="panel-body"> |
|
76 | 77 | ${h.secure_form(h.route_path('edit_repo_advanced_locking', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
77 | 78 | |
|
78 | 79 | %if c.rhodecode_db_repo.locked[0]: |
|
79 | 80 | <div class="panel-body-title-text">${'Locked by %s on %s. Lock reason: %s' % (h.person_by_id(c.rhodecode_db_repo.locked[0]), |
|
80 | 81 | h.format_date(h. time_to_datetime(c.rhodecode_db_repo.locked[1])), c.rhodecode_db_repo.locked[2])}</div> |
|
81 | 82 | %else: |
|
82 | 83 | <div class="panel-body-title-text">${_('This Repository is not currently locked.')}</div> |
|
83 | 84 | %endif |
|
84 | 85 | |
|
85 | 86 | <div class="field" > |
|
86 | 87 | %if c.rhodecode_db_repo.locked[0]: |
|
87 | 88 | ${h.hidden('set_unlock', '1')} |
|
88 | 89 | <button class="btn btn-small" type="submit" |
|
89 | 90 | onclick="return confirm('${_('Confirm to unlock repository.')}');"> |
|
90 | 91 | <i class="icon-unlock"></i> |
|
91 | 92 | ${_('Unlock repository')} |
|
92 | 93 | </button> |
|
93 | 94 | %else: |
|
94 | 95 | ${h.hidden('set_lock', '1')} |
|
95 | 96 | <button class="btn btn-small" type="submit" |
|
96 | 97 | onclick="return confirm('${_('Confirm to lock repository.')}');"> |
|
97 | 98 | <i class="icon-lock"></i> |
|
98 | 99 | ${_('Lock Repository')} |
|
99 | 100 | </button> |
|
100 | 101 | %endif |
|
101 | 102 | </div> |
|
102 | 103 | <div class="field" > |
|
103 | 104 | <span class="help-block"> |
|
104 | 105 | ${_('Force repository locking. This only works when anonymous access is disabled. Pulling from the repository locks the repository to that user until the same user pushes to that repository again.')} |
|
105 | 106 | </span> |
|
106 | 107 | </div> |
|
107 | 108 | ${h.end_form()} |
|
108 | 109 | </div> |
|
109 | 110 | </div> |
|
110 | 111 | |
|
111 | 112 | <div class="panel panel-danger"> |
|
112 | 113 | <div class="panel-heading" id="advanced-delete"> |
|
113 | 114 | <h3 class="panel-title">${_('Delete repository')} <a class="permalink" href="#advanced-delete"> ΒΆ</a></h3> |
|
114 | 115 | </div> |
|
115 | 116 | <div class="panel-body"> |
|
116 | 117 | ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=c.repo_name), request=request)} |
|
117 | 118 | <table class="display"> |
|
118 | 119 | <tr> |
|
119 | 120 | <td> |
|
120 | 121 | ${_ungettext('This repository has %s fork.', 'This repository has %s forks.', c.rhodecode_db_repo.forks.count()) % c.rhodecode_db_repo.forks.count()} |
|
121 | 122 | </td> |
|
122 | 123 | <td> |
|
123 | 124 | %if c.rhodecode_db_repo.forks.count(): |
|
124 | 125 | <input type="radio" name="forks" value="detach_forks" checked="checked"/> <label for="forks">${_('Detach forks')}</label> |
|
125 | 126 | %endif |
|
126 | 127 | </td> |
|
127 | 128 | <td> |
|
128 | 129 | %if c.rhodecode_db_repo.forks.count(): |
|
129 | 130 | <input type="radio" name="forks" value="delete_forks"/> <label for="forks">${_('Delete forks')}</label> |
|
130 | 131 | %endif |
|
131 | 132 | </td> |
|
132 | 133 | </tr> |
|
133 | 134 | </table> |
|
134 | 135 | <div style="margin: 0 0 20px 0" class="fake-space"></div> |
|
135 | 136 | |
|
136 | 137 | <div class="field"> |
|
137 | 138 | <button class="btn btn-small btn-danger" type="submit" |
|
138 | 139 | onclick="return confirm('${_('Confirm to delete this repository: %s') % c.repo_name}');"> |
|
139 | 140 | <i class="icon-remove-sign"></i> |
|
140 | 141 | ${_('Delete This Repository')} |
|
141 | 142 | </button> |
|
142 | 143 | </div> |
|
143 | 144 | <div class="field"> |
|
144 | 145 | <span class="help-block"> |
|
145 | 146 | ${_('This repository will be renamed in a special way in order to make it inaccessible to RhodeCode Enterprise and its VCS systems. If you need to fully delete it from the file system, please do it manually, or with rhodecode-cleanup-repos command available in rhodecode-tools.')} |
|
146 | 147 | </span> |
|
147 | 148 | </div> |
|
148 | 149 | |
|
149 | 150 | ${h.end_form()} |
|
150 | 151 | </div> |
|
151 | 152 | </div> |
|
152 | 153 | |
|
153 | 154 | |
|
154 | 155 | <script> |
|
155 | 156 | |
|
156 | 157 | var currentRepoId = ${c.rhodecode_db_repo.repo_id}; |
|
157 | 158 | |
|
158 | 159 | var repoTypeFilter = function(data) { |
|
159 | 160 | var results = []; |
|
160 | 161 | |
|
161 | 162 | if (!data.results[0]) { |
|
162 | 163 | return data |
|
163 | 164 | } |
|
164 | 165 | |
|
165 | 166 | $.each(data.results[0].children, function() { |
|
166 | 167 | // filter out the SAME repo, it cannot be used as fork of itself |
|
167 | 168 | if (this.obj.repo_id != currentRepoId) { |
|
168 | 169 | this.id = this.obj.repo_id; |
|
169 | 170 | results.push(this) |
|
170 | 171 | } |
|
171 | 172 | }); |
|
172 | 173 | data.results[0].children = results; |
|
173 | 174 | return data; |
|
174 | 175 | }; |
|
175 | 176 | |
|
176 | 177 | $("#id_fork_of").select2({ |
|
177 | 178 | cachedDataSource: {}, |
|
178 | 179 | minimumInputLength: 2, |
|
179 | 180 | placeholder: "${_('Change repository') if c.rhodecode_db_repo.fork else _('Pick repository')}", |
|
180 | 181 | dropdownAutoWidth: true, |
|
181 | 182 | containerCssClass: "drop-menu", |
|
182 | 183 | dropdownCssClass: "drop-menu-dropdown", |
|
183 | 184 | formatResult: formatResult, |
|
184 | 185 | query: $.debounce(250, function(query){ |
|
185 | 186 | self = this; |
|
186 | 187 | var cacheKey = query.term; |
|
187 | 188 | var cachedData = self.cachedDataSource[cacheKey]; |
|
188 | 189 | |
|
189 | 190 | if (cachedData) { |
|
190 | 191 | query.callback({results: cachedData.results}); |
|
191 | 192 | } else { |
|
192 | 193 | $.ajax({ |
|
193 | 194 | url: pyroutes.url('repo_list_data'), |
|
194 | 195 | data: {'query': query.term, repo_type: '${c.rhodecode_db_repo.repo_type}'}, |
|
195 | 196 | dataType: 'json', |
|
196 | 197 | type: 'GET', |
|
197 | 198 | success: function(data) { |
|
198 | 199 | data = repoTypeFilter(data); |
|
199 | 200 | self.cachedDataSource[cacheKey] = data; |
|
200 | 201 | query.callback({results: data.results}); |
|
201 | 202 | }, |
|
202 | 203 | error: function(data, textStatus, errorThrown) { |
|
203 | 204 | alert("Error while fetching entries.\nError code {0} ({1}).".format(data.status, data.statusText)); |
|
204 | 205 | } |
|
205 | 206 | }) |
|
206 | 207 | } |
|
207 | 208 | }) |
|
208 | 209 | }); |
|
209 | 210 | </script> |
|
210 | 211 |
General Comments 0
You need to be logged in to leave comments.
Login now