Show More
The requested changes are too big and content was truncated. Show full diff
This diff has been collapsed as it changes many lines, (3529 lines changed) Show them Hide them | |||||
@@ -0,0 +1,3529 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | """ | |||
|
22 | Database Models for RhodeCode Enterprise | |||
|
23 | """ | |||
|
24 | ||||
|
25 | import os | |||
|
26 | import sys | |||
|
27 | import time | |||
|
28 | import hashlib | |||
|
29 | import logging | |||
|
30 | import datetime | |||
|
31 | import warnings | |||
|
32 | import ipaddress | |||
|
33 | import functools | |||
|
34 | import traceback | |||
|
35 | import collections | |||
|
36 | ||||
|
37 | ||||
|
38 | from sqlalchemy import * | |||
|
39 | from sqlalchemy.exc import IntegrityError | |||
|
40 | from sqlalchemy.ext.declarative import declared_attr | |||
|
41 | from sqlalchemy.ext.hybrid import hybrid_property | |||
|
42 | from sqlalchemy.orm import ( | |||
|
43 | relationship, joinedload, class_mapper, validates, aliased) | |||
|
44 | from sqlalchemy.sql.expression import true | |||
|
45 | from beaker.cache import cache_region, region_invalidate | |||
|
46 | from webob.exc import HTTPNotFound | |||
|
47 | from zope.cachedescriptors.property import Lazy as LazyProperty | |||
|
48 | ||||
|
49 | from pylons import url | |||
|
50 | from pylons.i18n.translation import lazy_ugettext as _ | |||
|
51 | ||||
|
52 | from rhodecode.lib.vcs import get_backend, get_vcs_instance | |||
|
53 | from rhodecode.lib.vcs.utils.helpers import get_scm | |||
|
54 | from rhodecode.lib.vcs.exceptions import VCSError | |||
|
55 | from rhodecode.lib.vcs.backends.base import ( | |||
|
56 | EmptyCommit, Reference, MergeFailureReason) | |||
|
57 | from rhodecode.lib.utils2 import ( | |||
|
58 | str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe, | |||
|
59 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict) | |||
|
60 | from rhodecode.lib.jsonalchemy import MutationObj, JsonType, JSONDict | |||
|
61 | from rhodecode.lib.ext_json import json | |||
|
62 | from rhodecode.lib.caching_query import FromCache | |||
|
63 | from rhodecode.lib.encrypt import AESCipher | |||
|
64 | ||||
|
65 | from rhodecode.model.meta import Base, Session | |||
|
66 | ||||
|
67 | URL_SEP = '/' | |||
|
68 | log = logging.getLogger(__name__) | |||
|
69 | ||||
|
70 | # ============================================================================= | |||
|
71 | # BASE CLASSES | |||
|
72 | # ============================================================================= | |||
|
73 | ||||
|
74 | # this is propagated from .ini file rhodecode.encrypted_values.secret or | |||
|
75 | # beaker.session.secret if first is not set. | |||
|
76 | # and initialized at environment.py | |||
|
77 | ENCRYPTION_KEY = None | |||
|
78 | ||||
|
79 | # used to sort permissions by types, '#' used here is not allowed to be in | |||
|
80 | # usernames, and it's very early in sorted string.printable table. | |||
|
81 | PERMISSION_TYPE_SORT = { | |||
|
82 | 'admin': '####', | |||
|
83 | 'write': '###', | |||
|
84 | 'read': '##', | |||
|
85 | 'none': '#', | |||
|
86 | } | |||
|
87 | ||||
|
88 | ||||
|
89 | def display_sort(obj): | |||
|
90 | """ | |||
|
91 | Sort function used to sort permissions in .permissions() function of | |||
|
92 | Repository, RepoGroup, UserGroup. Also it put the default user in front | |||
|
93 | of all other resources | |||
|
94 | """ | |||
|
95 | ||||
|
96 | if obj.username == User.DEFAULT_USER: | |||
|
97 | return '#####' | |||
|
98 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') | |||
|
99 | return prefix + obj.username | |||
|
100 | ||||
|
101 | ||||
|
102 | def _hash_key(k): | |||
|
103 | return md5_safe(k) | |||
|
104 | ||||
|
105 | ||||
|
106 | class EncryptedTextValue(TypeDecorator): | |||
|
107 | """ | |||
|
108 | Special column for encrypted long text data, use like:: | |||
|
109 | ||||
|
110 | value = Column("encrypted_value", EncryptedValue(), nullable=False) | |||
|
111 | ||||
|
112 | This column is intelligent so if value is in unencrypted form it return | |||
|
113 | unencrypted form, but on save it always encrypts | |||
|
114 | """ | |||
|
115 | impl = Text | |||
|
116 | ||||
|
117 | def process_bind_param(self, value, dialect): | |||
|
118 | if not value: | |||
|
119 | return value | |||
|
120 | if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'): | |||
|
121 | # protect against double encrypting if someone manually starts | |||
|
122 | # doing | |||
|
123 | raise ValueError('value needs to be in unencrypted format, ie. ' | |||
|
124 | 'not starting with enc$aes') | |||
|
125 | return 'enc$aes_hmac$%s' % AESCipher( | |||
|
126 | ENCRYPTION_KEY, hmac=True).encrypt(value) | |||
|
127 | ||||
|
128 | def process_result_value(self, value, dialect): | |||
|
129 | import rhodecode | |||
|
130 | ||||
|
131 | if not value: | |||
|
132 | return value | |||
|
133 | ||||
|
134 | parts = value.split('$', 3) | |||
|
135 | if not len(parts) == 3: | |||
|
136 | # probably not encrypted values | |||
|
137 | return value | |||
|
138 | else: | |||
|
139 | if parts[0] != 'enc': | |||
|
140 | # parts ok but without our header ? | |||
|
141 | return value | |||
|
142 | enc_strict_mode = str2bool(rhodecode.CONFIG.get( | |||
|
143 | 'rhodecode.encrypted_values.strict') or True) | |||
|
144 | # at that stage we know it's our encryption | |||
|
145 | if parts[1] == 'aes': | |||
|
146 | decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2]) | |||
|
147 | elif parts[1] == 'aes_hmac': | |||
|
148 | decrypted_data = AESCipher( | |||
|
149 | ENCRYPTION_KEY, hmac=True, | |||
|
150 | strict_verification=enc_strict_mode).decrypt(parts[2]) | |||
|
151 | else: | |||
|
152 | raise ValueError( | |||
|
153 | 'Encryption type part is wrong, must be `aes` ' | |||
|
154 | 'or `aes_hmac`, got `%s` instead' % (parts[1])) | |||
|
155 | return decrypted_data | |||
|
156 | ||||
|
157 | ||||
|
158 | class BaseModel(object): | |||
|
159 | """ | |||
|
160 | Base Model for all classes | |||
|
161 | """ | |||
|
162 | ||||
|
163 | @classmethod | |||
|
164 | def _get_keys(cls): | |||
|
165 | """return column names for this model """ | |||
|
166 | return class_mapper(cls).c.keys() | |||
|
167 | ||||
|
168 | def get_dict(self): | |||
|
169 | """ | |||
|
170 | return dict with keys and values corresponding | |||
|
171 | to this model data """ | |||
|
172 | ||||
|
173 | d = {} | |||
|
174 | for k in self._get_keys(): | |||
|
175 | d[k] = getattr(self, k) | |||
|
176 | ||||
|
177 | # also use __json__() if present to get additional fields | |||
|
178 | _json_attr = getattr(self, '__json__', None) | |||
|
179 | if _json_attr: | |||
|
180 | # update with attributes from __json__ | |||
|
181 | if callable(_json_attr): | |||
|
182 | _json_attr = _json_attr() | |||
|
183 | for k, val in _json_attr.iteritems(): | |||
|
184 | d[k] = val | |||
|
185 | return d | |||
|
186 | ||||
|
187 | def get_appstruct(self): | |||
|
188 | """return list with keys and values tuples corresponding | |||
|
189 | to this model data """ | |||
|
190 | ||||
|
191 | l = [] | |||
|
192 | for k in self._get_keys(): | |||
|
193 | l.append((k, getattr(self, k),)) | |||
|
194 | return l | |||
|
195 | ||||
|
196 | def populate_obj(self, populate_dict): | |||
|
197 | """populate model with data from given populate_dict""" | |||
|
198 | ||||
|
199 | for k in self._get_keys(): | |||
|
200 | if k in populate_dict: | |||
|
201 | setattr(self, k, populate_dict[k]) | |||
|
202 | ||||
|
203 | @classmethod | |||
|
204 | def query(cls): | |||
|
205 | return Session().query(cls) | |||
|
206 | ||||
|
207 | @classmethod | |||
|
208 | def get(cls, id_): | |||
|
209 | if id_: | |||
|
210 | return cls.query().get(id_) | |||
|
211 | ||||
|
212 | @classmethod | |||
|
213 | def get_or_404(cls, id_): | |||
|
214 | try: | |||
|
215 | id_ = int(id_) | |||
|
216 | except (TypeError, ValueError): | |||
|
217 | raise HTTPNotFound | |||
|
218 | ||||
|
219 | res = cls.query().get(id_) | |||
|
220 | if not res: | |||
|
221 | raise HTTPNotFound | |||
|
222 | return res | |||
|
223 | ||||
|
224 | @classmethod | |||
|
225 | def getAll(cls): | |||
|
226 | # deprecated and left for backward compatibility | |||
|
227 | return cls.get_all() | |||
|
228 | ||||
|
229 | @classmethod | |||
|
230 | def get_all(cls): | |||
|
231 | return cls.query().all() | |||
|
232 | ||||
|
233 | @classmethod | |||
|
234 | def delete(cls, id_): | |||
|
235 | obj = cls.query().get(id_) | |||
|
236 | Session().delete(obj) | |||
|
237 | ||||
|
238 | @classmethod | |||
|
239 | def identity_cache(cls, session, attr_name, value): | |||
|
240 | exist_in_session = [] | |||
|
241 | for (item_cls, pkey), instance in session.identity_map.items(): | |||
|
242 | if cls == item_cls and getattr(instance, attr_name) == value: | |||
|
243 | exist_in_session.append(instance) | |||
|
244 | if exist_in_session: | |||
|
245 | if len(exist_in_session) == 1: | |||
|
246 | return exist_in_session[0] | |||
|
247 | log.exception( | |||
|
248 | 'multiple objects with attr %s and ' | |||
|
249 | 'value %s found with same name: %r', | |||
|
250 | attr_name, value, exist_in_session) | |||
|
251 | ||||
|
252 | def __repr__(self): | |||
|
253 | if hasattr(self, '__unicode__'): | |||
|
254 | # python repr needs to return str | |||
|
255 | try: | |||
|
256 | return safe_str(self.__unicode__()) | |||
|
257 | except UnicodeDecodeError: | |||
|
258 | pass | |||
|
259 | return '<DB:%s>' % (self.__class__.__name__) | |||
|
260 | ||||
|
261 | ||||
|
262 | class RhodeCodeSetting(Base, BaseModel): | |||
|
263 | __tablename__ = 'rhodecode_settings' | |||
|
264 | __table_args__ = ( | |||
|
265 | UniqueConstraint('app_settings_name'), | |||
|
266 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
267 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
268 | ) | |||
|
269 | ||||
|
270 | SETTINGS_TYPES = { | |||
|
271 | 'str': safe_str, | |||
|
272 | 'int': safe_int, | |||
|
273 | 'unicode': safe_unicode, | |||
|
274 | 'bool': str2bool, | |||
|
275 | 'list': functools.partial(aslist, sep=',') | |||
|
276 | } | |||
|
277 | DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' | |||
|
278 | GLOBAL_CONF_KEY = 'app_settings' | |||
|
279 | ||||
|
280 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
281 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) | |||
|
282 | _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) | |||
|
283 | _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) | |||
|
284 | ||||
|
285 | def __init__(self, key='', val='', type='unicode'): | |||
|
286 | self.app_settings_name = key | |||
|
287 | self.app_settings_type = type | |||
|
288 | self.app_settings_value = val | |||
|
289 | ||||
|
290 | @validates('_app_settings_value') | |||
|
291 | def validate_settings_value(self, key, val): | |||
|
292 | assert type(val) == unicode | |||
|
293 | return val | |||
|
294 | ||||
|
295 | @hybrid_property | |||
|
296 | def app_settings_value(self): | |||
|
297 | v = self._app_settings_value | |||
|
298 | _type = self.app_settings_type | |||
|
299 | if _type: | |||
|
300 | _type = self.app_settings_type.split('.')[0] | |||
|
301 | # decode the encrypted value | |||
|
302 | if 'encrypted' in self.app_settings_type: | |||
|
303 | cipher = EncryptedTextValue() | |||
|
304 | v = safe_unicode(cipher.process_result_value(v, None)) | |||
|
305 | ||||
|
306 | converter = self.SETTINGS_TYPES.get(_type) or \ | |||
|
307 | self.SETTINGS_TYPES['unicode'] | |||
|
308 | return converter(v) | |||
|
309 | ||||
|
310 | @app_settings_value.setter | |||
|
311 | def app_settings_value(self, val): | |||
|
312 | """ | |||
|
313 | Setter that will always make sure we use unicode in app_settings_value | |||
|
314 | ||||
|
315 | :param val: | |||
|
316 | """ | |||
|
317 | val = safe_unicode(val) | |||
|
318 | # encode the encrypted value | |||
|
319 | if 'encrypted' in self.app_settings_type: | |||
|
320 | cipher = EncryptedTextValue() | |||
|
321 | val = safe_unicode(cipher.process_bind_param(val, None)) | |||
|
322 | self._app_settings_value = val | |||
|
323 | ||||
|
324 | @hybrid_property | |||
|
325 | def app_settings_type(self): | |||
|
326 | return self._app_settings_type | |||
|
327 | ||||
|
328 | @app_settings_type.setter | |||
|
329 | def app_settings_type(self, val): | |||
|
330 | if val.split('.')[0] not in self.SETTINGS_TYPES: | |||
|
331 | raise Exception('type must be one of %s got %s' | |||
|
332 | % (self.SETTINGS_TYPES.keys(), val)) | |||
|
333 | self._app_settings_type = val | |||
|
334 | ||||
|
335 | def __unicode__(self): | |||
|
336 | return u"<%s('%s:%s[%s]')>" % ( | |||
|
337 | self.__class__.__name__, | |||
|
338 | self.app_settings_name, self.app_settings_value, | |||
|
339 | self.app_settings_type | |||
|
340 | ) | |||
|
341 | ||||
|
342 | ||||
|
343 | class RhodeCodeUi(Base, BaseModel): | |||
|
344 | __tablename__ = 'rhodecode_ui' | |||
|
345 | __table_args__ = ( | |||
|
346 | UniqueConstraint('ui_key'), | |||
|
347 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
348 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
349 | ) | |||
|
350 | ||||
|
351 | HOOK_REPO_SIZE = 'changegroup.repo_size' | |||
|
352 | # HG | |||
|
353 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' | |||
|
354 | HOOK_PULL = 'outgoing.pull_logger' | |||
|
355 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' | |||
|
356 | HOOK_PUSH = 'changegroup.push_logger' | |||
|
357 | ||||
|
358 | # TODO: johbo: Unify way how hooks are configured for git and hg, | |||
|
359 | # git part is currently hardcoded. | |||
|
360 | ||||
|
361 | # SVN PATTERNS | |||
|
362 | SVN_BRANCH_ID = 'vcs_svn_branch' | |||
|
363 | SVN_TAG_ID = 'vcs_svn_tag' | |||
|
364 | ||||
|
365 | ui_id = Column( | |||
|
366 | "ui_id", Integer(), nullable=False, unique=True, default=None, | |||
|
367 | primary_key=True) | |||
|
368 | ui_section = Column( | |||
|
369 | "ui_section", String(255), nullable=True, unique=None, default=None) | |||
|
370 | ui_key = Column( | |||
|
371 | "ui_key", String(255), nullable=True, unique=None, default=None) | |||
|
372 | ui_value = Column( | |||
|
373 | "ui_value", String(255), nullable=True, unique=None, default=None) | |||
|
374 | ui_active = Column( | |||
|
375 | "ui_active", Boolean(), nullable=True, unique=None, default=True) | |||
|
376 | ||||
|
377 | def __repr__(self): | |||
|
378 | return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, | |||
|
379 | self.ui_key, self.ui_value) | |||
|
380 | ||||
|
381 | ||||
|
382 | class RepoRhodeCodeSetting(Base, BaseModel): | |||
|
383 | __tablename__ = 'repo_rhodecode_settings' | |||
|
384 | __table_args__ = ( | |||
|
385 | UniqueConstraint( | |||
|
386 | 'app_settings_name', 'repository_id', | |||
|
387 | name='uq_repo_rhodecode_setting_name_repo_id'), | |||
|
388 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
389 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
390 | ) | |||
|
391 | ||||
|
392 | repository_id = Column( | |||
|
393 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), | |||
|
394 | nullable=False) | |||
|
395 | app_settings_id = Column( | |||
|
396 | "app_settings_id", Integer(), nullable=False, unique=True, | |||
|
397 | default=None, primary_key=True) | |||
|
398 | app_settings_name = Column( | |||
|
399 | "app_settings_name", String(255), nullable=True, unique=None, | |||
|
400 | default=None) | |||
|
401 | _app_settings_value = Column( | |||
|
402 | "app_settings_value", String(4096), nullable=True, unique=None, | |||
|
403 | default=None) | |||
|
404 | _app_settings_type = Column( | |||
|
405 | "app_settings_type", String(255), nullable=True, unique=None, | |||
|
406 | default=None) | |||
|
407 | ||||
|
408 | repository = relationship('Repository') | |||
|
409 | ||||
|
410 | def __init__(self, repository_id, key='', val='', type='unicode'): | |||
|
411 | self.repository_id = repository_id | |||
|
412 | self.app_settings_name = key | |||
|
413 | self.app_settings_type = type | |||
|
414 | self.app_settings_value = val | |||
|
415 | ||||
|
416 | @validates('_app_settings_value') | |||
|
417 | def validate_settings_value(self, key, val): | |||
|
418 | assert type(val) == unicode | |||
|
419 | return val | |||
|
420 | ||||
|
421 | @hybrid_property | |||
|
422 | def app_settings_value(self): | |||
|
423 | v = self._app_settings_value | |||
|
424 | type_ = self.app_settings_type | |||
|
425 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | |||
|
426 | converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] | |||
|
427 | return converter(v) | |||
|
428 | ||||
|
429 | @app_settings_value.setter | |||
|
430 | def app_settings_value(self, val): | |||
|
431 | """ | |||
|
432 | Setter that will always make sure we use unicode in app_settings_value | |||
|
433 | ||||
|
434 | :param val: | |||
|
435 | """ | |||
|
436 | self._app_settings_value = safe_unicode(val) | |||
|
437 | ||||
|
438 | @hybrid_property | |||
|
439 | def app_settings_type(self): | |||
|
440 | return self._app_settings_type | |||
|
441 | ||||
|
442 | @app_settings_type.setter | |||
|
443 | def app_settings_type(self, val): | |||
|
444 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | |||
|
445 | if val not in SETTINGS_TYPES: | |||
|
446 | raise Exception('type must be one of %s got %s' | |||
|
447 | % (SETTINGS_TYPES.keys(), val)) | |||
|
448 | self._app_settings_type = val | |||
|
449 | ||||
|
450 | def __unicode__(self): | |||
|
451 | return u"<%s('%s:%s:%s[%s]')>" % ( | |||
|
452 | self.__class__.__name__, self.repository.repo_name, | |||
|
453 | self.app_settings_name, self.app_settings_value, | |||
|
454 | self.app_settings_type | |||
|
455 | ) | |||
|
456 | ||||
|
457 | ||||
|
458 | class RepoRhodeCodeUi(Base, BaseModel): | |||
|
459 | __tablename__ = 'repo_rhodecode_ui' | |||
|
460 | __table_args__ = ( | |||
|
461 | UniqueConstraint( | |||
|
462 | 'repository_id', 'ui_section', 'ui_key', | |||
|
463 | name='uq_repo_rhodecode_ui_repository_id_section_key'), | |||
|
464 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
465 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
466 | ) | |||
|
467 | ||||
|
468 | repository_id = Column( | |||
|
469 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), | |||
|
470 | nullable=False) | |||
|
471 | ui_id = Column( | |||
|
472 | "ui_id", Integer(), nullable=False, unique=True, default=None, | |||
|
473 | primary_key=True) | |||
|
474 | ui_section = Column( | |||
|
475 | "ui_section", String(255), nullable=True, unique=None, default=None) | |||
|
476 | ui_key = Column( | |||
|
477 | "ui_key", String(255), nullable=True, unique=None, default=None) | |||
|
478 | ui_value = Column( | |||
|
479 | "ui_value", String(255), nullable=True, unique=None, default=None) | |||
|
480 | ui_active = Column( | |||
|
481 | "ui_active", Boolean(), nullable=True, unique=None, default=True) | |||
|
482 | ||||
|
483 | repository = relationship('Repository') | |||
|
484 | ||||
|
485 | def __repr__(self): | |||
|
486 | return '<%s[%s:%s]%s=>%s]>' % ( | |||
|
487 | self.__class__.__name__, self.repository.repo_name, | |||
|
488 | self.ui_section, self.ui_key, self.ui_value) | |||
|
489 | ||||
|
490 | ||||
|
491 | class User(Base, BaseModel): | |||
|
492 | __tablename__ = 'users' | |||
|
493 | __table_args__ = ( | |||
|
494 | UniqueConstraint('username'), UniqueConstraint('email'), | |||
|
495 | Index('u_username_idx', 'username'), | |||
|
496 | Index('u_email_idx', 'email'), | |||
|
497 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
498 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
499 | ) | |||
|
500 | DEFAULT_USER = 'default' | |||
|
501 | DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' | |||
|
502 | DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' | |||
|
503 | ||||
|
504 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
505 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |||
|
506 | password = Column("password", String(255), nullable=True, unique=None, default=None) | |||
|
507 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |||
|
508 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) | |||
|
509 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) | |||
|
510 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) | |||
|
511 | _email = Column("email", String(255), nullable=True, unique=None, default=None) | |||
|
512 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) | |||
|
513 | extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) | |||
|
514 | extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) | |||
|
515 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) | |||
|
516 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |||
|
517 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
518 | _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data | |||
|
519 | ||||
|
520 | user_log = relationship('UserLog') | |||
|
521 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') | |||
|
522 | ||||
|
523 | repositories = relationship('Repository') | |||
|
524 | repository_groups = relationship('RepoGroup') | |||
|
525 | user_groups = relationship('UserGroup') | |||
|
526 | ||||
|
527 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') | |||
|
528 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') | |||
|
529 | ||||
|
530 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') | |||
|
531 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') | |||
|
532 | user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all') | |||
|
533 | ||||
|
534 | group_member = relationship('UserGroupMember', cascade='all') | |||
|
535 | ||||
|
536 | notifications = relationship('UserNotification', cascade='all') | |||
|
537 | # notifications assigned to this user | |||
|
538 | user_created_notifications = relationship('Notification', cascade='all') | |||
|
539 | # comments created by this user | |||
|
540 | user_comments = relationship('ChangesetComment', cascade='all') | |||
|
541 | # user profile extra info | |||
|
542 | user_emails = relationship('UserEmailMap', cascade='all') | |||
|
543 | user_ip_map = relationship('UserIpMap', cascade='all') | |||
|
544 | user_auth_tokens = relationship('UserApiKeys', cascade='all') | |||
|
545 | # gists | |||
|
546 | user_gists = relationship('Gist', cascade='all') | |||
|
547 | # user pull requests | |||
|
548 | user_pull_requests = relationship('PullRequest', cascade='all') | |||
|
549 | # external identities | |||
|
550 | extenal_identities = relationship( | |||
|
551 | 'ExternalIdentity', | |||
|
552 | primaryjoin="User.user_id==ExternalIdentity.local_user_id", | |||
|
553 | cascade='all') | |||
|
554 | ||||
|
555 | def __unicode__(self): | |||
|
556 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |||
|
557 | self.user_id, self.username) | |||
|
558 | ||||
|
559 | @hybrid_property | |||
|
560 | def email(self): | |||
|
561 | return self._email | |||
|
562 | ||||
|
563 | @email.setter | |||
|
564 | def email(self, val): | |||
|
565 | self._email = val.lower() if val else None | |||
|
566 | ||||
|
567 | @property | |||
|
568 | def firstname(self): | |||
|
569 | # alias for future | |||
|
570 | return self.name | |||
|
571 | ||||
|
572 | @property | |||
|
573 | def emails(self): | |||
|
574 | other = UserEmailMap.query().filter(UserEmailMap.user==self).all() | |||
|
575 | return [self.email] + [x.email for x in other] | |||
|
576 | ||||
|
577 | @property | |||
|
578 | def auth_tokens(self): | |||
|
579 | return [self.api_key] + [x.api_key for x in self.extra_auth_tokens] | |||
|
580 | ||||
|
581 | @property | |||
|
582 | def extra_auth_tokens(self): | |||
|
583 | return UserApiKeys.query().filter(UserApiKeys.user == self).all() | |||
|
584 | ||||
|
585 | @property | |||
|
586 | def feed_token(self): | |||
|
587 | feed_tokens = UserApiKeys.query()\ | |||
|
588 | .filter(UserApiKeys.user == self)\ | |||
|
589 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\ | |||
|
590 | .all() | |||
|
591 | if feed_tokens: | |||
|
592 | return feed_tokens[0].api_key | |||
|
593 | else: | |||
|
594 | # use the main token so we don't end up with nothing... | |||
|
595 | return self.api_key | |||
|
596 | ||||
|
597 | @classmethod | |||
|
598 | def extra_valid_auth_tokens(cls, user, role=None): | |||
|
599 | tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ | |||
|
600 | .filter(or_(UserApiKeys.expires == -1, | |||
|
601 | UserApiKeys.expires >= time.time())) | |||
|
602 | if role: | |||
|
603 | tokens = tokens.filter(or_(UserApiKeys.role == role, | |||
|
604 | UserApiKeys.role == UserApiKeys.ROLE_ALL)) | |||
|
605 | return tokens.all() | |||
|
606 | ||||
|
607 | @property | |||
|
608 | def ip_addresses(self): | |||
|
609 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() | |||
|
610 | return [x.ip_addr for x in ret] | |||
|
611 | ||||
|
612 | @property | |||
|
613 | def username_and_name(self): | |||
|
614 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) | |||
|
615 | ||||
|
616 | @property | |||
|
617 | def username_or_name_or_email(self): | |||
|
618 | full_name = self.full_name if self.full_name is not ' ' else None | |||
|
619 | return self.username or full_name or self.email | |||
|
620 | ||||
|
621 | @property | |||
|
622 | def full_name(self): | |||
|
623 | return '%s %s' % (self.firstname, self.lastname) | |||
|
624 | ||||
|
625 | @property | |||
|
626 | def full_name_or_username(self): | |||
|
627 | return ('%s %s' % (self.firstname, self.lastname) | |||
|
628 | if (self.firstname and self.lastname) else self.username) | |||
|
629 | ||||
|
630 | @property | |||
|
631 | def full_contact(self): | |||
|
632 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) | |||
|
633 | ||||
|
634 | @property | |||
|
635 | def short_contact(self): | |||
|
636 | return '%s %s' % (self.firstname, self.lastname) | |||
|
637 | ||||
|
638 | @property | |||
|
639 | def is_admin(self): | |||
|
640 | return self.admin | |||
|
641 | ||||
|
642 | @property | |||
|
643 | def AuthUser(self): | |||
|
644 | """ | |||
|
645 | Returns instance of AuthUser for this user | |||
|
646 | """ | |||
|
647 | from rhodecode.lib.auth import AuthUser | |||
|
648 | return AuthUser(user_id=self.user_id, api_key=self.api_key, | |||
|
649 | username=self.username) | |||
|
650 | ||||
|
651 | @hybrid_property | |||
|
652 | def user_data(self): | |||
|
653 | if not self._user_data: | |||
|
654 | return {} | |||
|
655 | ||||
|
656 | try: | |||
|
657 | return json.loads(self._user_data) | |||
|
658 | except TypeError: | |||
|
659 | return {} | |||
|
660 | ||||
|
661 | @user_data.setter | |||
|
662 | def user_data(self, val): | |||
|
663 | if not isinstance(val, dict): | |||
|
664 | raise Exception('user_data must be dict, got %s' % type(val)) | |||
|
665 | try: | |||
|
666 | self._user_data = json.dumps(val) | |||
|
667 | except Exception: | |||
|
668 | log.error(traceback.format_exc()) | |||
|
669 | ||||
|
670 | @classmethod | |||
|
671 | def get_by_username(cls, username, case_insensitive=False, | |||
|
672 | cache=False, identity_cache=False): | |||
|
673 | session = Session() | |||
|
674 | ||||
|
675 | if case_insensitive: | |||
|
676 | q = cls.query().filter( | |||
|
677 | func.lower(cls.username) == func.lower(username)) | |||
|
678 | else: | |||
|
679 | q = cls.query().filter(cls.username == username) | |||
|
680 | ||||
|
681 | if cache: | |||
|
682 | if identity_cache: | |||
|
683 | val = cls.identity_cache(session, 'username', username) | |||
|
684 | if val: | |||
|
685 | return val | |||
|
686 | else: | |||
|
687 | q = q.options( | |||
|
688 | FromCache("sql_cache_short", | |||
|
689 | "get_user_by_name_%s" % _hash_key(username))) | |||
|
690 | ||||
|
691 | return q.scalar() | |||
|
692 | ||||
|
693 | @classmethod | |||
|
694 | def get_by_auth_token(cls, auth_token, cache=False, fallback=True): | |||
|
695 | q = cls.query().filter(cls.api_key == auth_token) | |||
|
696 | ||||
|
697 | if cache: | |||
|
698 | q = q.options(FromCache("sql_cache_short", | |||
|
699 | "get_auth_token_%s" % auth_token)) | |||
|
700 | res = q.scalar() | |||
|
701 | ||||
|
702 | if fallback and not res: | |||
|
703 | #fallback to additional keys | |||
|
704 | _res = UserApiKeys.query()\ | |||
|
705 | .filter(UserApiKeys.api_key == auth_token)\ | |||
|
706 | .filter(or_(UserApiKeys.expires == -1, | |||
|
707 | UserApiKeys.expires >= time.time()))\ | |||
|
708 | .first() | |||
|
709 | if _res: | |||
|
710 | res = _res.user | |||
|
711 | return res | |||
|
712 | ||||
|
713 | @classmethod | |||
|
714 | def get_by_email(cls, email, case_insensitive=False, cache=False): | |||
|
715 | ||||
|
716 | if case_insensitive: | |||
|
717 | q = cls.query().filter(func.lower(cls.email) == func.lower(email)) | |||
|
718 | ||||
|
719 | else: | |||
|
720 | q = cls.query().filter(cls.email == email) | |||
|
721 | ||||
|
722 | if cache: | |||
|
723 | q = q.options(FromCache("sql_cache_short", | |||
|
724 | "get_email_key_%s" % _hash_key(email))) | |||
|
725 | ||||
|
726 | ret = q.scalar() | |||
|
727 | if ret is None: | |||
|
728 | q = UserEmailMap.query() | |||
|
729 | # try fetching in alternate email map | |||
|
730 | if case_insensitive: | |||
|
731 | q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) | |||
|
732 | else: | |||
|
733 | q = q.filter(UserEmailMap.email == email) | |||
|
734 | q = q.options(joinedload(UserEmailMap.user)) | |||
|
735 | if cache: | |||
|
736 | q = q.options(FromCache("sql_cache_short", | |||
|
737 | "get_email_map_key_%s" % email)) | |||
|
738 | ret = getattr(q.scalar(), 'user', None) | |||
|
739 | ||||
|
740 | return ret | |||
|
741 | ||||
|
742 | @classmethod | |||
|
743 | def get_from_cs_author(cls, author): | |||
|
744 | """ | |||
|
745 | Tries to get User objects out of commit author string | |||
|
746 | ||||
|
747 | :param author: | |||
|
748 | """ | |||
|
749 | from rhodecode.lib.helpers import email, author_name | |||
|
750 | # Valid email in the attribute passed, see if they're in the system | |||
|
751 | _email = email(author) | |||
|
752 | if _email: | |||
|
753 | user = cls.get_by_email(_email, case_insensitive=True) | |||
|
754 | if user: | |||
|
755 | return user | |||
|
756 | # Maybe we can match by username? | |||
|
757 | _author = author_name(author) | |||
|
758 | user = cls.get_by_username(_author, case_insensitive=True) | |||
|
759 | if user: | |||
|
760 | return user | |||
|
761 | ||||
|
762 | def update_userdata(self, **kwargs): | |||
|
763 | usr = self | |||
|
764 | old = usr.user_data | |||
|
765 | old.update(**kwargs) | |||
|
766 | usr.user_data = old | |||
|
767 | Session().add(usr) | |||
|
768 | log.debug('updated userdata with ', kwargs) | |||
|
769 | ||||
|
770 | def update_lastlogin(self): | |||
|
771 | """Update user lastlogin""" | |||
|
772 | self.last_login = datetime.datetime.now() | |||
|
773 | Session().add(self) | |||
|
774 | log.debug('updated user %s lastlogin', self.username) | |||
|
775 | ||||
|
776 | def update_lastactivity(self): | |||
|
777 | """Update user lastactivity""" | |||
|
778 | usr = self | |||
|
779 | old = usr.user_data | |||
|
780 | old.update({'last_activity': time.time()}) | |||
|
781 | usr.user_data = old | |||
|
782 | Session().add(usr) | |||
|
783 | log.debug('updated user %s lastactivity', usr.username) | |||
|
784 | ||||
|
785 | def update_password(self, new_password, change_api_key=False): | |||
|
786 | from rhodecode.lib.auth import get_crypt_password,generate_auth_token | |||
|
787 | ||||
|
788 | self.password = get_crypt_password(new_password) | |||
|
789 | if change_api_key: | |||
|
790 | self.api_key = generate_auth_token(self.username) | |||
|
791 | Session().add(self) | |||
|
792 | ||||
|
793 | @classmethod | |||
|
794 | def get_first_super_admin(cls): | |||
|
795 | user = User.query().filter(User.admin == true()).first() | |||
|
796 | if user is None: | |||
|
797 | raise Exception('FATAL: Missing administrative account!') | |||
|
798 | return user | |||
|
799 | ||||
|
800 | @classmethod | |||
|
801 | def get_all_super_admins(cls): | |||
|
802 | """ | |||
|
803 | Returns all admin accounts sorted by username | |||
|
804 | """ | |||
|
805 | return User.query().filter(User.admin == true())\ | |||
|
806 | .order_by(User.username.asc()).all() | |||
|
807 | ||||
|
808 | @classmethod | |||
|
809 | def get_default_user(cls, cache=False): | |||
|
810 | user = User.get_by_username(User.DEFAULT_USER, cache=cache) | |||
|
811 | if user is None: | |||
|
812 | raise Exception('FATAL: Missing default account!') | |||
|
813 | return user | |||
|
814 | ||||
|
815 | def _get_default_perms(self, user, suffix=''): | |||
|
816 | from rhodecode.model.permission import PermissionModel | |||
|
817 | return PermissionModel().get_default_perms(user.user_perms, suffix) | |||
|
818 | ||||
|
819 | def get_default_perms(self, suffix=''): | |||
|
820 | return self._get_default_perms(self, suffix) | |||
|
821 | ||||
|
822 | def get_api_data(self, include_secrets=False, details='full'): | |||
|
823 | """ | |||
|
824 | Common function for generating user related data for API | |||
|
825 | ||||
|
826 | :param include_secrets: By default secrets in the API data will be replaced | |||
|
827 | by a placeholder value to prevent exposing this data by accident. In case | |||
|
828 | this data shall be exposed, set this flag to ``True``. | |||
|
829 | ||||
|
830 | :param details: details can be 'basic|full' basic gives only a subset of | |||
|
831 | the available user information that includes user_id, name and emails. | |||
|
832 | """ | |||
|
833 | user = self | |||
|
834 | user_data = self.user_data | |||
|
835 | data = { | |||
|
836 | 'user_id': user.user_id, | |||
|
837 | 'username': user.username, | |||
|
838 | 'firstname': user.name, | |||
|
839 | 'lastname': user.lastname, | |||
|
840 | 'email': user.email, | |||
|
841 | 'emails': user.emails, | |||
|
842 | } | |||
|
843 | if details == 'basic': | |||
|
844 | return data | |||
|
845 | ||||
|
846 | api_key_length = 40 | |||
|
847 | api_key_replacement = '*' * api_key_length | |||
|
848 | ||||
|
849 | extras = { | |||
|
850 | 'api_key': api_key_replacement, | |||
|
851 | 'api_keys': [api_key_replacement], | |||
|
852 | 'active': user.active, | |||
|
853 | 'admin': user.admin, | |||
|
854 | 'extern_type': user.extern_type, | |||
|
855 | 'extern_name': user.extern_name, | |||
|
856 | 'last_login': user.last_login, | |||
|
857 | 'ip_addresses': user.ip_addresses, | |||
|
858 | 'language': user_data.get('language') | |||
|
859 | } | |||
|
860 | data.update(extras) | |||
|
861 | ||||
|
862 | if include_secrets: | |||
|
863 | data['api_key'] = user.api_key | |||
|
864 | data['api_keys'] = user.auth_tokens | |||
|
865 | return data | |||
|
866 | ||||
|
867 | def __json__(self): | |||
|
868 | data = { | |||
|
869 | 'full_name': self.full_name, | |||
|
870 | 'full_name_or_username': self.full_name_or_username, | |||
|
871 | 'short_contact': self.short_contact, | |||
|
872 | 'full_contact': self.full_contact, | |||
|
873 | } | |||
|
874 | data.update(self.get_api_data()) | |||
|
875 | return data | |||
|
876 | ||||
|
877 | ||||
|
878 | class UserApiKeys(Base, BaseModel): | |||
|
879 | __tablename__ = 'user_api_keys' | |||
|
880 | __table_args__ = ( | |||
|
881 | Index('uak_api_key_idx', 'api_key'), | |||
|
882 | Index('uak_api_key_expires_idx', 'api_key', 'expires'), | |||
|
883 | UniqueConstraint('api_key'), | |||
|
884 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
885 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
886 | ) | |||
|
887 | __mapper_args__ = {} | |||
|
888 | ||||
|
889 | # ApiKey role | |||
|
890 | ROLE_ALL = 'token_role_all' | |||
|
891 | ROLE_HTTP = 'token_role_http' | |||
|
892 | ROLE_VCS = 'token_role_vcs' | |||
|
893 | ROLE_API = 'token_role_api' | |||
|
894 | ROLE_FEED = 'token_role_feed' | |||
|
895 | ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED] | |||
|
896 | ||||
|
897 | user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
898 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
899 | api_key = Column("api_key", String(255), nullable=False, unique=True) | |||
|
900 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |||
|
901 | expires = Column('expires', Float(53), nullable=False) | |||
|
902 | role = Column('role', String(255), nullable=True) | |||
|
903 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
904 | ||||
|
905 | user = relationship('User', lazy='joined') | |||
|
906 | ||||
|
907 | @classmethod | |||
|
908 | def _get_role_name(cls, role): | |||
|
909 | return { | |||
|
910 | cls.ROLE_ALL: _('all'), | |||
|
911 | cls.ROLE_HTTP: _('http/web interface'), | |||
|
912 | cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), | |||
|
913 | cls.ROLE_API: _('api calls'), | |||
|
914 | cls.ROLE_FEED: _('feed access'), | |||
|
915 | }.get(role, role) | |||
|
916 | ||||
|
917 | @property | |||
|
918 | def expired(self): | |||
|
919 | if self.expires == -1: | |||
|
920 | return False | |||
|
921 | return time.time() > self.expires | |||
|
922 | ||||
|
923 | @property | |||
|
924 | def role_humanized(self): | |||
|
925 | return self._get_role_name(self.role) | |||
|
926 | ||||
|
927 | ||||
|
928 | class UserEmailMap(Base, BaseModel): | |||
|
929 | __tablename__ = 'user_email_map' | |||
|
930 | __table_args__ = ( | |||
|
931 | Index('uem_email_idx', 'email'), | |||
|
932 | UniqueConstraint('email'), | |||
|
933 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
934 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
935 | ) | |||
|
936 | __mapper_args__ = {} | |||
|
937 | ||||
|
938 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
939 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
940 | _email = Column("email", String(255), nullable=True, unique=False, default=None) | |||
|
941 | user = relationship('User', lazy='joined') | |||
|
942 | ||||
|
943 | @validates('_email') | |||
|
944 | def validate_email(self, key, email): | |||
|
945 | # check if this email is not main one | |||
|
946 | main_email = Session().query(User).filter(User.email == email).scalar() | |||
|
947 | if main_email is not None: | |||
|
948 | raise AttributeError('email %s is present is user table' % email) | |||
|
949 | return email | |||
|
950 | ||||
|
951 | @hybrid_property | |||
|
952 | def email(self): | |||
|
953 | return self._email | |||
|
954 | ||||
|
955 | @email.setter | |||
|
956 | def email(self, val): | |||
|
957 | self._email = val.lower() if val else None | |||
|
958 | ||||
|
959 | ||||
|
960 | class UserIpMap(Base, BaseModel): | |||
|
961 | __tablename__ = 'user_ip_map' | |||
|
962 | __table_args__ = ( | |||
|
963 | UniqueConstraint('user_id', 'ip_addr'), | |||
|
964 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
965 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
966 | ) | |||
|
967 | __mapper_args__ = {} | |||
|
968 | ||||
|
969 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
970 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
971 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) | |||
|
972 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |||
|
973 | description = Column("description", String(10000), nullable=True, unique=None, default=None) | |||
|
974 | user = relationship('User', lazy='joined') | |||
|
975 | ||||
|
976 | @classmethod | |||
|
977 | def _get_ip_range(cls, ip_addr): | |||
|
978 | net = ipaddress.ip_network(ip_addr, strict=False) | |||
|
979 | return [str(net.network_address), str(net.broadcast_address)] | |||
|
980 | ||||
|
981 | def __json__(self): | |||
|
982 | return { | |||
|
983 | 'ip_addr': self.ip_addr, | |||
|
984 | 'ip_range': self._get_ip_range(self.ip_addr), | |||
|
985 | } | |||
|
986 | ||||
|
987 | def __unicode__(self): | |||
|
988 | return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, | |||
|
989 | self.user_id, self.ip_addr) | |||
|
990 | ||||
|
991 | class UserLog(Base, BaseModel): | |||
|
992 | __tablename__ = 'user_logs' | |||
|
993 | __table_args__ = ( | |||
|
994 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
995 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
996 | ) | |||
|
997 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
998 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
999 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |||
|
1000 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) | |||
|
1001 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) | |||
|
1002 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) | |||
|
1003 | action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) | |||
|
1004 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) | |||
|
1005 | ||||
|
1006 | def __unicode__(self): | |||
|
1007 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |||
|
1008 | self.repository_name, | |||
|
1009 | self.action) | |||
|
1010 | ||||
|
1011 | @property | |||
|
1012 | def action_as_day(self): | |||
|
1013 | return datetime.date(*self.action_date.timetuple()[:3]) | |||
|
1014 | ||||
|
1015 | user = relationship('User') | |||
|
1016 | repository = relationship('Repository', cascade='') | |||
|
1017 | ||||
|
1018 | ||||
|
1019 | class UserGroup(Base, BaseModel): | |||
|
1020 | __tablename__ = 'users_groups' | |||
|
1021 | __table_args__ = ( | |||
|
1022 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
1023 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
1024 | ) | |||
|
1025 | ||||
|
1026 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1027 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) | |||
|
1028 | user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) | |||
|
1029 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) | |||
|
1030 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |||
|
1031 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |||
|
1032 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
1033 | _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data | |||
|
1034 | ||||
|
1035 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") | |||
|
1036 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') | |||
|
1037 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |||
|
1038 | users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') | |||
|
1039 | user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') | |||
|
1040 | user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') | |||
|
1041 | ||||
|
1042 | user = relationship('User') | |||
|
1043 | ||||
|
1044 | @hybrid_property | |||
|
1045 | def group_data(self): | |||
|
1046 | if not self._group_data: | |||
|
1047 | return {} | |||
|
1048 | ||||
|
1049 | try: | |||
|
1050 | return json.loads(self._group_data) | |||
|
1051 | except TypeError: | |||
|
1052 | return {} | |||
|
1053 | ||||
|
1054 | @group_data.setter | |||
|
1055 | def group_data(self, val): | |||
|
1056 | try: | |||
|
1057 | self._group_data = json.dumps(val) | |||
|
1058 | except Exception: | |||
|
1059 | log.error(traceback.format_exc()) | |||
|
1060 | ||||
|
1061 | def __unicode__(self): | |||
|
1062 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |||
|
1063 | self.users_group_id, | |||
|
1064 | self.users_group_name) | |||
|
1065 | ||||
|
1066 | @classmethod | |||
|
1067 | def get_by_group_name(cls, group_name, cache=False, | |||
|
1068 | case_insensitive=False): | |||
|
1069 | if case_insensitive: | |||
|
1070 | q = cls.query().filter(func.lower(cls.users_group_name) == | |||
|
1071 | func.lower(group_name)) | |||
|
1072 | ||||
|
1073 | else: | |||
|
1074 | q = cls.query().filter(cls.users_group_name == group_name) | |||
|
1075 | if cache: | |||
|
1076 | q = q.options(FromCache( | |||
|
1077 | "sql_cache_short", | |||
|
1078 | "get_group_%s" % _hash_key(group_name))) | |||
|
1079 | return q.scalar() | |||
|
1080 | ||||
|
1081 | @classmethod | |||
|
1082 | def get(cls, user_group_id, cache=False): | |||
|
1083 | user_group = cls.query() | |||
|
1084 | if cache: | |||
|
1085 | user_group = user_group.options(FromCache("sql_cache_short", | |||
|
1086 | "get_users_group_%s" % user_group_id)) | |||
|
1087 | return user_group.get(user_group_id) | |||
|
1088 | ||||
|
1089 | def permissions(self, with_admins=True, with_owner=True): | |||
|
1090 | q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) | |||
|
1091 | q = q.options(joinedload(UserUserGroupToPerm.user_group), | |||
|
1092 | joinedload(UserUserGroupToPerm.user), | |||
|
1093 | joinedload(UserUserGroupToPerm.permission),) | |||
|
1094 | ||||
|
1095 | # get owners and admins and permissions. We do a trick of re-writing | |||
|
1096 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |||
|
1097 | # has a global reference and changing one object propagates to all | |||
|
1098 | # others. This means if admin is also an owner admin_row that change | |||
|
1099 | # would propagate to both objects | |||
|
1100 | perm_rows = [] | |||
|
1101 | for _usr in q.all(): | |||
|
1102 | usr = AttributeDict(_usr.user.get_dict()) | |||
|
1103 | usr.permission = _usr.permission.permission_name | |||
|
1104 | perm_rows.append(usr) | |||
|
1105 | ||||
|
1106 | # filter the perm rows by 'default' first and then sort them by | |||
|
1107 | # admin,write,read,none permissions sorted again alphabetically in | |||
|
1108 | # each group | |||
|
1109 | perm_rows = sorted(perm_rows, key=display_sort) | |||
|
1110 | ||||
|
1111 | _admin_perm = 'usergroup.admin' | |||
|
1112 | owner_row = [] | |||
|
1113 | if with_owner: | |||
|
1114 | usr = AttributeDict(self.user.get_dict()) | |||
|
1115 | usr.owner_row = True | |||
|
1116 | usr.permission = _admin_perm | |||
|
1117 | owner_row.append(usr) | |||
|
1118 | ||||
|
1119 | super_admin_rows = [] | |||
|
1120 | if with_admins: | |||
|
1121 | for usr in User.get_all_super_admins(): | |||
|
1122 | # if this admin is also owner, don't double the record | |||
|
1123 | if usr.user_id == owner_row[0].user_id: | |||
|
1124 | owner_row[0].admin_row = True | |||
|
1125 | else: | |||
|
1126 | usr = AttributeDict(usr.get_dict()) | |||
|
1127 | usr.admin_row = True | |||
|
1128 | usr.permission = _admin_perm | |||
|
1129 | super_admin_rows.append(usr) | |||
|
1130 | ||||
|
1131 | return super_admin_rows + owner_row + perm_rows | |||
|
1132 | ||||
|
1133 | def permission_user_groups(self): | |||
|
1134 | q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self) | |||
|
1135 | q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), | |||
|
1136 | joinedload(UserGroupUserGroupToPerm.target_user_group), | |||
|
1137 | joinedload(UserGroupUserGroupToPerm.permission),) | |||
|
1138 | ||||
|
1139 | perm_rows = [] | |||
|
1140 | for _user_group in q.all(): | |||
|
1141 | usr = AttributeDict(_user_group.user_group.get_dict()) | |||
|
1142 | usr.permission = _user_group.permission.permission_name | |||
|
1143 | perm_rows.append(usr) | |||
|
1144 | ||||
|
1145 | return perm_rows | |||
|
1146 | ||||
|
1147 | def _get_default_perms(self, user_group, suffix=''): | |||
|
1148 | from rhodecode.model.permission import PermissionModel | |||
|
1149 | return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) | |||
|
1150 | ||||
|
1151 | def get_default_perms(self, suffix=''): | |||
|
1152 | return self._get_default_perms(self, suffix) | |||
|
1153 | ||||
|
1154 | def get_api_data(self, with_group_members=True, include_secrets=False): | |||
|
1155 | """ | |||
|
1156 | :param include_secrets: See :meth:`User.get_api_data`, this parameter is | |||
|
1157 | basically forwarded. | |||
|
1158 | ||||
|
1159 | """ | |||
|
1160 | user_group = self | |||
|
1161 | ||||
|
1162 | data = { | |||
|
1163 | 'users_group_id': user_group.users_group_id, | |||
|
1164 | 'group_name': user_group.users_group_name, | |||
|
1165 | 'group_description': user_group.user_group_description, | |||
|
1166 | 'active': user_group.users_group_active, | |||
|
1167 | 'owner': user_group.user.username, | |||
|
1168 | } | |||
|
1169 | if with_group_members: | |||
|
1170 | users = [] | |||
|
1171 | for user in user_group.members: | |||
|
1172 | user = user.user | |||
|
1173 | users.append(user.get_api_data(include_secrets=include_secrets)) | |||
|
1174 | data['users'] = users | |||
|
1175 | ||||
|
1176 | return data | |||
|
1177 | ||||
|
1178 | ||||
|
1179 | class UserGroupMember(Base, BaseModel): | |||
|
1180 | __tablename__ = 'users_groups_members' | |||
|
1181 | __table_args__ = ( | |||
|
1182 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
1183 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
1184 | ) | |||
|
1185 | ||||
|
1186 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1187 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
1188 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
1189 | ||||
|
1190 | user = relationship('User', lazy='joined') | |||
|
1191 | users_group = relationship('UserGroup') | |||
|
1192 | ||||
|
1193 | def __init__(self, gr_id='', u_id=''): | |||
|
1194 | self.users_group_id = gr_id | |||
|
1195 | self.user_id = u_id | |||
|
1196 | ||||
|
1197 | ||||
|
1198 | class RepositoryField(Base, BaseModel): | |||
|
1199 | __tablename__ = 'repositories_fields' | |||
|
1200 | __table_args__ = ( | |||
|
1201 | UniqueConstraint('repository_id', 'field_key'), # no-multi field | |||
|
1202 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
1203 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
1204 | ) | |||
|
1205 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields | |||
|
1206 | ||||
|
1207 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1208 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |||
|
1209 | field_key = Column("field_key", String(250)) | |||
|
1210 | field_label = Column("field_label", String(1024), nullable=False) | |||
|
1211 | field_value = Column("field_value", String(10000), nullable=False) | |||
|
1212 | field_desc = Column("field_desc", String(1024), nullable=False) | |||
|
1213 | field_type = Column("field_type", String(255), nullable=False, unique=None) | |||
|
1214 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
1215 | ||||
|
1216 | repository = relationship('Repository') | |||
|
1217 | ||||
|
1218 | @property | |||
|
1219 | def field_key_prefixed(self): | |||
|
1220 | return 'ex_%s' % self.field_key | |||
|
1221 | ||||
|
1222 | @classmethod | |||
|
1223 | def un_prefix_key(cls, key): | |||
|
1224 | if key.startswith(cls.PREFIX): | |||
|
1225 | return key[len(cls.PREFIX):] | |||
|
1226 | return key | |||
|
1227 | ||||
|
1228 | @classmethod | |||
|
1229 | def get_by_key_name(cls, key, repo): | |||
|
1230 | row = cls.query()\ | |||
|
1231 | .filter(cls.repository == repo)\ | |||
|
1232 | .filter(cls.field_key == key).scalar() | |||
|
1233 | return row | |||
|
1234 | ||||
|
1235 | ||||
|
1236 | class Repository(Base, BaseModel): | |||
|
1237 | __tablename__ = 'repositories' | |||
|
1238 | __table_args__ = ( | |||
|
1239 | Index('r_repo_name_idx', 'repo_name', mysql_length=255), | |||
|
1240 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
1241 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
1242 | ) | |||
|
1243 | DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' | |||
|
1244 | DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' | |||
|
1245 | ||||
|
1246 | STATE_CREATED = 'repo_state_created' | |||
|
1247 | STATE_PENDING = 'repo_state_pending' | |||
|
1248 | STATE_ERROR = 'repo_state_error' | |||
|
1249 | ||||
|
1250 | LOCK_AUTOMATIC = 'lock_auto' | |||
|
1251 | LOCK_API = 'lock_api' | |||
|
1252 | LOCK_WEB = 'lock_web' | |||
|
1253 | LOCK_PULL = 'lock_pull' | |||
|
1254 | ||||
|
1255 | NAME_SEP = URL_SEP | |||
|
1256 | ||||
|
1257 | repo_id = Column( | |||
|
1258 | "repo_id", Integer(), nullable=False, unique=True, default=None, | |||
|
1259 | primary_key=True) | |||
|
1260 | _repo_name = Column( | |||
|
1261 | "repo_name", Text(), nullable=False, default=None) | |||
|
1262 | _repo_name_hash = Column( | |||
|
1263 | "repo_name_hash", String(255), nullable=False, unique=True) | |||
|
1264 | repo_state = Column("repo_state", String(255), nullable=True) | |||
|
1265 | ||||
|
1266 | clone_uri = Column( | |||
|
1267 | "clone_uri", EncryptedTextValue(), nullable=True, unique=False, | |||
|
1268 | default=None) | |||
|
1269 | repo_type = Column( | |||
|
1270 | "repo_type", String(255), nullable=False, unique=False, default=None) | |||
|
1271 | user_id = Column( | |||
|
1272 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | |||
|
1273 | unique=False, default=None) | |||
|
1274 | private = Column( | |||
|
1275 | "private", Boolean(), nullable=True, unique=None, default=None) | |||
|
1276 | enable_statistics = Column( | |||
|
1277 | "statistics", Boolean(), nullable=True, unique=None, default=True) | |||
|
1278 | enable_downloads = Column( | |||
|
1279 | "downloads", Boolean(), nullable=True, unique=None, default=True) | |||
|
1280 | description = Column( | |||
|
1281 | "description", String(10000), nullable=True, unique=None, default=None) | |||
|
1282 | created_on = Column( | |||
|
1283 | 'created_on', DateTime(timezone=False), nullable=True, unique=None, | |||
|
1284 | default=datetime.datetime.now) | |||
|
1285 | updated_on = Column( | |||
|
1286 | 'updated_on', DateTime(timezone=False), nullable=True, unique=None, | |||
|
1287 | default=datetime.datetime.now) | |||
|
1288 | _landing_revision = Column( | |||
|
1289 | "landing_revision", String(255), nullable=False, unique=False, | |||
|
1290 | default=None) | |||
|
1291 | enable_locking = Column( | |||
|
1292 | "enable_locking", Boolean(), nullable=False, unique=None, | |||
|
1293 | default=False) | |||
|
1294 | _locked = Column( | |||
|
1295 | "locked", String(255), nullable=True, unique=False, default=None) | |||
|
1296 | _changeset_cache = Column( | |||
|
1297 | "changeset_cache", LargeBinary(), nullable=True) # JSON data | |||
|
1298 | ||||
|
1299 | fork_id = Column( | |||
|
1300 | "fork_id", Integer(), ForeignKey('repositories.repo_id'), | |||
|
1301 | nullable=True, unique=False, default=None) | |||
|
1302 | group_id = Column( | |||
|
1303 | "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, | |||
|
1304 | unique=False, default=None) | |||
|
1305 | ||||
|
1306 | user = relationship('User', lazy='joined') | |||
|
1307 | fork = relationship('Repository', remote_side=repo_id, lazy='joined') | |||
|
1308 | group = relationship('RepoGroup', lazy='joined') | |||
|
1309 | repo_to_perm = relationship( | |||
|
1310 | 'UserRepoToPerm', cascade='all', | |||
|
1311 | order_by='UserRepoToPerm.repo_to_perm_id') | |||
|
1312 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |||
|
1313 | stats = relationship('Statistics', cascade='all', uselist=False) | |||
|
1314 | ||||
|
1315 | followers = relationship( | |||
|
1316 | 'UserFollowing', | |||
|
1317 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', | |||
|
1318 | cascade='all') | |||
|
1319 | extra_fields = relationship( | |||
|
1320 | 'RepositoryField', cascade="all, delete, delete-orphan") | |||
|
1321 | logs = relationship('UserLog') | |||
|
1322 | comments = relationship( | |||
|
1323 | 'ChangesetComment', cascade="all, delete, delete-orphan") | |||
|
1324 | pull_requests_source = relationship( | |||
|
1325 | 'PullRequest', | |||
|
1326 | primaryjoin='PullRequest.source_repo_id==Repository.repo_id', | |||
|
1327 | cascade="all, delete, delete-orphan") | |||
|
1328 | pull_requests_target = relationship( | |||
|
1329 | 'PullRequest', | |||
|
1330 | primaryjoin='PullRequest.target_repo_id==Repository.repo_id', | |||
|
1331 | cascade="all, delete, delete-orphan") | |||
|
1332 | ui = relationship('RepoRhodeCodeUi', cascade="all") | |||
|
1333 | settings = relationship('RepoRhodeCodeSetting', cascade="all") | |||
|
1334 | integrations = relationship('Integration', | |||
|
1335 | cascade="all, delete, delete-orphan") | |||
|
1336 | ||||
|
1337 | def __unicode__(self): | |||
|
1338 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, | |||
|
1339 | safe_unicode(self.repo_name)) | |||
|
1340 | ||||
|
1341 | @hybrid_property | |||
|
1342 | def landing_rev(self): | |||
|
1343 | # always should return [rev_type, rev] | |||
|
1344 | if self._landing_revision: | |||
|
1345 | _rev_info = self._landing_revision.split(':') | |||
|
1346 | if len(_rev_info) < 2: | |||
|
1347 | _rev_info.insert(0, 'rev') | |||
|
1348 | return [_rev_info[0], _rev_info[1]] | |||
|
1349 | return [None, None] | |||
|
1350 | ||||
|
1351 | @landing_rev.setter | |||
|
1352 | def landing_rev(self, val): | |||
|
1353 | if ':' not in val: | |||
|
1354 | raise ValueError('value must be delimited with `:` and consist ' | |||
|
1355 | 'of <rev_type>:<rev>, got %s instead' % val) | |||
|
1356 | self._landing_revision = val | |||
|
1357 | ||||
|
1358 | @hybrid_property | |||
|
1359 | def locked(self): | |||
|
1360 | if self._locked: | |||
|
1361 | user_id, timelocked, reason = self._locked.split(':') | |||
|
1362 | lock_values = int(user_id), timelocked, reason | |||
|
1363 | else: | |||
|
1364 | lock_values = [None, None, None] | |||
|
1365 | return lock_values | |||
|
1366 | ||||
|
1367 | @locked.setter | |||
|
1368 | def locked(self, val): | |||
|
1369 | if val and isinstance(val, (list, tuple)): | |||
|
1370 | self._locked = ':'.join(map(str, val)) | |||
|
1371 | else: | |||
|
1372 | self._locked = None | |||
|
1373 | ||||
|
1374 | @hybrid_property | |||
|
1375 | def changeset_cache(self): | |||
|
1376 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |||
|
1377 | dummy = EmptyCommit().__json__() | |||
|
1378 | if not self._changeset_cache: | |||
|
1379 | return dummy | |||
|
1380 | try: | |||
|
1381 | return json.loads(self._changeset_cache) | |||
|
1382 | except TypeError: | |||
|
1383 | return dummy | |||
|
1384 | except Exception: | |||
|
1385 | log.error(traceback.format_exc()) | |||
|
1386 | return dummy | |||
|
1387 | ||||
|
1388 | @changeset_cache.setter | |||
|
1389 | def changeset_cache(self, val): | |||
|
1390 | try: | |||
|
1391 | self._changeset_cache = json.dumps(val) | |||
|
1392 | except Exception: | |||
|
1393 | log.error(traceback.format_exc()) | |||
|
1394 | ||||
|
1395 | @hybrid_property | |||
|
1396 | def repo_name(self): | |||
|
1397 | return self._repo_name | |||
|
1398 | ||||
|
1399 | @repo_name.setter | |||
|
1400 | def repo_name(self, value): | |||
|
1401 | self._repo_name = value | |||
|
1402 | self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() | |||
|
1403 | ||||
|
1404 | @classmethod | |||
|
1405 | def normalize_repo_name(cls, repo_name): | |||
|
1406 | """ | |||
|
1407 | Normalizes os specific repo_name to the format internally stored inside | |||
|
1408 | database using URL_SEP | |||
|
1409 | ||||
|
1410 | :param cls: | |||
|
1411 | :param repo_name: | |||
|
1412 | """ | |||
|
1413 | return cls.NAME_SEP.join(repo_name.split(os.sep)) | |||
|
1414 | ||||
|
1415 | @classmethod | |||
|
1416 | def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): | |||
|
1417 | session = Session() | |||
|
1418 | q = session.query(cls).filter(cls.repo_name == repo_name) | |||
|
1419 | ||||
|
1420 | if cache: | |||
|
1421 | if identity_cache: | |||
|
1422 | val = cls.identity_cache(session, 'repo_name', repo_name) | |||
|
1423 | if val: | |||
|
1424 | return val | |||
|
1425 | else: | |||
|
1426 | q = q.options( | |||
|
1427 | FromCache("sql_cache_short", | |||
|
1428 | "get_repo_by_name_%s" % _hash_key(repo_name))) | |||
|
1429 | ||||
|
1430 | return q.scalar() | |||
|
1431 | ||||
|
1432 | @classmethod | |||
|
1433 | def get_by_full_path(cls, repo_full_path): | |||
|
1434 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] | |||
|
1435 | repo_name = cls.normalize_repo_name(repo_name) | |||
|
1436 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) | |||
|
1437 | ||||
|
1438 | @classmethod | |||
|
1439 | def get_repo_forks(cls, repo_id): | |||
|
1440 | return cls.query().filter(Repository.fork_id == repo_id) | |||
|
1441 | ||||
|
1442 | @classmethod | |||
|
1443 | def base_path(cls): | |||
|
1444 | """ | |||
|
1445 | Returns base path when all repos are stored | |||
|
1446 | ||||
|
1447 | :param cls: | |||
|
1448 | """ | |||
|
1449 | q = Session().query(RhodeCodeUi)\ | |||
|
1450 | .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) | |||
|
1451 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |||
|
1452 | return q.one().ui_value | |||
|
1453 | ||||
|
1454 | @classmethod | |||
|
1455 | def is_valid(cls, repo_name): | |||
|
1456 | """ | |||
|
1457 | returns True if given repo name is a valid filesystem repository | |||
|
1458 | ||||
|
1459 | :param cls: | |||
|
1460 | :param repo_name: | |||
|
1461 | """ | |||
|
1462 | from rhodecode.lib.utils import is_valid_repo | |||
|
1463 | ||||
|
1464 | return is_valid_repo(repo_name, cls.base_path()) | |||
|
1465 | ||||
|
1466 | @classmethod | |||
|
1467 | def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), | |||
|
1468 | case_insensitive=True): | |||
|
1469 | q = Repository.query() | |||
|
1470 | ||||
|
1471 | if not isinstance(user_id, Optional): | |||
|
1472 | q = q.filter(Repository.user_id == user_id) | |||
|
1473 | ||||
|
1474 | if not isinstance(group_id, Optional): | |||
|
1475 | q = q.filter(Repository.group_id == group_id) | |||
|
1476 | ||||
|
1477 | if case_insensitive: | |||
|
1478 | q = q.order_by(func.lower(Repository.repo_name)) | |||
|
1479 | else: | |||
|
1480 | q = q.order_by(Repository.repo_name) | |||
|
1481 | return q.all() | |||
|
1482 | ||||
|
1483 | @property | |||
|
1484 | def forks(self): | |||
|
1485 | """ | |||
|
1486 | Return forks of this repo | |||
|
1487 | """ | |||
|
1488 | return Repository.get_repo_forks(self.repo_id) | |||
|
1489 | ||||
|
1490 | @property | |||
|
1491 | def parent(self): | |||
|
1492 | """ | |||
|
1493 | Returns fork parent | |||
|
1494 | """ | |||
|
1495 | return self.fork | |||
|
1496 | ||||
|
1497 | @property | |||
|
1498 | def just_name(self): | |||
|
1499 | return self.repo_name.split(self.NAME_SEP)[-1] | |||
|
1500 | ||||
|
1501 | @property | |||
|
1502 | def groups_with_parents(self): | |||
|
1503 | groups = [] | |||
|
1504 | if self.group is None: | |||
|
1505 | return groups | |||
|
1506 | ||||
|
1507 | cur_gr = self.group | |||
|
1508 | groups.insert(0, cur_gr) | |||
|
1509 | while 1: | |||
|
1510 | gr = getattr(cur_gr, 'parent_group', None) | |||
|
1511 | cur_gr = cur_gr.parent_group | |||
|
1512 | if gr is None: | |||
|
1513 | break | |||
|
1514 | groups.insert(0, gr) | |||
|
1515 | ||||
|
1516 | return groups | |||
|
1517 | ||||
|
1518 | @property | |||
|
1519 | def groups_and_repo(self): | |||
|
1520 | return self.groups_with_parents, self | |||
|
1521 | ||||
|
1522 | @LazyProperty | |||
|
1523 | def repo_path(self): | |||
|
1524 | """ | |||
|
1525 | Returns base full path for that repository means where it actually | |||
|
1526 | exists on a filesystem | |||
|
1527 | """ | |||
|
1528 | q = Session().query(RhodeCodeUi).filter( | |||
|
1529 | RhodeCodeUi.ui_key == self.NAME_SEP) | |||
|
1530 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |||
|
1531 | return q.one().ui_value | |||
|
1532 | ||||
|
1533 | @property | |||
|
1534 | def repo_full_path(self): | |||
|
1535 | p = [self.repo_path] | |||
|
1536 | # we need to split the name by / since this is how we store the | |||
|
1537 | # names in the database, but that eventually needs to be converted | |||
|
1538 | # into a valid system path | |||
|
1539 | p += self.repo_name.split(self.NAME_SEP) | |||
|
1540 | return os.path.join(*map(safe_unicode, p)) | |||
|
1541 | ||||
|
1542 | @property | |||
|
1543 | def cache_keys(self): | |||
|
1544 | """ | |||
|
1545 | Returns associated cache keys for that repo | |||
|
1546 | """ | |||
|
1547 | return CacheKey.query()\ | |||
|
1548 | .filter(CacheKey.cache_args == self.repo_name)\ | |||
|
1549 | .order_by(CacheKey.cache_key)\ | |||
|
1550 | .all() | |||
|
1551 | ||||
|
1552 | def get_new_name(self, repo_name): | |||
|
1553 | """ | |||
|
1554 | returns new full repository name based on assigned group and new new | |||
|
1555 | ||||
|
1556 | :param group_name: | |||
|
1557 | """ | |||
|
1558 | path_prefix = self.group.full_path_splitted if self.group else [] | |||
|
1559 | return self.NAME_SEP.join(path_prefix + [repo_name]) | |||
|
1560 | ||||
|
1561 | @property | |||
|
1562 | def _config(self): | |||
|
1563 | """ | |||
|
1564 | Returns db based config object. | |||
|
1565 | """ | |||
|
1566 | from rhodecode.lib.utils import make_db_config | |||
|
1567 | return make_db_config(clear_session=False, repo=self) | |||
|
1568 | ||||
|
1569 | def permissions(self, with_admins=True, with_owner=True): | |||
|
1570 | q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) | |||
|
1571 | q = q.options(joinedload(UserRepoToPerm.repository), | |||
|
1572 | joinedload(UserRepoToPerm.user), | |||
|
1573 | joinedload(UserRepoToPerm.permission),) | |||
|
1574 | ||||
|
1575 | # get owners and admins and permissions. We do a trick of re-writing | |||
|
1576 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |||
|
1577 | # has a global reference and changing one object propagates to all | |||
|
1578 | # others. This means if admin is also an owner admin_row that change | |||
|
1579 | # would propagate to both objects | |||
|
1580 | perm_rows = [] | |||
|
1581 | for _usr in q.all(): | |||
|
1582 | usr = AttributeDict(_usr.user.get_dict()) | |||
|
1583 | usr.permission = _usr.permission.permission_name | |||
|
1584 | perm_rows.append(usr) | |||
|
1585 | ||||
|
1586 | # filter the perm rows by 'default' first and then sort them by | |||
|
1587 | # admin,write,read,none permissions sorted again alphabetically in | |||
|
1588 | # each group | |||
|
1589 | perm_rows = sorted(perm_rows, key=display_sort) | |||
|
1590 | ||||
|
1591 | _admin_perm = 'repository.admin' | |||
|
1592 | owner_row = [] | |||
|
1593 | if with_owner: | |||
|
1594 | usr = AttributeDict(self.user.get_dict()) | |||
|
1595 | usr.owner_row = True | |||
|
1596 | usr.permission = _admin_perm | |||
|
1597 | owner_row.append(usr) | |||
|
1598 | ||||
|
1599 | super_admin_rows = [] | |||
|
1600 | if with_admins: | |||
|
1601 | for usr in User.get_all_super_admins(): | |||
|
1602 | # if this admin is also owner, don't double the record | |||
|
1603 | if usr.user_id == owner_row[0].user_id: | |||
|
1604 | owner_row[0].admin_row = True | |||
|
1605 | else: | |||
|
1606 | usr = AttributeDict(usr.get_dict()) | |||
|
1607 | usr.admin_row = True | |||
|
1608 | usr.permission = _admin_perm | |||
|
1609 | super_admin_rows.append(usr) | |||
|
1610 | ||||
|
1611 | return super_admin_rows + owner_row + perm_rows | |||
|
1612 | ||||
|
1613 | def permission_user_groups(self): | |||
|
1614 | q = UserGroupRepoToPerm.query().filter( | |||
|
1615 | UserGroupRepoToPerm.repository == self) | |||
|
1616 | q = q.options(joinedload(UserGroupRepoToPerm.repository), | |||
|
1617 | joinedload(UserGroupRepoToPerm.users_group), | |||
|
1618 | joinedload(UserGroupRepoToPerm.permission),) | |||
|
1619 | ||||
|
1620 | perm_rows = [] | |||
|
1621 | for _user_group in q.all(): | |||
|
1622 | usr = AttributeDict(_user_group.users_group.get_dict()) | |||
|
1623 | usr.permission = _user_group.permission.permission_name | |||
|
1624 | perm_rows.append(usr) | |||
|
1625 | ||||
|
1626 | return perm_rows | |||
|
1627 | ||||
|
1628 | def get_api_data(self, include_secrets=False): | |||
|
1629 | """ | |||
|
1630 | Common function for generating repo api data | |||
|
1631 | ||||
|
1632 | :param include_secrets: See :meth:`User.get_api_data`. | |||
|
1633 | ||||
|
1634 | """ | |||
|
1635 | # TODO: mikhail: Here there is an anti-pattern, we probably need to | |||
|
1636 | # move this methods on models level. | |||
|
1637 | from rhodecode.model.settings import SettingsModel | |||
|
1638 | ||||
|
1639 | repo = self | |||
|
1640 | _user_id, _time, _reason = self.locked | |||
|
1641 | ||||
|
1642 | data = { | |||
|
1643 | 'repo_id': repo.repo_id, | |||
|
1644 | 'repo_name': repo.repo_name, | |||
|
1645 | 'repo_type': repo.repo_type, | |||
|
1646 | 'clone_uri': repo.clone_uri or '', | |||
|
1647 | 'url': url('summary_home', repo_name=self.repo_name, qualified=True), | |||
|
1648 | 'private': repo.private, | |||
|
1649 | 'created_on': repo.created_on, | |||
|
1650 | 'description': repo.description, | |||
|
1651 | 'landing_rev': repo.landing_rev, | |||
|
1652 | 'owner': repo.user.username, | |||
|
1653 | 'fork_of': repo.fork.repo_name if repo.fork else None, | |||
|
1654 | 'enable_statistics': repo.enable_statistics, | |||
|
1655 | 'enable_locking': repo.enable_locking, | |||
|
1656 | 'enable_downloads': repo.enable_downloads, | |||
|
1657 | 'last_changeset': repo.changeset_cache, | |||
|
1658 | 'locked_by': User.get(_user_id).get_api_data( | |||
|
1659 | include_secrets=include_secrets) if _user_id else None, | |||
|
1660 | 'locked_date': time_to_datetime(_time) if _time else None, | |||
|
1661 | 'lock_reason': _reason if _reason else None, | |||
|
1662 | } | |||
|
1663 | ||||
|
1664 | # TODO: mikhail: should be per-repo settings here | |||
|
1665 | rc_config = SettingsModel().get_all_settings() | |||
|
1666 | repository_fields = str2bool( | |||
|
1667 | rc_config.get('rhodecode_repository_fields')) | |||
|
1668 | if repository_fields: | |||
|
1669 | for f in self.extra_fields: | |||
|
1670 | data[f.field_key_prefixed] = f.field_value | |||
|
1671 | ||||
|
1672 | return data | |||
|
1673 | ||||
|
1674 | @classmethod | |||
|
1675 | def lock(cls, repo, user_id, lock_time=None, lock_reason=None): | |||
|
1676 | if not lock_time: | |||
|
1677 | lock_time = time.time() | |||
|
1678 | if not lock_reason: | |||
|
1679 | lock_reason = cls.LOCK_AUTOMATIC | |||
|
1680 | repo.locked = [user_id, lock_time, lock_reason] | |||
|
1681 | Session().add(repo) | |||
|
1682 | Session().commit() | |||
|
1683 | ||||
|
1684 | @classmethod | |||
|
1685 | def unlock(cls, repo): | |||
|
1686 | repo.locked = None | |||
|
1687 | Session().add(repo) | |||
|
1688 | Session().commit() | |||
|
1689 | ||||
|
1690 | @classmethod | |||
|
1691 | def getlock(cls, repo): | |||
|
1692 | return repo.locked | |||
|
1693 | ||||
|
1694 | def is_user_lock(self, user_id): | |||
|
1695 | if self.lock[0]: | |||
|
1696 | lock_user_id = safe_int(self.lock[0]) | |||
|
1697 | user_id = safe_int(user_id) | |||
|
1698 | # both are ints, and they are equal | |||
|
1699 | return all([lock_user_id, user_id]) and lock_user_id == user_id | |||
|
1700 | ||||
|
1701 | return False | |||
|
1702 | ||||
|
1703 | def get_locking_state(self, action, user_id, only_when_enabled=True): | |||
|
1704 | """ | |||
|
1705 | Checks locking on this repository, if locking is enabled and lock is | |||
|
1706 | present returns a tuple of make_lock, locked, locked_by. | |||
|
1707 | make_lock can have 3 states None (do nothing) True, make lock | |||
|
1708 | False release lock, This value is later propagated to hooks, which | |||
|
1709 | do the locking. Think about this as signals passed to hooks what to do. | |||
|
1710 | ||||
|
1711 | """ | |||
|
1712 | # TODO: johbo: This is part of the business logic and should be moved | |||
|
1713 | # into the RepositoryModel. | |||
|
1714 | ||||
|
1715 | if action not in ('push', 'pull'): | |||
|
1716 | raise ValueError("Invalid action value: %s" % repr(action)) | |||
|
1717 | ||||
|
1718 | # defines if locked error should be thrown to user | |||
|
1719 | currently_locked = False | |||
|
1720 | # defines if new lock should be made, tri-state | |||
|
1721 | make_lock = None | |||
|
1722 | repo = self | |||
|
1723 | user = User.get(user_id) | |||
|
1724 | ||||
|
1725 | lock_info = repo.locked | |||
|
1726 | ||||
|
1727 | if repo and (repo.enable_locking or not only_when_enabled): | |||
|
1728 | if action == 'push': | |||
|
1729 | # check if it's already locked !, if it is compare users | |||
|
1730 | locked_by_user_id = lock_info[0] | |||
|
1731 | if user.user_id == locked_by_user_id: | |||
|
1732 | log.debug( | |||
|
1733 | 'Got `push` action from user %s, now unlocking', user) | |||
|
1734 | # unlock if we have push from user who locked | |||
|
1735 | make_lock = False | |||
|
1736 | else: | |||
|
1737 | # we're not the same user who locked, ban with | |||
|
1738 | # code defined in settings (default is 423 HTTP Locked) ! | |||
|
1739 | log.debug('Repo %s is currently locked by %s', repo, user) | |||
|
1740 | currently_locked = True | |||
|
1741 | elif action == 'pull': | |||
|
1742 | # [0] user [1] date | |||
|
1743 | if lock_info[0] and lock_info[1]: | |||
|
1744 | log.debug('Repo %s is currently locked by %s', repo, user) | |||
|
1745 | currently_locked = True | |||
|
1746 | else: | |||
|
1747 | log.debug('Setting lock on repo %s by %s', repo, user) | |||
|
1748 | make_lock = True | |||
|
1749 | ||||
|
1750 | else: | |||
|
1751 | log.debug('Repository %s do not have locking enabled', repo) | |||
|
1752 | ||||
|
1753 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', | |||
|
1754 | make_lock, currently_locked, lock_info) | |||
|
1755 | ||||
|
1756 | from rhodecode.lib.auth import HasRepoPermissionAny | |||
|
1757 | perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') | |||
|
1758 | if make_lock and not perm_check(repo_name=repo.repo_name, user=user): | |||
|
1759 | # if we don't have at least write permission we cannot make a lock | |||
|
1760 | log.debug('lock state reset back to FALSE due to lack ' | |||
|
1761 | 'of at least read permission') | |||
|
1762 | make_lock = False | |||
|
1763 | ||||
|
1764 | return make_lock, currently_locked, lock_info | |||
|
1765 | ||||
|
1766 | @property | |||
|
1767 | def last_db_change(self): | |||
|
1768 | return self.updated_on | |||
|
1769 | ||||
|
1770 | @property | |||
|
1771 | def clone_uri_hidden(self): | |||
|
1772 | clone_uri = self.clone_uri | |||
|
1773 | if clone_uri: | |||
|
1774 | import urlobject | |||
|
1775 | url_obj = urlobject.URLObject(clone_uri) | |||
|
1776 | if url_obj.password: | |||
|
1777 | clone_uri = url_obj.with_password('*****') | |||
|
1778 | return clone_uri | |||
|
1779 | ||||
|
1780 | def clone_url(self, **override): | |||
|
1781 | qualified_home_url = url('home', qualified=True) | |||
|
1782 | ||||
|
1783 | uri_tmpl = None | |||
|
1784 | if 'with_id' in override: | |||
|
1785 | uri_tmpl = self.DEFAULT_CLONE_URI_ID | |||
|
1786 | del override['with_id'] | |||
|
1787 | ||||
|
1788 | if 'uri_tmpl' in override: | |||
|
1789 | uri_tmpl = override['uri_tmpl'] | |||
|
1790 | del override['uri_tmpl'] | |||
|
1791 | ||||
|
1792 | # we didn't override our tmpl from **overrides | |||
|
1793 | if not uri_tmpl: | |||
|
1794 | uri_tmpl = self.DEFAULT_CLONE_URI | |||
|
1795 | try: | |||
|
1796 | from pylons import tmpl_context as c | |||
|
1797 | uri_tmpl = c.clone_uri_tmpl | |||
|
1798 | except Exception: | |||
|
1799 | # in any case if we call this outside of request context, | |||
|
1800 | # ie, not having tmpl_context set up | |||
|
1801 | pass | |||
|
1802 | ||||
|
1803 | return get_clone_url(uri_tmpl=uri_tmpl, | |||
|
1804 | qualifed_home_url=qualified_home_url, | |||
|
1805 | repo_name=self.repo_name, | |||
|
1806 | repo_id=self.repo_id, **override) | |||
|
1807 | ||||
|
1808 | def set_state(self, state): | |||
|
1809 | self.repo_state = state | |||
|
1810 | Session().add(self) | |||
|
1811 | #========================================================================== | |||
|
1812 | # SCM PROPERTIES | |||
|
1813 | #========================================================================== | |||
|
1814 | ||||
|
1815 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): | |||
|
1816 | return get_commit_safe( | |||
|
1817 | self.scm_instance(), commit_id, commit_idx, pre_load=pre_load) | |||
|
1818 | ||||
|
1819 | def get_changeset(self, rev=None, pre_load=None): | |||
|
1820 | warnings.warn("Use get_commit", DeprecationWarning) | |||
|
1821 | commit_id = None | |||
|
1822 | commit_idx = None | |||
|
1823 | if isinstance(rev, basestring): | |||
|
1824 | commit_id = rev | |||
|
1825 | else: | |||
|
1826 | commit_idx = rev | |||
|
1827 | return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, | |||
|
1828 | pre_load=pre_load) | |||
|
1829 | ||||
|
1830 | def get_landing_commit(self): | |||
|
1831 | """ | |||
|
1832 | Returns landing commit, or if that doesn't exist returns the tip | |||
|
1833 | """ | |||
|
1834 | _rev_type, _rev = self.landing_rev | |||
|
1835 | commit = self.get_commit(_rev) | |||
|
1836 | if isinstance(commit, EmptyCommit): | |||
|
1837 | return self.get_commit() | |||
|
1838 | return commit | |||
|
1839 | ||||
|
1840 | def update_commit_cache(self, cs_cache=None, config=None): | |||
|
1841 | """ | |||
|
1842 | Update cache of last changeset for repository, keys should be:: | |||
|
1843 | ||||
|
1844 | short_id | |||
|
1845 | raw_id | |||
|
1846 | revision | |||
|
1847 | parents | |||
|
1848 | message | |||
|
1849 | date | |||
|
1850 | author | |||
|
1851 | ||||
|
1852 | :param cs_cache: | |||
|
1853 | """ | |||
|
1854 | from rhodecode.lib.vcs.backends.base import BaseChangeset | |||
|
1855 | if cs_cache is None: | |||
|
1856 | # use no-cache version here | |||
|
1857 | scm_repo = self.scm_instance(cache=False, config=config) | |||
|
1858 | if scm_repo: | |||
|
1859 | cs_cache = scm_repo.get_commit( | |||
|
1860 | pre_load=["author", "date", "message", "parents"]) | |||
|
1861 | else: | |||
|
1862 | cs_cache = EmptyCommit() | |||
|
1863 | ||||
|
1864 | if isinstance(cs_cache, BaseChangeset): | |||
|
1865 | cs_cache = cs_cache.__json__() | |||
|
1866 | ||||
|
1867 | def is_outdated(new_cs_cache): | |||
|
1868 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or | |||
|
1869 | new_cs_cache['revision'] != self.changeset_cache['revision']): | |||
|
1870 | return True | |||
|
1871 | return False | |||
|
1872 | ||||
|
1873 | # check if we have maybe already latest cached revision | |||
|
1874 | if is_outdated(cs_cache) or not self.changeset_cache: | |||
|
1875 | _default = datetime.datetime.fromtimestamp(0) | |||
|
1876 | last_change = cs_cache.get('date') or _default | |||
|
1877 | log.debug('updated repo %s with new cs cache %s', | |||
|
1878 | self.repo_name, cs_cache) | |||
|
1879 | self.updated_on = last_change | |||
|
1880 | self.changeset_cache = cs_cache | |||
|
1881 | Session().add(self) | |||
|
1882 | Session().commit() | |||
|
1883 | else: | |||
|
1884 | log.debug('Skipping update_commit_cache for repo:`%s` ' | |||
|
1885 | 'commit already with latest changes', self.repo_name) | |||
|
1886 | ||||
|
1887 | @property | |||
|
1888 | def tip(self): | |||
|
1889 | return self.get_commit('tip') | |||
|
1890 | ||||
|
1891 | @property | |||
|
1892 | def author(self): | |||
|
1893 | return self.tip.author | |||
|
1894 | ||||
|
1895 | @property | |||
|
1896 | def last_change(self): | |||
|
1897 | return self.scm_instance().last_change | |||
|
1898 | ||||
|
1899 | def get_comments(self, revisions=None): | |||
|
1900 | """ | |||
|
1901 | Returns comments for this repository grouped by revisions | |||
|
1902 | ||||
|
1903 | :param revisions: filter query by revisions only | |||
|
1904 | """ | |||
|
1905 | cmts = ChangesetComment.query()\ | |||
|
1906 | .filter(ChangesetComment.repo == self) | |||
|
1907 | if revisions: | |||
|
1908 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) | |||
|
1909 | grouped = collections.defaultdict(list) | |||
|
1910 | for cmt in cmts.all(): | |||
|
1911 | grouped[cmt.revision].append(cmt) | |||
|
1912 | return grouped | |||
|
1913 | ||||
|
1914 | def statuses(self, revisions=None): | |||
|
1915 | """ | |||
|
1916 | Returns statuses for this repository | |||
|
1917 | ||||
|
1918 | :param revisions: list of revisions to get statuses for | |||
|
1919 | """ | |||
|
1920 | statuses = ChangesetStatus.query()\ | |||
|
1921 | .filter(ChangesetStatus.repo == self)\ | |||
|
1922 | .filter(ChangesetStatus.version == 0) | |||
|
1923 | ||||
|
1924 | if revisions: | |||
|
1925 | # Try doing the filtering in chunks to avoid hitting limits | |||
|
1926 | size = 500 | |||
|
1927 | status_results = [] | |||
|
1928 | for chunk in xrange(0, len(revisions), size): | |||
|
1929 | status_results += statuses.filter( | |||
|
1930 | ChangesetStatus.revision.in_( | |||
|
1931 | revisions[chunk: chunk+size]) | |||
|
1932 | ).all() | |||
|
1933 | else: | |||
|
1934 | status_results = statuses.all() | |||
|
1935 | ||||
|
1936 | grouped = {} | |||
|
1937 | ||||
|
1938 | # maybe we have open new pullrequest without a status? | |||
|
1939 | stat = ChangesetStatus.STATUS_UNDER_REVIEW | |||
|
1940 | status_lbl = ChangesetStatus.get_status_lbl(stat) | |||
|
1941 | for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): | |||
|
1942 | for rev in pr.revisions: | |||
|
1943 | pr_id = pr.pull_request_id | |||
|
1944 | pr_repo = pr.target_repo.repo_name | |||
|
1945 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] | |||
|
1946 | ||||
|
1947 | for stat in status_results: | |||
|
1948 | pr_id = pr_repo = None | |||
|
1949 | if stat.pull_request: | |||
|
1950 | pr_id = stat.pull_request.pull_request_id | |||
|
1951 | pr_repo = stat.pull_request.target_repo.repo_name | |||
|
1952 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, | |||
|
1953 | pr_id, pr_repo] | |||
|
1954 | return grouped | |||
|
1955 | ||||
|
1956 | # ========================================================================== | |||
|
1957 | # SCM CACHE INSTANCE | |||
|
1958 | # ========================================================================== | |||
|
1959 | ||||
|
1960 | def scm_instance(self, **kwargs): | |||
|
1961 | import rhodecode | |||
|
1962 | ||||
|
1963 | # Passing a config will not hit the cache currently only used | |||
|
1964 | # for repo2dbmapper | |||
|
1965 | config = kwargs.pop('config', None) | |||
|
1966 | cache = kwargs.pop('cache', None) | |||
|
1967 | full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) | |||
|
1968 | # if cache is NOT defined use default global, else we have a full | |||
|
1969 | # control over cache behaviour | |||
|
1970 | if cache is None and full_cache and not config: | |||
|
1971 | return self._get_instance_cached() | |||
|
1972 | return self._get_instance(cache=bool(cache), config=config) | |||
|
1973 | ||||
|
1974 | def _get_instance_cached(self): | |||
|
1975 | @cache_region('long_term') | |||
|
1976 | def _get_repo(cache_key): | |||
|
1977 | return self._get_instance() | |||
|
1978 | ||||
|
1979 | invalidator_context = CacheKey.repo_context_cache( | |||
|
1980 | _get_repo, self.repo_name, None, thread_scoped=True) | |||
|
1981 | ||||
|
1982 | with invalidator_context as context: | |||
|
1983 | context.invalidate() | |||
|
1984 | repo = context.compute() | |||
|
1985 | ||||
|
1986 | return repo | |||
|
1987 | ||||
|
1988 | def _get_instance(self, cache=True, config=None): | |||
|
1989 | config = config or self._config | |||
|
1990 | custom_wire = { | |||
|
1991 | 'cache': cache # controls the vcs.remote cache | |||
|
1992 | } | |||
|
1993 | ||||
|
1994 | repo = get_vcs_instance( | |||
|
1995 | repo_path=safe_str(self.repo_full_path), | |||
|
1996 | config=config, | |||
|
1997 | with_wire=custom_wire, | |||
|
1998 | create=False) | |||
|
1999 | ||||
|
2000 | return repo | |||
|
2001 | ||||
|
2002 | def __json__(self): | |||
|
2003 | return {'landing_rev': self.landing_rev} | |||
|
2004 | ||||
|
2005 | def get_dict(self): | |||
|
2006 | ||||
|
2007 | # Since we transformed `repo_name` to a hybrid property, we need to | |||
|
2008 | # keep compatibility with the code which uses `repo_name` field. | |||
|
2009 | ||||
|
2010 | result = super(Repository, self).get_dict() | |||
|
2011 | result['repo_name'] = result.pop('_repo_name', None) | |||
|
2012 | return result | |||
|
2013 | ||||
|
2014 | ||||
|
2015 | class RepoGroup(Base, BaseModel): | |||
|
2016 | __tablename__ = 'groups' | |||
|
2017 | __table_args__ = ( | |||
|
2018 | UniqueConstraint('group_name', 'group_parent_id'), | |||
|
2019 | CheckConstraint('group_id != group_parent_id'), | |||
|
2020 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2021 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
2022 | ) | |||
|
2023 | __mapper_args__ = {'order_by': 'group_name'} | |||
|
2024 | ||||
|
2025 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups | |||
|
2026 | ||||
|
2027 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2028 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) | |||
|
2029 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | |||
|
2030 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) | |||
|
2031 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | |||
|
2032 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |||
|
2033 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
2034 | ||||
|
2035 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') | |||
|
2036 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') | |||
|
2037 | parent_group = relationship('RepoGroup', remote_side=group_id) | |||
|
2038 | user = relationship('User') | |||
|
2039 | ||||
|
2040 | def __init__(self, group_name='', parent_group=None): | |||
|
2041 | self.group_name = group_name | |||
|
2042 | self.parent_group = parent_group | |||
|
2043 | ||||
|
2044 | def __unicode__(self): | |||
|
2045 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id, | |||
|
2046 | self.group_name) | |||
|
2047 | ||||
|
2048 | @classmethod | |||
|
2049 | def _generate_choice(cls, repo_group): | |||
|
2050 | from webhelpers.html import literal as _literal | |||
|
2051 | _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) | |||
|
2052 | return repo_group.group_id, _name(repo_group.full_path_splitted) | |||
|
2053 | ||||
|
2054 | @classmethod | |||
|
2055 | def groups_choices(cls, groups=None, show_empty_group=True): | |||
|
2056 | if not groups: | |||
|
2057 | groups = cls.query().all() | |||
|
2058 | ||||
|
2059 | repo_groups = [] | |||
|
2060 | if show_empty_group: | |||
|
2061 | repo_groups = [('-1', u'-- %s --' % _('No parent'))] | |||
|
2062 | ||||
|
2063 | repo_groups.extend([cls._generate_choice(x) for x in groups]) | |||
|
2064 | ||||
|
2065 | repo_groups = sorted( | |||
|
2066 | repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) | |||
|
2067 | return repo_groups | |||
|
2068 | ||||
|
2069 | @classmethod | |||
|
2070 | def url_sep(cls): | |||
|
2071 | return URL_SEP | |||
|
2072 | ||||
|
2073 | @classmethod | |||
|
2074 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): | |||
|
2075 | if case_insensitive: | |||
|
2076 | gr = cls.query().filter(func.lower(cls.group_name) | |||
|
2077 | == func.lower(group_name)) | |||
|
2078 | else: | |||
|
2079 | gr = cls.query().filter(cls.group_name == group_name) | |||
|
2080 | if cache: | |||
|
2081 | gr = gr.options(FromCache( | |||
|
2082 | "sql_cache_short", | |||
|
2083 | "get_group_%s" % _hash_key(group_name))) | |||
|
2084 | return gr.scalar() | |||
|
2085 | ||||
|
2086 | @classmethod | |||
|
2087 | def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), | |||
|
2088 | case_insensitive=True): | |||
|
2089 | q = RepoGroup.query() | |||
|
2090 | ||||
|
2091 | if not isinstance(user_id, Optional): | |||
|
2092 | q = q.filter(RepoGroup.user_id == user_id) | |||
|
2093 | ||||
|
2094 | if not isinstance(group_id, Optional): | |||
|
2095 | q = q.filter(RepoGroup.group_parent_id == group_id) | |||
|
2096 | ||||
|
2097 | if case_insensitive: | |||
|
2098 | q = q.order_by(func.lower(RepoGroup.group_name)) | |||
|
2099 | else: | |||
|
2100 | q = q.order_by(RepoGroup.group_name) | |||
|
2101 | return q.all() | |||
|
2102 | ||||
|
2103 | @property | |||
|
2104 | def parents(self): | |||
|
2105 | parents_recursion_limit = 10 | |||
|
2106 | groups = [] | |||
|
2107 | if self.parent_group is None: | |||
|
2108 | return groups | |||
|
2109 | cur_gr = self.parent_group | |||
|
2110 | groups.insert(0, cur_gr) | |||
|
2111 | cnt = 0 | |||
|
2112 | while 1: | |||
|
2113 | cnt += 1 | |||
|
2114 | gr = getattr(cur_gr, 'parent_group', None) | |||
|
2115 | cur_gr = cur_gr.parent_group | |||
|
2116 | if gr is None: | |||
|
2117 | break | |||
|
2118 | if cnt == parents_recursion_limit: | |||
|
2119 | # this will prevent accidental infinit loops | |||
|
2120 | log.error(('more than %s parents found for group %s, stopping ' | |||
|
2121 | 'recursive parent fetching' % (parents_recursion_limit, self))) | |||
|
2122 | break | |||
|
2123 | ||||
|
2124 | groups.insert(0, gr) | |||
|
2125 | return groups | |||
|
2126 | ||||
|
2127 | @property | |||
|
2128 | def children(self): | |||
|
2129 | return RepoGroup.query().filter(RepoGroup.parent_group == self) | |||
|
2130 | ||||
|
2131 | @property | |||
|
2132 | def name(self): | |||
|
2133 | return self.group_name.split(RepoGroup.url_sep())[-1] | |||
|
2134 | ||||
|
2135 | @property | |||
|
2136 | def full_path(self): | |||
|
2137 | return self.group_name | |||
|
2138 | ||||
|
2139 | @property | |||
|
2140 | def full_path_splitted(self): | |||
|
2141 | return self.group_name.split(RepoGroup.url_sep()) | |||
|
2142 | ||||
|
2143 | @property | |||
|
2144 | def repositories(self): | |||
|
2145 | return Repository.query()\ | |||
|
2146 | .filter(Repository.group == self)\ | |||
|
2147 | .order_by(Repository.repo_name) | |||
|
2148 | ||||
|
2149 | @property | |||
|
2150 | def repositories_recursive_count(self): | |||
|
2151 | cnt = self.repositories.count() | |||
|
2152 | ||||
|
2153 | def children_count(group): | |||
|
2154 | cnt = 0 | |||
|
2155 | for child in group.children: | |||
|
2156 | cnt += child.repositories.count() | |||
|
2157 | cnt += children_count(child) | |||
|
2158 | return cnt | |||
|
2159 | ||||
|
2160 | return cnt + children_count(self) | |||
|
2161 | ||||
|
2162 | def _recursive_objects(self, include_repos=True): | |||
|
2163 | all_ = [] | |||
|
2164 | ||||
|
2165 | def _get_members(root_gr): | |||
|
2166 | if include_repos: | |||
|
2167 | for r in root_gr.repositories: | |||
|
2168 | all_.append(r) | |||
|
2169 | childs = root_gr.children.all() | |||
|
2170 | if childs: | |||
|
2171 | for gr in childs: | |||
|
2172 | all_.append(gr) | |||
|
2173 | _get_members(gr) | |||
|
2174 | ||||
|
2175 | _get_members(self) | |||
|
2176 | return [self] + all_ | |||
|
2177 | ||||
|
2178 | def recursive_groups_and_repos(self): | |||
|
2179 | """ | |||
|
2180 | Recursive return all groups, with repositories in those groups | |||
|
2181 | """ | |||
|
2182 | return self._recursive_objects() | |||
|
2183 | ||||
|
2184 | def recursive_groups(self): | |||
|
2185 | """ | |||
|
2186 | Returns all children groups for this group including children of children | |||
|
2187 | """ | |||
|
2188 | return self._recursive_objects(include_repos=False) | |||
|
2189 | ||||
|
2190 | def get_new_name(self, group_name): | |||
|
2191 | """ | |||
|
2192 | returns new full group name based on parent and new name | |||
|
2193 | ||||
|
2194 | :param group_name: | |||
|
2195 | """ | |||
|
2196 | path_prefix = (self.parent_group.full_path_splitted if | |||
|
2197 | self.parent_group else []) | |||
|
2198 | return RepoGroup.url_sep().join(path_prefix + [group_name]) | |||
|
2199 | ||||
|
2200 | def permissions(self, with_admins=True, with_owner=True): | |||
|
2201 | q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) | |||
|
2202 | q = q.options(joinedload(UserRepoGroupToPerm.group), | |||
|
2203 | joinedload(UserRepoGroupToPerm.user), | |||
|
2204 | joinedload(UserRepoGroupToPerm.permission),) | |||
|
2205 | ||||
|
2206 | # get owners and admins and permissions. We do a trick of re-writing | |||
|
2207 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |||
|
2208 | # has a global reference and changing one object propagates to all | |||
|
2209 | # others. This means if admin is also an owner admin_row that change | |||
|
2210 | # would propagate to both objects | |||
|
2211 | perm_rows = [] | |||
|
2212 | for _usr in q.all(): | |||
|
2213 | usr = AttributeDict(_usr.user.get_dict()) | |||
|
2214 | usr.permission = _usr.permission.permission_name | |||
|
2215 | perm_rows.append(usr) | |||
|
2216 | ||||
|
2217 | # filter the perm rows by 'default' first and then sort them by | |||
|
2218 | # admin,write,read,none permissions sorted again alphabetically in | |||
|
2219 | # each group | |||
|
2220 | perm_rows = sorted(perm_rows, key=display_sort) | |||
|
2221 | ||||
|
2222 | _admin_perm = 'group.admin' | |||
|
2223 | owner_row = [] | |||
|
2224 | if with_owner: | |||
|
2225 | usr = AttributeDict(self.user.get_dict()) | |||
|
2226 | usr.owner_row = True | |||
|
2227 | usr.permission = _admin_perm | |||
|
2228 | owner_row.append(usr) | |||
|
2229 | ||||
|
2230 | super_admin_rows = [] | |||
|
2231 | if with_admins: | |||
|
2232 | for usr in User.get_all_super_admins(): | |||
|
2233 | # if this admin is also owner, don't double the record | |||
|
2234 | if usr.user_id == owner_row[0].user_id: | |||
|
2235 | owner_row[0].admin_row = True | |||
|
2236 | else: | |||
|
2237 | usr = AttributeDict(usr.get_dict()) | |||
|
2238 | usr.admin_row = True | |||
|
2239 | usr.permission = _admin_perm | |||
|
2240 | super_admin_rows.append(usr) | |||
|
2241 | ||||
|
2242 | return super_admin_rows + owner_row + perm_rows | |||
|
2243 | ||||
|
2244 | def permission_user_groups(self): | |||
|
2245 | q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self) | |||
|
2246 | q = q.options(joinedload(UserGroupRepoGroupToPerm.group), | |||
|
2247 | joinedload(UserGroupRepoGroupToPerm.users_group), | |||
|
2248 | joinedload(UserGroupRepoGroupToPerm.permission),) | |||
|
2249 | ||||
|
2250 | perm_rows = [] | |||
|
2251 | for _user_group in q.all(): | |||
|
2252 | usr = AttributeDict(_user_group.users_group.get_dict()) | |||
|
2253 | usr.permission = _user_group.permission.permission_name | |||
|
2254 | perm_rows.append(usr) | |||
|
2255 | ||||
|
2256 | return perm_rows | |||
|
2257 | ||||
|
2258 | def get_api_data(self): | |||
|
2259 | """ | |||
|
2260 | Common function for generating api data | |||
|
2261 | ||||
|
2262 | """ | |||
|
2263 | group = self | |||
|
2264 | data = { | |||
|
2265 | 'group_id': group.group_id, | |||
|
2266 | 'group_name': group.group_name, | |||
|
2267 | 'group_description': group.group_description, | |||
|
2268 | 'parent_group': group.parent_group.group_name if group.parent_group else None, | |||
|
2269 | 'repositories': [x.repo_name for x in group.repositories], | |||
|
2270 | 'owner': group.user.username, | |||
|
2271 | } | |||
|
2272 | return data | |||
|
2273 | ||||
|
2274 | ||||
|
2275 | class Permission(Base, BaseModel): | |||
|
2276 | __tablename__ = 'permissions' | |||
|
2277 | __table_args__ = ( | |||
|
2278 | Index('p_perm_name_idx', 'permission_name'), | |||
|
2279 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2280 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
2281 | ) | |||
|
2282 | PERMS = [ | |||
|
2283 | ('hg.admin', _('RhodeCode Super Administrator')), | |||
|
2284 | ||||
|
2285 | ('repository.none', _('Repository no access')), | |||
|
2286 | ('repository.read', _('Repository read access')), | |||
|
2287 | ('repository.write', _('Repository write access')), | |||
|
2288 | ('repository.admin', _('Repository admin access')), | |||
|
2289 | ||||
|
2290 | ('group.none', _('Repository group no access')), | |||
|
2291 | ('group.read', _('Repository group read access')), | |||
|
2292 | ('group.write', _('Repository group write access')), | |||
|
2293 | ('group.admin', _('Repository group admin access')), | |||
|
2294 | ||||
|
2295 | ('usergroup.none', _('User group no access')), | |||
|
2296 | ('usergroup.read', _('User group read access')), | |||
|
2297 | ('usergroup.write', _('User group write access')), | |||
|
2298 | ('usergroup.admin', _('User group admin access')), | |||
|
2299 | ||||
|
2300 | ('hg.repogroup.create.false', _('Repository Group creation disabled')), | |||
|
2301 | ('hg.repogroup.create.true', _('Repository Group creation enabled')), | |||
|
2302 | ||||
|
2303 | ('hg.usergroup.create.false', _('User Group creation disabled')), | |||
|
2304 | ('hg.usergroup.create.true', _('User Group creation enabled')), | |||
|
2305 | ||||
|
2306 | ('hg.create.none', _('Repository creation disabled')), | |||
|
2307 | ('hg.create.repository', _('Repository creation enabled')), | |||
|
2308 | ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), | |||
|
2309 | ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), | |||
|
2310 | ||||
|
2311 | ('hg.fork.none', _('Repository forking disabled')), | |||
|
2312 | ('hg.fork.repository', _('Repository forking enabled')), | |||
|
2313 | ||||
|
2314 | ('hg.register.none', _('Registration disabled')), | |||
|
2315 | ('hg.register.manual_activate', _('User Registration with manual account activation')), | |||
|
2316 | ('hg.register.auto_activate', _('User Registration with automatic account activation')), | |||
|
2317 | ||||
|
2318 | ('hg.extern_activate.manual', _('Manual activation of external account')), | |||
|
2319 | ('hg.extern_activate.auto', _('Automatic activation of external account')), | |||
|
2320 | ||||
|
2321 | ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), | |||
|
2322 | ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), | |||
|
2323 | ] | |||
|
2324 | ||||
|
2325 | # definition of system default permissions for DEFAULT user | |||
|
2326 | DEFAULT_USER_PERMISSIONS = [ | |||
|
2327 | 'repository.read', | |||
|
2328 | 'group.read', | |||
|
2329 | 'usergroup.read', | |||
|
2330 | 'hg.create.repository', | |||
|
2331 | 'hg.repogroup.create.false', | |||
|
2332 | 'hg.usergroup.create.false', | |||
|
2333 | 'hg.create.write_on_repogroup.true', | |||
|
2334 | 'hg.fork.repository', | |||
|
2335 | 'hg.register.manual_activate', | |||
|
2336 | 'hg.extern_activate.auto', | |||
|
2337 | 'hg.inherit_default_perms.true', | |||
|
2338 | ] | |||
|
2339 | ||||
|
2340 | # defines which permissions are more important higher the more important | |||
|
2341 | # Weight defines which permissions are more important. | |||
|
2342 | # The higher number the more important. | |||
|
2343 | PERM_WEIGHTS = { | |||
|
2344 | 'repository.none': 0, | |||
|
2345 | 'repository.read': 1, | |||
|
2346 | 'repository.write': 3, | |||
|
2347 | 'repository.admin': 4, | |||
|
2348 | ||||
|
2349 | 'group.none': 0, | |||
|
2350 | 'group.read': 1, | |||
|
2351 | 'group.write': 3, | |||
|
2352 | 'group.admin': 4, | |||
|
2353 | ||||
|
2354 | 'usergroup.none': 0, | |||
|
2355 | 'usergroup.read': 1, | |||
|
2356 | 'usergroup.write': 3, | |||
|
2357 | 'usergroup.admin': 4, | |||
|
2358 | ||||
|
2359 | 'hg.repogroup.create.false': 0, | |||
|
2360 | 'hg.repogroup.create.true': 1, | |||
|
2361 | ||||
|
2362 | 'hg.usergroup.create.false': 0, | |||
|
2363 | 'hg.usergroup.create.true': 1, | |||
|
2364 | ||||
|
2365 | 'hg.fork.none': 0, | |||
|
2366 | 'hg.fork.repository': 1, | |||
|
2367 | 'hg.create.none': 0, | |||
|
2368 | 'hg.create.repository': 1 | |||
|
2369 | } | |||
|
2370 | ||||
|
2371 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2372 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) | |||
|
2373 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) | |||
|
2374 | ||||
|
2375 | def __unicode__(self): | |||
|
2376 | return u"<%s('%s:%s')>" % ( | |||
|
2377 | self.__class__.__name__, self.permission_id, self.permission_name | |||
|
2378 | ) | |||
|
2379 | ||||
|
2380 | @classmethod | |||
|
2381 | def get_by_key(cls, key): | |||
|
2382 | return cls.query().filter(cls.permission_name == key).scalar() | |||
|
2383 | ||||
|
2384 | @classmethod | |||
|
2385 | def get_default_repo_perms(cls, user_id, repo_id=None): | |||
|
2386 | q = Session().query(UserRepoToPerm, Repository, Permission)\ | |||
|
2387 | .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ | |||
|
2388 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ | |||
|
2389 | .filter(UserRepoToPerm.user_id == user_id) | |||
|
2390 | if repo_id: | |||
|
2391 | q = q.filter(UserRepoToPerm.repository_id == repo_id) | |||
|
2392 | return q.all() | |||
|
2393 | ||||
|
2394 | @classmethod | |||
|
2395 | def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): | |||
|
2396 | q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ | |||
|
2397 | .join( | |||
|
2398 | Permission, | |||
|
2399 | UserGroupRepoToPerm.permission_id == Permission.permission_id)\ | |||
|
2400 | .join( | |||
|
2401 | Repository, | |||
|
2402 | UserGroupRepoToPerm.repository_id == Repository.repo_id)\ | |||
|
2403 | .join( | |||
|
2404 | UserGroup, | |||
|
2405 | UserGroupRepoToPerm.users_group_id == | |||
|
2406 | UserGroup.users_group_id)\ | |||
|
2407 | .join( | |||
|
2408 | UserGroupMember, | |||
|
2409 | UserGroupRepoToPerm.users_group_id == | |||
|
2410 | UserGroupMember.users_group_id)\ | |||
|
2411 | .filter( | |||
|
2412 | UserGroupMember.user_id == user_id, | |||
|
2413 | UserGroup.users_group_active == true()) | |||
|
2414 | if repo_id: | |||
|
2415 | q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) | |||
|
2416 | return q.all() | |||
|
2417 | ||||
|
2418 | @classmethod | |||
|
2419 | def get_default_group_perms(cls, user_id, repo_group_id=None): | |||
|
2420 | q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ | |||
|
2421 | .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\ | |||
|
2422 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ | |||
|
2423 | .filter(UserRepoGroupToPerm.user_id == user_id) | |||
|
2424 | if repo_group_id: | |||
|
2425 | q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) | |||
|
2426 | return q.all() | |||
|
2427 | ||||
|
2428 | @classmethod | |||
|
2429 | def get_default_group_perms_from_user_group( | |||
|
2430 | cls, user_id, repo_group_id=None): | |||
|
2431 | q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ | |||
|
2432 | .join( | |||
|
2433 | Permission, | |||
|
2434 | UserGroupRepoGroupToPerm.permission_id == | |||
|
2435 | Permission.permission_id)\ | |||
|
2436 | .join( | |||
|
2437 | RepoGroup, | |||
|
2438 | UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ | |||
|
2439 | .join( | |||
|
2440 | UserGroup, | |||
|
2441 | UserGroupRepoGroupToPerm.users_group_id == | |||
|
2442 | UserGroup.users_group_id)\ | |||
|
2443 | .join( | |||
|
2444 | UserGroupMember, | |||
|
2445 | UserGroupRepoGroupToPerm.users_group_id == | |||
|
2446 | UserGroupMember.users_group_id)\ | |||
|
2447 | .filter( | |||
|
2448 | UserGroupMember.user_id == user_id, | |||
|
2449 | UserGroup.users_group_active == true()) | |||
|
2450 | if repo_group_id: | |||
|
2451 | q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) | |||
|
2452 | return q.all() | |||
|
2453 | ||||
|
2454 | @classmethod | |||
|
2455 | def get_default_user_group_perms(cls, user_id, user_group_id=None): | |||
|
2456 | q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ | |||
|
2457 | .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ | |||
|
2458 | .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ | |||
|
2459 | .filter(UserUserGroupToPerm.user_id == user_id) | |||
|
2460 | if user_group_id: | |||
|
2461 | q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) | |||
|
2462 | return q.all() | |||
|
2463 | ||||
|
2464 | @classmethod | |||
|
2465 | def get_default_user_group_perms_from_user_group( | |||
|
2466 | cls, user_id, user_group_id=None): | |||
|
2467 | TargetUserGroup = aliased(UserGroup, name='target_user_group') | |||
|
2468 | q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ | |||
|
2469 | .join( | |||
|
2470 | Permission, | |||
|
2471 | UserGroupUserGroupToPerm.permission_id == | |||
|
2472 | Permission.permission_id)\ | |||
|
2473 | .join( | |||
|
2474 | TargetUserGroup, | |||
|
2475 | UserGroupUserGroupToPerm.target_user_group_id == | |||
|
2476 | TargetUserGroup.users_group_id)\ | |||
|
2477 | .join( | |||
|
2478 | UserGroup, | |||
|
2479 | UserGroupUserGroupToPerm.user_group_id == | |||
|
2480 | UserGroup.users_group_id)\ | |||
|
2481 | .join( | |||
|
2482 | UserGroupMember, | |||
|
2483 | UserGroupUserGroupToPerm.user_group_id == | |||
|
2484 | UserGroupMember.users_group_id)\ | |||
|
2485 | .filter( | |||
|
2486 | UserGroupMember.user_id == user_id, | |||
|
2487 | UserGroup.users_group_active == true()) | |||
|
2488 | if user_group_id: | |||
|
2489 | q = q.filter( | |||
|
2490 | UserGroupUserGroupToPerm.user_group_id == user_group_id) | |||
|
2491 | ||||
|
2492 | return q.all() | |||
|
2493 | ||||
|
2494 | ||||
|
2495 | class UserRepoToPerm(Base, BaseModel): | |||
|
2496 | __tablename__ = 'repo_to_perm' | |||
|
2497 | __table_args__ = ( | |||
|
2498 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), | |||
|
2499 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2500 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2501 | ) | |||
|
2502 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2503 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
2504 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
2505 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |||
|
2506 | ||||
|
2507 | user = relationship('User') | |||
|
2508 | repository = relationship('Repository') | |||
|
2509 | permission = relationship('Permission') | |||
|
2510 | ||||
|
2511 | @classmethod | |||
|
2512 | def create(cls, user, repository, permission): | |||
|
2513 | n = cls() | |||
|
2514 | n.user = user | |||
|
2515 | n.repository = repository | |||
|
2516 | n.permission = permission | |||
|
2517 | Session().add(n) | |||
|
2518 | return n | |||
|
2519 | ||||
|
2520 | def __unicode__(self): | |||
|
2521 | return u'<%s => %s >' % (self.user, self.repository) | |||
|
2522 | ||||
|
2523 | ||||
|
2524 | class UserUserGroupToPerm(Base, BaseModel): | |||
|
2525 | __tablename__ = 'user_user_group_to_perm' | |||
|
2526 | __table_args__ = ( | |||
|
2527 | UniqueConstraint('user_id', 'user_group_id', 'permission_id'), | |||
|
2528 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2529 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2530 | ) | |||
|
2531 | user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2532 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
2533 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
2534 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
2535 | ||||
|
2536 | user = relationship('User') | |||
|
2537 | user_group = relationship('UserGroup') | |||
|
2538 | permission = relationship('Permission') | |||
|
2539 | ||||
|
2540 | @classmethod | |||
|
2541 | def create(cls, user, user_group, permission): | |||
|
2542 | n = cls() | |||
|
2543 | n.user = user | |||
|
2544 | n.user_group = user_group | |||
|
2545 | n.permission = permission | |||
|
2546 | Session().add(n) | |||
|
2547 | return n | |||
|
2548 | ||||
|
2549 | def __unicode__(self): | |||
|
2550 | return u'<%s => %s >' % (self.user, self.user_group) | |||
|
2551 | ||||
|
2552 | ||||
|
2553 | class UserToPerm(Base, BaseModel): | |||
|
2554 | __tablename__ = 'user_to_perm' | |||
|
2555 | __table_args__ = ( | |||
|
2556 | UniqueConstraint('user_id', 'permission_id'), | |||
|
2557 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2558 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2559 | ) | |||
|
2560 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2561 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
2562 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
2563 | ||||
|
2564 | user = relationship('User') | |||
|
2565 | permission = relationship('Permission', lazy='joined') | |||
|
2566 | ||||
|
2567 | def __unicode__(self): | |||
|
2568 | return u'<%s => %s >' % (self.user, self.permission) | |||
|
2569 | ||||
|
2570 | ||||
|
2571 | class UserGroupRepoToPerm(Base, BaseModel): | |||
|
2572 | __tablename__ = 'users_group_repo_to_perm' | |||
|
2573 | __table_args__ = ( | |||
|
2574 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), | |||
|
2575 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2576 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2577 | ) | |||
|
2578 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2579 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
2580 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
2581 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |||
|
2582 | ||||
|
2583 | users_group = relationship('UserGroup') | |||
|
2584 | permission = relationship('Permission') | |||
|
2585 | repository = relationship('Repository') | |||
|
2586 | ||||
|
2587 | @classmethod | |||
|
2588 | def create(cls, users_group, repository, permission): | |||
|
2589 | n = cls() | |||
|
2590 | n.users_group = users_group | |||
|
2591 | n.repository = repository | |||
|
2592 | n.permission = permission | |||
|
2593 | Session().add(n) | |||
|
2594 | return n | |||
|
2595 | ||||
|
2596 | def __unicode__(self): | |||
|
2597 | return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository) | |||
|
2598 | ||||
|
2599 | ||||
|
2600 | class UserGroupUserGroupToPerm(Base, BaseModel): | |||
|
2601 | __tablename__ = 'user_group_user_group_to_perm' | |||
|
2602 | __table_args__ = ( | |||
|
2603 | UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), | |||
|
2604 | CheckConstraint('target_user_group_id != user_group_id'), | |||
|
2605 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2606 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2607 | ) | |||
|
2608 | user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2609 | target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
2610 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
2611 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
2612 | ||||
|
2613 | target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') | |||
|
2614 | user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') | |||
|
2615 | permission = relationship('Permission') | |||
|
2616 | ||||
|
2617 | @classmethod | |||
|
2618 | def create(cls, target_user_group, user_group, permission): | |||
|
2619 | n = cls() | |||
|
2620 | n.target_user_group = target_user_group | |||
|
2621 | n.user_group = user_group | |||
|
2622 | n.permission = permission | |||
|
2623 | Session().add(n) | |||
|
2624 | return n | |||
|
2625 | ||||
|
2626 | def __unicode__(self): | |||
|
2627 | return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group) | |||
|
2628 | ||||
|
2629 | ||||
|
2630 | class UserGroupToPerm(Base, BaseModel): | |||
|
2631 | __tablename__ = 'users_group_to_perm' | |||
|
2632 | __table_args__ = ( | |||
|
2633 | UniqueConstraint('users_group_id', 'permission_id',), | |||
|
2634 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2635 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2636 | ) | |||
|
2637 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2638 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
2639 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
2640 | ||||
|
2641 | users_group = relationship('UserGroup') | |||
|
2642 | permission = relationship('Permission') | |||
|
2643 | ||||
|
2644 | ||||
|
2645 | class UserRepoGroupToPerm(Base, BaseModel): | |||
|
2646 | __tablename__ = 'user_repo_group_to_perm' | |||
|
2647 | __table_args__ = ( | |||
|
2648 | UniqueConstraint('user_id', 'group_id', 'permission_id'), | |||
|
2649 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2650 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2651 | ) | |||
|
2652 | ||||
|
2653 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2654 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
2655 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |||
|
2656 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
2657 | ||||
|
2658 | user = relationship('User') | |||
|
2659 | group = relationship('RepoGroup') | |||
|
2660 | permission = relationship('Permission') | |||
|
2661 | ||||
|
2662 | @classmethod | |||
|
2663 | def create(cls, user, repository_group, permission): | |||
|
2664 | n = cls() | |||
|
2665 | n.user = user | |||
|
2666 | n.group = repository_group | |||
|
2667 | n.permission = permission | |||
|
2668 | Session().add(n) | |||
|
2669 | return n | |||
|
2670 | ||||
|
2671 | ||||
|
2672 | class UserGroupRepoGroupToPerm(Base, BaseModel): | |||
|
2673 | __tablename__ = 'users_group_repo_group_to_perm' | |||
|
2674 | __table_args__ = ( | |||
|
2675 | UniqueConstraint('users_group_id', 'group_id'), | |||
|
2676 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2677 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2678 | ) | |||
|
2679 | ||||
|
2680 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2681 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
2682 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |||
|
2683 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
2684 | ||||
|
2685 | users_group = relationship('UserGroup') | |||
|
2686 | permission = relationship('Permission') | |||
|
2687 | group = relationship('RepoGroup') | |||
|
2688 | ||||
|
2689 | @classmethod | |||
|
2690 | def create(cls, user_group, repository_group, permission): | |||
|
2691 | n = cls() | |||
|
2692 | n.users_group = user_group | |||
|
2693 | n.group = repository_group | |||
|
2694 | n.permission = permission | |||
|
2695 | Session().add(n) | |||
|
2696 | return n | |||
|
2697 | ||||
|
2698 | def __unicode__(self): | |||
|
2699 | return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group) | |||
|
2700 | ||||
|
2701 | ||||
|
2702 | class Statistics(Base, BaseModel): | |||
|
2703 | __tablename__ = 'statistics' | |||
|
2704 | __table_args__ = ( | |||
|
2705 | UniqueConstraint('repository_id'), | |||
|
2706 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2707 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2708 | ) | |||
|
2709 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2710 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) | |||
|
2711 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) | |||
|
2712 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data | |||
|
2713 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data | |||
|
2714 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data | |||
|
2715 | ||||
|
2716 | repository = relationship('Repository', single_parent=True) | |||
|
2717 | ||||
|
2718 | ||||
|
2719 | class UserFollowing(Base, BaseModel): | |||
|
2720 | __tablename__ = 'user_followings' | |||
|
2721 | __table_args__ = ( | |||
|
2722 | UniqueConstraint('user_id', 'follows_repository_id'), | |||
|
2723 | UniqueConstraint('user_id', 'follows_user_id'), | |||
|
2724 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2725 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2726 | ) | |||
|
2727 | ||||
|
2728 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2729 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
2730 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) | |||
|
2731 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
2732 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |||
|
2733 | ||||
|
2734 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') | |||
|
2735 | ||||
|
2736 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') | |||
|
2737 | follows_repository = relationship('Repository', order_by='Repository.repo_name') | |||
|
2738 | ||||
|
2739 | @classmethod | |||
|
2740 | def get_repo_followers(cls, repo_id): | |||
|
2741 | return cls.query().filter(cls.follows_repo_id == repo_id) | |||
|
2742 | ||||
|
2743 | ||||
|
2744 | class CacheKey(Base, BaseModel): | |||
|
2745 | __tablename__ = 'cache_invalidation' | |||
|
2746 | __table_args__ = ( | |||
|
2747 | UniqueConstraint('cache_key'), | |||
|
2748 | Index('key_idx', 'cache_key'), | |||
|
2749 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2750 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
2751 | ) | |||
|
2752 | CACHE_TYPE_ATOM = 'ATOM' | |||
|
2753 | CACHE_TYPE_RSS = 'RSS' | |||
|
2754 | CACHE_TYPE_README = 'README' | |||
|
2755 | ||||
|
2756 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2757 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) | |||
|
2758 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) | |||
|
2759 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) | |||
|
2760 | ||||
|
2761 | def __init__(self, cache_key, cache_args=''): | |||
|
2762 | self.cache_key = cache_key | |||
|
2763 | self.cache_args = cache_args | |||
|
2764 | self.cache_active = False | |||
|
2765 | ||||
|
2766 | def __unicode__(self): | |||
|
2767 | return u"<%s('%s:%s[%s]')>" % ( | |||
|
2768 | self.__class__.__name__, | |||
|
2769 | self.cache_id, self.cache_key, self.cache_active) | |||
|
2770 | ||||
|
2771 | def _cache_key_partition(self): | |||
|
2772 | prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) | |||
|
2773 | return prefix, repo_name, suffix | |||
|
2774 | ||||
|
2775 | def get_prefix(self): | |||
|
2776 | """ | |||
|
2777 | Try to extract prefix from existing cache key. The key could consist | |||
|
2778 | of prefix, repo_name, suffix | |||
|
2779 | """ | |||
|
2780 | # this returns prefix, repo_name, suffix | |||
|
2781 | return self._cache_key_partition()[0] | |||
|
2782 | ||||
|
2783 | def get_suffix(self): | |||
|
2784 | """ | |||
|
2785 | get suffix that might have been used in _get_cache_key to | |||
|
2786 | generate self.cache_key. Only used for informational purposes | |||
|
2787 | in repo_edit.html. | |||
|
2788 | """ | |||
|
2789 | # prefix, repo_name, suffix | |||
|
2790 | return self._cache_key_partition()[2] | |||
|
2791 | ||||
|
2792 | @classmethod | |||
|
2793 | def delete_all_cache(cls): | |||
|
2794 | """ | |||
|
2795 | Delete all cache keys from database. | |||
|
2796 | Should only be run when all instances are down and all entries | |||
|
2797 | thus stale. | |||
|
2798 | """ | |||
|
2799 | cls.query().delete() | |||
|
2800 | Session().commit() | |||
|
2801 | ||||
|
2802 | @classmethod | |||
|
2803 | def get_cache_key(cls, repo_name, cache_type): | |||
|
2804 | """ | |||
|
2805 | ||||
|
2806 | Generate a cache key for this process of RhodeCode instance. | |||
|
2807 | Prefix most likely will be process id or maybe explicitly set | |||
|
2808 | instance_id from .ini file. | |||
|
2809 | """ | |||
|
2810 | import rhodecode | |||
|
2811 | prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '') | |||
|
2812 | ||||
|
2813 | repo_as_unicode = safe_unicode(repo_name) | |||
|
2814 | key = u'{}_{}'.format(repo_as_unicode, cache_type) \ | |||
|
2815 | if cache_type else repo_as_unicode | |||
|
2816 | ||||
|
2817 | return u'{}{}'.format(prefix, key) | |||
|
2818 | ||||
|
2819 | @classmethod | |||
|
2820 | def set_invalidate(cls, repo_name, delete=False): | |||
|
2821 | """ | |||
|
2822 | Mark all caches of a repo as invalid in the database. | |||
|
2823 | """ | |||
|
2824 | ||||
|
2825 | try: | |||
|
2826 | qry = Session().query(cls).filter(cls.cache_args == repo_name) | |||
|
2827 | if delete: | |||
|
2828 | log.debug('cache objects deleted for repo %s', | |||
|
2829 | safe_str(repo_name)) | |||
|
2830 | qry.delete() | |||
|
2831 | else: | |||
|
2832 | log.debug('cache objects marked as invalid for repo %s', | |||
|
2833 | safe_str(repo_name)) | |||
|
2834 | qry.update({"cache_active": False}) | |||
|
2835 | ||||
|
2836 | Session().commit() | |||
|
2837 | except Exception: | |||
|
2838 | log.exception( | |||
|
2839 | 'Cache key invalidation failed for repository %s', | |||
|
2840 | safe_str(repo_name)) | |||
|
2841 | Session().rollback() | |||
|
2842 | ||||
|
2843 | @classmethod | |||
|
2844 | def get_active_cache(cls, cache_key): | |||
|
2845 | inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() | |||
|
2846 | if inv_obj: | |||
|
2847 | return inv_obj | |||
|
2848 | return None | |||
|
2849 | ||||
|
2850 | @classmethod | |||
|
2851 | def repo_context_cache(cls, compute_func, repo_name, cache_type, | |||
|
2852 | thread_scoped=False): | |||
|
2853 | """ | |||
|
2854 | @cache_region('long_term') | |||
|
2855 | def _heavy_calculation(cache_key): | |||
|
2856 | return 'result' | |||
|
2857 | ||||
|
2858 | cache_context = CacheKey.repo_context_cache( | |||
|
2859 | _heavy_calculation, repo_name, cache_type) | |||
|
2860 | ||||
|
2861 | with cache_context as context: | |||
|
2862 | context.invalidate() | |||
|
2863 | computed = context.compute() | |||
|
2864 | ||||
|
2865 | assert computed == 'result' | |||
|
2866 | """ | |||
|
2867 | from rhodecode.lib import caches | |||
|
2868 | return caches.InvalidationContext( | |||
|
2869 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) | |||
|
2870 | ||||
|
2871 | ||||
|
2872 | class ChangesetComment(Base, BaseModel): | |||
|
2873 | __tablename__ = 'changeset_comments' | |||
|
2874 | __table_args__ = ( | |||
|
2875 | Index('cc_revision_idx', 'revision'), | |||
|
2876 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2877 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
2878 | ) | |||
|
2879 | ||||
|
2880 | COMMENT_OUTDATED = u'comment_outdated' | |||
|
2881 | ||||
|
2882 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) | |||
|
2883 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |||
|
2884 | revision = Column('revision', String(40), nullable=True) | |||
|
2885 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |||
|
2886 | pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) | |||
|
2887 | line_no = Column('line_no', Unicode(10), nullable=True) | |||
|
2888 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) | |||
|
2889 | f_path = Column('f_path', Unicode(1000), nullable=True) | |||
|
2890 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |||
|
2891 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) | |||
|
2892 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
2893 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
2894 | renderer = Column('renderer', Unicode(64), nullable=True) | |||
|
2895 | display_state = Column('display_state', Unicode(128), nullable=True) | |||
|
2896 | ||||
|
2897 | author = relationship('User', lazy='joined') | |||
|
2898 | repo = relationship('Repository') | |||
|
2899 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") | |||
|
2900 | pull_request = relationship('PullRequest', lazy='joined') | |||
|
2901 | pull_request_version = relationship('PullRequestVersion') | |||
|
2902 | ||||
|
2903 | @classmethod | |||
|
2904 | def get_users(cls, revision=None, pull_request_id=None): | |||
|
2905 | """ | |||
|
2906 | Returns user associated with this ChangesetComment. ie those | |||
|
2907 | who actually commented | |||
|
2908 | ||||
|
2909 | :param cls: | |||
|
2910 | :param revision: | |||
|
2911 | """ | |||
|
2912 | q = Session().query(User)\ | |||
|
2913 | .join(ChangesetComment.author) | |||
|
2914 | if revision: | |||
|
2915 | q = q.filter(cls.revision == revision) | |||
|
2916 | elif pull_request_id: | |||
|
2917 | q = q.filter(cls.pull_request_id == pull_request_id) | |||
|
2918 | return q.all() | |||
|
2919 | ||||
|
2920 | def render(self, mentions=False): | |||
|
2921 | from rhodecode.lib import helpers as h | |||
|
2922 | return h.render(self.text, renderer=self.renderer, mentions=mentions) | |||
|
2923 | ||||
|
2924 | def __repr__(self): | |||
|
2925 | if self.comment_id: | |||
|
2926 | return '<DB:ChangesetComment #%s>' % self.comment_id | |||
|
2927 | else: | |||
|
2928 | return '<DB:ChangesetComment at %#x>' % id(self) | |||
|
2929 | ||||
|
2930 | ||||
|
2931 | class ChangesetStatus(Base, BaseModel): | |||
|
2932 | __tablename__ = 'changeset_statuses' | |||
|
2933 | __table_args__ = ( | |||
|
2934 | Index('cs_revision_idx', 'revision'), | |||
|
2935 | Index('cs_version_idx', 'version'), | |||
|
2936 | UniqueConstraint('repo_id', 'revision', 'version'), | |||
|
2937 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
2938 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
2939 | ) | |||
|
2940 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' | |||
|
2941 | STATUS_APPROVED = 'approved' | |||
|
2942 | STATUS_REJECTED = 'rejected' | |||
|
2943 | STATUS_UNDER_REVIEW = 'under_review' | |||
|
2944 | ||||
|
2945 | STATUSES = [ | |||
|
2946 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default | |||
|
2947 | (STATUS_APPROVED, _("Approved")), | |||
|
2948 | (STATUS_REJECTED, _("Rejected")), | |||
|
2949 | (STATUS_UNDER_REVIEW, _("Under Review")), | |||
|
2950 | ] | |||
|
2951 | ||||
|
2952 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) | |||
|
2953 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |||
|
2954 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) | |||
|
2955 | revision = Column('revision', String(40), nullable=False) | |||
|
2956 | status = Column('status', String(128), nullable=False, default=DEFAULT) | |||
|
2957 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) | |||
|
2958 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) | |||
|
2959 | version = Column('version', Integer(), nullable=False, default=0) | |||
|
2960 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |||
|
2961 | ||||
|
2962 | author = relationship('User', lazy='joined') | |||
|
2963 | repo = relationship('Repository') | |||
|
2964 | comment = relationship('ChangesetComment', lazy='joined') | |||
|
2965 | pull_request = relationship('PullRequest', lazy='joined') | |||
|
2966 | ||||
|
2967 | def __unicode__(self): | |||
|
2968 | return u"<%s('%s[%s]:%s')>" % ( | |||
|
2969 | self.__class__.__name__, | |||
|
2970 | self.status, self.version, self.author | |||
|
2971 | ) | |||
|
2972 | ||||
|
2973 | @classmethod | |||
|
2974 | def get_status_lbl(cls, value): | |||
|
2975 | return dict(cls.STATUSES).get(value) | |||
|
2976 | ||||
|
2977 | @property | |||
|
2978 | def status_lbl(self): | |||
|
2979 | return ChangesetStatus.get_status_lbl(self.status) | |||
|
2980 | ||||
|
2981 | ||||
|
2982 | class _PullRequestBase(BaseModel): | |||
|
2983 | """ | |||
|
2984 | Common attributes of pull request and version entries. | |||
|
2985 | """ | |||
|
2986 | ||||
|
2987 | # .status values | |||
|
2988 | STATUS_NEW = u'new' | |||
|
2989 | STATUS_OPEN = u'open' | |||
|
2990 | STATUS_CLOSED = u'closed' | |||
|
2991 | ||||
|
2992 | title = Column('title', Unicode(255), nullable=True) | |||
|
2993 | description = Column( | |||
|
2994 | 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), | |||
|
2995 | nullable=True) | |||
|
2996 | # new/open/closed status of pull request (not approve/reject/etc) | |||
|
2997 | status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) | |||
|
2998 | created_on = Column( | |||
|
2999 | 'created_on', DateTime(timezone=False), nullable=False, | |||
|
3000 | default=datetime.datetime.now) | |||
|
3001 | updated_on = Column( | |||
|
3002 | 'updated_on', DateTime(timezone=False), nullable=False, | |||
|
3003 | default=datetime.datetime.now) | |||
|
3004 | ||||
|
3005 | @declared_attr | |||
|
3006 | def user_id(cls): | |||
|
3007 | return Column( | |||
|
3008 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | |||
|
3009 | unique=None) | |||
|
3010 | ||||
|
3011 | # 500 revisions max | |||
|
3012 | _revisions = Column( | |||
|
3013 | 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) | |||
|
3014 | ||||
|
3015 | @declared_attr | |||
|
3016 | def source_repo_id(cls): | |||
|
3017 | # TODO: dan: rename column to source_repo_id | |||
|
3018 | return Column( | |||
|
3019 | 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), | |||
|
3020 | nullable=False) | |||
|
3021 | ||||
|
3022 | source_ref = Column('org_ref', Unicode(255), nullable=False) | |||
|
3023 | ||||
|
3024 | @declared_attr | |||
|
3025 | def target_repo_id(cls): | |||
|
3026 | # TODO: dan: rename column to target_repo_id | |||
|
3027 | return Column( | |||
|
3028 | 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), | |||
|
3029 | nullable=False) | |||
|
3030 | ||||
|
3031 | target_ref = Column('other_ref', Unicode(255), nullable=False) | |||
|
3032 | ||||
|
3033 | # TODO: dan: rename column to last_merge_source_rev | |||
|
3034 | _last_merge_source_rev = Column( | |||
|
3035 | 'last_merge_org_rev', String(40), nullable=True) | |||
|
3036 | # TODO: dan: rename column to last_merge_target_rev | |||
|
3037 | _last_merge_target_rev = Column( | |||
|
3038 | 'last_merge_other_rev', String(40), nullable=True) | |||
|
3039 | _last_merge_status = Column('merge_status', Integer(), nullable=True) | |||
|
3040 | merge_rev = Column('merge_rev', String(40), nullable=True) | |||
|
3041 | ||||
|
3042 | @hybrid_property | |||
|
3043 | def revisions(self): | |||
|
3044 | return self._revisions.split(':') if self._revisions else [] | |||
|
3045 | ||||
|
3046 | @revisions.setter | |||
|
3047 | def revisions(self, val): | |||
|
3048 | self._revisions = ':'.join(val) | |||
|
3049 | ||||
|
3050 | @declared_attr | |||
|
3051 | def author(cls): | |||
|
3052 | return relationship('User', lazy='joined') | |||
|
3053 | ||||
|
3054 | @declared_attr | |||
|
3055 | def source_repo(cls): | |||
|
3056 | return relationship( | |||
|
3057 | 'Repository', | |||
|
3058 | primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) | |||
|
3059 | ||||
|
3060 | @property | |||
|
3061 | def source_ref_parts(self): | |||
|
3062 | refs = self.source_ref.split(':') | |||
|
3063 | return Reference(refs[0], refs[1], refs[2]) | |||
|
3064 | ||||
|
3065 | @declared_attr | |||
|
3066 | def target_repo(cls): | |||
|
3067 | return relationship( | |||
|
3068 | 'Repository', | |||
|
3069 | primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) | |||
|
3070 | ||||
|
3071 | @property | |||
|
3072 | def target_ref_parts(self): | |||
|
3073 | refs = self.target_ref.split(':') | |||
|
3074 | return Reference(refs[0], refs[1], refs[2]) | |||
|
3075 | ||||
|
3076 | ||||
|
3077 | class PullRequest(Base, _PullRequestBase): | |||
|
3078 | __tablename__ = 'pull_requests' | |||
|
3079 | __table_args__ = ( | |||
|
3080 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
3081 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
3082 | ) | |||
|
3083 | ||||
|
3084 | pull_request_id = Column( | |||
|
3085 | 'pull_request_id', Integer(), nullable=False, primary_key=True) | |||
|
3086 | ||||
|
3087 | def __repr__(self): | |||
|
3088 | if self.pull_request_id: | |||
|
3089 | return '<DB:PullRequest #%s>' % self.pull_request_id | |||
|
3090 | else: | |||
|
3091 | return '<DB:PullRequest at %#x>' % id(self) | |||
|
3092 | ||||
|
3093 | reviewers = relationship('PullRequestReviewers', | |||
|
3094 | cascade="all, delete, delete-orphan") | |||
|
3095 | statuses = relationship('ChangesetStatus') | |||
|
3096 | comments = relationship('ChangesetComment', | |||
|
3097 | cascade="all, delete, delete-orphan") | |||
|
3098 | versions = relationship('PullRequestVersion', | |||
|
3099 | cascade="all, delete, delete-orphan") | |||
|
3100 | ||||
|
3101 | def is_closed(self): | |||
|
3102 | return self.status == self.STATUS_CLOSED | |||
|
3103 | ||||
|
3104 | def get_api_data(self): | |||
|
3105 | from rhodecode.model.pull_request import PullRequestModel | |||
|
3106 | pull_request = self | |||
|
3107 | merge_status = PullRequestModel().merge_status(pull_request) | |||
|
3108 | data = { | |||
|
3109 | 'pull_request_id': pull_request.pull_request_id, | |||
|
3110 | 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name, | |||
|
3111 | pull_request_id=self.pull_request_id, | |||
|
3112 | qualified=True), | |||
|
3113 | 'title': pull_request.title, | |||
|
3114 | 'description': pull_request.description, | |||
|
3115 | 'status': pull_request.status, | |||
|
3116 | 'created_on': pull_request.created_on, | |||
|
3117 | 'updated_on': pull_request.updated_on, | |||
|
3118 | 'commit_ids': pull_request.revisions, | |||
|
3119 | 'review_status': pull_request.calculated_review_status(), | |||
|
3120 | 'mergeable': { | |||
|
3121 | 'status': merge_status[0], | |||
|
3122 | 'message': unicode(merge_status[1]), | |||
|
3123 | }, | |||
|
3124 | 'source': { | |||
|
3125 | 'clone_url': pull_request.source_repo.clone_url(), | |||
|
3126 | 'repository': pull_request.source_repo.repo_name, | |||
|
3127 | 'reference': { | |||
|
3128 | 'name': pull_request.source_ref_parts.name, | |||
|
3129 | 'type': pull_request.source_ref_parts.type, | |||
|
3130 | 'commit_id': pull_request.source_ref_parts.commit_id, | |||
|
3131 | }, | |||
|
3132 | }, | |||
|
3133 | 'target': { | |||
|
3134 | 'clone_url': pull_request.target_repo.clone_url(), | |||
|
3135 | 'repository': pull_request.target_repo.repo_name, | |||
|
3136 | 'reference': { | |||
|
3137 | 'name': pull_request.target_ref_parts.name, | |||
|
3138 | 'type': pull_request.target_ref_parts.type, | |||
|
3139 | 'commit_id': pull_request.target_ref_parts.commit_id, | |||
|
3140 | }, | |||
|
3141 | }, | |||
|
3142 | 'author': pull_request.author.get_api_data(include_secrets=False, | |||
|
3143 | details='basic'), | |||
|
3144 | 'reviewers': [ | |||
|
3145 | { | |||
|
3146 | 'user': reviewer.get_api_data(include_secrets=False, | |||
|
3147 | details='basic'), | |||
|
3148 | 'review_status': st[0][1].status if st else 'not_reviewed', | |||
|
3149 | } | |||
|
3150 | for reviewer, st in pull_request.reviewers_statuses() | |||
|
3151 | ] | |||
|
3152 | } | |||
|
3153 | ||||
|
3154 | return data | |||
|
3155 | ||||
|
3156 | def __json__(self): | |||
|
3157 | return { | |||
|
3158 | 'revisions': self.revisions, | |||
|
3159 | } | |||
|
3160 | ||||
|
3161 | def calculated_review_status(self): | |||
|
3162 | # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html | |||
|
3163 | # because it's tricky on how to use ChangesetStatusModel from there | |||
|
3164 | warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning) | |||
|
3165 | from rhodecode.model.changeset_status import ChangesetStatusModel | |||
|
3166 | return ChangesetStatusModel().calculated_review_status(self) | |||
|
3167 | ||||
|
3168 | def reviewers_statuses(self): | |||
|
3169 | warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning) | |||
|
3170 | from rhodecode.model.changeset_status import ChangesetStatusModel | |||
|
3171 | return ChangesetStatusModel().reviewers_statuses(self) | |||
|
3172 | ||||
|
3173 | ||||
|
3174 | class PullRequestVersion(Base, _PullRequestBase): | |||
|
3175 | __tablename__ = 'pull_request_versions' | |||
|
3176 | __table_args__ = ( | |||
|
3177 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
3178 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
3179 | ) | |||
|
3180 | ||||
|
3181 | pull_request_version_id = Column( | |||
|
3182 | 'pull_request_version_id', Integer(), nullable=False, primary_key=True) | |||
|
3183 | pull_request_id = Column( | |||
|
3184 | 'pull_request_id', Integer(), | |||
|
3185 | ForeignKey('pull_requests.pull_request_id'), nullable=False) | |||
|
3186 | pull_request = relationship('PullRequest') | |||
|
3187 | ||||
|
3188 | def __repr__(self): | |||
|
3189 | if self.pull_request_version_id: | |||
|
3190 | return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id | |||
|
3191 | else: | |||
|
3192 | return '<DB:PullRequestVersion at %#x>' % id(self) | |||
|
3193 | ||||
|
3194 | ||||
|
3195 | class PullRequestReviewers(Base, BaseModel): | |||
|
3196 | __tablename__ = 'pull_request_reviewers' | |||
|
3197 | __table_args__ = ( | |||
|
3198 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
3199 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
3200 | ) | |||
|
3201 | ||||
|
3202 | def __init__(self, user=None, pull_request=None): | |||
|
3203 | self.user = user | |||
|
3204 | self.pull_request = pull_request | |||
|
3205 | ||||
|
3206 | pull_requests_reviewers_id = Column( | |||
|
3207 | 'pull_requests_reviewers_id', Integer(), nullable=False, | |||
|
3208 | primary_key=True) | |||
|
3209 | pull_request_id = Column( | |||
|
3210 | "pull_request_id", Integer(), | |||
|
3211 | ForeignKey('pull_requests.pull_request_id'), nullable=False) | |||
|
3212 | user_id = Column( | |||
|
3213 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) | |||
|
3214 | ||||
|
3215 | user = relationship('User') | |||
|
3216 | pull_request = relationship('PullRequest') | |||
|
3217 | ||||
|
3218 | ||||
|
3219 | class Notification(Base, BaseModel): | |||
|
3220 | __tablename__ = 'notifications' | |||
|
3221 | __table_args__ = ( | |||
|
3222 | Index('notification_type_idx', 'type'), | |||
|
3223 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
3224 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
3225 | ) | |||
|
3226 | ||||
|
3227 | TYPE_CHANGESET_COMMENT = u'cs_comment' | |||
|
3228 | TYPE_MESSAGE = u'message' | |||
|
3229 | TYPE_MENTION = u'mention' | |||
|
3230 | TYPE_REGISTRATION = u'registration' | |||
|
3231 | TYPE_PULL_REQUEST = u'pull_request' | |||
|
3232 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' | |||
|
3233 | ||||
|
3234 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) | |||
|
3235 | subject = Column('subject', Unicode(512), nullable=True) | |||
|
3236 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) | |||
|
3237 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) | |||
|
3238 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
3239 | type_ = Column('type', Unicode(255)) | |||
|
3240 | ||||
|
3241 | created_by_user = relationship('User') | |||
|
3242 | notifications_to_users = relationship('UserNotification', lazy='joined', | |||
|
3243 | cascade="all, delete, delete-orphan") | |||
|
3244 | ||||
|
3245 | @property | |||
|
3246 | def recipients(self): | |||
|
3247 | return [x.user for x in UserNotification.query()\ | |||
|
3248 | .filter(UserNotification.notification == self)\ | |||
|
3249 | .order_by(UserNotification.user_id.asc()).all()] | |||
|
3250 | ||||
|
3251 | @classmethod | |||
|
3252 | def create(cls, created_by, subject, body, recipients, type_=None): | |||
|
3253 | if type_ is None: | |||
|
3254 | type_ = Notification.TYPE_MESSAGE | |||
|
3255 | ||||
|
3256 | notification = cls() | |||
|
3257 | notification.created_by_user = created_by | |||
|
3258 | notification.subject = subject | |||
|
3259 | notification.body = body | |||
|
3260 | notification.type_ = type_ | |||
|
3261 | notification.created_on = datetime.datetime.now() | |||
|
3262 | ||||
|
3263 | for u in recipients: | |||
|
3264 | assoc = UserNotification() | |||
|
3265 | assoc.notification = notification | |||
|
3266 | ||||
|
3267 | # if created_by is inside recipients mark his notification | |||
|
3268 | # as read | |||
|
3269 | if u.user_id == created_by.user_id: | |||
|
3270 | assoc.read = True | |||
|
3271 | ||||
|
3272 | u.notifications.append(assoc) | |||
|
3273 | Session().add(notification) | |||
|
3274 | ||||
|
3275 | return notification | |||
|
3276 | ||||
|
3277 | @property | |||
|
3278 | def description(self): | |||
|
3279 | from rhodecode.model.notification import NotificationModel | |||
|
3280 | return NotificationModel().make_description(self) | |||
|
3281 | ||||
|
3282 | ||||
|
3283 | class UserNotification(Base, BaseModel): | |||
|
3284 | __tablename__ = 'user_to_notification' | |||
|
3285 | __table_args__ = ( | |||
|
3286 | UniqueConstraint('user_id', 'notification_id'), | |||
|
3287 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
3288 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
3289 | ) | |||
|
3290 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |||
|
3291 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) | |||
|
3292 | read = Column('read', Boolean, default=False) | |||
|
3293 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) | |||
|
3294 | ||||
|
3295 | user = relationship('User', lazy="joined") | |||
|
3296 | notification = relationship('Notification', lazy="joined", | |||
|
3297 | order_by=lambda: Notification.created_on.desc(),) | |||
|
3298 | ||||
|
3299 | def mark_as_read(self): | |||
|
3300 | self.read = True | |||
|
3301 | Session().add(self) | |||
|
3302 | ||||
|
3303 | ||||
|
3304 | class Gist(Base, BaseModel): | |||
|
3305 | __tablename__ = 'gists' | |||
|
3306 | __table_args__ = ( | |||
|
3307 | Index('g_gist_access_id_idx', 'gist_access_id'), | |||
|
3308 | Index('g_created_on_idx', 'created_on'), | |||
|
3309 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
3310 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
3311 | ) | |||
|
3312 | GIST_PUBLIC = u'public' | |||
|
3313 | GIST_PRIVATE = u'private' | |||
|
3314 | DEFAULT_FILENAME = u'gistfile1.txt' | |||
|
3315 | ||||
|
3316 | ACL_LEVEL_PUBLIC = u'acl_public' | |||
|
3317 | ACL_LEVEL_PRIVATE = u'acl_private' | |||
|
3318 | ||||
|
3319 | gist_id = Column('gist_id', Integer(), primary_key=True) | |||
|
3320 | gist_access_id = Column('gist_access_id', Unicode(250)) | |||
|
3321 | gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |||
|
3322 | gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) | |||
|
3323 | gist_expires = Column('gist_expires', Float(53), nullable=False) | |||
|
3324 | gist_type = Column('gist_type', Unicode(128), nullable=False) | |||
|
3325 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
3326 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
3327 | acl_level = Column('acl_level', Unicode(128), nullable=True) | |||
|
3328 | ||||
|
3329 | owner = relationship('User') | |||
|
3330 | ||||
|
3331 | def __repr__(self): | |||
|
3332 | return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id) | |||
|
3333 | ||||
|
3334 | @classmethod | |||
|
3335 | def get_or_404(cls, id_): | |||
|
3336 | res = cls.query().filter(cls.gist_access_id == id_).scalar() | |||
|
3337 | if not res: | |||
|
3338 | raise HTTPNotFound | |||
|
3339 | return res | |||
|
3340 | ||||
|
3341 | @classmethod | |||
|
3342 | def get_by_access_id(cls, gist_access_id): | |||
|
3343 | return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() | |||
|
3344 | ||||
|
3345 | def gist_url(self): | |||
|
3346 | import rhodecode | |||
|
3347 | alias_url = rhodecode.CONFIG.get('gist_alias_url') | |||
|
3348 | if alias_url: | |||
|
3349 | return alias_url.replace('{gistid}', self.gist_access_id) | |||
|
3350 | ||||
|
3351 | return url('gist', gist_id=self.gist_access_id, qualified=True) | |||
|
3352 | ||||
|
3353 | @classmethod | |||
|
3354 | def base_path(cls): | |||
|
3355 | """ | |||
|
3356 | Returns base path when all gists are stored | |||
|
3357 | ||||
|
3358 | :param cls: | |||
|
3359 | """ | |||
|
3360 | from rhodecode.model.gist import GIST_STORE_LOC | |||
|
3361 | q = Session().query(RhodeCodeUi)\ | |||
|
3362 | .filter(RhodeCodeUi.ui_key == URL_SEP) | |||
|
3363 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |||
|
3364 | return os.path.join(q.one().ui_value, GIST_STORE_LOC) | |||
|
3365 | ||||
|
3366 | def get_api_data(self): | |||
|
3367 | """ | |||
|
3368 | Common function for generating gist related data for API | |||
|
3369 | """ | |||
|
3370 | gist = self | |||
|
3371 | data = { | |||
|
3372 | 'gist_id': gist.gist_id, | |||
|
3373 | 'type': gist.gist_type, | |||
|
3374 | 'access_id': gist.gist_access_id, | |||
|
3375 | 'description': gist.gist_description, | |||
|
3376 | 'url': gist.gist_url(), | |||
|
3377 | 'expires': gist.gist_expires, | |||
|
3378 | 'created_on': gist.created_on, | |||
|
3379 | 'modified_at': gist.modified_at, | |||
|
3380 | 'content': None, | |||
|
3381 | 'acl_level': gist.acl_level, | |||
|
3382 | } | |||
|
3383 | return data | |||
|
3384 | ||||
|
3385 | def __json__(self): | |||
|
3386 | data = dict( | |||
|
3387 | ) | |||
|
3388 | data.update(self.get_api_data()) | |||
|
3389 | return data | |||
|
3390 | # SCM functions | |||
|
3391 | ||||
|
3392 | def scm_instance(self, **kwargs): | |||
|
3393 | full_repo_path = os.path.join(self.base_path(), self.gist_access_id) | |||
|
3394 | return get_vcs_instance( | |||
|
3395 | repo_path=safe_str(full_repo_path), create=False) | |||
|
3396 | ||||
|
3397 | ||||
|
3398 | class DbMigrateVersion(Base, BaseModel): | |||
|
3399 | __tablename__ = 'db_migrate_version' | |||
|
3400 | __table_args__ = ( | |||
|
3401 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
3402 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |||
|
3403 | ) | |||
|
3404 | repository_id = Column('repository_id', String(250), primary_key=True) | |||
|
3405 | repository_path = Column('repository_path', Text) | |||
|
3406 | version = Column('version', Integer) | |||
|
3407 | ||||
|
3408 | ||||
|
3409 | class ExternalIdentity(Base, BaseModel): | |||
|
3410 | __tablename__ = 'external_identities' | |||
|
3411 | __table_args__ = ( | |||
|
3412 | Index('local_user_id_idx', 'local_user_id'), | |||
|
3413 | Index('external_id_idx', 'external_id'), | |||
|
3414 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
3415 | 'mysql_charset': 'utf8'}) | |||
|
3416 | ||||
|
3417 | external_id = Column('external_id', Unicode(255), default=u'', | |||
|
3418 | primary_key=True) | |||
|
3419 | external_username = Column('external_username', Unicode(1024), default=u'') | |||
|
3420 | local_user_id = Column('local_user_id', Integer(), | |||
|
3421 | ForeignKey('users.user_id'), primary_key=True) | |||
|
3422 | provider_name = Column('provider_name', Unicode(255), default=u'', | |||
|
3423 | primary_key=True) | |||
|
3424 | access_token = Column('access_token', String(1024), default=u'') | |||
|
3425 | alt_token = Column('alt_token', String(1024), default=u'') | |||
|
3426 | token_secret = Column('token_secret', String(1024), default=u'') | |||
|
3427 | ||||
|
3428 | @classmethod | |||
|
3429 | def by_external_id_and_provider(cls, external_id, provider_name, | |||
|
3430 | local_user_id=None): | |||
|
3431 | """ | |||
|
3432 | Returns ExternalIdentity instance based on search params | |||
|
3433 | ||||
|
3434 | :param external_id: | |||
|
3435 | :param provider_name: | |||
|
3436 | :return: ExternalIdentity | |||
|
3437 | """ | |||
|
3438 | query = cls.query() | |||
|
3439 | query = query.filter(cls.external_id == external_id) | |||
|
3440 | query = query.filter(cls.provider_name == provider_name) | |||
|
3441 | if local_user_id: | |||
|
3442 | query = query.filter(cls.local_user_id == local_user_id) | |||
|
3443 | return query.first() | |||
|
3444 | ||||
|
3445 | @classmethod | |||
|
3446 | def user_by_external_id_and_provider(cls, external_id, provider_name): | |||
|
3447 | """ | |||
|
3448 | Returns User instance based on search params | |||
|
3449 | ||||
|
3450 | :param external_id: | |||
|
3451 | :param provider_name: | |||
|
3452 | :return: User | |||
|
3453 | """ | |||
|
3454 | query = User.query() | |||
|
3455 | query = query.filter(cls.external_id == external_id) | |||
|
3456 | query = query.filter(cls.provider_name == provider_name) | |||
|
3457 | query = query.filter(User.user_id == cls.local_user_id) | |||
|
3458 | return query.first() | |||
|
3459 | ||||
|
3460 | @classmethod | |||
|
3461 | def by_local_user_id(cls, local_user_id): | |||
|
3462 | """ | |||
|
3463 | Returns all tokens for user | |||
|
3464 | ||||
|
3465 | :param local_user_id: | |||
|
3466 | :return: ExternalIdentity | |||
|
3467 | """ | |||
|
3468 | query = cls.query() | |||
|
3469 | query = query.filter(cls.local_user_id == local_user_id) | |||
|
3470 | return query | |||
|
3471 | ||||
|
3472 | ||||
|
3473 | class Integration(Base, BaseModel): | |||
|
3474 | __tablename__ = 'integrations' | |||
|
3475 | __table_args__ = ( | |||
|
3476 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |||
|
3477 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |||
|
3478 | ) | |||
|
3479 | ||||
|
3480 | integration_id = Column('integration_id', Integer(), primary_key=True) | |||
|
3481 | integration_type = Column('integration_type', String(255)) | |||
|
3482 | enabled = Column('enabled', Boolean(), nullable=False) | |||
|
3483 | name = Column('name', String(255), nullable=False) | |||
|
3484 | child_repos_only = Column('child_repos_only', Boolean(), nullable=True) | |||
|
3485 | ||||
|
3486 | settings = Column( | |||
|
3487 | 'settings_json', MutationObj.as_mutable( | |||
|
3488 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) | |||
|
3489 | repo_id = Column( | |||
|
3490 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), | |||
|
3491 | nullable=True, unique=None, default=None) | |||
|
3492 | repo = relationship('Repository', lazy='joined') | |||
|
3493 | ||||
|
3494 | repo_group_id = Column( | |||
|
3495 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), | |||
|
3496 | nullable=True, unique=None, default=None) | |||
|
3497 | repo_group = relationship('RepoGroup', lazy='joined') | |||
|
3498 | ||||
|
3499 | @hybrid_property | |||
|
3500 | def scope(self): | |||
|
3501 | if self.repo: | |||
|
3502 | return self.repo | |||
|
3503 | if self.repo_group: | |||
|
3504 | return self.repo_group | |||
|
3505 | if self.child_repos_only: | |||
|
3506 | return 'root_repos' | |||
|
3507 | return 'global' | |||
|
3508 | ||||
|
3509 | @scope.setter | |||
|
3510 | def scope(self, value): | |||
|
3511 | self.repo = None | |||
|
3512 | self.repo_id = None | |||
|
3513 | self.repo_group_id = None | |||
|
3514 | self.repo_group = None | |||
|
3515 | self.child_repos_only = None | |||
|
3516 | if isinstance(value, Repository): | |||
|
3517 | self.repo = value | |||
|
3518 | elif isinstance(value, RepoGroup): | |||
|
3519 | self.repo_group = value | |||
|
3520 | elif value == 'root_repos': | |||
|
3521 | self.child_repos_only = True | |||
|
3522 | elif value == 'global': | |||
|
3523 | pass | |||
|
3524 | else: | |||
|
3525 | raise Exception("invalid scope: %s, must be one of " | |||
|
3526 | "['global', 'root_repos', <RepoGroup>. <Repository>]" % value) | |||
|
3527 | ||||
|
3528 | def __repr__(self): | |||
|
3529 | return '<Integration(%r, %r)>' % (self.integration_type, self.scope) |
@@ -0,0 +1,35 b'' | |||||
|
1 | import logging | |||
|
2 | import datetime | |||
|
3 | ||||
|
4 | from sqlalchemy import * | |||
|
5 | from sqlalchemy.exc import DatabaseError | |||
|
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload | |||
|
7 | from sqlalchemy.orm.session import Session | |||
|
8 | from sqlalchemy.ext.declarative import declarative_base | |||
|
9 | ||||
|
10 | from rhodecode.lib.dbmigrate.migrate import * | |||
|
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * | |||
|
12 | from rhodecode.lib.utils2 import str2bool | |||
|
13 | ||||
|
14 | from rhodecode.model.meta import Base | |||
|
15 | from rhodecode.model import meta | |||
|
16 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify | |||
|
17 | ||||
|
18 | log = logging.getLogger(__name__) | |||
|
19 | ||||
|
20 | ||||
|
21 | def upgrade(migrate_engine): | |||
|
22 | """ | |||
|
23 | Upgrade operations go here. | |||
|
24 | Don't create your own engine; bind migrate_engine to your metadata | |||
|
25 | """ | |||
|
26 | _reset_base(migrate_engine) | |||
|
27 | from rhodecode.lib.dbmigrate.schema import db_4_4_0_1 | |||
|
28 | ||||
|
29 | tbl = db_4_4_0_1.Integration.__table__ | |||
|
30 | child_repos_only = db_4_4_0_1.Integration.child_repos_only | |||
|
31 | child_repos_only.create(table=tbl) | |||
|
32 | ||||
|
33 | def downgrade(migrate_engine): | |||
|
34 | meta = MetaData() | |||
|
35 | meta.bind = migrate_engine |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,62 +1,62 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
23 | RhodeCode, a web based repository management software |
|
23 | RhodeCode, a web based repository management software | |
24 | versioning implementation: http://www.python.org/dev/peps/pep-0386/ |
|
24 | versioning implementation: http://www.python.org/dev/peps/pep-0386/ | |
25 | """ |
|
25 | """ | |
26 |
|
26 | |||
27 | import os |
|
27 | import os | |
28 | import sys |
|
28 | import sys | |
29 | import platform |
|
29 | import platform | |
30 |
|
30 | |||
31 | VERSION = tuple(open(os.path.join( |
|
31 | VERSION = tuple(open(os.path.join( | |
32 | os.path.dirname(__file__), 'VERSION')).read().split('.')) |
|
32 | os.path.dirname(__file__), 'VERSION')).read().split('.')) | |
33 |
|
33 | |||
34 | BACKENDS = { |
|
34 | BACKENDS = { | |
35 | 'hg': 'Mercurial repository', |
|
35 | 'hg': 'Mercurial repository', | |
36 | 'git': 'Git repository', |
|
36 | 'git': 'Git repository', | |
37 | 'svn': 'Subversion repository', |
|
37 | 'svn': 'Subversion repository', | |
38 | } |
|
38 | } | |
39 |
|
39 | |||
40 | CELERY_ENABLED = False |
|
40 | CELERY_ENABLED = False | |
41 | CELERY_EAGER = False |
|
41 | CELERY_EAGER = False | |
42 |
|
42 | |||
43 | # link to config for pylons |
|
43 | # link to config for pylons | |
44 | CONFIG = {} |
|
44 | CONFIG = {} | |
45 |
|
45 | |||
46 | # Populated with the settings dictionary from application init in |
|
46 | # Populated with the settings dictionary from application init in | |
47 | # rhodecode.conf.environment.load_pyramid_environment |
|
47 | # rhodecode.conf.environment.load_pyramid_environment | |
48 | PYRAMID_SETTINGS = {} |
|
48 | PYRAMID_SETTINGS = {} | |
49 |
|
49 | |||
50 | # Linked module for extensions |
|
50 | # Linked module for extensions | |
51 | EXTENSIONS = {} |
|
51 | EXTENSIONS = {} | |
52 |
|
52 | |||
53 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) |
|
53 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) | |
54 |
__dbversion__ = 5 |
|
54 | __dbversion__ = 57 # defines current db version for migrations | |
55 | __platform__ = platform.system() |
|
55 | __platform__ = platform.system() | |
56 | __license__ = 'AGPLv3, and Commercial License' |
|
56 | __license__ = 'AGPLv3, and Commercial License' | |
57 | __author__ = 'RhodeCode GmbH' |
|
57 | __author__ = 'RhodeCode GmbH' | |
58 | __url__ = 'http://rhodecode.com' |
|
58 | __url__ = 'http://rhodecode.com' | |
59 |
|
59 | |||
60 | is_windows = __platform__ in ['Windows'] |
|
60 | is_windows = __platform__ in ['Windows'] | |
61 | is_unix = not is_windows |
|
61 | is_unix = not is_windows | |
62 | is_test = False |
|
62 | is_test = False |
@@ -1,1159 +1,1160 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Routes configuration |
|
22 | Routes configuration | |
23 |
|
23 | |||
24 | The more specific and detailed routes should be defined first so they |
|
24 | The more specific and detailed routes should be defined first so they | |
25 | may take precedent over the more generic routes. For more information |
|
25 | may take precedent over the more generic routes. For more information | |
26 | refer to the routes manual at http://routes.groovie.org/docs/ |
|
26 | refer to the routes manual at http://routes.groovie.org/docs/ | |
27 |
|
27 | |||
28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py |
|
28 | IMPORTANT: if you change any routing here, make sure to take a look at lib/base.py | |
29 | and _route_name variable which uses some of stored naming here to do redirects. |
|
29 | and _route_name variable which uses some of stored naming here to do redirects. | |
30 | """ |
|
30 | """ | |
31 | import os |
|
31 | import os | |
32 | import re |
|
32 | import re | |
33 | from routes import Mapper |
|
33 | from routes import Mapper | |
34 |
|
34 | |||
35 | from rhodecode.config import routing_links |
|
35 | from rhodecode.config import routing_links | |
36 |
|
36 | |||
37 | # prefix for non repository related links needs to be prefixed with `/` |
|
37 | # prefix for non repository related links needs to be prefixed with `/` | |
38 | ADMIN_PREFIX = '/_admin' |
|
38 | ADMIN_PREFIX = '/_admin' | |
39 | STATIC_FILE_PREFIX = '/_static' |
|
39 | STATIC_FILE_PREFIX = '/_static' | |
40 |
|
40 | |||
41 | # Default requirements for URL parts |
|
41 | # Default requirements for URL parts | |
42 | URL_NAME_REQUIREMENTS = { |
|
42 | URL_NAME_REQUIREMENTS = { | |
43 | # group name can have a slash in them, but they must not end with a slash |
|
43 | # group name can have a slash in them, but they must not end with a slash | |
44 | 'group_name': r'.*?[^/]', |
|
44 | 'group_name': r'.*?[^/]', | |
|
45 | 'repo_group_name': r'.*?[^/]', | |||
45 | # repo names can have a slash in them, but they must not end with a slash |
|
46 | # repo names can have a slash in them, but they must not end with a slash | |
46 | 'repo_name': r'.*?[^/]', |
|
47 | 'repo_name': r'.*?[^/]', | |
47 | # file path eats up everything at the end |
|
48 | # file path eats up everything at the end | |
48 | 'f_path': r'.*', |
|
49 | 'f_path': r'.*', | |
49 | # reference types |
|
50 | # reference types | |
50 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', |
|
51 | 'source_ref_type': '(branch|book|tag|rev|\%\(source_ref_type\)s)', | |
51 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', |
|
52 | 'target_ref_type': '(branch|book|tag|rev|\%\(target_ref_type\)s)', | |
52 | } |
|
53 | } | |
53 |
|
54 | |||
54 |
|
55 | |||
55 | def add_route_requirements(route_path, requirements): |
|
56 | def add_route_requirements(route_path, requirements): | |
56 | """ |
|
57 | """ | |
57 | Adds regex requirements to pyramid routes using a mapping dict |
|
58 | Adds regex requirements to pyramid routes using a mapping dict | |
58 |
|
59 | |||
59 | >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'}) |
|
60 | >>> add_route_requirements('/{action}/{id}', {'id': r'\d+'}) | |
60 | '/{action}/{id:\d+}' |
|
61 | '/{action}/{id:\d+}' | |
61 |
|
62 | |||
62 | """ |
|
63 | """ | |
63 | for key, regex in requirements.items(): |
|
64 | for key, regex in requirements.items(): | |
64 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) |
|
65 | route_path = route_path.replace('{%s}' % key, '{%s:%s}' % (key, regex)) | |
65 | return route_path |
|
66 | return route_path | |
66 |
|
67 | |||
67 |
|
68 | |||
68 | class JSRoutesMapper(Mapper): |
|
69 | class JSRoutesMapper(Mapper): | |
69 | """ |
|
70 | """ | |
70 | Wrapper for routes.Mapper to make pyroutes compatible url definitions |
|
71 | Wrapper for routes.Mapper to make pyroutes compatible url definitions | |
71 | """ |
|
72 | """ | |
72 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') |
|
73 | _named_route_regex = re.compile(r'^[a-z-_0-9A-Z]+$') | |
73 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
74 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') | |
74 | def __init__(self, *args, **kw): |
|
75 | def __init__(self, *args, **kw): | |
75 | super(JSRoutesMapper, self).__init__(*args, **kw) |
|
76 | super(JSRoutesMapper, self).__init__(*args, **kw) | |
76 | self._jsroutes = [] |
|
77 | self._jsroutes = [] | |
77 |
|
78 | |||
78 | def connect(self, *args, **kw): |
|
79 | def connect(self, *args, **kw): | |
79 | """ |
|
80 | """ | |
80 | Wrapper for connect to take an extra argument jsroute=True |
|
81 | Wrapper for connect to take an extra argument jsroute=True | |
81 |
|
82 | |||
82 | :param jsroute: boolean, if True will add the route to the pyroutes list |
|
83 | :param jsroute: boolean, if True will add the route to the pyroutes list | |
83 | """ |
|
84 | """ | |
84 | if kw.pop('jsroute', False): |
|
85 | if kw.pop('jsroute', False): | |
85 | if not self._named_route_regex.match(args[0]): |
|
86 | if not self._named_route_regex.match(args[0]): | |
86 | raise Exception('only named routes can be added to pyroutes') |
|
87 | raise Exception('only named routes can be added to pyroutes') | |
87 | self._jsroutes.append(args[0]) |
|
88 | self._jsroutes.append(args[0]) | |
88 |
|
89 | |||
89 | super(JSRoutesMapper, self).connect(*args, **kw) |
|
90 | super(JSRoutesMapper, self).connect(*args, **kw) | |
90 |
|
91 | |||
91 | def _extract_route_information(self, route): |
|
92 | def _extract_route_information(self, route): | |
92 | """ |
|
93 | """ | |
93 | Convert a route into tuple(name, path, args), eg: |
|
94 | Convert a route into tuple(name, path, args), eg: | |
94 | ('user_profile', '/profile/%(username)s', ['username']) |
|
95 | ('user_profile', '/profile/%(username)s', ['username']) | |
95 | """ |
|
96 | """ | |
96 | routepath = route.routepath |
|
97 | routepath = route.routepath | |
97 | def replace(matchobj): |
|
98 | def replace(matchobj): | |
98 | if matchobj.group(1): |
|
99 | if matchobj.group(1): | |
99 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
100 | return "%%(%s)s" % matchobj.group(1).split(':')[0] | |
100 | else: |
|
101 | else: | |
101 | return "%%(%s)s" % matchobj.group(2) |
|
102 | return "%%(%s)s" % matchobj.group(2) | |
102 |
|
103 | |||
103 | routepath = self._argument_prog.sub(replace, routepath) |
|
104 | routepath = self._argument_prog.sub(replace, routepath) | |
104 | return ( |
|
105 | return ( | |
105 | route.name, |
|
106 | route.name, | |
106 | routepath, |
|
107 | routepath, | |
107 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
108 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) | |
108 | for arg in self._argument_prog.findall(route.routepath)] |
|
109 | for arg in self._argument_prog.findall(route.routepath)] | |
109 | ) |
|
110 | ) | |
110 |
|
111 | |||
111 | def jsroutes(self): |
|
112 | def jsroutes(self): | |
112 | """ |
|
113 | """ | |
113 | Return a list of pyroutes.js compatible routes |
|
114 | Return a list of pyroutes.js compatible routes | |
114 | """ |
|
115 | """ | |
115 | for route_name in self._jsroutes: |
|
116 | for route_name in self._jsroutes: | |
116 | yield self._extract_route_information(self._routenames[route_name]) |
|
117 | yield self._extract_route_information(self._routenames[route_name]) | |
117 |
|
118 | |||
118 |
|
119 | |||
119 | def make_map(config): |
|
120 | def make_map(config): | |
120 | """Create, configure and return the routes Mapper""" |
|
121 | """Create, configure and return the routes Mapper""" | |
121 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], |
|
122 | rmap = JSRoutesMapper(directory=config['pylons.paths']['controllers'], | |
122 | always_scan=config['debug']) |
|
123 | always_scan=config['debug']) | |
123 | rmap.minimization = False |
|
124 | rmap.minimization = False | |
124 | rmap.explicit = False |
|
125 | rmap.explicit = False | |
125 |
|
126 | |||
126 | from rhodecode.lib.utils2 import str2bool |
|
127 | from rhodecode.lib.utils2 import str2bool | |
127 | from rhodecode.model import repo, repo_group |
|
128 | from rhodecode.model import repo, repo_group | |
128 |
|
129 | |||
129 | def check_repo(environ, match_dict): |
|
130 | def check_repo(environ, match_dict): | |
130 | """ |
|
131 | """ | |
131 | check for valid repository for proper 404 handling |
|
132 | check for valid repository for proper 404 handling | |
132 |
|
133 | |||
133 | :param environ: |
|
134 | :param environ: | |
134 | :param match_dict: |
|
135 | :param match_dict: | |
135 | """ |
|
136 | """ | |
136 | repo_name = match_dict.get('repo_name') |
|
137 | repo_name = match_dict.get('repo_name') | |
137 |
|
138 | |||
138 | if match_dict.get('f_path'): |
|
139 | if match_dict.get('f_path'): | |
139 | # fix for multiple initial slashes that causes errors |
|
140 | # fix for multiple initial slashes that causes errors | |
140 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') |
|
141 | match_dict['f_path'] = match_dict['f_path'].lstrip('/') | |
141 | repo_model = repo.RepoModel() |
|
142 | repo_model = repo.RepoModel() | |
142 | by_name_match = repo_model.get_by_repo_name(repo_name) |
|
143 | by_name_match = repo_model.get_by_repo_name(repo_name) | |
143 | # if we match quickly from database, short circuit the operation, |
|
144 | # if we match quickly from database, short circuit the operation, | |
144 | # and validate repo based on the type. |
|
145 | # and validate repo based on the type. | |
145 | if by_name_match: |
|
146 | if by_name_match: | |
146 | return True |
|
147 | return True | |
147 |
|
148 | |||
148 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
149 | by_id_match = repo_model.get_repo_by_id(repo_name) | |
149 | if by_id_match: |
|
150 | if by_id_match: | |
150 | repo_name = by_id_match.repo_name |
|
151 | repo_name = by_id_match.repo_name | |
151 | match_dict['repo_name'] = repo_name |
|
152 | match_dict['repo_name'] = repo_name | |
152 | return True |
|
153 | return True | |
153 |
|
154 | |||
154 | return False |
|
155 | return False | |
155 |
|
156 | |||
156 | def check_group(environ, match_dict): |
|
157 | def check_group(environ, match_dict): | |
157 | """ |
|
158 | """ | |
158 | check for valid repository group path for proper 404 handling |
|
159 | check for valid repository group path for proper 404 handling | |
159 |
|
160 | |||
160 | :param environ: |
|
161 | :param environ: | |
161 | :param match_dict: |
|
162 | :param match_dict: | |
162 | """ |
|
163 | """ | |
163 | repo_group_name = match_dict.get('group_name') |
|
164 | repo_group_name = match_dict.get('group_name') | |
164 | repo_group_model = repo_group.RepoGroupModel() |
|
165 | repo_group_model = repo_group.RepoGroupModel() | |
165 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) |
|
166 | by_name_match = repo_group_model.get_by_group_name(repo_group_name) | |
166 | if by_name_match: |
|
167 | if by_name_match: | |
167 | return True |
|
168 | return True | |
168 |
|
169 | |||
169 | return False |
|
170 | return False | |
170 |
|
171 | |||
171 | def check_user_group(environ, match_dict): |
|
172 | def check_user_group(environ, match_dict): | |
172 | """ |
|
173 | """ | |
173 | check for valid user group for proper 404 handling |
|
174 | check for valid user group for proper 404 handling | |
174 |
|
175 | |||
175 | :param environ: |
|
176 | :param environ: | |
176 | :param match_dict: |
|
177 | :param match_dict: | |
177 | """ |
|
178 | """ | |
178 | return True |
|
179 | return True | |
179 |
|
180 | |||
180 | def check_int(environ, match_dict): |
|
181 | def check_int(environ, match_dict): | |
181 | return match_dict.get('id').isdigit() |
|
182 | return match_dict.get('id').isdigit() | |
182 |
|
183 | |||
183 |
|
184 | |||
184 | #========================================================================== |
|
185 | #========================================================================== | |
185 | # CUSTOM ROUTES HERE |
|
186 | # CUSTOM ROUTES HERE | |
186 | #========================================================================== |
|
187 | #========================================================================== | |
187 |
|
188 | |||
188 | # MAIN PAGE |
|
189 | # MAIN PAGE | |
189 | rmap.connect('home', '/', controller='home', action='index', jsroute=True) |
|
190 | rmap.connect('home', '/', controller='home', action='index', jsroute=True) | |
190 | rmap.connect('goto_switcher_data', '/_goto_data', controller='home', |
|
191 | rmap.connect('goto_switcher_data', '/_goto_data', controller='home', | |
191 | action='goto_switcher_data') |
|
192 | action='goto_switcher_data') | |
192 | rmap.connect('repo_list_data', '/_repos', controller='home', |
|
193 | rmap.connect('repo_list_data', '/_repos', controller='home', | |
193 | action='repo_list_data') |
|
194 | action='repo_list_data') | |
194 |
|
195 | |||
195 | rmap.connect('user_autocomplete_data', '/_users', controller='home', |
|
196 | rmap.connect('user_autocomplete_data', '/_users', controller='home', | |
196 | action='user_autocomplete_data', jsroute=True) |
|
197 | action='user_autocomplete_data', jsroute=True) | |
197 | rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home', |
|
198 | rmap.connect('user_group_autocomplete_data', '/_user_groups', controller='home', | |
198 | action='user_group_autocomplete_data') |
|
199 | action='user_group_autocomplete_data') | |
199 |
|
200 | |||
200 | rmap.connect( |
|
201 | rmap.connect( | |
201 | 'user_profile', '/_profiles/{username}', controller='users', |
|
202 | 'user_profile', '/_profiles/{username}', controller='users', | |
202 | action='user_profile') |
|
203 | action='user_profile') | |
203 |
|
204 | |||
204 | # TODO: johbo: Static links, to be replaced by our redirection mechanism |
|
205 | # TODO: johbo: Static links, to be replaced by our redirection mechanism | |
205 | rmap.connect('rst_help', |
|
206 | rmap.connect('rst_help', | |
206 | 'http://docutils.sourceforge.net/docs/user/rst/quickref.html', |
|
207 | 'http://docutils.sourceforge.net/docs/user/rst/quickref.html', | |
207 | _static=True) |
|
208 | _static=True) | |
208 | rmap.connect('markdown_help', |
|
209 | rmap.connect('markdown_help', | |
209 | 'http://daringfireball.net/projects/markdown/syntax', |
|
210 | 'http://daringfireball.net/projects/markdown/syntax', | |
210 | _static=True) |
|
211 | _static=True) | |
211 | rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True) |
|
212 | rmap.connect('rhodecode_official', 'https://rhodecode.com', _static=True) | |
212 | rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True) |
|
213 | rmap.connect('rhodecode_support', 'https://rhodecode.com/help/', _static=True) | |
213 | rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True) |
|
214 | rmap.connect('rhodecode_translations', 'https://rhodecode.com/translate/enterprise', _static=True) | |
214 | # TODO: anderson - making this a static link since redirect won't play |
|
215 | # TODO: anderson - making this a static link since redirect won't play | |
215 | # nice with POST requests |
|
216 | # nice with POST requests | |
216 | rmap.connect('enterprise_license_convert_from_old', |
|
217 | rmap.connect('enterprise_license_convert_from_old', | |
217 | 'https://rhodecode.com/u/license-upgrade', |
|
218 | 'https://rhodecode.com/u/license-upgrade', | |
218 | _static=True) |
|
219 | _static=True) | |
219 |
|
220 | |||
220 | routing_links.connect_redirection_links(rmap) |
|
221 | routing_links.connect_redirection_links(rmap) | |
221 |
|
222 | |||
222 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') |
|
223 | rmap.connect('ping', '%s/ping' % (ADMIN_PREFIX,), controller='home', action='ping') | |
223 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') |
|
224 | rmap.connect('error_test', '%s/error_test' % (ADMIN_PREFIX,), controller='home', action='error_test') | |
224 |
|
225 | |||
225 | # ADMIN REPOSITORY ROUTES |
|
226 | # ADMIN REPOSITORY ROUTES | |
226 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
227 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
227 | controller='admin/repos') as m: |
|
228 | controller='admin/repos') as m: | |
228 | m.connect('repos', '/repos', |
|
229 | m.connect('repos', '/repos', | |
229 | action='create', conditions={'method': ['POST']}) |
|
230 | action='create', conditions={'method': ['POST']}) | |
230 | m.connect('repos', '/repos', |
|
231 | m.connect('repos', '/repos', | |
231 | action='index', conditions={'method': ['GET']}) |
|
232 | action='index', conditions={'method': ['GET']}) | |
232 | m.connect('new_repo', '/create_repository', jsroute=True, |
|
233 | m.connect('new_repo', '/create_repository', jsroute=True, | |
233 | action='create_repository', conditions={'method': ['GET']}) |
|
234 | action='create_repository', conditions={'method': ['GET']}) | |
234 | m.connect('/repos/{repo_name}', |
|
235 | m.connect('/repos/{repo_name}', | |
235 | action='update', conditions={'method': ['PUT'], |
|
236 | action='update', conditions={'method': ['PUT'], | |
236 | 'function': check_repo}, |
|
237 | 'function': check_repo}, | |
237 | requirements=URL_NAME_REQUIREMENTS) |
|
238 | requirements=URL_NAME_REQUIREMENTS) | |
238 | m.connect('delete_repo', '/repos/{repo_name}', |
|
239 | m.connect('delete_repo', '/repos/{repo_name}', | |
239 | action='delete', conditions={'method': ['DELETE']}, |
|
240 | action='delete', conditions={'method': ['DELETE']}, | |
240 | requirements=URL_NAME_REQUIREMENTS) |
|
241 | requirements=URL_NAME_REQUIREMENTS) | |
241 | m.connect('repo', '/repos/{repo_name}', |
|
242 | m.connect('repo', '/repos/{repo_name}', | |
242 | action='show', conditions={'method': ['GET'], |
|
243 | action='show', conditions={'method': ['GET'], | |
243 | 'function': check_repo}, |
|
244 | 'function': check_repo}, | |
244 | requirements=URL_NAME_REQUIREMENTS) |
|
245 | requirements=URL_NAME_REQUIREMENTS) | |
245 |
|
246 | |||
246 | # ADMIN REPOSITORY GROUPS ROUTES |
|
247 | # ADMIN REPOSITORY GROUPS ROUTES | |
247 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
248 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
248 | controller='admin/repo_groups') as m: |
|
249 | controller='admin/repo_groups') as m: | |
249 | m.connect('repo_groups', '/repo_groups', |
|
250 | m.connect('repo_groups', '/repo_groups', | |
250 | action='create', conditions={'method': ['POST']}) |
|
251 | action='create', conditions={'method': ['POST']}) | |
251 | m.connect('repo_groups', '/repo_groups', |
|
252 | m.connect('repo_groups', '/repo_groups', | |
252 | action='index', conditions={'method': ['GET']}) |
|
253 | action='index', conditions={'method': ['GET']}) | |
253 | m.connect('new_repo_group', '/repo_groups/new', |
|
254 | m.connect('new_repo_group', '/repo_groups/new', | |
254 | action='new', conditions={'method': ['GET']}) |
|
255 | action='new', conditions={'method': ['GET']}) | |
255 | m.connect('update_repo_group', '/repo_groups/{group_name}', |
|
256 | m.connect('update_repo_group', '/repo_groups/{group_name}', | |
256 | action='update', conditions={'method': ['PUT'], |
|
257 | action='update', conditions={'method': ['PUT'], | |
257 | 'function': check_group}, |
|
258 | 'function': check_group}, | |
258 | requirements=URL_NAME_REQUIREMENTS) |
|
259 | requirements=URL_NAME_REQUIREMENTS) | |
259 |
|
260 | |||
260 | # EXTRAS REPO GROUP ROUTES |
|
261 | # EXTRAS REPO GROUP ROUTES | |
261 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
262 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', | |
262 | action='edit', |
|
263 | action='edit', | |
263 | conditions={'method': ['GET'], 'function': check_group}, |
|
264 | conditions={'method': ['GET'], 'function': check_group}, | |
264 | requirements=URL_NAME_REQUIREMENTS) |
|
265 | requirements=URL_NAME_REQUIREMENTS) | |
265 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', |
|
266 | m.connect('edit_repo_group', '/repo_groups/{group_name}/edit', | |
266 | action='edit', |
|
267 | action='edit', | |
267 | conditions={'method': ['PUT'], 'function': check_group}, |
|
268 | conditions={'method': ['PUT'], 'function': check_group}, | |
268 | requirements=URL_NAME_REQUIREMENTS) |
|
269 | requirements=URL_NAME_REQUIREMENTS) | |
269 |
|
270 | |||
270 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
271 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', | |
271 | action='edit_repo_group_advanced', |
|
272 | action='edit_repo_group_advanced', | |
272 | conditions={'method': ['GET'], 'function': check_group}, |
|
273 | conditions={'method': ['GET'], 'function': check_group}, | |
273 | requirements=URL_NAME_REQUIREMENTS) |
|
274 | requirements=URL_NAME_REQUIREMENTS) | |
274 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', |
|
275 | m.connect('edit_repo_group_advanced', '/repo_groups/{group_name}/edit/advanced', | |
275 | action='edit_repo_group_advanced', |
|
276 | action='edit_repo_group_advanced', | |
276 | conditions={'method': ['PUT'], 'function': check_group}, |
|
277 | conditions={'method': ['PUT'], 'function': check_group}, | |
277 | requirements=URL_NAME_REQUIREMENTS) |
|
278 | requirements=URL_NAME_REQUIREMENTS) | |
278 |
|
279 | |||
279 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
280 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', | |
280 | action='edit_repo_group_perms', |
|
281 | action='edit_repo_group_perms', | |
281 | conditions={'method': ['GET'], 'function': check_group}, |
|
282 | conditions={'method': ['GET'], 'function': check_group}, | |
282 | requirements=URL_NAME_REQUIREMENTS) |
|
283 | requirements=URL_NAME_REQUIREMENTS) | |
283 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', |
|
284 | m.connect('edit_repo_group_perms', '/repo_groups/{group_name}/edit/permissions', | |
284 | action='update_perms', |
|
285 | action='update_perms', | |
285 | conditions={'method': ['PUT'], 'function': check_group}, |
|
286 | conditions={'method': ['PUT'], 'function': check_group}, | |
286 | requirements=URL_NAME_REQUIREMENTS) |
|
287 | requirements=URL_NAME_REQUIREMENTS) | |
287 |
|
288 | |||
288 | m.connect('delete_repo_group', '/repo_groups/{group_name}', |
|
289 | m.connect('delete_repo_group', '/repo_groups/{group_name}', | |
289 | action='delete', conditions={'method': ['DELETE'], |
|
290 | action='delete', conditions={'method': ['DELETE'], | |
290 | 'function': check_group}, |
|
291 | 'function': check_group}, | |
291 | requirements=URL_NAME_REQUIREMENTS) |
|
292 | requirements=URL_NAME_REQUIREMENTS) | |
292 |
|
293 | |||
293 | # ADMIN USER ROUTES |
|
294 | # ADMIN USER ROUTES | |
294 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
295 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
295 | controller='admin/users') as m: |
|
296 | controller='admin/users') as m: | |
296 | m.connect('users', '/users', |
|
297 | m.connect('users', '/users', | |
297 | action='create', conditions={'method': ['POST']}) |
|
298 | action='create', conditions={'method': ['POST']}) | |
298 | m.connect('users', '/users', |
|
299 | m.connect('users', '/users', | |
299 | action='index', conditions={'method': ['GET']}) |
|
300 | action='index', conditions={'method': ['GET']}) | |
300 | m.connect('new_user', '/users/new', |
|
301 | m.connect('new_user', '/users/new', | |
301 | action='new', conditions={'method': ['GET']}) |
|
302 | action='new', conditions={'method': ['GET']}) | |
302 | m.connect('update_user', '/users/{user_id}', |
|
303 | m.connect('update_user', '/users/{user_id}', | |
303 | action='update', conditions={'method': ['PUT']}) |
|
304 | action='update', conditions={'method': ['PUT']}) | |
304 | m.connect('delete_user', '/users/{user_id}', |
|
305 | m.connect('delete_user', '/users/{user_id}', | |
305 | action='delete', conditions={'method': ['DELETE']}) |
|
306 | action='delete', conditions={'method': ['DELETE']}) | |
306 | m.connect('edit_user', '/users/{user_id}/edit', |
|
307 | m.connect('edit_user', '/users/{user_id}/edit', | |
307 | action='edit', conditions={'method': ['GET']}) |
|
308 | action='edit', conditions={'method': ['GET']}) | |
308 | m.connect('user', '/users/{user_id}', |
|
309 | m.connect('user', '/users/{user_id}', | |
309 | action='show', conditions={'method': ['GET']}) |
|
310 | action='show', conditions={'method': ['GET']}) | |
310 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', |
|
311 | m.connect('force_password_reset_user', '/users/{user_id}/password_reset', | |
311 | action='reset_password', conditions={'method': ['POST']}) |
|
312 | action='reset_password', conditions={'method': ['POST']}) | |
312 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', |
|
313 | m.connect('create_personal_repo_group', '/users/{user_id}/create_repo_group', | |
313 | action='create_personal_repo_group', conditions={'method': ['POST']}) |
|
314 | action='create_personal_repo_group', conditions={'method': ['POST']}) | |
314 |
|
315 | |||
315 | # EXTRAS USER ROUTES |
|
316 | # EXTRAS USER ROUTES | |
316 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
317 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', | |
317 | action='edit_advanced', conditions={'method': ['GET']}) |
|
318 | action='edit_advanced', conditions={'method': ['GET']}) | |
318 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', |
|
319 | m.connect('edit_user_advanced', '/users/{user_id}/edit/advanced', | |
319 | action='update_advanced', conditions={'method': ['PUT']}) |
|
320 | action='update_advanced', conditions={'method': ['PUT']}) | |
320 |
|
321 | |||
321 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
322 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', | |
322 | action='edit_auth_tokens', conditions={'method': ['GET']}) |
|
323 | action='edit_auth_tokens', conditions={'method': ['GET']}) | |
323 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
324 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', | |
324 | action='add_auth_token', conditions={'method': ['PUT']}) |
|
325 | action='add_auth_token', conditions={'method': ['PUT']}) | |
325 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', |
|
326 | m.connect('edit_user_auth_tokens', '/users/{user_id}/edit/auth_tokens', | |
326 | action='delete_auth_token', conditions={'method': ['DELETE']}) |
|
327 | action='delete_auth_token', conditions={'method': ['DELETE']}) | |
327 |
|
328 | |||
328 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
329 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', | |
329 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
330 | action='edit_global_perms', conditions={'method': ['GET']}) | |
330 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', |
|
331 | m.connect('edit_user_global_perms', '/users/{user_id}/edit/global_permissions', | |
331 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
332 | action='update_global_perms', conditions={'method': ['PUT']}) | |
332 |
|
333 | |||
333 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', |
|
334 | m.connect('edit_user_perms_summary', '/users/{user_id}/edit/permissions_summary', | |
334 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
335 | action='edit_perms_summary', conditions={'method': ['GET']}) | |
335 |
|
336 | |||
336 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
337 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', | |
337 | action='edit_emails', conditions={'method': ['GET']}) |
|
338 | action='edit_emails', conditions={'method': ['GET']}) | |
338 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
339 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', | |
339 | action='add_email', conditions={'method': ['PUT']}) |
|
340 | action='add_email', conditions={'method': ['PUT']}) | |
340 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', |
|
341 | m.connect('edit_user_emails', '/users/{user_id}/edit/emails', | |
341 | action='delete_email', conditions={'method': ['DELETE']}) |
|
342 | action='delete_email', conditions={'method': ['DELETE']}) | |
342 |
|
343 | |||
343 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
344 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', | |
344 | action='edit_ips', conditions={'method': ['GET']}) |
|
345 | action='edit_ips', conditions={'method': ['GET']}) | |
345 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
346 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', | |
346 | action='add_ip', conditions={'method': ['PUT']}) |
|
347 | action='add_ip', conditions={'method': ['PUT']}) | |
347 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', |
|
348 | m.connect('edit_user_ips', '/users/{user_id}/edit/ips', | |
348 | action='delete_ip', conditions={'method': ['DELETE']}) |
|
349 | action='delete_ip', conditions={'method': ['DELETE']}) | |
349 |
|
350 | |||
350 | # ADMIN USER GROUPS REST ROUTES |
|
351 | # ADMIN USER GROUPS REST ROUTES | |
351 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
352 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
352 | controller='admin/user_groups') as m: |
|
353 | controller='admin/user_groups') as m: | |
353 | m.connect('users_groups', '/user_groups', |
|
354 | m.connect('users_groups', '/user_groups', | |
354 | action='create', conditions={'method': ['POST']}) |
|
355 | action='create', conditions={'method': ['POST']}) | |
355 | m.connect('users_groups', '/user_groups', |
|
356 | m.connect('users_groups', '/user_groups', | |
356 | action='index', conditions={'method': ['GET']}) |
|
357 | action='index', conditions={'method': ['GET']}) | |
357 | m.connect('new_users_group', '/user_groups/new', |
|
358 | m.connect('new_users_group', '/user_groups/new', | |
358 | action='new', conditions={'method': ['GET']}) |
|
359 | action='new', conditions={'method': ['GET']}) | |
359 | m.connect('update_users_group', '/user_groups/{user_group_id}', |
|
360 | m.connect('update_users_group', '/user_groups/{user_group_id}', | |
360 | action='update', conditions={'method': ['PUT']}) |
|
361 | action='update', conditions={'method': ['PUT']}) | |
361 | m.connect('delete_users_group', '/user_groups/{user_group_id}', |
|
362 | m.connect('delete_users_group', '/user_groups/{user_group_id}', | |
362 | action='delete', conditions={'method': ['DELETE']}) |
|
363 | action='delete', conditions={'method': ['DELETE']}) | |
363 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', |
|
364 | m.connect('edit_users_group', '/user_groups/{user_group_id}/edit', | |
364 | action='edit', conditions={'method': ['GET']}, |
|
365 | action='edit', conditions={'method': ['GET']}, | |
365 | function=check_user_group) |
|
366 | function=check_user_group) | |
366 |
|
367 | |||
367 | # EXTRAS USER GROUP ROUTES |
|
368 | # EXTRAS USER GROUP ROUTES | |
368 | m.connect('edit_user_group_global_perms', |
|
369 | m.connect('edit_user_group_global_perms', | |
369 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
370 | '/user_groups/{user_group_id}/edit/global_permissions', | |
370 | action='edit_global_perms', conditions={'method': ['GET']}) |
|
371 | action='edit_global_perms', conditions={'method': ['GET']}) | |
371 | m.connect('edit_user_group_global_perms', |
|
372 | m.connect('edit_user_group_global_perms', | |
372 | '/user_groups/{user_group_id}/edit/global_permissions', |
|
373 | '/user_groups/{user_group_id}/edit/global_permissions', | |
373 | action='update_global_perms', conditions={'method': ['PUT']}) |
|
374 | action='update_global_perms', conditions={'method': ['PUT']}) | |
374 | m.connect('edit_user_group_perms_summary', |
|
375 | m.connect('edit_user_group_perms_summary', | |
375 | '/user_groups/{user_group_id}/edit/permissions_summary', |
|
376 | '/user_groups/{user_group_id}/edit/permissions_summary', | |
376 | action='edit_perms_summary', conditions={'method': ['GET']}) |
|
377 | action='edit_perms_summary', conditions={'method': ['GET']}) | |
377 |
|
378 | |||
378 | m.connect('edit_user_group_perms', |
|
379 | m.connect('edit_user_group_perms', | |
379 | '/user_groups/{user_group_id}/edit/permissions', |
|
380 | '/user_groups/{user_group_id}/edit/permissions', | |
380 | action='edit_perms', conditions={'method': ['GET']}) |
|
381 | action='edit_perms', conditions={'method': ['GET']}) | |
381 | m.connect('edit_user_group_perms', |
|
382 | m.connect('edit_user_group_perms', | |
382 | '/user_groups/{user_group_id}/edit/permissions', |
|
383 | '/user_groups/{user_group_id}/edit/permissions', | |
383 | action='update_perms', conditions={'method': ['PUT']}) |
|
384 | action='update_perms', conditions={'method': ['PUT']}) | |
384 |
|
385 | |||
385 | m.connect('edit_user_group_advanced', |
|
386 | m.connect('edit_user_group_advanced', | |
386 | '/user_groups/{user_group_id}/edit/advanced', |
|
387 | '/user_groups/{user_group_id}/edit/advanced', | |
387 | action='edit_advanced', conditions={'method': ['GET']}) |
|
388 | action='edit_advanced', conditions={'method': ['GET']}) | |
388 |
|
389 | |||
389 | m.connect('edit_user_group_members', |
|
390 | m.connect('edit_user_group_members', | |
390 | '/user_groups/{user_group_id}/edit/members', jsroute=True, |
|
391 | '/user_groups/{user_group_id}/edit/members', jsroute=True, | |
391 | action='edit_members', conditions={'method': ['GET']}) |
|
392 | action='edit_members', conditions={'method': ['GET']}) | |
392 |
|
393 | |||
393 | # ADMIN PERMISSIONS ROUTES |
|
394 | # ADMIN PERMISSIONS ROUTES | |
394 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
395 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
395 | controller='admin/permissions') as m: |
|
396 | controller='admin/permissions') as m: | |
396 | m.connect('admin_permissions_application', '/permissions/application', |
|
397 | m.connect('admin_permissions_application', '/permissions/application', | |
397 | action='permission_application_update', conditions={'method': ['POST']}) |
|
398 | action='permission_application_update', conditions={'method': ['POST']}) | |
398 | m.connect('admin_permissions_application', '/permissions/application', |
|
399 | m.connect('admin_permissions_application', '/permissions/application', | |
399 | action='permission_application', conditions={'method': ['GET']}) |
|
400 | action='permission_application', conditions={'method': ['GET']}) | |
400 |
|
401 | |||
401 | m.connect('admin_permissions_global', '/permissions/global', |
|
402 | m.connect('admin_permissions_global', '/permissions/global', | |
402 | action='permission_global_update', conditions={'method': ['POST']}) |
|
403 | action='permission_global_update', conditions={'method': ['POST']}) | |
403 | m.connect('admin_permissions_global', '/permissions/global', |
|
404 | m.connect('admin_permissions_global', '/permissions/global', | |
404 | action='permission_global', conditions={'method': ['GET']}) |
|
405 | action='permission_global', conditions={'method': ['GET']}) | |
405 |
|
406 | |||
406 | m.connect('admin_permissions_object', '/permissions/object', |
|
407 | m.connect('admin_permissions_object', '/permissions/object', | |
407 | action='permission_objects_update', conditions={'method': ['POST']}) |
|
408 | action='permission_objects_update', conditions={'method': ['POST']}) | |
408 | m.connect('admin_permissions_object', '/permissions/object', |
|
409 | m.connect('admin_permissions_object', '/permissions/object', | |
409 | action='permission_objects', conditions={'method': ['GET']}) |
|
410 | action='permission_objects', conditions={'method': ['GET']}) | |
410 |
|
411 | |||
411 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
412 | m.connect('admin_permissions_ips', '/permissions/ips', | |
412 | action='permission_ips', conditions={'method': ['POST']}) |
|
413 | action='permission_ips', conditions={'method': ['POST']}) | |
413 | m.connect('admin_permissions_ips', '/permissions/ips', |
|
414 | m.connect('admin_permissions_ips', '/permissions/ips', | |
414 | action='permission_ips', conditions={'method': ['GET']}) |
|
415 | action='permission_ips', conditions={'method': ['GET']}) | |
415 |
|
416 | |||
416 | m.connect('admin_permissions_overview', '/permissions/overview', |
|
417 | m.connect('admin_permissions_overview', '/permissions/overview', | |
417 | action='permission_perms', conditions={'method': ['GET']}) |
|
418 | action='permission_perms', conditions={'method': ['GET']}) | |
418 |
|
419 | |||
419 | # ADMIN DEFAULTS REST ROUTES |
|
420 | # ADMIN DEFAULTS REST ROUTES | |
420 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
421 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
421 | controller='admin/defaults') as m: |
|
422 | controller='admin/defaults') as m: | |
422 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
423 | m.connect('admin_defaults_repositories', '/defaults/repositories', | |
423 | action='update_repository_defaults', conditions={'method': ['POST']}) |
|
424 | action='update_repository_defaults', conditions={'method': ['POST']}) | |
424 | m.connect('admin_defaults_repositories', '/defaults/repositories', |
|
425 | m.connect('admin_defaults_repositories', '/defaults/repositories', | |
425 | action='index', conditions={'method': ['GET']}) |
|
426 | action='index', conditions={'method': ['GET']}) | |
426 |
|
427 | |||
427 | # ADMIN DEBUG STYLE ROUTES |
|
428 | # ADMIN DEBUG STYLE ROUTES | |
428 | if str2bool(config.get('debug_style')): |
|
429 | if str2bool(config.get('debug_style')): | |
429 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', |
|
430 | with rmap.submapper(path_prefix=ADMIN_PREFIX + '/debug_style', | |
430 | controller='debug_style') as m: |
|
431 | controller='debug_style') as m: | |
431 | m.connect('debug_style_home', '', |
|
432 | m.connect('debug_style_home', '', | |
432 | action='index', conditions={'method': ['GET']}) |
|
433 | action='index', conditions={'method': ['GET']}) | |
433 | m.connect('debug_style_template', '/t/{t_path}', |
|
434 | m.connect('debug_style_template', '/t/{t_path}', | |
434 | action='template', conditions={'method': ['GET']}) |
|
435 | action='template', conditions={'method': ['GET']}) | |
435 |
|
436 | |||
436 | # ADMIN SETTINGS ROUTES |
|
437 | # ADMIN SETTINGS ROUTES | |
437 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
438 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
438 | controller='admin/settings') as m: |
|
439 | controller='admin/settings') as m: | |
439 |
|
440 | |||
440 | # default |
|
441 | # default | |
441 | m.connect('admin_settings', '/settings', |
|
442 | m.connect('admin_settings', '/settings', | |
442 | action='settings_global_update', |
|
443 | action='settings_global_update', | |
443 | conditions={'method': ['POST']}) |
|
444 | conditions={'method': ['POST']}) | |
444 | m.connect('admin_settings', '/settings', |
|
445 | m.connect('admin_settings', '/settings', | |
445 | action='settings_global', conditions={'method': ['GET']}) |
|
446 | action='settings_global', conditions={'method': ['GET']}) | |
446 |
|
447 | |||
447 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
448 | m.connect('admin_settings_vcs', '/settings/vcs', | |
448 | action='settings_vcs_update', |
|
449 | action='settings_vcs_update', | |
449 | conditions={'method': ['POST']}) |
|
450 | conditions={'method': ['POST']}) | |
450 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
451 | m.connect('admin_settings_vcs', '/settings/vcs', | |
451 | action='settings_vcs', |
|
452 | action='settings_vcs', | |
452 | conditions={'method': ['GET']}) |
|
453 | conditions={'method': ['GET']}) | |
453 | m.connect('admin_settings_vcs', '/settings/vcs', |
|
454 | m.connect('admin_settings_vcs', '/settings/vcs', | |
454 | action='delete_svn_pattern', |
|
455 | action='delete_svn_pattern', | |
455 | conditions={'method': ['DELETE']}) |
|
456 | conditions={'method': ['DELETE']}) | |
456 |
|
457 | |||
457 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
458 | m.connect('admin_settings_mapping', '/settings/mapping', | |
458 | action='settings_mapping_update', |
|
459 | action='settings_mapping_update', | |
459 | conditions={'method': ['POST']}) |
|
460 | conditions={'method': ['POST']}) | |
460 | m.connect('admin_settings_mapping', '/settings/mapping', |
|
461 | m.connect('admin_settings_mapping', '/settings/mapping', | |
461 | action='settings_mapping', conditions={'method': ['GET']}) |
|
462 | action='settings_mapping', conditions={'method': ['GET']}) | |
462 |
|
463 | |||
463 | m.connect('admin_settings_global', '/settings/global', |
|
464 | m.connect('admin_settings_global', '/settings/global', | |
464 | action='settings_global_update', |
|
465 | action='settings_global_update', | |
465 | conditions={'method': ['POST']}) |
|
466 | conditions={'method': ['POST']}) | |
466 | m.connect('admin_settings_global', '/settings/global', |
|
467 | m.connect('admin_settings_global', '/settings/global', | |
467 | action='settings_global', conditions={'method': ['GET']}) |
|
468 | action='settings_global', conditions={'method': ['GET']}) | |
468 |
|
469 | |||
469 | m.connect('admin_settings_visual', '/settings/visual', |
|
470 | m.connect('admin_settings_visual', '/settings/visual', | |
470 | action='settings_visual_update', |
|
471 | action='settings_visual_update', | |
471 | conditions={'method': ['POST']}) |
|
472 | conditions={'method': ['POST']}) | |
472 | m.connect('admin_settings_visual', '/settings/visual', |
|
473 | m.connect('admin_settings_visual', '/settings/visual', | |
473 | action='settings_visual', conditions={'method': ['GET']}) |
|
474 | action='settings_visual', conditions={'method': ['GET']}) | |
474 |
|
475 | |||
475 | m.connect('admin_settings_issuetracker', |
|
476 | m.connect('admin_settings_issuetracker', | |
476 | '/settings/issue-tracker', action='settings_issuetracker', |
|
477 | '/settings/issue-tracker', action='settings_issuetracker', | |
477 | conditions={'method': ['GET']}) |
|
478 | conditions={'method': ['GET']}) | |
478 | m.connect('admin_settings_issuetracker_save', |
|
479 | m.connect('admin_settings_issuetracker_save', | |
479 | '/settings/issue-tracker/save', |
|
480 | '/settings/issue-tracker/save', | |
480 | action='settings_issuetracker_save', |
|
481 | action='settings_issuetracker_save', | |
481 | conditions={'method': ['POST']}) |
|
482 | conditions={'method': ['POST']}) | |
482 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', |
|
483 | m.connect('admin_issuetracker_test', '/settings/issue-tracker/test', | |
483 | action='settings_issuetracker_test', |
|
484 | action='settings_issuetracker_test', | |
484 | conditions={'method': ['POST']}) |
|
485 | conditions={'method': ['POST']}) | |
485 | m.connect('admin_issuetracker_delete', |
|
486 | m.connect('admin_issuetracker_delete', | |
486 | '/settings/issue-tracker/delete', |
|
487 | '/settings/issue-tracker/delete', | |
487 | action='settings_issuetracker_delete', |
|
488 | action='settings_issuetracker_delete', | |
488 | conditions={'method': ['DELETE']}) |
|
489 | conditions={'method': ['DELETE']}) | |
489 |
|
490 | |||
490 | m.connect('admin_settings_email', '/settings/email', |
|
491 | m.connect('admin_settings_email', '/settings/email', | |
491 | action='settings_email_update', |
|
492 | action='settings_email_update', | |
492 | conditions={'method': ['POST']}) |
|
493 | conditions={'method': ['POST']}) | |
493 | m.connect('admin_settings_email', '/settings/email', |
|
494 | m.connect('admin_settings_email', '/settings/email', | |
494 | action='settings_email', conditions={'method': ['GET']}) |
|
495 | action='settings_email', conditions={'method': ['GET']}) | |
495 |
|
496 | |||
496 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
497 | m.connect('admin_settings_hooks', '/settings/hooks', | |
497 | action='settings_hooks_update', |
|
498 | action='settings_hooks_update', | |
498 | conditions={'method': ['POST', 'DELETE']}) |
|
499 | conditions={'method': ['POST', 'DELETE']}) | |
499 | m.connect('admin_settings_hooks', '/settings/hooks', |
|
500 | m.connect('admin_settings_hooks', '/settings/hooks', | |
500 | action='settings_hooks', conditions={'method': ['GET']}) |
|
501 | action='settings_hooks', conditions={'method': ['GET']}) | |
501 |
|
502 | |||
502 | m.connect('admin_settings_search', '/settings/search', |
|
503 | m.connect('admin_settings_search', '/settings/search', | |
503 | action='settings_search', conditions={'method': ['GET']}) |
|
504 | action='settings_search', conditions={'method': ['GET']}) | |
504 |
|
505 | |||
505 | m.connect('admin_settings_system', '/settings/system', |
|
506 | m.connect('admin_settings_system', '/settings/system', | |
506 | action='settings_system', conditions={'method': ['GET']}) |
|
507 | action='settings_system', conditions={'method': ['GET']}) | |
507 |
|
508 | |||
508 | m.connect('admin_settings_system_update', '/settings/system/updates', |
|
509 | m.connect('admin_settings_system_update', '/settings/system/updates', | |
509 | action='settings_system_update', conditions={'method': ['GET']}) |
|
510 | action='settings_system_update', conditions={'method': ['GET']}) | |
510 |
|
511 | |||
511 | m.connect('admin_settings_supervisor', '/settings/supervisor', |
|
512 | m.connect('admin_settings_supervisor', '/settings/supervisor', | |
512 | action='settings_supervisor', conditions={'method': ['GET']}) |
|
513 | action='settings_supervisor', conditions={'method': ['GET']}) | |
513 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', |
|
514 | m.connect('admin_settings_supervisor_log', '/settings/supervisor/{procid}/log', | |
514 | action='settings_supervisor_log', conditions={'method': ['GET']}) |
|
515 | action='settings_supervisor_log', conditions={'method': ['GET']}) | |
515 |
|
516 | |||
516 | m.connect('admin_settings_labs', '/settings/labs', |
|
517 | m.connect('admin_settings_labs', '/settings/labs', | |
517 | action='settings_labs_update', |
|
518 | action='settings_labs_update', | |
518 | conditions={'method': ['POST']}) |
|
519 | conditions={'method': ['POST']}) | |
519 | m.connect('admin_settings_labs', '/settings/labs', |
|
520 | m.connect('admin_settings_labs', '/settings/labs', | |
520 | action='settings_labs', conditions={'method': ['GET']}) |
|
521 | action='settings_labs', conditions={'method': ['GET']}) | |
521 |
|
522 | |||
522 | # ADMIN MY ACCOUNT |
|
523 | # ADMIN MY ACCOUNT | |
523 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
524 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
524 | controller='admin/my_account') as m: |
|
525 | controller='admin/my_account') as m: | |
525 |
|
526 | |||
526 | m.connect('my_account', '/my_account', |
|
527 | m.connect('my_account', '/my_account', | |
527 | action='my_account', conditions={'method': ['GET']}) |
|
528 | action='my_account', conditions={'method': ['GET']}) | |
528 | m.connect('my_account_edit', '/my_account/edit', |
|
529 | m.connect('my_account_edit', '/my_account/edit', | |
529 | action='my_account_edit', conditions={'method': ['GET']}) |
|
530 | action='my_account_edit', conditions={'method': ['GET']}) | |
530 | m.connect('my_account', '/my_account', |
|
531 | m.connect('my_account', '/my_account', | |
531 | action='my_account_update', conditions={'method': ['POST']}) |
|
532 | action='my_account_update', conditions={'method': ['POST']}) | |
532 |
|
533 | |||
533 | m.connect('my_account_password', '/my_account/password', |
|
534 | m.connect('my_account_password', '/my_account/password', | |
534 | action='my_account_password', conditions={'method': ['GET', 'POST']}) |
|
535 | action='my_account_password', conditions={'method': ['GET', 'POST']}) | |
535 |
|
536 | |||
536 | m.connect('my_account_repos', '/my_account/repos', |
|
537 | m.connect('my_account_repos', '/my_account/repos', | |
537 | action='my_account_repos', conditions={'method': ['GET']}) |
|
538 | action='my_account_repos', conditions={'method': ['GET']}) | |
538 |
|
539 | |||
539 | m.connect('my_account_watched', '/my_account/watched', |
|
540 | m.connect('my_account_watched', '/my_account/watched', | |
540 | action='my_account_watched', conditions={'method': ['GET']}) |
|
541 | action='my_account_watched', conditions={'method': ['GET']}) | |
541 |
|
542 | |||
542 | m.connect('my_account_pullrequests', '/my_account/pull_requests', |
|
543 | m.connect('my_account_pullrequests', '/my_account/pull_requests', | |
543 | action='my_account_pullrequests', conditions={'method': ['GET']}) |
|
544 | action='my_account_pullrequests', conditions={'method': ['GET']}) | |
544 |
|
545 | |||
545 | m.connect('my_account_perms', '/my_account/perms', |
|
546 | m.connect('my_account_perms', '/my_account/perms', | |
546 | action='my_account_perms', conditions={'method': ['GET']}) |
|
547 | action='my_account_perms', conditions={'method': ['GET']}) | |
547 |
|
548 | |||
548 | m.connect('my_account_emails', '/my_account/emails', |
|
549 | m.connect('my_account_emails', '/my_account/emails', | |
549 | action='my_account_emails', conditions={'method': ['GET']}) |
|
550 | action='my_account_emails', conditions={'method': ['GET']}) | |
550 | m.connect('my_account_emails', '/my_account/emails', |
|
551 | m.connect('my_account_emails', '/my_account/emails', | |
551 | action='my_account_emails_add', conditions={'method': ['POST']}) |
|
552 | action='my_account_emails_add', conditions={'method': ['POST']}) | |
552 | m.connect('my_account_emails', '/my_account/emails', |
|
553 | m.connect('my_account_emails', '/my_account/emails', | |
553 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) |
|
554 | action='my_account_emails_delete', conditions={'method': ['DELETE']}) | |
554 |
|
555 | |||
555 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
556 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', | |
556 | action='my_account_auth_tokens', conditions={'method': ['GET']}) |
|
557 | action='my_account_auth_tokens', conditions={'method': ['GET']}) | |
557 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
558 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', | |
558 | action='my_account_auth_tokens_add', conditions={'method': ['POST']}) |
|
559 | action='my_account_auth_tokens_add', conditions={'method': ['POST']}) | |
559 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', |
|
560 | m.connect('my_account_auth_tokens', '/my_account/auth_tokens', | |
560 | action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']}) |
|
561 | action='my_account_auth_tokens_delete', conditions={'method': ['DELETE']}) | |
561 | m.connect('my_account_notifications', '/my_account/notifications', |
|
562 | m.connect('my_account_notifications', '/my_account/notifications', | |
562 | action='my_notifications', |
|
563 | action='my_notifications', | |
563 | conditions={'method': ['GET']}) |
|
564 | conditions={'method': ['GET']}) | |
564 | m.connect('my_account_notifications_toggle_visibility', |
|
565 | m.connect('my_account_notifications_toggle_visibility', | |
565 | '/my_account/toggle_visibility', |
|
566 | '/my_account/toggle_visibility', | |
566 | action='my_notifications_toggle_visibility', |
|
567 | action='my_notifications_toggle_visibility', | |
567 | conditions={'method': ['POST']}) |
|
568 | conditions={'method': ['POST']}) | |
568 |
|
569 | |||
569 | # NOTIFICATION REST ROUTES |
|
570 | # NOTIFICATION REST ROUTES | |
570 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
571 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
571 | controller='admin/notifications') as m: |
|
572 | controller='admin/notifications') as m: | |
572 | m.connect('notifications', '/notifications', |
|
573 | m.connect('notifications', '/notifications', | |
573 | action='index', conditions={'method': ['GET']}) |
|
574 | action='index', conditions={'method': ['GET']}) | |
574 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', |
|
575 | m.connect('notifications_mark_all_read', '/notifications/mark_all_read', | |
575 | action='mark_all_read', conditions={'method': ['POST']}) |
|
576 | action='mark_all_read', conditions={'method': ['POST']}) | |
576 | m.connect('/notifications/{notification_id}', |
|
577 | m.connect('/notifications/{notification_id}', | |
577 | action='update', conditions={'method': ['PUT']}) |
|
578 | action='update', conditions={'method': ['PUT']}) | |
578 | m.connect('/notifications/{notification_id}', |
|
579 | m.connect('/notifications/{notification_id}', | |
579 | action='delete', conditions={'method': ['DELETE']}) |
|
580 | action='delete', conditions={'method': ['DELETE']}) | |
580 | m.connect('notification', '/notifications/{notification_id}', |
|
581 | m.connect('notification', '/notifications/{notification_id}', | |
581 | action='show', conditions={'method': ['GET']}) |
|
582 | action='show', conditions={'method': ['GET']}) | |
582 |
|
583 | |||
583 | # ADMIN GIST |
|
584 | # ADMIN GIST | |
584 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
585 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
585 | controller='admin/gists') as m: |
|
586 | controller='admin/gists') as m: | |
586 | m.connect('gists', '/gists', |
|
587 | m.connect('gists', '/gists', | |
587 | action='create', conditions={'method': ['POST']}) |
|
588 | action='create', conditions={'method': ['POST']}) | |
588 | m.connect('gists', '/gists', jsroute=True, |
|
589 | m.connect('gists', '/gists', jsroute=True, | |
589 | action='index', conditions={'method': ['GET']}) |
|
590 | action='index', conditions={'method': ['GET']}) | |
590 | m.connect('new_gist', '/gists/new', jsroute=True, |
|
591 | m.connect('new_gist', '/gists/new', jsroute=True, | |
591 | action='new', conditions={'method': ['GET']}) |
|
592 | action='new', conditions={'method': ['GET']}) | |
592 |
|
593 | |||
593 | m.connect('/gists/{gist_id}', |
|
594 | m.connect('/gists/{gist_id}', | |
594 | action='delete', conditions={'method': ['DELETE']}) |
|
595 | action='delete', conditions={'method': ['DELETE']}) | |
595 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
596 | m.connect('edit_gist', '/gists/{gist_id}/edit', | |
596 | action='edit_form', conditions={'method': ['GET']}) |
|
597 | action='edit_form', conditions={'method': ['GET']}) | |
597 | m.connect('edit_gist', '/gists/{gist_id}/edit', |
|
598 | m.connect('edit_gist', '/gists/{gist_id}/edit', | |
598 | action='edit', conditions={'method': ['POST']}) |
|
599 | action='edit', conditions={'method': ['POST']}) | |
599 | m.connect( |
|
600 | m.connect( | |
600 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', |
|
601 | 'edit_gist_check_revision', '/gists/{gist_id}/edit/check_revision', | |
601 | action='check_revision', conditions={'method': ['GET']}) |
|
602 | action='check_revision', conditions={'method': ['GET']}) | |
602 |
|
603 | |||
603 | m.connect('gist', '/gists/{gist_id}', |
|
604 | m.connect('gist', '/gists/{gist_id}', | |
604 | action='show', conditions={'method': ['GET']}) |
|
605 | action='show', conditions={'method': ['GET']}) | |
605 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', |
|
606 | m.connect('gist_rev', '/gists/{gist_id}/{revision}', | |
606 | revision='tip', |
|
607 | revision='tip', | |
607 | action='show', conditions={'method': ['GET']}) |
|
608 | action='show', conditions={'method': ['GET']}) | |
608 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', |
|
609 | m.connect('formatted_gist', '/gists/{gist_id}/{revision}/{format}', | |
609 | revision='tip', |
|
610 | revision='tip', | |
610 | action='show', conditions={'method': ['GET']}) |
|
611 | action='show', conditions={'method': ['GET']}) | |
611 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', |
|
612 | m.connect('formatted_gist_file', '/gists/{gist_id}/{revision}/{format}/{f_path}', | |
612 | revision='tip', |
|
613 | revision='tip', | |
613 | action='show', conditions={'method': ['GET']}, |
|
614 | action='show', conditions={'method': ['GET']}, | |
614 | requirements=URL_NAME_REQUIREMENTS) |
|
615 | requirements=URL_NAME_REQUIREMENTS) | |
615 |
|
616 | |||
616 | # ADMIN MAIN PAGES |
|
617 | # ADMIN MAIN PAGES | |
617 | with rmap.submapper(path_prefix=ADMIN_PREFIX, |
|
618 | with rmap.submapper(path_prefix=ADMIN_PREFIX, | |
618 | controller='admin/admin') as m: |
|
619 | controller='admin/admin') as m: | |
619 | m.connect('admin_home', '', action='index') |
|
620 | m.connect('admin_home', '', action='index') | |
620 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', |
|
621 | m.connect('admin_add_repo', '/add_repo/{new_repo:[a-z0-9\. _-]*}', | |
621 | action='add_repo') |
|
622 | action='add_repo') | |
622 | m.connect( |
|
623 | m.connect( | |
623 | 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', |
|
624 | 'pull_requests_global_0', '/pull_requests/{pull_request_id:[0-9]+}', | |
624 | action='pull_requests') |
|
625 | action='pull_requests') | |
625 | m.connect( |
|
626 | m.connect( | |
626 | 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}', |
|
627 | 'pull_requests_global', '/pull-requests/{pull_request_id:[0-9]+}', | |
627 | action='pull_requests') |
|
628 | action='pull_requests') | |
628 |
|
629 | |||
629 |
|
630 | |||
630 | # USER JOURNAL |
|
631 | # USER JOURNAL | |
631 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), |
|
632 | rmap.connect('journal', '%s/journal' % (ADMIN_PREFIX,), | |
632 | controller='journal', action='index') |
|
633 | controller='journal', action='index') | |
633 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), |
|
634 | rmap.connect('journal_rss', '%s/journal/rss' % (ADMIN_PREFIX,), | |
634 | controller='journal', action='journal_rss') |
|
635 | controller='journal', action='journal_rss') | |
635 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), |
|
636 | rmap.connect('journal_atom', '%s/journal/atom' % (ADMIN_PREFIX,), | |
636 | controller='journal', action='journal_atom') |
|
637 | controller='journal', action='journal_atom') | |
637 |
|
638 | |||
638 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), |
|
639 | rmap.connect('public_journal', '%s/public_journal' % (ADMIN_PREFIX,), | |
639 | controller='journal', action='public_journal') |
|
640 | controller='journal', action='public_journal') | |
640 |
|
641 | |||
641 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), |
|
642 | rmap.connect('public_journal_rss', '%s/public_journal/rss' % (ADMIN_PREFIX,), | |
642 | controller='journal', action='public_journal_rss') |
|
643 | controller='journal', action='public_journal_rss') | |
643 |
|
644 | |||
644 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), |
|
645 | rmap.connect('public_journal_rss_old', '%s/public_journal_rss' % (ADMIN_PREFIX,), | |
645 | controller='journal', action='public_journal_rss') |
|
646 | controller='journal', action='public_journal_rss') | |
646 |
|
647 | |||
647 | rmap.connect('public_journal_atom', |
|
648 | rmap.connect('public_journal_atom', | |
648 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', |
|
649 | '%s/public_journal/atom' % (ADMIN_PREFIX,), controller='journal', | |
649 | action='public_journal_atom') |
|
650 | action='public_journal_atom') | |
650 |
|
651 | |||
651 | rmap.connect('public_journal_atom_old', |
|
652 | rmap.connect('public_journal_atom_old', | |
652 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', |
|
653 | '%s/public_journal_atom' % (ADMIN_PREFIX,), controller='journal', | |
653 | action='public_journal_atom') |
|
654 | action='public_journal_atom') | |
654 |
|
655 | |||
655 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), |
|
656 | rmap.connect('toggle_following', '%s/toggle_following' % (ADMIN_PREFIX,), | |
656 | controller='journal', action='toggle_following', jsroute=True, |
|
657 | controller='journal', action='toggle_following', jsroute=True, | |
657 | conditions={'method': ['POST']}) |
|
658 | conditions={'method': ['POST']}) | |
658 |
|
659 | |||
659 | # FULL TEXT SEARCH |
|
660 | # FULL TEXT SEARCH | |
660 | rmap.connect('search', '%s/search' % (ADMIN_PREFIX,), |
|
661 | rmap.connect('search', '%s/search' % (ADMIN_PREFIX,), | |
661 | controller='search') |
|
662 | controller='search') | |
662 | rmap.connect('search_repo_home', '/{repo_name}/search', |
|
663 | rmap.connect('search_repo_home', '/{repo_name}/search', | |
663 | controller='search', |
|
664 | controller='search', | |
664 | action='index', |
|
665 | action='index', | |
665 | conditions={'function': check_repo}, |
|
666 | conditions={'function': check_repo}, | |
666 | requirements=URL_NAME_REQUIREMENTS) |
|
667 | requirements=URL_NAME_REQUIREMENTS) | |
667 |
|
668 | |||
668 | # FEEDS |
|
669 | # FEEDS | |
669 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', |
|
670 | rmap.connect('rss_feed_home', '/{repo_name}/feed/rss', | |
670 | controller='feed', action='rss', |
|
671 | controller='feed', action='rss', | |
671 | conditions={'function': check_repo}, |
|
672 | conditions={'function': check_repo}, | |
672 | requirements=URL_NAME_REQUIREMENTS) |
|
673 | requirements=URL_NAME_REQUIREMENTS) | |
673 |
|
674 | |||
674 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', |
|
675 | rmap.connect('atom_feed_home', '/{repo_name}/feed/atom', | |
675 | controller='feed', action='atom', |
|
676 | controller='feed', action='atom', | |
676 | conditions={'function': check_repo}, |
|
677 | conditions={'function': check_repo}, | |
677 | requirements=URL_NAME_REQUIREMENTS) |
|
678 | requirements=URL_NAME_REQUIREMENTS) | |
678 |
|
679 | |||
679 | #========================================================================== |
|
680 | #========================================================================== | |
680 | # REPOSITORY ROUTES |
|
681 | # REPOSITORY ROUTES | |
681 | #========================================================================== |
|
682 | #========================================================================== | |
682 |
|
683 | |||
683 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', |
|
684 | rmap.connect('repo_creating_home', '/{repo_name}/repo_creating', | |
684 | controller='admin/repos', action='repo_creating', |
|
685 | controller='admin/repos', action='repo_creating', | |
685 | requirements=URL_NAME_REQUIREMENTS) |
|
686 | requirements=URL_NAME_REQUIREMENTS) | |
686 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', |
|
687 | rmap.connect('repo_check_home', '/{repo_name}/crepo_check', | |
687 | controller='admin/repos', action='repo_check', |
|
688 | controller='admin/repos', action='repo_check', | |
688 | requirements=URL_NAME_REQUIREMENTS) |
|
689 | requirements=URL_NAME_REQUIREMENTS) | |
689 |
|
690 | |||
690 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', |
|
691 | rmap.connect('repo_stats', '/{repo_name}/repo_stats/{commit_id}', | |
691 | controller='summary', action='repo_stats', |
|
692 | controller='summary', action='repo_stats', | |
692 | conditions={'function': check_repo}, |
|
693 | conditions={'function': check_repo}, | |
693 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
694 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
694 |
|
695 | |||
695 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', |
|
696 | rmap.connect('repo_refs_data', '/{repo_name}/refs-data', | |
696 | controller='summary', action='repo_refs_data', jsroute=True, |
|
697 | controller='summary', action='repo_refs_data', jsroute=True, | |
697 | requirements=URL_NAME_REQUIREMENTS) |
|
698 | requirements=URL_NAME_REQUIREMENTS) | |
698 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', |
|
699 | rmap.connect('repo_refs_changelog_data', '/{repo_name}/refs-data-changelog', | |
699 | controller='summary', action='repo_refs_changelog_data', |
|
700 | controller='summary', action='repo_refs_changelog_data', | |
700 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
701 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
701 |
|
702 | |||
702 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', |
|
703 | rmap.connect('changeset_home', '/{repo_name}/changeset/{revision}', | |
703 | controller='changeset', revision='tip', jsroute=True, |
|
704 | controller='changeset', revision='tip', jsroute=True, | |
704 | conditions={'function': check_repo}, |
|
705 | conditions={'function': check_repo}, | |
705 | requirements=URL_NAME_REQUIREMENTS) |
|
706 | requirements=URL_NAME_REQUIREMENTS) | |
706 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', |
|
707 | rmap.connect('changeset_children', '/{repo_name}/changeset_children/{revision}', | |
707 | controller='changeset', revision='tip', action='changeset_children', |
|
708 | controller='changeset', revision='tip', action='changeset_children', | |
708 | conditions={'function': check_repo}, |
|
709 | conditions={'function': check_repo}, | |
709 | requirements=URL_NAME_REQUIREMENTS) |
|
710 | requirements=URL_NAME_REQUIREMENTS) | |
710 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', |
|
711 | rmap.connect('changeset_parents', '/{repo_name}/changeset_parents/{revision}', | |
711 | controller='changeset', revision='tip', action='changeset_parents', |
|
712 | controller='changeset', revision='tip', action='changeset_parents', | |
712 | conditions={'function': check_repo}, |
|
713 | conditions={'function': check_repo}, | |
713 | requirements=URL_NAME_REQUIREMENTS) |
|
714 | requirements=URL_NAME_REQUIREMENTS) | |
714 |
|
715 | |||
715 | # repo edit options |
|
716 | # repo edit options | |
716 | rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, |
|
717 | rmap.connect('edit_repo', '/{repo_name}/settings', jsroute=True, | |
717 | controller='admin/repos', action='edit', |
|
718 | controller='admin/repos', action='edit', | |
718 | conditions={'method': ['GET'], 'function': check_repo}, |
|
719 | conditions={'method': ['GET'], 'function': check_repo}, | |
719 | requirements=URL_NAME_REQUIREMENTS) |
|
720 | requirements=URL_NAME_REQUIREMENTS) | |
720 |
|
721 | |||
721 | rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', |
|
722 | rmap.connect('edit_repo_perms', '/{repo_name}/settings/permissions', | |
722 | jsroute=True, |
|
723 | jsroute=True, | |
723 | controller='admin/repos', action='edit_permissions', |
|
724 | controller='admin/repos', action='edit_permissions', | |
724 | conditions={'method': ['GET'], 'function': check_repo}, |
|
725 | conditions={'method': ['GET'], 'function': check_repo}, | |
725 | requirements=URL_NAME_REQUIREMENTS) |
|
726 | requirements=URL_NAME_REQUIREMENTS) | |
726 | rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions', |
|
727 | rmap.connect('edit_repo_perms_update', '/{repo_name}/settings/permissions', | |
727 | controller='admin/repos', action='edit_permissions_update', |
|
728 | controller='admin/repos', action='edit_permissions_update', | |
728 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
729 | conditions={'method': ['PUT'], 'function': check_repo}, | |
729 | requirements=URL_NAME_REQUIREMENTS) |
|
730 | requirements=URL_NAME_REQUIREMENTS) | |
730 |
|
731 | |||
731 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', |
|
732 | rmap.connect('edit_repo_fields', '/{repo_name}/settings/fields', | |
732 | controller='admin/repos', action='edit_fields', |
|
733 | controller='admin/repos', action='edit_fields', | |
733 | conditions={'method': ['GET'], 'function': check_repo}, |
|
734 | conditions={'method': ['GET'], 'function': check_repo}, | |
734 | requirements=URL_NAME_REQUIREMENTS) |
|
735 | requirements=URL_NAME_REQUIREMENTS) | |
735 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', |
|
736 | rmap.connect('create_repo_fields', '/{repo_name}/settings/fields/new', | |
736 | controller='admin/repos', action='create_repo_field', |
|
737 | controller='admin/repos', action='create_repo_field', | |
737 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
738 | conditions={'method': ['PUT'], 'function': check_repo}, | |
738 | requirements=URL_NAME_REQUIREMENTS) |
|
739 | requirements=URL_NAME_REQUIREMENTS) | |
739 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', |
|
740 | rmap.connect('delete_repo_fields', '/{repo_name}/settings/fields/{field_id}', | |
740 | controller='admin/repos', action='delete_repo_field', |
|
741 | controller='admin/repos', action='delete_repo_field', | |
741 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
742 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
742 | requirements=URL_NAME_REQUIREMENTS) |
|
743 | requirements=URL_NAME_REQUIREMENTS) | |
743 |
|
744 | |||
744 | rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced', |
|
745 | rmap.connect('edit_repo_advanced', '/{repo_name}/settings/advanced', | |
745 | controller='admin/repos', action='edit_advanced', |
|
746 | controller='admin/repos', action='edit_advanced', | |
746 | conditions={'method': ['GET'], 'function': check_repo}, |
|
747 | conditions={'method': ['GET'], 'function': check_repo}, | |
747 | requirements=URL_NAME_REQUIREMENTS) |
|
748 | requirements=URL_NAME_REQUIREMENTS) | |
748 |
|
749 | |||
749 | rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking', |
|
750 | rmap.connect('edit_repo_advanced_locking', '/{repo_name}/settings/advanced/locking', | |
750 | controller='admin/repos', action='edit_advanced_locking', |
|
751 | controller='admin/repos', action='edit_advanced_locking', | |
751 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
752 | conditions={'method': ['PUT'], 'function': check_repo}, | |
752 | requirements=URL_NAME_REQUIREMENTS) |
|
753 | requirements=URL_NAME_REQUIREMENTS) | |
753 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', |
|
754 | rmap.connect('toggle_locking', '/{repo_name}/settings/advanced/locking_toggle', | |
754 | controller='admin/repos', action='toggle_locking', |
|
755 | controller='admin/repos', action='toggle_locking', | |
755 | conditions={'method': ['GET'], 'function': check_repo}, |
|
756 | conditions={'method': ['GET'], 'function': check_repo}, | |
756 | requirements=URL_NAME_REQUIREMENTS) |
|
757 | requirements=URL_NAME_REQUIREMENTS) | |
757 |
|
758 | |||
758 | rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal', |
|
759 | rmap.connect('edit_repo_advanced_journal', '/{repo_name}/settings/advanced/journal', | |
759 | controller='admin/repos', action='edit_advanced_journal', |
|
760 | controller='admin/repos', action='edit_advanced_journal', | |
760 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
761 | conditions={'method': ['PUT'], 'function': check_repo}, | |
761 | requirements=URL_NAME_REQUIREMENTS) |
|
762 | requirements=URL_NAME_REQUIREMENTS) | |
762 |
|
763 | |||
763 | rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork', |
|
764 | rmap.connect('edit_repo_advanced_fork', '/{repo_name}/settings/advanced/fork', | |
764 | controller='admin/repos', action='edit_advanced_fork', |
|
765 | controller='admin/repos', action='edit_advanced_fork', | |
765 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
766 | conditions={'method': ['PUT'], 'function': check_repo}, | |
766 | requirements=URL_NAME_REQUIREMENTS) |
|
767 | requirements=URL_NAME_REQUIREMENTS) | |
767 |
|
768 | |||
768 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
769 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', | |
769 | controller='admin/repos', action='edit_caches_form', |
|
770 | controller='admin/repos', action='edit_caches_form', | |
770 | conditions={'method': ['GET'], 'function': check_repo}, |
|
771 | conditions={'method': ['GET'], 'function': check_repo}, | |
771 | requirements=URL_NAME_REQUIREMENTS) |
|
772 | requirements=URL_NAME_REQUIREMENTS) | |
772 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', |
|
773 | rmap.connect('edit_repo_caches', '/{repo_name}/settings/caches', | |
773 | controller='admin/repos', action='edit_caches', |
|
774 | controller='admin/repos', action='edit_caches', | |
774 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
775 | conditions={'method': ['PUT'], 'function': check_repo}, | |
775 | requirements=URL_NAME_REQUIREMENTS) |
|
776 | requirements=URL_NAME_REQUIREMENTS) | |
776 |
|
777 | |||
777 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
778 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', | |
778 | controller='admin/repos', action='edit_remote_form', |
|
779 | controller='admin/repos', action='edit_remote_form', | |
779 | conditions={'method': ['GET'], 'function': check_repo}, |
|
780 | conditions={'method': ['GET'], 'function': check_repo}, | |
780 | requirements=URL_NAME_REQUIREMENTS) |
|
781 | requirements=URL_NAME_REQUIREMENTS) | |
781 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', |
|
782 | rmap.connect('edit_repo_remote', '/{repo_name}/settings/remote', | |
782 | controller='admin/repos', action='edit_remote', |
|
783 | controller='admin/repos', action='edit_remote', | |
783 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
784 | conditions={'method': ['PUT'], 'function': check_repo}, | |
784 | requirements=URL_NAME_REQUIREMENTS) |
|
785 | requirements=URL_NAME_REQUIREMENTS) | |
785 |
|
786 | |||
786 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
787 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', | |
787 | controller='admin/repos', action='edit_statistics_form', |
|
788 | controller='admin/repos', action='edit_statistics_form', | |
788 | conditions={'method': ['GET'], 'function': check_repo}, |
|
789 | conditions={'method': ['GET'], 'function': check_repo}, | |
789 | requirements=URL_NAME_REQUIREMENTS) |
|
790 | requirements=URL_NAME_REQUIREMENTS) | |
790 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', |
|
791 | rmap.connect('edit_repo_statistics', '/{repo_name}/settings/statistics', | |
791 | controller='admin/repos', action='edit_statistics', |
|
792 | controller='admin/repos', action='edit_statistics', | |
792 | conditions={'method': ['PUT'], 'function': check_repo}, |
|
793 | conditions={'method': ['PUT'], 'function': check_repo}, | |
793 | requirements=URL_NAME_REQUIREMENTS) |
|
794 | requirements=URL_NAME_REQUIREMENTS) | |
794 | rmap.connect('repo_settings_issuetracker', |
|
795 | rmap.connect('repo_settings_issuetracker', | |
795 | '/{repo_name}/settings/issue-tracker', |
|
796 | '/{repo_name}/settings/issue-tracker', | |
796 | controller='admin/repos', action='repo_issuetracker', |
|
797 | controller='admin/repos', action='repo_issuetracker', | |
797 | conditions={'method': ['GET'], 'function': check_repo}, |
|
798 | conditions={'method': ['GET'], 'function': check_repo}, | |
798 | requirements=URL_NAME_REQUIREMENTS) |
|
799 | requirements=URL_NAME_REQUIREMENTS) | |
799 | rmap.connect('repo_issuetracker_test', |
|
800 | rmap.connect('repo_issuetracker_test', | |
800 | '/{repo_name}/settings/issue-tracker/test', |
|
801 | '/{repo_name}/settings/issue-tracker/test', | |
801 | controller='admin/repos', action='repo_issuetracker_test', |
|
802 | controller='admin/repos', action='repo_issuetracker_test', | |
802 | conditions={'method': ['POST'], 'function': check_repo}, |
|
803 | conditions={'method': ['POST'], 'function': check_repo}, | |
803 | requirements=URL_NAME_REQUIREMENTS) |
|
804 | requirements=URL_NAME_REQUIREMENTS) | |
804 | rmap.connect('repo_issuetracker_delete', |
|
805 | rmap.connect('repo_issuetracker_delete', | |
805 | '/{repo_name}/settings/issue-tracker/delete', |
|
806 | '/{repo_name}/settings/issue-tracker/delete', | |
806 | controller='admin/repos', action='repo_issuetracker_delete', |
|
807 | controller='admin/repos', action='repo_issuetracker_delete', | |
807 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
808 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
808 | requirements=URL_NAME_REQUIREMENTS) |
|
809 | requirements=URL_NAME_REQUIREMENTS) | |
809 | rmap.connect('repo_issuetracker_save', |
|
810 | rmap.connect('repo_issuetracker_save', | |
810 | '/{repo_name}/settings/issue-tracker/save', |
|
811 | '/{repo_name}/settings/issue-tracker/save', | |
811 | controller='admin/repos', action='repo_issuetracker_save', |
|
812 | controller='admin/repos', action='repo_issuetracker_save', | |
812 | conditions={'method': ['POST'], 'function': check_repo}, |
|
813 | conditions={'method': ['POST'], 'function': check_repo}, | |
813 | requirements=URL_NAME_REQUIREMENTS) |
|
814 | requirements=URL_NAME_REQUIREMENTS) | |
814 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
815 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
815 | controller='admin/repos', action='repo_settings_vcs_update', |
|
816 | controller='admin/repos', action='repo_settings_vcs_update', | |
816 | conditions={'method': ['POST'], 'function': check_repo}, |
|
817 | conditions={'method': ['POST'], 'function': check_repo}, | |
817 | requirements=URL_NAME_REQUIREMENTS) |
|
818 | requirements=URL_NAME_REQUIREMENTS) | |
818 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
819 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
819 | controller='admin/repos', action='repo_settings_vcs', |
|
820 | controller='admin/repos', action='repo_settings_vcs', | |
820 | conditions={'method': ['GET'], 'function': check_repo}, |
|
821 | conditions={'method': ['GET'], 'function': check_repo}, | |
821 | requirements=URL_NAME_REQUIREMENTS) |
|
822 | requirements=URL_NAME_REQUIREMENTS) | |
822 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', |
|
823 | rmap.connect('repo_vcs_settings', '/{repo_name}/settings/vcs', | |
823 | controller='admin/repos', action='repo_delete_svn_pattern', |
|
824 | controller='admin/repos', action='repo_delete_svn_pattern', | |
824 | conditions={'method': ['DELETE'], 'function': check_repo}, |
|
825 | conditions={'method': ['DELETE'], 'function': check_repo}, | |
825 | requirements=URL_NAME_REQUIREMENTS) |
|
826 | requirements=URL_NAME_REQUIREMENTS) | |
826 |
|
827 | |||
827 | # still working url for backward compat. |
|
828 | # still working url for backward compat. | |
828 | rmap.connect('raw_changeset_home_depraced', |
|
829 | rmap.connect('raw_changeset_home_depraced', | |
829 | '/{repo_name}/raw-changeset/{revision}', |
|
830 | '/{repo_name}/raw-changeset/{revision}', | |
830 | controller='changeset', action='changeset_raw', |
|
831 | controller='changeset', action='changeset_raw', | |
831 | revision='tip', conditions={'function': check_repo}, |
|
832 | revision='tip', conditions={'function': check_repo}, | |
832 | requirements=URL_NAME_REQUIREMENTS) |
|
833 | requirements=URL_NAME_REQUIREMENTS) | |
833 |
|
834 | |||
834 | # new URLs |
|
835 | # new URLs | |
835 | rmap.connect('changeset_raw_home', |
|
836 | rmap.connect('changeset_raw_home', | |
836 | '/{repo_name}/changeset-diff/{revision}', |
|
837 | '/{repo_name}/changeset-diff/{revision}', | |
837 | controller='changeset', action='changeset_raw', |
|
838 | controller='changeset', action='changeset_raw', | |
838 | revision='tip', conditions={'function': check_repo}, |
|
839 | revision='tip', conditions={'function': check_repo}, | |
839 | requirements=URL_NAME_REQUIREMENTS) |
|
840 | requirements=URL_NAME_REQUIREMENTS) | |
840 |
|
841 | |||
841 | rmap.connect('changeset_patch_home', |
|
842 | rmap.connect('changeset_patch_home', | |
842 | '/{repo_name}/changeset-patch/{revision}', |
|
843 | '/{repo_name}/changeset-patch/{revision}', | |
843 | controller='changeset', action='changeset_patch', |
|
844 | controller='changeset', action='changeset_patch', | |
844 | revision='tip', conditions={'function': check_repo}, |
|
845 | revision='tip', conditions={'function': check_repo}, | |
845 | requirements=URL_NAME_REQUIREMENTS) |
|
846 | requirements=URL_NAME_REQUIREMENTS) | |
846 |
|
847 | |||
847 | rmap.connect('changeset_download_home', |
|
848 | rmap.connect('changeset_download_home', | |
848 | '/{repo_name}/changeset-download/{revision}', |
|
849 | '/{repo_name}/changeset-download/{revision}', | |
849 | controller='changeset', action='changeset_download', |
|
850 | controller='changeset', action='changeset_download', | |
850 | revision='tip', conditions={'function': check_repo}, |
|
851 | revision='tip', conditions={'function': check_repo}, | |
851 | requirements=URL_NAME_REQUIREMENTS) |
|
852 | requirements=URL_NAME_REQUIREMENTS) | |
852 |
|
853 | |||
853 | rmap.connect('changeset_comment', |
|
854 | rmap.connect('changeset_comment', | |
854 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, |
|
855 | '/{repo_name}/changeset/{revision}/comment', jsroute=True, | |
855 | controller='changeset', revision='tip', action='comment', |
|
856 | controller='changeset', revision='tip', action='comment', | |
856 | conditions={'function': check_repo}, |
|
857 | conditions={'function': check_repo}, | |
857 | requirements=URL_NAME_REQUIREMENTS) |
|
858 | requirements=URL_NAME_REQUIREMENTS) | |
858 |
|
859 | |||
859 | rmap.connect('changeset_comment_preview', |
|
860 | rmap.connect('changeset_comment_preview', | |
860 | '/{repo_name}/changeset/comment/preview', jsroute=True, |
|
861 | '/{repo_name}/changeset/comment/preview', jsroute=True, | |
861 | controller='changeset', action='preview_comment', |
|
862 | controller='changeset', action='preview_comment', | |
862 | conditions={'function': check_repo, 'method': ['POST']}, |
|
863 | conditions={'function': check_repo, 'method': ['POST']}, | |
863 | requirements=URL_NAME_REQUIREMENTS) |
|
864 | requirements=URL_NAME_REQUIREMENTS) | |
864 |
|
865 | |||
865 | rmap.connect('changeset_comment_delete', |
|
866 | rmap.connect('changeset_comment_delete', | |
866 | '/{repo_name}/changeset/comment/{comment_id}/delete', |
|
867 | '/{repo_name}/changeset/comment/{comment_id}/delete', | |
867 | controller='changeset', action='delete_comment', |
|
868 | controller='changeset', action='delete_comment', | |
868 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
869 | conditions={'function': check_repo, 'method': ['DELETE']}, | |
869 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
870 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
870 |
|
871 | |||
871 | rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}', |
|
872 | rmap.connect('changeset_info', '/{repo_name}/changeset_info/{revision}', | |
872 | controller='changeset', action='changeset_info', |
|
873 | controller='changeset', action='changeset_info', | |
873 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
874 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
874 |
|
875 | |||
875 | rmap.connect('compare_home', |
|
876 | rmap.connect('compare_home', | |
876 | '/{repo_name}/compare', |
|
877 | '/{repo_name}/compare', | |
877 | controller='compare', action='index', |
|
878 | controller='compare', action='index', | |
878 | conditions={'function': check_repo}, |
|
879 | conditions={'function': check_repo}, | |
879 | requirements=URL_NAME_REQUIREMENTS) |
|
880 | requirements=URL_NAME_REQUIREMENTS) | |
880 |
|
881 | |||
881 | rmap.connect('compare_url', |
|
882 | rmap.connect('compare_url', | |
882 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', |
|
883 | '/{repo_name}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', | |
883 | controller='compare', action='compare', |
|
884 | controller='compare', action='compare', | |
884 | conditions={'function': check_repo}, |
|
885 | conditions={'function': check_repo}, | |
885 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
886 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
886 |
|
887 | |||
887 | rmap.connect('pullrequest_home', |
|
888 | rmap.connect('pullrequest_home', | |
888 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
889 | '/{repo_name}/pull-request/new', controller='pullrequests', | |
889 | action='index', conditions={'function': check_repo, |
|
890 | action='index', conditions={'function': check_repo, | |
890 | 'method': ['GET']}, |
|
891 | 'method': ['GET']}, | |
891 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
892 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
892 |
|
893 | |||
893 | rmap.connect('pullrequest', |
|
894 | rmap.connect('pullrequest', | |
894 | '/{repo_name}/pull-request/new', controller='pullrequests', |
|
895 | '/{repo_name}/pull-request/new', controller='pullrequests', | |
895 | action='create', conditions={'function': check_repo, |
|
896 | action='create', conditions={'function': check_repo, | |
896 | 'method': ['POST']}, |
|
897 | 'method': ['POST']}, | |
897 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
898 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
898 |
|
899 | |||
899 | rmap.connect('pullrequest_repo_refs', |
|
900 | rmap.connect('pullrequest_repo_refs', | |
900 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
901 | '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', | |
901 | controller='pullrequests', |
|
902 | controller='pullrequests', | |
902 | action='get_repo_refs', |
|
903 | action='get_repo_refs', | |
903 | conditions={'function': check_repo, 'method': ['GET']}, |
|
904 | conditions={'function': check_repo, 'method': ['GET']}, | |
904 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
905 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
905 |
|
906 | |||
906 | rmap.connect('pullrequest_repo_destinations', |
|
907 | rmap.connect('pullrequest_repo_destinations', | |
907 | '/{repo_name}/pull-request/repo-destinations', |
|
908 | '/{repo_name}/pull-request/repo-destinations', | |
908 | controller='pullrequests', |
|
909 | controller='pullrequests', | |
909 | action='get_repo_destinations', |
|
910 | action='get_repo_destinations', | |
910 | conditions={'function': check_repo, 'method': ['GET']}, |
|
911 | conditions={'function': check_repo, 'method': ['GET']}, | |
911 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
912 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
912 |
|
913 | |||
913 | rmap.connect('pullrequest_show', |
|
914 | rmap.connect('pullrequest_show', | |
914 | '/{repo_name}/pull-request/{pull_request_id}', |
|
915 | '/{repo_name}/pull-request/{pull_request_id}', | |
915 | controller='pullrequests', |
|
916 | controller='pullrequests', | |
916 | action='show', conditions={'function': check_repo, |
|
917 | action='show', conditions={'function': check_repo, | |
917 | 'method': ['GET']}, |
|
918 | 'method': ['GET']}, | |
918 | requirements=URL_NAME_REQUIREMENTS) |
|
919 | requirements=URL_NAME_REQUIREMENTS) | |
919 |
|
920 | |||
920 | rmap.connect('pullrequest_update', |
|
921 | rmap.connect('pullrequest_update', | |
921 | '/{repo_name}/pull-request/{pull_request_id}', |
|
922 | '/{repo_name}/pull-request/{pull_request_id}', | |
922 | controller='pullrequests', |
|
923 | controller='pullrequests', | |
923 | action='update', conditions={'function': check_repo, |
|
924 | action='update', conditions={'function': check_repo, | |
924 | 'method': ['PUT']}, |
|
925 | 'method': ['PUT']}, | |
925 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
926 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
926 |
|
927 | |||
927 | rmap.connect('pullrequest_merge', |
|
928 | rmap.connect('pullrequest_merge', | |
928 | '/{repo_name}/pull-request/{pull_request_id}', |
|
929 | '/{repo_name}/pull-request/{pull_request_id}', | |
929 | controller='pullrequests', |
|
930 | controller='pullrequests', | |
930 | action='merge', conditions={'function': check_repo, |
|
931 | action='merge', conditions={'function': check_repo, | |
931 | 'method': ['POST']}, |
|
932 | 'method': ['POST']}, | |
932 | requirements=URL_NAME_REQUIREMENTS) |
|
933 | requirements=URL_NAME_REQUIREMENTS) | |
933 |
|
934 | |||
934 | rmap.connect('pullrequest_delete', |
|
935 | rmap.connect('pullrequest_delete', | |
935 | '/{repo_name}/pull-request/{pull_request_id}', |
|
936 | '/{repo_name}/pull-request/{pull_request_id}', | |
936 | controller='pullrequests', |
|
937 | controller='pullrequests', | |
937 | action='delete', conditions={'function': check_repo, |
|
938 | action='delete', conditions={'function': check_repo, | |
938 | 'method': ['DELETE']}, |
|
939 | 'method': ['DELETE']}, | |
939 | requirements=URL_NAME_REQUIREMENTS) |
|
940 | requirements=URL_NAME_REQUIREMENTS) | |
940 |
|
941 | |||
941 | rmap.connect('pullrequest_show_all', |
|
942 | rmap.connect('pullrequest_show_all', | |
942 | '/{repo_name}/pull-request', |
|
943 | '/{repo_name}/pull-request', | |
943 | controller='pullrequests', |
|
944 | controller='pullrequests', | |
944 | action='show_all', conditions={'function': check_repo, |
|
945 | action='show_all', conditions={'function': check_repo, | |
945 | 'method': ['GET']}, |
|
946 | 'method': ['GET']}, | |
946 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
947 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
947 |
|
948 | |||
948 | rmap.connect('pullrequest_comment', |
|
949 | rmap.connect('pullrequest_comment', | |
949 | '/{repo_name}/pull-request-comment/{pull_request_id}', |
|
950 | '/{repo_name}/pull-request-comment/{pull_request_id}', | |
950 | controller='pullrequests', |
|
951 | controller='pullrequests', | |
951 | action='comment', conditions={'function': check_repo, |
|
952 | action='comment', conditions={'function': check_repo, | |
952 | 'method': ['POST']}, |
|
953 | 'method': ['POST']}, | |
953 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
954 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
954 |
|
955 | |||
955 | rmap.connect('pullrequest_comment_delete', |
|
956 | rmap.connect('pullrequest_comment_delete', | |
956 | '/{repo_name}/pull-request-comment/{comment_id}/delete', |
|
957 | '/{repo_name}/pull-request-comment/{comment_id}/delete', | |
957 | controller='pullrequests', action='delete_comment', |
|
958 | controller='pullrequests', action='delete_comment', | |
958 | conditions={'function': check_repo, 'method': ['DELETE']}, |
|
959 | conditions={'function': check_repo, 'method': ['DELETE']}, | |
959 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
960 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
960 |
|
961 | |||
961 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', |
|
962 | rmap.connect('summary_home_explicit', '/{repo_name}/summary', | |
962 | controller='summary', conditions={'function': check_repo}, |
|
963 | controller='summary', conditions={'function': check_repo}, | |
963 | requirements=URL_NAME_REQUIREMENTS) |
|
964 | requirements=URL_NAME_REQUIREMENTS) | |
964 |
|
965 | |||
965 | rmap.connect('branches_home', '/{repo_name}/branches', |
|
966 | rmap.connect('branches_home', '/{repo_name}/branches', | |
966 | controller='branches', conditions={'function': check_repo}, |
|
967 | controller='branches', conditions={'function': check_repo}, | |
967 | requirements=URL_NAME_REQUIREMENTS) |
|
968 | requirements=URL_NAME_REQUIREMENTS) | |
968 |
|
969 | |||
969 | rmap.connect('tags_home', '/{repo_name}/tags', |
|
970 | rmap.connect('tags_home', '/{repo_name}/tags', | |
970 | controller='tags', conditions={'function': check_repo}, |
|
971 | controller='tags', conditions={'function': check_repo}, | |
971 | requirements=URL_NAME_REQUIREMENTS) |
|
972 | requirements=URL_NAME_REQUIREMENTS) | |
972 |
|
973 | |||
973 | rmap.connect('bookmarks_home', '/{repo_name}/bookmarks', |
|
974 | rmap.connect('bookmarks_home', '/{repo_name}/bookmarks', | |
974 | controller='bookmarks', conditions={'function': check_repo}, |
|
975 | controller='bookmarks', conditions={'function': check_repo}, | |
975 | requirements=URL_NAME_REQUIREMENTS) |
|
976 | requirements=URL_NAME_REQUIREMENTS) | |
976 |
|
977 | |||
977 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, |
|
978 | rmap.connect('changelog_home', '/{repo_name}/changelog', jsroute=True, | |
978 | controller='changelog', conditions={'function': check_repo}, |
|
979 | controller='changelog', conditions={'function': check_repo}, | |
979 | requirements=URL_NAME_REQUIREMENTS) |
|
980 | requirements=URL_NAME_REQUIREMENTS) | |
980 |
|
981 | |||
981 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', |
|
982 | rmap.connect('changelog_summary_home', '/{repo_name}/changelog_summary', | |
982 | controller='changelog', action='changelog_summary', |
|
983 | controller='changelog', action='changelog_summary', | |
983 | conditions={'function': check_repo}, |
|
984 | conditions={'function': check_repo}, | |
984 | requirements=URL_NAME_REQUIREMENTS) |
|
985 | requirements=URL_NAME_REQUIREMENTS) | |
985 |
|
986 | |||
986 | rmap.connect('changelog_file_home', |
|
987 | rmap.connect('changelog_file_home', | |
987 | '/{repo_name}/changelog/{revision}/{f_path}', |
|
988 | '/{repo_name}/changelog/{revision}/{f_path}', | |
988 | controller='changelog', f_path=None, |
|
989 | controller='changelog', f_path=None, | |
989 | conditions={'function': check_repo}, |
|
990 | conditions={'function': check_repo}, | |
990 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
991 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
991 |
|
992 | |||
992 | rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}', |
|
993 | rmap.connect('changelog_details', '/{repo_name}/changelog_details/{cs}', | |
993 | controller='changelog', action='changelog_details', |
|
994 | controller='changelog', action='changelog_details', | |
994 | conditions={'function': check_repo}, |
|
995 | conditions={'function': check_repo}, | |
995 | requirements=URL_NAME_REQUIREMENTS) |
|
996 | requirements=URL_NAME_REQUIREMENTS) | |
996 |
|
997 | |||
997 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', |
|
998 | rmap.connect('files_home', '/{repo_name}/files/{revision}/{f_path}', | |
998 | controller='files', revision='tip', f_path='', |
|
999 | controller='files', revision='tip', f_path='', | |
999 | conditions={'function': check_repo}, |
|
1000 | conditions={'function': check_repo}, | |
1000 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1001 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1001 |
|
1002 | |||
1002 | rmap.connect('files_home_simple_catchrev', |
|
1003 | rmap.connect('files_home_simple_catchrev', | |
1003 | '/{repo_name}/files/{revision}', |
|
1004 | '/{repo_name}/files/{revision}', | |
1004 | controller='files', revision='tip', f_path='', |
|
1005 | controller='files', revision='tip', f_path='', | |
1005 | conditions={'function': check_repo}, |
|
1006 | conditions={'function': check_repo}, | |
1006 | requirements=URL_NAME_REQUIREMENTS) |
|
1007 | requirements=URL_NAME_REQUIREMENTS) | |
1007 |
|
1008 | |||
1008 | rmap.connect('files_home_simple_catchall', |
|
1009 | rmap.connect('files_home_simple_catchall', | |
1009 | '/{repo_name}/files', |
|
1010 | '/{repo_name}/files', | |
1010 | controller='files', revision='tip', f_path='', |
|
1011 | controller='files', revision='tip', f_path='', | |
1011 | conditions={'function': check_repo}, |
|
1012 | conditions={'function': check_repo}, | |
1012 | requirements=URL_NAME_REQUIREMENTS) |
|
1013 | requirements=URL_NAME_REQUIREMENTS) | |
1013 |
|
1014 | |||
1014 | rmap.connect('files_history_home', |
|
1015 | rmap.connect('files_history_home', | |
1015 | '/{repo_name}/history/{revision}/{f_path}', |
|
1016 | '/{repo_name}/history/{revision}/{f_path}', | |
1016 | controller='files', action='history', revision='tip', f_path='', |
|
1017 | controller='files', action='history', revision='tip', f_path='', | |
1017 | conditions={'function': check_repo}, |
|
1018 | conditions={'function': check_repo}, | |
1018 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1019 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1019 |
|
1020 | |||
1020 | rmap.connect('files_authors_home', |
|
1021 | rmap.connect('files_authors_home', | |
1021 | '/{repo_name}/authors/{revision}/{f_path}', |
|
1022 | '/{repo_name}/authors/{revision}/{f_path}', | |
1022 | controller='files', action='authors', revision='tip', f_path='', |
|
1023 | controller='files', action='authors', revision='tip', f_path='', | |
1023 | conditions={'function': check_repo}, |
|
1024 | conditions={'function': check_repo}, | |
1024 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1025 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1025 |
|
1026 | |||
1026 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', |
|
1027 | rmap.connect('files_diff_home', '/{repo_name}/diff/{f_path}', | |
1027 | controller='files', action='diff', f_path='', |
|
1028 | controller='files', action='diff', f_path='', | |
1028 | conditions={'function': check_repo}, |
|
1029 | conditions={'function': check_repo}, | |
1029 | requirements=URL_NAME_REQUIREMENTS) |
|
1030 | requirements=URL_NAME_REQUIREMENTS) | |
1030 |
|
1031 | |||
1031 | rmap.connect('files_diff_2way_home', |
|
1032 | rmap.connect('files_diff_2way_home', | |
1032 | '/{repo_name}/diff-2way/{f_path}', |
|
1033 | '/{repo_name}/diff-2way/{f_path}', | |
1033 | controller='files', action='diff_2way', f_path='', |
|
1034 | controller='files', action='diff_2way', f_path='', | |
1034 | conditions={'function': check_repo}, |
|
1035 | conditions={'function': check_repo}, | |
1035 | requirements=URL_NAME_REQUIREMENTS) |
|
1036 | requirements=URL_NAME_REQUIREMENTS) | |
1036 |
|
1037 | |||
1037 | rmap.connect('files_rawfile_home', |
|
1038 | rmap.connect('files_rawfile_home', | |
1038 | '/{repo_name}/rawfile/{revision}/{f_path}', |
|
1039 | '/{repo_name}/rawfile/{revision}/{f_path}', | |
1039 | controller='files', action='rawfile', revision='tip', |
|
1040 | controller='files', action='rawfile', revision='tip', | |
1040 | f_path='', conditions={'function': check_repo}, |
|
1041 | f_path='', conditions={'function': check_repo}, | |
1041 | requirements=URL_NAME_REQUIREMENTS) |
|
1042 | requirements=URL_NAME_REQUIREMENTS) | |
1042 |
|
1043 | |||
1043 | rmap.connect('files_raw_home', |
|
1044 | rmap.connect('files_raw_home', | |
1044 | '/{repo_name}/raw/{revision}/{f_path}', |
|
1045 | '/{repo_name}/raw/{revision}/{f_path}', | |
1045 | controller='files', action='raw', revision='tip', f_path='', |
|
1046 | controller='files', action='raw', revision='tip', f_path='', | |
1046 | conditions={'function': check_repo}, |
|
1047 | conditions={'function': check_repo}, | |
1047 | requirements=URL_NAME_REQUIREMENTS) |
|
1048 | requirements=URL_NAME_REQUIREMENTS) | |
1048 |
|
1049 | |||
1049 | rmap.connect('files_render_home', |
|
1050 | rmap.connect('files_render_home', | |
1050 | '/{repo_name}/render/{revision}/{f_path}', |
|
1051 | '/{repo_name}/render/{revision}/{f_path}', | |
1051 | controller='files', action='index', revision='tip', f_path='', |
|
1052 | controller='files', action='index', revision='tip', f_path='', | |
1052 | rendered=True, conditions={'function': check_repo}, |
|
1053 | rendered=True, conditions={'function': check_repo}, | |
1053 | requirements=URL_NAME_REQUIREMENTS) |
|
1054 | requirements=URL_NAME_REQUIREMENTS) | |
1054 |
|
1055 | |||
1055 | rmap.connect('files_annotate_home', |
|
1056 | rmap.connect('files_annotate_home', | |
1056 | '/{repo_name}/annotate/{revision}/{f_path}', |
|
1057 | '/{repo_name}/annotate/{revision}/{f_path}', | |
1057 | controller='files', action='index', revision='tip', |
|
1058 | controller='files', action='index', revision='tip', | |
1058 | f_path='', annotate=True, conditions={'function': check_repo}, |
|
1059 | f_path='', annotate=True, conditions={'function': check_repo}, | |
1059 | requirements=URL_NAME_REQUIREMENTS) |
|
1060 | requirements=URL_NAME_REQUIREMENTS) | |
1060 |
|
1061 | |||
1061 | rmap.connect('files_edit', |
|
1062 | rmap.connect('files_edit', | |
1062 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1063 | '/{repo_name}/edit/{revision}/{f_path}', | |
1063 | controller='files', action='edit', revision='tip', |
|
1064 | controller='files', action='edit', revision='tip', | |
1064 | f_path='', |
|
1065 | f_path='', | |
1065 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1066 | conditions={'function': check_repo, 'method': ['POST']}, | |
1066 | requirements=URL_NAME_REQUIREMENTS) |
|
1067 | requirements=URL_NAME_REQUIREMENTS) | |
1067 |
|
1068 | |||
1068 | rmap.connect('files_edit_home', |
|
1069 | rmap.connect('files_edit_home', | |
1069 | '/{repo_name}/edit/{revision}/{f_path}', |
|
1070 | '/{repo_name}/edit/{revision}/{f_path}', | |
1070 | controller='files', action='edit_home', revision='tip', |
|
1071 | controller='files', action='edit_home', revision='tip', | |
1071 | f_path='', conditions={'function': check_repo}, |
|
1072 | f_path='', conditions={'function': check_repo}, | |
1072 | requirements=URL_NAME_REQUIREMENTS) |
|
1073 | requirements=URL_NAME_REQUIREMENTS) | |
1073 |
|
1074 | |||
1074 | rmap.connect('files_add', |
|
1075 | rmap.connect('files_add', | |
1075 | '/{repo_name}/add/{revision}/{f_path}', |
|
1076 | '/{repo_name}/add/{revision}/{f_path}', | |
1076 | controller='files', action='add', revision='tip', |
|
1077 | controller='files', action='add', revision='tip', | |
1077 | f_path='', |
|
1078 | f_path='', | |
1078 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1079 | conditions={'function': check_repo, 'method': ['POST']}, | |
1079 | requirements=URL_NAME_REQUIREMENTS) |
|
1080 | requirements=URL_NAME_REQUIREMENTS) | |
1080 |
|
1081 | |||
1081 | rmap.connect('files_add_home', |
|
1082 | rmap.connect('files_add_home', | |
1082 | '/{repo_name}/add/{revision}/{f_path}', |
|
1083 | '/{repo_name}/add/{revision}/{f_path}', | |
1083 | controller='files', action='add_home', revision='tip', |
|
1084 | controller='files', action='add_home', revision='tip', | |
1084 | f_path='', conditions={'function': check_repo}, |
|
1085 | f_path='', conditions={'function': check_repo}, | |
1085 | requirements=URL_NAME_REQUIREMENTS) |
|
1086 | requirements=URL_NAME_REQUIREMENTS) | |
1086 |
|
1087 | |||
1087 | rmap.connect('files_delete', |
|
1088 | rmap.connect('files_delete', | |
1088 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1089 | '/{repo_name}/delete/{revision}/{f_path}', | |
1089 | controller='files', action='delete', revision='tip', |
|
1090 | controller='files', action='delete', revision='tip', | |
1090 | f_path='', |
|
1091 | f_path='', | |
1091 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1092 | conditions={'function': check_repo, 'method': ['POST']}, | |
1092 | requirements=URL_NAME_REQUIREMENTS) |
|
1093 | requirements=URL_NAME_REQUIREMENTS) | |
1093 |
|
1094 | |||
1094 | rmap.connect('files_delete_home', |
|
1095 | rmap.connect('files_delete_home', | |
1095 | '/{repo_name}/delete/{revision}/{f_path}', |
|
1096 | '/{repo_name}/delete/{revision}/{f_path}', | |
1096 | controller='files', action='delete_home', revision='tip', |
|
1097 | controller='files', action='delete_home', revision='tip', | |
1097 | f_path='', conditions={'function': check_repo}, |
|
1098 | f_path='', conditions={'function': check_repo}, | |
1098 | requirements=URL_NAME_REQUIREMENTS) |
|
1099 | requirements=URL_NAME_REQUIREMENTS) | |
1099 |
|
1100 | |||
1100 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', |
|
1101 | rmap.connect('files_archive_home', '/{repo_name}/archive/{fname}', | |
1101 | controller='files', action='archivefile', |
|
1102 | controller='files', action='archivefile', | |
1102 | conditions={'function': check_repo}, |
|
1103 | conditions={'function': check_repo}, | |
1103 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1104 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1104 |
|
1105 | |||
1105 | rmap.connect('files_nodelist_home', |
|
1106 | rmap.connect('files_nodelist_home', | |
1106 | '/{repo_name}/nodelist/{revision}/{f_path}', |
|
1107 | '/{repo_name}/nodelist/{revision}/{f_path}', | |
1107 | controller='files', action='nodelist', |
|
1108 | controller='files', action='nodelist', | |
1108 | conditions={'function': check_repo}, |
|
1109 | conditions={'function': check_repo}, | |
1109 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1110 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1110 |
|
1111 | |||
1111 | rmap.connect('files_nodetree_full', |
|
1112 | rmap.connect('files_nodetree_full', | |
1112 | '/{repo_name}/nodetree_full/{commit_id}/{f_path}', |
|
1113 | '/{repo_name}/nodetree_full/{commit_id}/{f_path}', | |
1113 | controller='files', action='nodetree_full', |
|
1114 | controller='files', action='nodetree_full', | |
1114 | conditions={'function': check_repo}, |
|
1115 | conditions={'function': check_repo}, | |
1115 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) |
|
1116 | requirements=URL_NAME_REQUIREMENTS, jsroute=True) | |
1116 |
|
1117 | |||
1117 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', |
|
1118 | rmap.connect('repo_fork_create_home', '/{repo_name}/fork', | |
1118 | controller='forks', action='fork_create', |
|
1119 | controller='forks', action='fork_create', | |
1119 | conditions={'function': check_repo, 'method': ['POST']}, |
|
1120 | conditions={'function': check_repo, 'method': ['POST']}, | |
1120 | requirements=URL_NAME_REQUIREMENTS) |
|
1121 | requirements=URL_NAME_REQUIREMENTS) | |
1121 |
|
1122 | |||
1122 | rmap.connect('repo_fork_home', '/{repo_name}/fork', |
|
1123 | rmap.connect('repo_fork_home', '/{repo_name}/fork', | |
1123 | controller='forks', action='fork', |
|
1124 | controller='forks', action='fork', | |
1124 | conditions={'function': check_repo}, |
|
1125 | conditions={'function': check_repo}, | |
1125 | requirements=URL_NAME_REQUIREMENTS) |
|
1126 | requirements=URL_NAME_REQUIREMENTS) | |
1126 |
|
1127 | |||
1127 | rmap.connect('repo_forks_home', '/{repo_name}/forks', |
|
1128 | rmap.connect('repo_forks_home', '/{repo_name}/forks', | |
1128 | controller='forks', action='forks', |
|
1129 | controller='forks', action='forks', | |
1129 | conditions={'function': check_repo}, |
|
1130 | conditions={'function': check_repo}, | |
1130 | requirements=URL_NAME_REQUIREMENTS) |
|
1131 | requirements=URL_NAME_REQUIREMENTS) | |
1131 |
|
1132 | |||
1132 | rmap.connect('repo_followers_home', '/{repo_name}/followers', |
|
1133 | rmap.connect('repo_followers_home', '/{repo_name}/followers', | |
1133 | controller='followers', action='followers', |
|
1134 | controller='followers', action='followers', | |
1134 | conditions={'function': check_repo}, |
|
1135 | conditions={'function': check_repo}, | |
1135 | requirements=URL_NAME_REQUIREMENTS) |
|
1136 | requirements=URL_NAME_REQUIREMENTS) | |
1136 |
|
1137 | |||
1137 | # must be here for proper group/repo catching pattern |
|
1138 | # must be here for proper group/repo catching pattern | |
1138 | _connect_with_slash( |
|
1139 | _connect_with_slash( | |
1139 | rmap, 'repo_group_home', '/{group_name}', |
|
1140 | rmap, 'repo_group_home', '/{group_name}', | |
1140 | controller='home', action='index_repo_group', |
|
1141 | controller='home', action='index_repo_group', | |
1141 | conditions={'function': check_group}, |
|
1142 | conditions={'function': check_group}, | |
1142 | requirements=URL_NAME_REQUIREMENTS) |
|
1143 | requirements=URL_NAME_REQUIREMENTS) | |
1143 |
|
1144 | |||
1144 | # catch all, at the end |
|
1145 | # catch all, at the end | |
1145 | _connect_with_slash( |
|
1146 | _connect_with_slash( | |
1146 | rmap, 'summary_home', '/{repo_name}', jsroute=True, |
|
1147 | rmap, 'summary_home', '/{repo_name}', jsroute=True, | |
1147 | controller='summary', action='index', |
|
1148 | controller='summary', action='index', | |
1148 | conditions={'function': check_repo}, |
|
1149 | conditions={'function': check_repo}, | |
1149 | requirements=URL_NAME_REQUIREMENTS) |
|
1150 | requirements=URL_NAME_REQUIREMENTS) | |
1150 |
|
1151 | |||
1151 | return rmap |
|
1152 | return rmap | |
1152 |
|
1153 | |||
1153 |
|
1154 | |||
1154 | def _connect_with_slash(mapper, name, path, *args, **kwargs): |
|
1155 | def _connect_with_slash(mapper, name, path, *args, **kwargs): | |
1155 | """ |
|
1156 | """ | |
1156 | Connect a route with an optional trailing slash in `path`. |
|
1157 | Connect a route with an optional trailing slash in `path`. | |
1157 | """ |
|
1158 | """ | |
1158 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) |
|
1159 | mapper.connect(name + '_slash', path + '/', *args, **kwargs) | |
1159 | mapper.connect(name, path, *args, **kwargs) |
|
1160 | mapper.connect(name, path, *args, **kwargs) |
@@ -1,200 +1,238 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 | from rhodecode.model.db import Repository, Integration, RepoGroup |
|
23 | from rhodecode.model.db import Repository, Integration, RepoGroup | |
24 | from rhodecode.config.routing import ( |
|
24 | from rhodecode.config.routing import ( | |
25 | ADMIN_PREFIX, add_route_requirements, URL_NAME_REQUIREMENTS) |
|
25 | ADMIN_PREFIX, add_route_requirements, URL_NAME_REQUIREMENTS) | |
26 | from rhodecode.integrations import integration_type_registry |
|
26 | from rhodecode.integrations import integration_type_registry | |
27 |
|
27 | |||
28 | log = logging.getLogger(__name__) |
|
28 | log = logging.getLogger(__name__) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def includeme(config): |
|
31 | def includeme(config): | |
32 |
|
32 | |||
33 | # global integrations |
|
33 | # global integrations | |
|
34 | ||||
|
35 | config.add_route('global_integrations_new', | |||
|
36 | ADMIN_PREFIX + '/integrations/new') | |||
|
37 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', | |||
|
38 | attr='new_integration', | |||
|
39 | renderer='rhodecode:templates/admin/integrations/new.html', | |||
|
40 | request_method='GET', | |||
|
41 | route_name='global_integrations_new') | |||
|
42 | ||||
34 | config.add_route('global_integrations_home', |
|
43 | config.add_route('global_integrations_home', | |
35 | ADMIN_PREFIX + '/integrations') |
|
44 | ADMIN_PREFIX + '/integrations') | |
36 | config.add_route('global_integrations_list', |
|
45 | config.add_route('global_integrations_list', | |
37 | ADMIN_PREFIX + '/integrations/{integration}') |
|
46 | ADMIN_PREFIX + '/integrations/{integration}') | |
38 | for route_name in ['global_integrations_home', 'global_integrations_list']: |
|
47 | for route_name in ['global_integrations_home', 'global_integrations_list']: | |
39 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', |
|
48 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', | |
40 | attr='index', |
|
49 | attr='index', | |
41 | renderer='rhodecode:templates/admin/integrations/list.html', |
|
50 | renderer='rhodecode:templates/admin/integrations/list.html', | |
42 | request_method='GET', |
|
51 | request_method='GET', | |
43 | route_name=route_name) |
|
52 | route_name=route_name) | |
44 |
|
53 | |||
45 | config.add_route('global_integrations_create', |
|
54 | config.add_route('global_integrations_create', | |
46 | ADMIN_PREFIX + '/integrations/{integration}/new', |
|
55 | ADMIN_PREFIX + '/integrations/{integration}/new', | |
47 | custom_predicates=(valid_integration,)) |
|
56 | custom_predicates=(valid_integration,)) | |
48 | config.add_route('global_integrations_edit', |
|
57 | config.add_route('global_integrations_edit', | |
49 | ADMIN_PREFIX + '/integrations/{integration}/{integration_id}', |
|
58 | ADMIN_PREFIX + '/integrations/{integration}/{integration_id}', | |
50 | custom_predicates=(valid_integration,)) |
|
59 | custom_predicates=(valid_integration,)) | |
|
60 | ||||
|
61 | ||||
51 | for route_name in ['global_integrations_create', 'global_integrations_edit']: |
|
62 | for route_name in ['global_integrations_create', 'global_integrations_edit']: | |
52 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', |
|
63 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', | |
53 | attr='settings_get', |
|
64 | attr='settings_get', | |
54 |
renderer='rhodecode:templates/admin/integrations/ |
|
65 | renderer='rhodecode:templates/admin/integrations/form.html', | |
55 | request_method='GET', |
|
66 | request_method='GET', | |
56 | route_name=route_name) |
|
67 | route_name=route_name) | |
57 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', |
|
68 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', | |
58 | attr='settings_post', |
|
69 | attr='settings_post', | |
59 |
renderer='rhodecode:templates/admin/integrations/ |
|
70 | renderer='rhodecode:templates/admin/integrations/form.html', | |
|
71 | request_method='POST', | |||
|
72 | route_name=route_name) | |||
|
73 | ||||
|
74 | ||||
|
75 | # repo group integrations | |||
|
76 | config.add_route('repo_group_integrations_home', | |||
|
77 | add_route_requirements( | |||
|
78 | '{repo_group_name}/settings/integrations', | |||
|
79 | URL_NAME_REQUIREMENTS | |||
|
80 | ), | |||
|
81 | custom_predicates=(valid_repo_group,) | |||
|
82 | ) | |||
|
83 | config.add_route('repo_group_integrations_list', | |||
|
84 | add_route_requirements( | |||
|
85 | '{repo_group_name}/settings/integrations/{integration}', | |||
|
86 | URL_NAME_REQUIREMENTS | |||
|
87 | ), | |||
|
88 | custom_predicates=(valid_repo_group, valid_integration)) | |||
|
89 | for route_name in ['repo_group_integrations_home', 'repo_group_integrations_list']: | |||
|
90 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |||
|
91 | attr='index', | |||
|
92 | renderer='rhodecode:templates/admin/integrations/list.html', | |||
|
93 | request_method='GET', | |||
|
94 | route_name=route_name) | |||
|
95 | ||||
|
96 | config.add_route('repo_group_integrations_new', | |||
|
97 | add_route_requirements( | |||
|
98 | '{repo_group_name}/settings/integrations/new', | |||
|
99 | URL_NAME_REQUIREMENTS | |||
|
100 | ), | |||
|
101 | custom_predicates=(valid_repo_group,)) | |||
|
102 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |||
|
103 | attr='new_integration', | |||
|
104 | renderer='rhodecode:templates/admin/integrations/new.html', | |||
|
105 | request_method='GET', | |||
|
106 | route_name='repo_group_integrations_new') | |||
|
107 | ||||
|
108 | config.add_route('repo_group_integrations_create', | |||
|
109 | add_route_requirements( | |||
|
110 | '{repo_group_name}/settings/integrations/{integration}/new', | |||
|
111 | URL_NAME_REQUIREMENTS | |||
|
112 | ), | |||
|
113 | custom_predicates=(valid_repo_group, valid_integration)) | |||
|
114 | config.add_route('repo_group_integrations_edit', | |||
|
115 | add_route_requirements( | |||
|
116 | '{repo_group_name}/settings/integrations/{integration}/{integration_id}', | |||
|
117 | URL_NAME_REQUIREMENTS | |||
|
118 | ), | |||
|
119 | custom_predicates=(valid_repo_group, valid_integration)) | |||
|
120 | for route_name in ['repo_group_integrations_edit', 'repo_group_integrations_create']: | |||
|
121 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |||
|
122 | attr='settings_get', | |||
|
123 | renderer='rhodecode:templates/admin/integrations/form.html', | |||
|
124 | request_method='GET', | |||
|
125 | route_name=route_name) | |||
|
126 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |||
|
127 | attr='settings_post', | |||
|
128 | renderer='rhodecode:templates/admin/integrations/form.html', | |||
60 | request_method='POST', |
|
129 | request_method='POST', | |
61 | route_name=route_name) |
|
130 | route_name=route_name) | |
62 |
|
131 | |||
63 |
|
132 | |||
64 | # repo integrations |
|
133 | # repo integrations | |
65 | config.add_route('repo_integrations_home', |
|
134 | config.add_route('repo_integrations_home', | |
66 | add_route_requirements( |
|
135 | add_route_requirements( | |
67 | '{repo_name}/settings/integrations', |
|
136 | '{repo_name}/settings/integrations', | |
68 | URL_NAME_REQUIREMENTS |
|
137 | URL_NAME_REQUIREMENTS | |
69 | ), |
|
138 | ), | |
70 | custom_predicates=(valid_repo,)) |
|
139 | custom_predicates=(valid_repo,)) | |
71 | config.add_route('repo_integrations_list', |
|
140 | config.add_route('repo_integrations_list', | |
72 | add_route_requirements( |
|
141 | add_route_requirements( | |
73 | '{repo_name}/settings/integrations/{integration}', |
|
142 | '{repo_name}/settings/integrations/{integration}', | |
74 | URL_NAME_REQUIREMENTS |
|
143 | URL_NAME_REQUIREMENTS | |
75 | ), |
|
144 | ), | |
76 | custom_predicates=(valid_repo, valid_integration)) |
|
145 | custom_predicates=(valid_repo, valid_integration)) | |
77 | for route_name in ['repo_integrations_home', 'repo_integrations_list']: |
|
146 | for route_name in ['repo_integrations_home', 'repo_integrations_list']: | |
78 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', |
|
147 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', | |
79 | attr='index', |
|
148 | attr='index', | |
80 | request_method='GET', |
|
149 | request_method='GET', | |
|
150 | renderer='rhodecode:templates/admin/integrations/list.html', | |||
81 | route_name=route_name) |
|
151 | route_name=route_name) | |
82 |
|
152 | |||
|
153 | config.add_route('repo_integrations_new', | |||
|
154 | add_route_requirements( | |||
|
155 | '{repo_name}/settings/integrations/new', | |||
|
156 | URL_NAME_REQUIREMENTS | |||
|
157 | ), | |||
|
158 | custom_predicates=(valid_repo,)) | |||
|
159 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', | |||
|
160 | attr='new_integration', | |||
|
161 | renderer='rhodecode:templates/admin/integrations/new.html', | |||
|
162 | request_method='GET', | |||
|
163 | route_name='repo_integrations_new') | |||
|
164 | ||||
83 | config.add_route('repo_integrations_create', |
|
165 | config.add_route('repo_integrations_create', | |
84 | add_route_requirements( |
|
166 | add_route_requirements( | |
85 | '{repo_name}/settings/integrations/{integration}/new', |
|
167 | '{repo_name}/settings/integrations/{integration}/new', | |
86 | URL_NAME_REQUIREMENTS |
|
168 | URL_NAME_REQUIREMENTS | |
87 | ), |
|
169 | ), | |
88 | custom_predicates=(valid_repo, valid_integration)) |
|
170 | custom_predicates=(valid_repo, valid_integration)) | |
89 | config.add_route('repo_integrations_edit', |
|
171 | config.add_route('repo_integrations_edit', | |
90 | add_route_requirements( |
|
172 | add_route_requirements( | |
91 | '{repo_name}/settings/integrations/{integration}/{integration_id}', |
|
173 | '{repo_name}/settings/integrations/{integration}/{integration_id}', | |
92 | URL_NAME_REQUIREMENTS |
|
174 | URL_NAME_REQUIREMENTS | |
93 | ), |
|
175 | ), | |
94 | custom_predicates=(valid_repo, valid_integration)) |
|
176 | custom_predicates=(valid_repo, valid_integration)) | |
95 | for route_name in ['repo_integrations_edit', 'repo_integrations_create']: |
|
177 | for route_name in ['repo_integrations_edit', 'repo_integrations_create']: | |
96 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', |
|
178 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', | |
97 | attr='settings_get', |
|
179 | attr='settings_get', | |
98 |
renderer='rhodecode:templates/admin/integrations/ |
|
180 | renderer='rhodecode:templates/admin/integrations/form.html', | |
99 | request_method='GET', |
|
181 | request_method='GET', | |
100 | route_name=route_name) |
|
182 | route_name=route_name) | |
101 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', |
|
183 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', | |
102 | attr='settings_post', |
|
184 | attr='settings_post', | |
103 |
renderer='rhodecode:templates/admin/integrations/ |
|
185 | renderer='rhodecode:templates/admin/integrations/form.html', | |
104 | request_method='POST', |
|
|||
105 | route_name=route_name) |
|
|||
106 |
|
||||
107 |
|
||||
108 | # repo group integrations |
|
|||
109 | config.add_route('repo_group_integrations_home', |
|
|||
110 | add_route_requirements( |
|
|||
111 | '{repo_group_name}/settings/integrations', |
|
|||
112 | URL_NAME_REQUIREMENTS |
|
|||
113 | ), |
|
|||
114 | custom_predicates=(valid_repo_group,)) |
|
|||
115 | config.add_route('repo_group_integrations_list', |
|
|||
116 | add_route_requirements( |
|
|||
117 | '{repo_group_name}/settings/integrations/{integration}', |
|
|||
118 | URL_NAME_REQUIREMENTS |
|
|||
119 | ), |
|
|||
120 | custom_predicates=(valid_repo_group, valid_integration)) |
|
|||
121 | for route_name in ['repo_group_integrations_home', 'repo_group_integrations_list']: |
|
|||
122 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', |
|
|||
123 | attr='index', |
|
|||
124 | request_method='GET', |
|
|||
125 | route_name=route_name) |
|
|||
126 |
|
||||
127 | config.add_route('repo_group_integrations_create', |
|
|||
128 | add_route_requirements( |
|
|||
129 | '{repo_group_name}/settings/integrations/{integration}/new', |
|
|||
130 | URL_NAME_REQUIREMENTS |
|
|||
131 | ), |
|
|||
132 | custom_predicates=(valid_repo_group, valid_integration)) |
|
|||
133 | config.add_route('repo_group_integrations_edit', |
|
|||
134 | add_route_requirements( |
|
|||
135 | '{repo_group_name}/settings/integrations/{integration}/{integration_id}', |
|
|||
136 | URL_NAME_REQUIREMENTS |
|
|||
137 | ), |
|
|||
138 | custom_predicates=(valid_repo_group, valid_integration)) |
|
|||
139 | for route_name in ['repo_group_integrations_edit', 'repo_group_integrations_create']: |
|
|||
140 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', |
|
|||
141 | attr='settings_get', |
|
|||
142 | renderer='rhodecode:templates/admin/integrations/edit.html', |
|
|||
143 | request_method='GET', |
|
|||
144 | route_name=route_name) |
|
|||
145 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', |
|
|||
146 | attr='settings_post', |
|
|||
147 | renderer='rhodecode:templates/admin/integrations/edit.html', |
|
|||
148 | request_method='POST', |
|
186 | request_method='POST', | |
149 | route_name=route_name) |
|
187 | route_name=route_name) | |
150 |
|
188 | |||
151 |
|
189 | |||
152 | def valid_repo(info, request): |
|
190 | def valid_repo(info, request): | |
153 | repo = Repository.get_by_repo_name(info['match']['repo_name']) |
|
191 | repo = Repository.get_by_repo_name(info['match']['repo_name']) | |
154 | if repo: |
|
192 | if repo: | |
155 | return True |
|
193 | return True | |
156 |
|
194 | |||
157 |
|
195 | |||
158 | def valid_repo_group(info, request): |
|
196 | def valid_repo_group(info, request): | |
159 | repo_group = RepoGroup.get_by_group_name(info['match']['repo_group_name']) |
|
197 | repo_group = RepoGroup.get_by_group_name(info['match']['repo_group_name']) | |
160 | if repo_group: |
|
198 | if repo_group: | |
161 | return True |
|
199 | return True | |
162 | return False |
|
200 | return False | |
163 |
|
201 | |||
164 |
|
202 | |||
165 | def valid_integration(info, request): |
|
203 | def valid_integration(info, request): | |
166 | integration_type = info['match']['integration'] |
|
204 | integration_type = info['match']['integration'] | |
167 | integration_id = info['match'].get('integration_id') |
|
205 | integration_id = info['match'].get('integration_id') | |
168 | repo_name = info['match'].get('repo_name') |
|
206 | repo_name = info['match'].get('repo_name') | |
169 | repo_group_name = info['match'].get('repo_group_name') |
|
207 | repo_group_name = info['match'].get('repo_group_name') | |
170 |
|
208 | |||
171 | if integration_type not in integration_type_registry: |
|
209 | if integration_type not in integration_type_registry: | |
172 | return False |
|
210 | return False | |
173 |
|
211 | |||
174 | repo, repo_group = None, None |
|
212 | repo, repo_group = None, None | |
175 | if repo_name: |
|
213 | if repo_name: | |
176 | repo = Repository.get_by_repo_name(repo_name) |
|
214 | repo = Repository.get_by_repo_name(repo_name) | |
177 | if not repo: |
|
215 | if not repo: | |
178 | return False |
|
216 | return False | |
179 |
|
217 | |||
180 | if repo_group_name: |
|
218 | if repo_group_name: | |
181 | repo_group = RepoGroup.get_by_group_name(repo_group_name) |
|
219 | repo_group = RepoGroup.get_by_group_name(repo_group_name) | |
182 | if not repo_group: |
|
220 | if not repo_group: | |
183 | return False |
|
221 | return False | |
184 |
|
222 | |||
185 | if repo_name and repo_group: |
|
223 | if repo_name and repo_group: | |
186 | raise Exception('Either repo or repo_group can be set, not both') |
|
224 | raise Exception('Either repo or repo_group can be set, not both') | |
187 |
|
225 | |||
188 |
|
226 | |||
189 | if integration_id: |
|
227 | if integration_id: | |
190 | integration = Integration.get(integration_id) |
|
228 | integration = Integration.get(integration_id) | |
191 | if not integration: |
|
229 | if not integration: | |
192 | return False |
|
230 | return False | |
193 | if integration.integration_type != integration_type: |
|
231 | if integration.integration_type != integration_type: | |
194 | return False |
|
232 | return False | |
195 | if repo and repo.repo_id != integration.repo_id: |
|
233 | if repo and repo.repo_id != integration.repo_id: | |
196 | return False |
|
234 | return False | |
197 |
if repo_group and repo_group. |
|
235 | if repo_group and repo_group.group_id != integration.repo_group_id: | |
198 | return False |
|
236 | return False | |
199 |
|
237 | |||
200 | return True |
|
238 | return True |
@@ -1,45 +1,71 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import colander |
|
21 | import colander | |
22 |
|
22 | |||
23 |
from rhodecode.translation import |
|
23 | from rhodecode.translation import _ | |
24 |
|
24 | |||
25 |
|
25 | |||
26 |
class Integration |
|
26 | class IntegrationOptionsSchemaBase(colander.MappingSchema): | |
27 | """ |
|
|||
28 | This base schema is intended for use in integrations. |
|
|||
29 | It adds a few default settings (e.g., "enabled"), so that integration |
|
|||
30 | authors don't have to maintain a bunch of boilerplate. |
|
|||
31 | """ |
|
|||
32 | enabled = colander.SchemaNode( |
|
27 | enabled = colander.SchemaNode( | |
33 | colander.Bool(), |
|
28 | colander.Bool(), | |
34 | default=True, |
|
29 | default=True, | |
35 |
description= |
|
30 | description=_('Enable or disable this integration.'), | |
36 | missing=False, |
|
31 | missing=False, | |
37 |
title= |
|
32 | title=_('Enabled'), | |
38 | ) |
|
33 | ) | |
39 |
|
34 | |||
40 | name = colander.SchemaNode( |
|
35 | name = colander.SchemaNode( | |
41 | colander.String(), |
|
36 | colander.String(), | |
42 |
description= |
|
37 | description=_('Short name for this integration.'), | |
43 | missing=colander.required, |
|
38 | missing=colander.required, | |
44 |
title= |
|
39 | title=_('Integration name'), | |
45 | ) |
|
40 | ) | |
|
41 | ||||
|
42 | ||||
|
43 | class RepoIntegrationOptionsSchema(IntegrationOptionsSchemaBase): | |||
|
44 | pass | |||
|
45 | ||||
|
46 | ||||
|
47 | class RepoGroupIntegrationOptionsSchema(IntegrationOptionsSchemaBase): | |||
|
48 | child_repos_only = colander.SchemaNode( | |||
|
49 | colander.Bool(), | |||
|
50 | default=True, | |||
|
51 | description=_( | |||
|
52 | 'Limit integrations to to work only on the direct children ' | |||
|
53 | 'repositories of this repository group (no subgroups)'), | |||
|
54 | missing=False, | |||
|
55 | title=_('Limit to childen repos only'), | |||
|
56 | ) | |||
|
57 | ||||
|
58 | ||||
|
59 | class GlobalIntegrationOptionsSchema(IntegrationOptionsSchemaBase): | |||
|
60 | child_repos_only = colander.SchemaNode( | |||
|
61 | colander.Bool(), | |||
|
62 | default=False, | |||
|
63 | description=_( | |||
|
64 | 'Limit integrations to to work only on root level repositories'), | |||
|
65 | missing=False, | |||
|
66 | title=_('Root repositories only'), | |||
|
67 | ) | |||
|
68 | ||||
|
69 | ||||
|
70 | class IntegrationSettingsSchemaBase(colander.MappingSchema): | |||
|
71 | pass |
@@ -1,42 +1,101 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase |
|
21 | import colander | |
|
22 | from rhodecode.translation import _ | |||
22 |
|
23 | |||
23 |
|
24 | |||
24 | class IntegrationTypeBase(object): |
|
25 | class IntegrationTypeBase(object): | |
25 | """ Base class for IntegrationType plugins """ |
|
26 | """ Base class for IntegrationType plugins """ | |
26 |
|
27 | |||
|
28 | description = '' | |||
|
29 | icon = ''' | |||
|
30 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | |||
|
31 | <svg | |||
|
32 | xmlns:dc="http://purl.org/dc/elements/1.1/" | |||
|
33 | xmlns:cc="http://creativecommons.org/ns#" | |||
|
34 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | |||
|
35 | xmlns:svg="http://www.w3.org/2000/svg" | |||
|
36 | xmlns="http://www.w3.org/2000/svg" | |||
|
37 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | |||
|
38 | xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | |||
|
39 | viewBox="0 -256 1792 1792" | |||
|
40 | id="svg3025" | |||
|
41 | version="1.1" | |||
|
42 | inkscape:version="0.48.3.1 r9886" | |||
|
43 | width="100%" | |||
|
44 | height="100%" | |||
|
45 | sodipodi:docname="cog_font_awesome.svg"> | |||
|
46 | <metadata | |||
|
47 | id="metadata3035"> | |||
|
48 | <rdf:RDF> | |||
|
49 | <cc:Work | |||
|
50 | rdf:about=""> | |||
|
51 | <dc:format>image/svg+xml</dc:format> | |||
|
52 | <dc:type | |||
|
53 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> | |||
|
54 | </cc:Work> | |||
|
55 | </rdf:RDF> | |||
|
56 | </metadata> | |||
|
57 | <defs | |||
|
58 | id="defs3033" /> | |||
|
59 | <sodipodi:namedview | |||
|
60 | pagecolor="#ffffff" | |||
|
61 | bordercolor="#666666" | |||
|
62 | borderopacity="1" | |||
|
63 | objecttolerance="10" | |||
|
64 | gridtolerance="10" | |||
|
65 | guidetolerance="10" | |||
|
66 | inkscape:pageopacity="0" | |||
|
67 | inkscape:pageshadow="2" | |||
|
68 | inkscape:window-width="640" | |||
|
69 | inkscape:window-height="480" | |||
|
70 | id="namedview3031" | |||
|
71 | showgrid="false" | |||
|
72 | inkscape:zoom="0.13169643" | |||
|
73 | inkscape:cx="896" | |||
|
74 | inkscape:cy="896" | |||
|
75 | inkscape:window-x="0" | |||
|
76 | inkscape:window-y="25" | |||
|
77 | inkscape:window-maximized="0" | |||
|
78 | inkscape:current-layer="svg3025" /> | |||
|
79 | <g | |||
|
80 | transform="matrix(1,0,0,-1,121.49153,1285.4237)" | |||
|
81 | id="g3027"> | |||
|
82 | <path | |||
|
83 | d="m 1024,640 q 0,106 -75,181 -75,75 -181,75 -106,0 -181,-75 -75,-75 -75,-181 0,-106 75,-181 75,-75 181,-75 106,0 181,75 75,75 75,181 z m 512,109 V 527 q 0,-12 -8,-23 -8,-11 -20,-13 l -185,-28 q -19,-54 -39,-91 35,-50 107,-138 10,-12 10,-25 0,-13 -9,-23 -27,-37 -99,-108 -72,-71 -94,-71 -12,0 -26,9 l -138,108 q -44,-23 -91,-38 -16,-136 -29,-186 -7,-28 -36,-28 H 657 q -14,0 -24.5,8.5 Q 622,-111 621,-98 L 593,86 q -49,16 -90,37 L 362,16 Q 352,7 337,7 323,7 312,18 186,132 147,186 q -7,10 -7,23 0,12 8,23 15,21 51,66.5 36,45.5 54,70.5 -27,50 -41,99 L 29,495 Q 16,497 8,507.5 0,518 0,531 v 222 q 0,12 8,23 8,11 19,13 l 186,28 q 14,46 39,92 -40,57 -107,138 -10,12 -10,24 0,10 9,23 26,36 98.5,107.5 72.5,71.5 94.5,71.5 13,0 26,-10 l 138,-107 q 44,23 91,38 16,136 29,186 7,28 36,28 h 222 q 14,0 24.5,-8.5 Q 914,1391 915,1378 l 28,-184 q 49,-16 90,-37 l 142,107 q 9,9 24,9 13,0 25,-10 129,-119 165,-170 7,-8 7,-22 0,-12 -8,-23 -15,-21 -51,-66.5 -36,-45.5 -54,-70.5 26,-50 41,-98 l 183,-28 q 13,-2 21,-12.5 8,-10.5 8,-23.5 z" | |||
|
84 | id="path3029" | |||
|
85 | inkscape:connector-curvature="0" | |||
|
86 | style="fill:currentColor" /> | |||
|
87 | </g> | |||
|
88 | </svg> | |||
|
89 | ''' | |||
|
90 | ||||
27 | def __init__(self, settings): |
|
91 | def __init__(self, settings): | |
28 | """ |
|
92 | """ | |
29 | :param settings: dict of settings to be used for the integration |
|
93 | :param settings: dict of settings to be used for the integration | |
30 | """ |
|
94 | """ | |
31 | self.settings = settings |
|
95 | self.settings = settings | |
32 |
|
96 | |||
33 |
|
||||
34 | def settings_schema(self): |
|
97 | def settings_schema(self): | |
35 | """ |
|
98 | """ | |
36 | A colander schema of settings for the integration type |
|
99 | A colander schema of settings for the integration type | |
37 |
|
||||
38 | Subclasses can return their own schema but should always |
|
|||
39 | inherit from IntegrationSettingsSchemaBase |
|
|||
40 | """ |
|
100 | """ | |
41 |
return |
|
101 | return colander.Schema() | |
42 |
|
@@ -1,222 +1,283 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import deform |
|
22 | import deform | |
23 | import logging |
|
23 | import logging | |
24 | import colander |
|
24 | import colander | |
25 |
|
25 | |||
26 | from mako.template import Template |
|
26 | from mako.template import Template | |
27 |
|
27 | |||
28 | from rhodecode import events |
|
28 | from rhodecode import events | |
29 |
from rhodecode.translation import _ |
|
29 | from rhodecode.translation import _ | |
30 | from rhodecode.lib.celerylib import run_task |
|
30 | from rhodecode.lib.celerylib import run_task | |
31 | from rhodecode.lib.celerylib import tasks |
|
31 | from rhodecode.lib.celerylib import tasks | |
32 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
32 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
33 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase |
|
|||
34 |
|
33 | |||
35 |
|
34 | |||
36 | log = logging.getLogger(__name__) |
|
35 | log = logging.getLogger(__name__) | |
37 |
|
36 | |||
38 | repo_push_template_plaintext = Template(''' |
|
37 | repo_push_template_plaintext = Template(''' | |
39 | Commits: |
|
38 | Commits: | |
40 |
|
39 | |||
41 | % for commit in data['push']['commits']: |
|
40 | % for commit in data['push']['commits']: | |
42 | ${commit['url']} by ${commit['author']} at ${commit['date']} |
|
41 | ${commit['url']} by ${commit['author']} at ${commit['date']} | |
43 | ${commit['message']} |
|
42 | ${commit['message']} | |
44 | ---- |
|
43 | ---- | |
45 |
|
44 | |||
46 | % endfor |
|
45 | % endfor | |
47 | ''') |
|
46 | ''') | |
48 |
|
47 | |||
49 | ## TODO (marcink): think about putting this into a file, or use base.mako email template |
|
48 | ## TODO (marcink): think about putting this into a file, or use base.mako email template | |
50 |
|
49 | |||
51 | repo_push_template_html = Template(''' |
|
50 | repo_push_template_html = Template(''' | |
52 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> |
|
51 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> | |
53 | <html xmlns="http://www.w3.org/1999/xhtml"> |
|
52 | <html xmlns="http://www.w3.org/1999/xhtml"> | |
54 | <head> |
|
53 | <head> | |
55 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> |
|
54 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> | |
56 | <meta name="viewport" content="width=device-width, initial-scale=1.0"/> |
|
55 | <meta name="viewport" content="width=device-width, initial-scale=1.0"/> | |
57 | <title>${subject}</title> |
|
56 | <title>${subject}</title> | |
58 | <style type="text/css"> |
|
57 | <style type="text/css"> | |
59 | /* Based on The MailChimp Reset INLINE: Yes. */ |
|
58 | /* Based on The MailChimp Reset INLINE: Yes. */ | |
60 | #outlook a {padding:0;} /* Force Outlook to provide a "view in browser" menu link. */ |
|
59 | #outlook a {padding:0;} /* Force Outlook to provide a "view in browser" menu link. */ | |
61 | body{width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0;} |
|
60 | body{width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0;} | |
62 | /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/ |
|
61 | /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/ | |
63 | .ExternalClass {width:100%;} /* Force Hotmail to display emails at full width */ |
|
62 | .ExternalClass {width:100%;} /* Force Hotmail to display emails at full width */ | |
64 | .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {line-height: 100%;} |
|
63 | .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {line-height: 100%;} | |
65 | /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */ |
|
64 | /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */ | |
66 | #backgroundTable {margin:0; padding:0; line-height: 100% !important;} |
|
65 | #backgroundTable {margin:0; padding:0; line-height: 100% !important;} | |
67 | /* End reset */ |
|
66 | /* End reset */ | |
68 |
|
67 | |||
69 | /* defaults for images*/ |
|
68 | /* defaults for images*/ | |
70 | img {outline:none; text-decoration:none; -ms-interpolation-mode: bicubic;} |
|
69 | img {outline:none; text-decoration:none; -ms-interpolation-mode: bicubic;} | |
71 | a img {border:none;} |
|
70 | a img {border:none;} | |
72 | .image_fix {display:block;} |
|
71 | .image_fix {display:block;} | |
73 |
|
72 | |||
74 | body {line-height:1.2em;} |
|
73 | body {line-height:1.2em;} | |
75 | p {margin: 0 0 20px;} |
|
74 | p {margin: 0 0 20px;} | |
76 | h1, h2, h3, h4, h5, h6 {color:#323232!important;} |
|
75 | h1, h2, h3, h4, h5, h6 {color:#323232!important;} | |
77 | a {color:#427cc9;text-decoration:none;outline:none;cursor:pointer;} |
|
76 | a {color:#427cc9;text-decoration:none;outline:none;cursor:pointer;} | |
78 | a:focus {outline:none;} |
|
77 | a:focus {outline:none;} | |
79 | a:hover {color: #305b91;} |
|
78 | a:hover {color: #305b91;} | |
80 | h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {color:#427cc9!important;text-decoration:none!important;} |
|
79 | h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {color:#427cc9!important;text-decoration:none!important;} | |
81 | h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active {color: #305b91!important;} |
|
80 | h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active {color: #305b91!important;} | |
82 | h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited {color: #305b91!important;} |
|
81 | h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited {color: #305b91!important;} | |
83 | table {font-size:13px;border-collapse:collapse;mso-table-lspace:0pt;mso-table-rspace:0pt;} |
|
82 | table {font-size:13px;border-collapse:collapse;mso-table-lspace:0pt;mso-table-rspace:0pt;} | |
84 | table td {padding:.65em 1em .65em 0;border-collapse:collapse;vertical-align:top;text-align:left;} |
|
83 | table td {padding:.65em 1em .65em 0;border-collapse:collapse;vertical-align:top;text-align:left;} | |
85 | input {display:inline;border-radius:2px;border-style:solid;border: 1px solid #dbd9da;padding:.5em;} |
|
84 | input {display:inline;border-radius:2px;border-style:solid;border: 1px solid #dbd9da;padding:.5em;} | |
86 | input:focus {outline: 1px solid #979797} |
|
85 | input:focus {outline: 1px solid #979797} | |
87 | @media only screen and (-webkit-min-device-pixel-ratio: 2) { |
|
86 | @media only screen and (-webkit-min-device-pixel-ratio: 2) { | |
88 | /* Put your iPhone 4g styles in here */ |
|
87 | /* Put your iPhone 4g styles in here */ | |
89 | } |
|
88 | } | |
90 |
|
89 | |||
91 | /* Android targeting */ |
|
90 | /* Android targeting */ | |
92 | @media only screen and (-webkit-device-pixel-ratio:.75){ |
|
91 | @media only screen and (-webkit-device-pixel-ratio:.75){ | |
93 | /* Put CSS for low density (ldpi) Android layouts in here */ |
|
92 | /* Put CSS for low density (ldpi) Android layouts in here */ | |
94 | } |
|
93 | } | |
95 | @media only screen and (-webkit-device-pixel-ratio:1){ |
|
94 | @media only screen and (-webkit-device-pixel-ratio:1){ | |
96 | /* Put CSS for medium density (mdpi) Android layouts in here */ |
|
95 | /* Put CSS for medium density (mdpi) Android layouts in here */ | |
97 | } |
|
96 | } | |
98 | @media only screen and (-webkit-device-pixel-ratio:1.5){ |
|
97 | @media only screen and (-webkit-device-pixel-ratio:1.5){ | |
99 | /* Put CSS for high density (hdpi) Android layouts in here */ |
|
98 | /* Put CSS for high density (hdpi) Android layouts in here */ | |
100 | } |
|
99 | } | |
101 | /* end Android targeting */ |
|
100 | /* end Android targeting */ | |
102 |
|
101 | |||
103 | </style> |
|
102 | </style> | |
104 |
|
103 | |||
105 | <!-- Targeting Windows Mobile --> |
|
104 | <!-- Targeting Windows Mobile --> | |
106 | <!--[if IEMobile 7]> |
|
105 | <!--[if IEMobile 7]> | |
107 | <style type="text/css"> |
|
106 | <style type="text/css"> | |
108 |
|
107 | |||
109 | </style> |
|
108 | </style> | |
110 | <![endif]--> |
|
109 | <![endif]--> | |
111 |
|
110 | |||
112 | <!--[if gte mso 9]> |
|
111 | <!--[if gte mso 9]> | |
113 | <style> |
|
112 | <style> | |
114 | /* Target Outlook 2007 and 2010 */ |
|
113 | /* Target Outlook 2007 and 2010 */ | |
115 | </style> |
|
114 | </style> | |
116 | <![endif]--> |
|
115 | <![endif]--> | |
117 | </head> |
|
116 | </head> | |
118 | <body> |
|
117 | <body> | |
119 | <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. --> |
|
118 | <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. --> | |
120 | <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da"> |
|
119 | <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da"> | |
121 | <tr> |
|
120 | <tr> | |
122 | <td valign="top" style="padding:0;"> |
|
121 | <td valign="top" style="padding:0;"> | |
123 | <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%"> |
|
122 | <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%"> | |
124 | <tr><td style="width:100%;padding:7px;background-color:#202020" valign="top"> |
|
123 | <tr><td style="width:100%;padding:7px;background-color:#202020" valign="top"> | |
125 | <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}"> |
|
124 | <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}"> | |
126 | ${'RhodeCode'} |
|
125 | ${'RhodeCode'} | |
127 | </a> |
|
126 | </a> | |
128 | </td></tr> |
|
127 | </td></tr> | |
129 | <tr> |
|
128 | <tr> | |
130 | <td style="padding:15px;" valign="top"> |
|
129 | <td style="padding:15px;" valign="top"> | |
131 | % for commit in data['push']['commits']: |
|
130 | % for commit in data['push']['commits']: | |
132 | <a href="${commit['url']}">${commit['short_id']}</a> by ${commit['author']} at ${commit['date']} <br/> |
|
131 | <a href="${commit['url']}">${commit['short_id']}</a> by ${commit['author']} at ${commit['date']} <br/> | |
133 | ${commit['message_html']} <br/> |
|
132 | ${commit['message_html']} <br/> | |
134 | <br/> |
|
133 | <br/> | |
135 | % endfor |
|
134 | % endfor | |
136 | </td> |
|
135 | </td> | |
137 | </tr> |
|
136 | </tr> | |
138 | </table> |
|
137 | </table> | |
139 | </td> |
|
138 | </td> | |
140 | </tr> |
|
139 | </tr> | |
141 | </table> |
|
140 | </table> | |
142 | <!-- End of wrapper table --> |
|
141 | <!-- End of wrapper table --> | |
143 | <p><a style="margin-top:15px;margin-left:1%;font-family:sans-serif;font-weight:100;font-size:11px;color:#666666;text-decoration:none;" href="${instance_url}"> |
|
142 | <p><a style="margin-top:15px;margin-left:1%;font-family:sans-serif;font-weight:100;font-size:11px;color:#666666;text-decoration:none;" href="${instance_url}"> | |
144 | ${'This is a notification from RhodeCode. %(instance_url)s' % {'instance_url': instance_url}} |
|
143 | ${'This is a notification from RhodeCode. %(instance_url)s' % {'instance_url': instance_url}} | |
145 | </a></p> |
|
144 | </a></p> | |
146 | </body> |
|
145 | </body> | |
147 | </html> |
|
146 | </html> | |
148 | ''') |
|
147 | ''') | |
149 |
|
148 | |||
|
149 | email_icon = ''' | |||
|
150 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | |||
|
151 | <svg | |||
|
152 | xmlns:dc="http://purl.org/dc/elements/1.1/" | |||
|
153 | xmlns:cc="http://creativecommons.org/ns#" | |||
|
154 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | |||
|
155 | xmlns:svg="http://www.w3.org/2000/svg" | |||
|
156 | xmlns="http://www.w3.org/2000/svg" | |||
|
157 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | |||
|
158 | xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | |||
|
159 | viewBox="0 -256 1850 1850" | |||
|
160 | id="svg2989" | |||
|
161 | version="1.1" | |||
|
162 | inkscape:version="0.48.3.1 r9886" | |||
|
163 | width="100%" | |||
|
164 | height="100%" | |||
|
165 | sodipodi:docname="envelope_font_awesome.svg"> | |||
|
166 | <metadata | |||
|
167 | id="metadata2999"> | |||
|
168 | <rdf:RDF> | |||
|
169 | <cc:Work | |||
|
170 | rdf:about=""> | |||
|
171 | <dc:format>image/svg+xml</dc:format> | |||
|
172 | <dc:type | |||
|
173 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> | |||
|
174 | </cc:Work> | |||
|
175 | </rdf:RDF> | |||
|
176 | </metadata> | |||
|
177 | <defs | |||
|
178 | id="defs2997" /> | |||
|
179 | <sodipodi:namedview | |||
|
180 | pagecolor="#ffffff" | |||
|
181 | bordercolor="#666666" | |||
|
182 | borderopacity="1" | |||
|
183 | objecttolerance="10" | |||
|
184 | gridtolerance="10" | |||
|
185 | guidetolerance="10" | |||
|
186 | inkscape:pageopacity="0" | |||
|
187 | inkscape:pageshadow="2" | |||
|
188 | inkscape:window-width="640" | |||
|
189 | inkscape:window-height="480" | |||
|
190 | id="namedview2995" | |||
|
191 | showgrid="false" | |||
|
192 | inkscape:zoom="0.13169643" | |||
|
193 | inkscape:cx="896" | |||
|
194 | inkscape:cy="896" | |||
|
195 | inkscape:window-x="0" | |||
|
196 | inkscape:window-y="25" | |||
|
197 | inkscape:window-maximized="0" | |||
|
198 | inkscape:current-layer="svg2989" /> | |||
|
199 | <g | |||
|
200 | transform="matrix(1,0,0,-1,37.966102,1282.678)" | |||
|
201 | id="g2991"> | |||
|
202 | <path | |||
|
203 | d="m 1664,32 v 768 q -32,-36 -69,-66 -268,-206 -426,-338 -51,-43 -83,-67 -32,-24 -86.5,-48.5 Q 945,256 897,256 h -1 -1 Q 847,256 792.5,280.5 738,305 706,329 674,353 623,396 465,528 197,734 160,764 128,800 V 32 Q 128,19 137.5,9.5 147,0 160,0 h 1472 q 13,0 22.5,9.5 9.5,9.5 9.5,22.5 z m 0,1051 v 11 13.5 q 0,0 -0.5,13 -0.5,13 -3,12.5 -2.5,-0.5 -5.5,9 -3,9.5 -9,7.5 -6,-2 -14,2.5 H 160 q -13,0 -22.5,-9.5 Q 128,1133 128,1120 128,952 275,836 468,684 676,519 682,514 711,489.5 740,465 757,452 774,439 801.5,420.5 829,402 852,393 q 23,-9 43,-9 h 1 1 q 20,0 43,9 23,9 50.5,27.5 27.5,18.5 44.5,31.5 17,13 46,37.5 29,24.5 35,29.5 208,165 401,317 54,43 100.5,115.5 46.5,72.5 46.5,131.5 z m 128,37 V 32 q 0,-66 -47,-113 -47,-47 -113,-47 H 160 Q 94,-128 47,-81 0,-34 0,32 v 1088 q 0,66 47,113 47,47 113,47 h 1472 q 66,0 113,-47 47,-47 47,-113 z" | |||
|
204 | id="path2993" | |||
|
205 | inkscape:connector-curvature="0" | |||
|
206 | style="fill:currentColor" /> | |||
|
207 | </g> | |||
|
208 | </svg> | |||
|
209 | ''' | |||
150 |
|
210 | |||
151 |
class EmailSettingsSchema( |
|
211 | class EmailSettingsSchema(colander.Schema): | |
152 | @colander.instantiate(validator=colander.Length(min=1)) |
|
212 | @colander.instantiate(validator=colander.Length(min=1)) | |
153 | class recipients(colander.SequenceSchema): |
|
213 | class recipients(colander.SequenceSchema): | |
154 |
title = |
|
214 | title = _('Recipients') | |
155 |
description = |
|
215 | description = _('Email addresses to send push events to') | |
156 | widget = deform.widget.SequenceWidget(min_len=1) |
|
216 | widget = deform.widget.SequenceWidget(min_len=1) | |
157 |
|
217 | |||
158 | recipient = colander.SchemaNode( |
|
218 | recipient = colander.SchemaNode( | |
159 | colander.String(), |
|
219 | colander.String(), | |
160 |
title= |
|
220 | title=_('Email address'), | |
161 |
description= |
|
221 | description=_('Email address'), | |
162 | default='', |
|
222 | default='', | |
163 | validator=colander.Email(), |
|
223 | validator=colander.Email(), | |
164 | widget=deform.widget.TextInputWidget( |
|
224 | widget=deform.widget.TextInputWidget( | |
165 | placeholder='user@domain.com', |
|
225 | placeholder='user@domain.com', | |
166 | ), |
|
226 | ), | |
167 | ) |
|
227 | ) | |
168 |
|
228 | |||
169 |
|
229 | |||
170 | class EmailIntegrationType(IntegrationTypeBase): |
|
230 | class EmailIntegrationType(IntegrationTypeBase): | |
171 | key = 'email' |
|
231 | key = 'email' | |
172 |
display_name = |
|
232 | display_name = _('Email') | |
173 | SettingsSchema = EmailSettingsSchema |
|
233 | description = _('Send repo push summaries to a list of recipients via email') | |
|
234 | icon = email_icon | |||
174 |
|
235 | |||
175 | def settings_schema(self): |
|
236 | def settings_schema(self): | |
176 | schema = EmailSettingsSchema() |
|
237 | schema = EmailSettingsSchema() | |
177 | return schema |
|
238 | return schema | |
178 |
|
239 | |||
179 | def send_event(self, event): |
|
240 | def send_event(self, event): | |
180 | data = event.as_dict() |
|
241 | data = event.as_dict() | |
181 | log.debug('got event: %r', event) |
|
242 | log.debug('got event: %r', event) | |
182 |
|
243 | |||
183 | if isinstance(event, events.RepoPushEvent): |
|
244 | if isinstance(event, events.RepoPushEvent): | |
184 | repo_push_handler(data, self.settings) |
|
245 | repo_push_handler(data, self.settings) | |
185 | else: |
|
246 | else: | |
186 | log.debug('ignoring event: %r', event) |
|
247 | log.debug('ignoring event: %r', event) | |
187 |
|
248 | |||
188 |
|
249 | |||
189 | def repo_push_handler(data, settings): |
|
250 | def repo_push_handler(data, settings): | |
190 | commit_num = len(data['push']['commits']) |
|
251 | commit_num = len(data['push']['commits']) | |
191 | server_url = data['server_url'] |
|
252 | server_url = data['server_url'] | |
192 |
|
253 | |||
193 | if commit_num == 0: |
|
254 | if commit_num == 0: | |
194 | subject = '[{repo_name}] {author} pushed {commit_num} commit on branches: {branches}'.format( |
|
255 | subject = '[{repo_name}] {author} pushed {commit_num} commit on branches: {branches}'.format( | |
195 | author=data['actor']['username'], |
|
256 | author=data['actor']['username'], | |
196 | repo_name=data['repo']['repo_name'], |
|
257 | repo_name=data['repo']['repo_name'], | |
197 | commit_num=commit_num, |
|
258 | commit_num=commit_num, | |
198 | branches=', '.join( |
|
259 | branches=', '.join( | |
199 | branch['name'] for branch in data['push']['branches']) |
|
260 | branch['name'] for branch in data['push']['branches']) | |
200 | ) |
|
261 | ) | |
201 | else: |
|
262 | else: | |
202 | subject = '[{repo_name}] {author} pushed {commit_num} commits on branches: {branches}'.format( |
|
263 | subject = '[{repo_name}] {author} pushed {commit_num} commits on branches: {branches}'.format( | |
203 | author=data['actor']['username'], |
|
264 | author=data['actor']['username'], | |
204 | repo_name=data['repo']['repo_name'], |
|
265 | repo_name=data['repo']['repo_name'], | |
205 | commit_num=commit_num, |
|
266 | commit_num=commit_num, | |
206 | branches=', '.join( |
|
267 | branches=', '.join( | |
207 | branch['name'] for branch in data['push']['branches'])) |
|
268 | branch['name'] for branch in data['push']['branches'])) | |
208 |
|
269 | |||
209 | email_body_plaintext = repo_push_template_plaintext.render( |
|
270 | email_body_plaintext = repo_push_template_plaintext.render( | |
210 | data=data, |
|
271 | data=data, | |
211 | subject=subject, |
|
272 | subject=subject, | |
212 | instance_url=server_url) |
|
273 | instance_url=server_url) | |
213 |
|
274 | |||
214 | email_body_html = repo_push_template_html.render( |
|
275 | email_body_html = repo_push_template_html.render( | |
215 | data=data, |
|
276 | data=data, | |
216 | subject=subject, |
|
277 | subject=subject, | |
217 | instance_url=server_url) |
|
278 | instance_url=server_url) | |
218 |
|
279 | |||
219 | for email_address in settings['recipients']: |
|
280 | for email_address in settings['recipients']: | |
220 | run_task( |
|
281 | run_task( | |
221 | tasks.send_email, email_address, subject, |
|
282 | tasks.send_email, email_address, subject, | |
222 | email_body_plaintext, email_body_html) |
|
283 | email_body_plaintext, email_body_html) |
@@ -1,242 +1,243 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import deform |
|
22 | import deform | |
23 | import re |
|
23 | import re | |
24 | import logging |
|
24 | import logging | |
25 | import requests |
|
25 | import requests | |
26 | import colander |
|
26 | import colander | |
27 | import textwrap |
|
27 | import textwrap | |
28 | from celery.task import task |
|
28 | from celery.task import task | |
29 | from mako.template import Template |
|
29 | from mako.template import Template | |
30 |
|
30 | |||
31 | from rhodecode import events |
|
31 | from rhodecode import events | |
32 |
from rhodecode.translation import |
|
32 | from rhodecode.translation import _ | |
33 | from rhodecode.lib import helpers as h |
|
33 | from rhodecode.lib import helpers as h | |
34 | from rhodecode.lib.celerylib import run_task |
|
34 | from rhodecode.lib.celerylib import run_task | |
35 | from rhodecode.lib.colander_utils import strip_whitespace |
|
35 | from rhodecode.lib.colander_utils import strip_whitespace | |
36 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
36 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
37 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase |
|
|||
38 |
|
37 | |||
39 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
40 |
|
39 | |||
41 |
|
40 | |||
42 |
class HipchatSettingsSchema( |
|
41 | class HipchatSettingsSchema(colander.Schema): | |
43 | color_choices = [ |
|
42 | color_choices = [ | |
44 |
('yellow', |
|
43 | ('yellow', _('Yellow')), | |
45 |
('red', |
|
44 | ('red', _('Red')), | |
46 |
('green', |
|
45 | ('green', _('Green')), | |
47 |
('purple', |
|
46 | ('purple', _('Purple')), | |
48 |
('gray', |
|
47 | ('gray', _('Gray')), | |
49 | ] |
|
48 | ] | |
50 |
|
49 | |||
51 | server_url = colander.SchemaNode( |
|
50 | server_url = colander.SchemaNode( | |
52 | colander.String(), |
|
51 | colander.String(), | |
53 |
title= |
|
52 | title=_('Hipchat server URL'), | |
54 |
description= |
|
53 | description=_('Hipchat integration url.'), | |
55 | default='', |
|
54 | default='', | |
56 | preparer=strip_whitespace, |
|
55 | preparer=strip_whitespace, | |
57 | validator=colander.url, |
|
56 | validator=colander.url, | |
58 | widget=deform.widget.TextInputWidget( |
|
57 | widget=deform.widget.TextInputWidget( | |
59 | placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?', |
|
58 | placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?', | |
60 | ), |
|
59 | ), | |
61 | ) |
|
60 | ) | |
62 | notify = colander.SchemaNode( |
|
61 | notify = colander.SchemaNode( | |
63 | colander.Bool(), |
|
62 | colander.Bool(), | |
64 |
title= |
|
63 | title=_('Notify'), | |
65 |
description= |
|
64 | description=_('Make a notification to the users in room.'), | |
66 | missing=False, |
|
65 | missing=False, | |
67 | default=False, |
|
66 | default=False, | |
68 | ) |
|
67 | ) | |
69 | color = colander.SchemaNode( |
|
68 | color = colander.SchemaNode( | |
70 | colander.String(), |
|
69 | colander.String(), | |
71 |
title= |
|
70 | title=_('Color'), | |
72 |
description= |
|
71 | description=_('Background color of message.'), | |
73 | missing='', |
|
72 | missing='', | |
74 | validator=colander.OneOf([x[0] for x in color_choices]), |
|
73 | validator=colander.OneOf([x[0] for x in color_choices]), | |
75 | widget=deform.widget.Select2Widget( |
|
74 | widget=deform.widget.Select2Widget( | |
76 | values=color_choices, |
|
75 | values=color_choices, | |
77 | ), |
|
76 | ), | |
78 | ) |
|
77 | ) | |
79 |
|
78 | |||
80 |
|
79 | |||
81 | repo_push_template = Template(''' |
|
80 | repo_push_template = Template(''' | |
82 | <b>${data['actor']['username']}</b> pushed to |
|
81 | <b>${data['actor']['username']}</b> pushed to | |
83 | %if data['push']['branches']: |
|
82 | %if data['push']['branches']: | |
84 | ${len(data['push']['branches']) > 1 and 'branches' or 'branch'} |
|
83 | ${len(data['push']['branches']) > 1 and 'branches' or 'branch'} | |
85 | ${', '.join('<a href="%s">%s</a>' % (branch['url'], branch['name']) for branch in data['push']['branches'])} |
|
84 | ${', '.join('<a href="%s">%s</a>' % (branch['url'], branch['name']) for branch in data['push']['branches'])} | |
86 | %else: |
|
85 | %else: | |
87 | unknown branch |
|
86 | unknown branch | |
88 | %endif |
|
87 | %endif | |
89 | in <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a> |
|
88 | in <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a> | |
90 | <br> |
|
89 | <br> | |
91 | <ul> |
|
90 | <ul> | |
92 | %for commit in data['push']['commits']: |
|
91 | %for commit in data['push']['commits']: | |
93 | <li> |
|
92 | <li> | |
94 | <a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']} |
|
93 | <a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']} | |
95 | </li> |
|
94 | </li> | |
96 | %endfor |
|
95 | %endfor | |
97 | </ul> |
|
96 | </ul> | |
98 | ''') |
|
97 | ''') | |
99 |
|
98 | |||
100 |
|
99 | |||
101 |
|
||||
102 | class HipchatIntegrationType(IntegrationTypeBase): |
|
100 | class HipchatIntegrationType(IntegrationTypeBase): | |
103 | key = 'hipchat' |
|
101 | key = 'hipchat' | |
104 |
display_name = |
|
102 | display_name = _('Hipchat') | |
|
103 | description = _('Send events such as repo pushes and pull requests to ' | |||
|
104 | 'your hipchat channel.') | |||
|
105 | icon = '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>''' | |||
105 | valid_events = [ |
|
106 | valid_events = [ | |
106 | events.PullRequestCloseEvent, |
|
107 | events.PullRequestCloseEvent, | |
107 | events.PullRequestMergeEvent, |
|
108 | events.PullRequestMergeEvent, | |
108 | events.PullRequestUpdateEvent, |
|
109 | events.PullRequestUpdateEvent, | |
109 | events.PullRequestCommentEvent, |
|
110 | events.PullRequestCommentEvent, | |
110 | events.PullRequestReviewEvent, |
|
111 | events.PullRequestReviewEvent, | |
111 | events.PullRequestCreateEvent, |
|
112 | events.PullRequestCreateEvent, | |
112 | events.RepoPushEvent, |
|
113 | events.RepoPushEvent, | |
113 | events.RepoCreateEvent, |
|
114 | events.RepoCreateEvent, | |
114 | ] |
|
115 | ] | |
115 |
|
116 | |||
116 | def send_event(self, event): |
|
117 | def send_event(self, event): | |
117 | if event.__class__ not in self.valid_events: |
|
118 | if event.__class__ not in self.valid_events: | |
118 | log.debug('event not valid: %r' % event) |
|
119 | log.debug('event not valid: %r' % event) | |
119 | return |
|
120 | return | |
120 |
|
121 | |||
121 | if event.name not in self.settings['events']: |
|
122 | if event.name not in self.settings['events']: | |
122 | log.debug('event ignored: %r' % event) |
|
123 | log.debug('event ignored: %r' % event) | |
123 | return |
|
124 | return | |
124 |
|
125 | |||
125 | data = event.as_dict() |
|
126 | data = event.as_dict() | |
126 |
|
127 | |||
127 | text = '<b>%s<b> caused a <b>%s</b> event' % ( |
|
128 | text = '<b>%s<b> caused a <b>%s</b> event' % ( | |
128 | data['actor']['username'], event.name) |
|
129 | data['actor']['username'], event.name) | |
129 |
|
130 | |||
130 | log.debug('handling hipchat event for %s' % event.name) |
|
131 | log.debug('handling hipchat event for %s' % event.name) | |
131 |
|
132 | |||
132 | if isinstance(event, events.PullRequestCommentEvent): |
|
133 | if isinstance(event, events.PullRequestCommentEvent): | |
133 | text = self.format_pull_request_comment_event(event, data) |
|
134 | text = self.format_pull_request_comment_event(event, data) | |
134 | elif isinstance(event, events.PullRequestReviewEvent): |
|
135 | elif isinstance(event, events.PullRequestReviewEvent): | |
135 | text = self.format_pull_request_review_event(event, data) |
|
136 | text = self.format_pull_request_review_event(event, data) | |
136 | elif isinstance(event, events.PullRequestEvent): |
|
137 | elif isinstance(event, events.PullRequestEvent): | |
137 | text = self.format_pull_request_event(event, data) |
|
138 | text = self.format_pull_request_event(event, data) | |
138 | elif isinstance(event, events.RepoPushEvent): |
|
139 | elif isinstance(event, events.RepoPushEvent): | |
139 | text = self.format_repo_push_event(data) |
|
140 | text = self.format_repo_push_event(data) | |
140 | elif isinstance(event, events.RepoCreateEvent): |
|
141 | elif isinstance(event, events.RepoCreateEvent): | |
141 | text = self.format_repo_create_event(data) |
|
142 | text = self.format_repo_create_event(data) | |
142 | else: |
|
143 | else: | |
143 | log.error('unhandled event type: %r' % event) |
|
144 | log.error('unhandled event type: %r' % event) | |
144 |
|
145 | |||
145 | run_task(post_text_to_hipchat, self.settings, text) |
|
146 | run_task(post_text_to_hipchat, self.settings, text) | |
146 |
|
147 | |||
147 | def settings_schema(self): |
|
148 | def settings_schema(self): | |
148 | schema = HipchatSettingsSchema() |
|
149 | schema = HipchatSettingsSchema() | |
149 | schema.add(colander.SchemaNode( |
|
150 | schema.add(colander.SchemaNode( | |
150 | colander.Set(), |
|
151 | colander.Set(), | |
151 | widget=deform.widget.CheckboxChoiceWidget( |
|
152 | widget=deform.widget.CheckboxChoiceWidget( | |
152 | values=sorted( |
|
153 | values=sorted( | |
153 | [(e.name, e.display_name) for e in self.valid_events] |
|
154 | [(e.name, e.display_name) for e in self.valid_events] | |
154 | ) |
|
155 | ) | |
155 | ), |
|
156 | ), | |
156 | description="Events activated for this integration", |
|
157 | description="Events activated for this integration", | |
157 | name='events' |
|
158 | name='events' | |
158 | )) |
|
159 | )) | |
159 |
|
160 | |||
160 | return schema |
|
161 | return schema | |
161 |
|
162 | |||
162 | def format_pull_request_comment_event(self, event, data): |
|
163 | def format_pull_request_comment_event(self, event, data): | |
163 | comment_text = data['comment']['text'] |
|
164 | comment_text = data['comment']['text'] | |
164 | if len(comment_text) > 200: |
|
165 | if len(comment_text) > 200: | |
165 | comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format( |
|
166 | comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format( | |
166 | comment_text=comment_text[:200], |
|
167 | comment_text=comment_text[:200], | |
167 | comment_url=data['comment']['url'], |
|
168 | comment_url=data['comment']['url'], | |
168 | ) |
|
169 | ) | |
169 |
|
170 | |||
170 | comment_status = '' |
|
171 | comment_status = '' | |
171 | if data['comment']['status']: |
|
172 | if data['comment']['status']: | |
172 | comment_status = '[{}]: '.format(data['comment']['status']) |
|
173 | comment_status = '[{}]: '.format(data['comment']['status']) | |
173 |
|
174 | |||
174 | return (textwrap.dedent( |
|
175 | return (textwrap.dedent( | |
175 | ''' |
|
176 | ''' | |
176 | {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}: |
|
177 | {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}: | |
177 | >>> {comment_status}{comment_text} |
|
178 | >>> {comment_status}{comment_text} | |
178 | ''').format( |
|
179 | ''').format( | |
179 | comment_status=comment_status, |
|
180 | comment_status=comment_status, | |
180 | user=data['actor']['username'], |
|
181 | user=data['actor']['username'], | |
181 | number=data['pullrequest']['pull_request_id'], |
|
182 | number=data['pullrequest']['pull_request_id'], | |
182 | pr_url=data['pullrequest']['url'], |
|
183 | pr_url=data['pullrequest']['url'], | |
183 | pr_status=data['pullrequest']['status'], |
|
184 | pr_status=data['pullrequest']['status'], | |
184 | pr_title=data['pullrequest']['title'], |
|
185 | pr_title=data['pullrequest']['title'], | |
185 | comment_text=comment_text |
|
186 | comment_text=comment_text | |
186 | ) |
|
187 | ) | |
187 | ) |
|
188 | ) | |
188 |
|
189 | |||
189 | def format_pull_request_review_event(self, event, data): |
|
190 | def format_pull_request_review_event(self, event, data): | |
190 | return (textwrap.dedent( |
|
191 | return (textwrap.dedent( | |
191 | ''' |
|
192 | ''' | |
192 | Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title} |
|
193 | Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title} | |
193 | ''').format( |
|
194 | ''').format( | |
194 | user=data['actor']['username'], |
|
195 | user=data['actor']['username'], | |
195 | number=data['pullrequest']['pull_request_id'], |
|
196 | number=data['pullrequest']['pull_request_id'], | |
196 | pr_url=data['pullrequest']['url'], |
|
197 | pr_url=data['pullrequest']['url'], | |
197 | pr_status=data['pullrequest']['status'], |
|
198 | pr_status=data['pullrequest']['status'], | |
198 | pr_title=data['pullrequest']['title'], |
|
199 | pr_title=data['pullrequest']['title'], | |
199 | ) |
|
200 | ) | |
200 | ) |
|
201 | ) | |
201 |
|
202 | |||
202 | def format_pull_request_event(self, event, data): |
|
203 | def format_pull_request_event(self, event, data): | |
203 | action = { |
|
204 | action = { | |
204 | events.PullRequestCloseEvent: 'closed', |
|
205 | events.PullRequestCloseEvent: 'closed', | |
205 | events.PullRequestMergeEvent: 'merged', |
|
206 | events.PullRequestMergeEvent: 'merged', | |
206 | events.PullRequestUpdateEvent: 'updated', |
|
207 | events.PullRequestUpdateEvent: 'updated', | |
207 | events.PullRequestCreateEvent: 'created', |
|
208 | events.PullRequestCreateEvent: 'created', | |
208 | }.get(event.__class__, str(event.__class__)) |
|
209 | }.get(event.__class__, str(event.__class__)) | |
209 |
|
210 | |||
210 | return ('Pull request <a href="{url}">#{number}</a> - {title} ' |
|
211 | return ('Pull request <a href="{url}">#{number}</a> - {title} ' | |
211 | '{action} by {user}').format( |
|
212 | '{action} by {user}').format( | |
212 | user=data['actor']['username'], |
|
213 | user=data['actor']['username'], | |
213 | number=data['pullrequest']['pull_request_id'], |
|
214 | number=data['pullrequest']['pull_request_id'], | |
214 | url=data['pullrequest']['url'], |
|
215 | url=data['pullrequest']['url'], | |
215 | title=data['pullrequest']['title'], |
|
216 | title=data['pullrequest']['title'], | |
216 | action=action |
|
217 | action=action | |
217 | ) |
|
218 | ) | |
218 |
|
219 | |||
219 | def format_repo_push_event(self, data): |
|
220 | def format_repo_push_event(self, data): | |
220 | result = repo_push_template.render( |
|
221 | result = repo_push_template.render( | |
221 | data=data, |
|
222 | data=data, | |
222 | ) |
|
223 | ) | |
223 | return result |
|
224 | return result | |
224 |
|
225 | |||
225 | def format_repo_create_event(self, data): |
|
226 | def format_repo_create_event(self, data): | |
226 | return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format( |
|
227 | return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format( | |
227 | data['repo']['url'], |
|
228 | data['repo']['url'], | |
228 | data['repo']['repo_name'], |
|
229 | data['repo']['repo_name'], | |
229 | data['repo']['repo_type'], |
|
230 | data['repo']['repo_type'], | |
230 | data['actor']['username'], |
|
231 | data['actor']['username'], | |
231 | ) |
|
232 | ) | |
232 |
|
233 | |||
233 |
|
234 | |||
234 | @task(ignore_result=True) |
|
235 | @task(ignore_result=True) | |
235 | def post_text_to_hipchat(settings, text): |
|
236 | def post_text_to_hipchat(settings, text): | |
236 | log.debug('sending %s to hipchat %s' % (text, settings['server_url'])) |
|
237 | log.debug('sending %s to hipchat %s' % (text, settings['server_url'])) | |
237 | resp = requests.post(settings['server_url'], json={ |
|
238 | resp = requests.post(settings['server_url'], json={ | |
238 | "message": text, |
|
239 | "message": text, | |
239 | "color": settings.get('color', 'yellow'), |
|
240 | "color": settings.get('color', 'yellow'), | |
240 | "notify": settings.get('notify', False), |
|
241 | "notify": settings.get('notify', False), | |
241 | }) |
|
242 | }) | |
242 | resp.raise_for_status() # raise exception on a failed request |
|
243 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,253 +1,256 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import deform |
|
22 | import deform | |
23 | import re |
|
23 | import re | |
24 | import logging |
|
24 | import logging | |
25 | import requests |
|
25 | import requests | |
26 | import colander |
|
26 | import colander | |
27 | import textwrap |
|
27 | import textwrap | |
28 | from celery.task import task |
|
28 | from celery.task import task | |
29 | from mako.template import Template |
|
29 | from mako.template import Template | |
30 |
|
30 | |||
31 | from rhodecode import events |
|
31 | from rhodecode import events | |
32 |
from rhodecode.translation import |
|
32 | from rhodecode.translation import _ | |
33 | from rhodecode.lib import helpers as h |
|
33 | from rhodecode.lib import helpers as h | |
34 | from rhodecode.lib.celerylib import run_task |
|
34 | from rhodecode.lib.celerylib import run_task | |
35 | from rhodecode.lib.colander_utils import strip_whitespace |
|
35 | from rhodecode.lib.colander_utils import strip_whitespace | |
36 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
36 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
37 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase |
|
|||
38 |
|
37 | |||
39 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
40 |
|
39 | |||
41 |
|
40 | |||
42 |
class SlackSettingsSchema( |
|
41 | class SlackSettingsSchema(colander.Schema): | |
43 | service = colander.SchemaNode( |
|
42 | service = colander.SchemaNode( | |
44 | colander.String(), |
|
43 | colander.String(), | |
45 |
title= |
|
44 | title=_('Slack service URL'), | |
46 |
description=h.literal( |
|
45 | description=h.literal(_( | |
47 | 'This can be setup at the ' |
|
46 | 'This can be setup at the ' | |
48 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' |
|
47 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' | |
49 | 'slack app manager</a>')), |
|
48 | 'slack app manager</a>')), | |
50 | default='', |
|
49 | default='', | |
51 | preparer=strip_whitespace, |
|
50 | preparer=strip_whitespace, | |
52 | validator=colander.url, |
|
51 | validator=colander.url, | |
53 | widget=deform.widget.TextInputWidget( |
|
52 | widget=deform.widget.TextInputWidget( | |
54 | placeholder='https://hooks.slack.com/services/...', |
|
53 | placeholder='https://hooks.slack.com/services/...', | |
55 | ), |
|
54 | ), | |
56 | ) |
|
55 | ) | |
57 | username = colander.SchemaNode( |
|
56 | username = colander.SchemaNode( | |
58 | colander.String(), |
|
57 | colander.String(), | |
59 |
title= |
|
58 | title=_('Username'), | |
60 |
description= |
|
59 | description=_('Username to show notifications coming from.'), | |
61 | missing='Rhodecode', |
|
60 | missing='Rhodecode', | |
62 | preparer=strip_whitespace, |
|
61 | preparer=strip_whitespace, | |
63 | widget=deform.widget.TextInputWidget( |
|
62 | widget=deform.widget.TextInputWidget( | |
64 | placeholder='Rhodecode' |
|
63 | placeholder='Rhodecode' | |
65 | ), |
|
64 | ), | |
66 | ) |
|
65 | ) | |
67 | channel = colander.SchemaNode( |
|
66 | channel = colander.SchemaNode( | |
68 | colander.String(), |
|
67 | colander.String(), | |
69 |
title= |
|
68 | title=_('Channel'), | |
70 |
description= |
|
69 | description=_('Channel to send notifications to.'), | |
71 | missing='', |
|
70 | missing='', | |
72 | preparer=strip_whitespace, |
|
71 | preparer=strip_whitespace, | |
73 | widget=deform.widget.TextInputWidget( |
|
72 | widget=deform.widget.TextInputWidget( | |
74 | placeholder='#general' |
|
73 | placeholder='#general' | |
75 | ), |
|
74 | ), | |
76 | ) |
|
75 | ) | |
77 | icon_emoji = colander.SchemaNode( |
|
76 | icon_emoji = colander.SchemaNode( | |
78 | colander.String(), |
|
77 | colander.String(), | |
79 |
title= |
|
78 | title=_('Emoji'), | |
80 |
description= |
|
79 | description=_('Emoji to use eg. :studio_microphone:'), | |
81 | missing='', |
|
80 | missing='', | |
82 | preparer=strip_whitespace, |
|
81 | preparer=strip_whitespace, | |
83 | widget=deform.widget.TextInputWidget( |
|
82 | widget=deform.widget.TextInputWidget( | |
84 | placeholder=':studio_microphone:' |
|
83 | placeholder=':studio_microphone:' | |
85 | ), |
|
84 | ), | |
86 | ) |
|
85 | ) | |
87 |
|
86 | |||
88 |
|
87 | |||
89 | repo_push_template = Template(r''' |
|
88 | repo_push_template = Template(r''' | |
90 | *${data['actor']['username']}* pushed to \ |
|
89 | *${data['actor']['username']}* pushed to \ | |
91 | %if data['push']['branches']: |
|
90 | %if data['push']['branches']: | |
92 | ${len(data['push']['branches']) > 1 and 'branches' or 'branch'} \ |
|
91 | ${len(data['push']['branches']) > 1 and 'branches' or 'branch'} \ | |
93 | ${', '.join('<%s|%s>' % (branch['url'], branch['name']) for branch in data['push']['branches'])} \ |
|
92 | ${', '.join('<%s|%s>' % (branch['url'], branch['name']) for branch in data['push']['branches'])} \ | |
94 | %else: |
|
93 | %else: | |
95 | unknown branch \ |
|
94 | unknown branch \ | |
96 | %endif |
|
95 | %endif | |
97 | in <${data['repo']['url']}|${data['repo']['repo_name']}> |
|
96 | in <${data['repo']['url']}|${data['repo']['repo_name']}> | |
98 | >>> |
|
97 | >>> | |
99 | %for commit in data['push']['commits']: |
|
98 | %for commit in data['push']['commits']: | |
100 | <${commit['url']}|${commit['short_id']}> - ${commit['message_html']|html_to_slack_links} |
|
99 | <${commit['url']}|${commit['short_id']}> - ${commit['message_html']|html_to_slack_links} | |
101 | %endfor |
|
100 | %endfor | |
102 | ''') |
|
101 | ''') | |
103 |
|
102 | |||
104 |
|
103 | |||
|
104 | ||||
|
105 | ||||
105 | class SlackIntegrationType(IntegrationTypeBase): |
|
106 | class SlackIntegrationType(IntegrationTypeBase): | |
106 | key = 'slack' |
|
107 | key = 'slack' | |
107 |
display_name = |
|
108 | display_name = _('Slack') | |
108 | SettingsSchema = SlackSettingsSchema |
|
109 | description = _('Send events such as repo pushes and pull requests to ' | |
|
110 | 'your slack channel.') | |||
|
111 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>''' | |||
109 | valid_events = [ |
|
112 | valid_events = [ | |
110 | events.PullRequestCloseEvent, |
|
113 | events.PullRequestCloseEvent, | |
111 | events.PullRequestMergeEvent, |
|
114 | events.PullRequestMergeEvent, | |
112 | events.PullRequestUpdateEvent, |
|
115 | events.PullRequestUpdateEvent, | |
113 | events.PullRequestCommentEvent, |
|
116 | events.PullRequestCommentEvent, | |
114 | events.PullRequestReviewEvent, |
|
117 | events.PullRequestReviewEvent, | |
115 | events.PullRequestCreateEvent, |
|
118 | events.PullRequestCreateEvent, | |
116 | events.RepoPushEvent, |
|
119 | events.RepoPushEvent, | |
117 | events.RepoCreateEvent, |
|
120 | events.RepoCreateEvent, | |
118 | ] |
|
121 | ] | |
119 |
|
122 | |||
120 | def send_event(self, event): |
|
123 | def send_event(self, event): | |
121 | if event.__class__ not in self.valid_events: |
|
124 | if event.__class__ not in self.valid_events: | |
122 | log.debug('event not valid: %r' % event) |
|
125 | log.debug('event not valid: %r' % event) | |
123 | return |
|
126 | return | |
124 |
|
127 | |||
125 | if event.name not in self.settings['events']: |
|
128 | if event.name not in self.settings['events']: | |
126 | log.debug('event ignored: %r' % event) |
|
129 | log.debug('event ignored: %r' % event) | |
127 | return |
|
130 | return | |
128 |
|
131 | |||
129 | data = event.as_dict() |
|
132 | data = event.as_dict() | |
130 |
|
133 | |||
131 | text = '*%s* caused a *%s* event' % ( |
|
134 | text = '*%s* caused a *%s* event' % ( | |
132 | data['actor']['username'], event.name) |
|
135 | data['actor']['username'], event.name) | |
133 |
|
136 | |||
134 | log.debug('handling slack event for %s' % event.name) |
|
137 | log.debug('handling slack event for %s' % event.name) | |
135 |
|
138 | |||
136 | if isinstance(event, events.PullRequestCommentEvent): |
|
139 | if isinstance(event, events.PullRequestCommentEvent): | |
137 | text = self.format_pull_request_comment_event(event, data) |
|
140 | text = self.format_pull_request_comment_event(event, data) | |
138 | elif isinstance(event, events.PullRequestReviewEvent): |
|
141 | elif isinstance(event, events.PullRequestReviewEvent): | |
139 | text = self.format_pull_request_review_event(event, data) |
|
142 | text = self.format_pull_request_review_event(event, data) | |
140 | elif isinstance(event, events.PullRequestEvent): |
|
143 | elif isinstance(event, events.PullRequestEvent): | |
141 | text = self.format_pull_request_event(event, data) |
|
144 | text = self.format_pull_request_event(event, data) | |
142 | elif isinstance(event, events.RepoPushEvent): |
|
145 | elif isinstance(event, events.RepoPushEvent): | |
143 | text = self.format_repo_push_event(data) |
|
146 | text = self.format_repo_push_event(data) | |
144 | elif isinstance(event, events.RepoCreateEvent): |
|
147 | elif isinstance(event, events.RepoCreateEvent): | |
145 | text = self.format_repo_create_event(data) |
|
148 | text = self.format_repo_create_event(data) | |
146 | else: |
|
149 | else: | |
147 | log.error('unhandled event type: %r' % event) |
|
150 | log.error('unhandled event type: %r' % event) | |
148 |
|
151 | |||
149 | run_task(post_text_to_slack, self.settings, text) |
|
152 | run_task(post_text_to_slack, self.settings, text) | |
150 |
|
153 | |||
151 | def settings_schema(self): |
|
154 | def settings_schema(self): | |
152 | schema = SlackSettingsSchema() |
|
155 | schema = SlackSettingsSchema() | |
153 | schema.add(colander.SchemaNode( |
|
156 | schema.add(colander.SchemaNode( | |
154 | colander.Set(), |
|
157 | colander.Set(), | |
155 | widget=deform.widget.CheckboxChoiceWidget( |
|
158 | widget=deform.widget.CheckboxChoiceWidget( | |
156 | values=sorted( |
|
159 | values=sorted( | |
157 | [(e.name, e.display_name) for e in self.valid_events] |
|
160 | [(e.name, e.display_name) for e in self.valid_events] | |
158 | ) |
|
161 | ) | |
159 | ), |
|
162 | ), | |
160 | description="Events activated for this integration", |
|
163 | description="Events activated for this integration", | |
161 | name='events' |
|
164 | name='events' | |
162 | )) |
|
165 | )) | |
163 |
|
166 | |||
164 | return schema |
|
167 | return schema | |
165 |
|
168 | |||
166 | def format_pull_request_comment_event(self, event, data): |
|
169 | def format_pull_request_comment_event(self, event, data): | |
167 | comment_text = data['comment']['text'] |
|
170 | comment_text = data['comment']['text'] | |
168 | if len(comment_text) > 200: |
|
171 | if len(comment_text) > 200: | |
169 | comment_text = '<{comment_url}|{comment_text}...>'.format( |
|
172 | comment_text = '<{comment_url}|{comment_text}...>'.format( | |
170 | comment_text=comment_text[:200], |
|
173 | comment_text=comment_text[:200], | |
171 | comment_url=data['comment']['url'], |
|
174 | comment_url=data['comment']['url'], | |
172 | ) |
|
175 | ) | |
173 |
|
176 | |||
174 | comment_status = '' |
|
177 | comment_status = '' | |
175 | if data['comment']['status']: |
|
178 | if data['comment']['status']: | |
176 | comment_status = '[{}]: '.format(data['comment']['status']) |
|
179 | comment_status = '[{}]: '.format(data['comment']['status']) | |
177 |
|
180 | |||
178 | return (textwrap.dedent( |
|
181 | return (textwrap.dedent( | |
179 | ''' |
|
182 | ''' | |
180 | {user} commented on pull request <{pr_url}|#{number}> - {pr_title}: |
|
183 | {user} commented on pull request <{pr_url}|#{number}> - {pr_title}: | |
181 | >>> {comment_status}{comment_text} |
|
184 | >>> {comment_status}{comment_text} | |
182 | ''').format( |
|
185 | ''').format( | |
183 | comment_status=comment_status, |
|
186 | comment_status=comment_status, | |
184 | user=data['actor']['username'], |
|
187 | user=data['actor']['username'], | |
185 | number=data['pullrequest']['pull_request_id'], |
|
188 | number=data['pullrequest']['pull_request_id'], | |
186 | pr_url=data['pullrequest']['url'], |
|
189 | pr_url=data['pullrequest']['url'], | |
187 | pr_status=data['pullrequest']['status'], |
|
190 | pr_status=data['pullrequest']['status'], | |
188 | pr_title=data['pullrequest']['title'], |
|
191 | pr_title=data['pullrequest']['title'], | |
189 | comment_text=comment_text |
|
192 | comment_text=comment_text | |
190 | ) |
|
193 | ) | |
191 | ) |
|
194 | ) | |
192 |
|
195 | |||
193 | def format_pull_request_review_event(self, event, data): |
|
196 | def format_pull_request_review_event(self, event, data): | |
194 | return (textwrap.dedent( |
|
197 | return (textwrap.dedent( | |
195 | ''' |
|
198 | ''' | |
196 | Status changed to {pr_status} for pull request <{pr_url}|#{number}> - {pr_title} |
|
199 | Status changed to {pr_status} for pull request <{pr_url}|#{number}> - {pr_title} | |
197 | ''').format( |
|
200 | ''').format( | |
198 | user=data['actor']['username'], |
|
201 | user=data['actor']['username'], | |
199 | number=data['pullrequest']['pull_request_id'], |
|
202 | number=data['pullrequest']['pull_request_id'], | |
200 | pr_url=data['pullrequest']['url'], |
|
203 | pr_url=data['pullrequest']['url'], | |
201 | pr_status=data['pullrequest']['status'], |
|
204 | pr_status=data['pullrequest']['status'], | |
202 | pr_title=data['pullrequest']['title'], |
|
205 | pr_title=data['pullrequest']['title'], | |
203 | ) |
|
206 | ) | |
204 | ) |
|
207 | ) | |
205 |
|
208 | |||
206 | def format_pull_request_event(self, event, data): |
|
209 | def format_pull_request_event(self, event, data): | |
207 | action = { |
|
210 | action = { | |
208 | events.PullRequestCloseEvent: 'closed', |
|
211 | events.PullRequestCloseEvent: 'closed', | |
209 | events.PullRequestMergeEvent: 'merged', |
|
212 | events.PullRequestMergeEvent: 'merged', | |
210 | events.PullRequestUpdateEvent: 'updated', |
|
213 | events.PullRequestUpdateEvent: 'updated', | |
211 | events.PullRequestCreateEvent: 'created', |
|
214 | events.PullRequestCreateEvent: 'created', | |
212 | }.get(event.__class__, str(event.__class__)) |
|
215 | }.get(event.__class__, str(event.__class__)) | |
213 |
|
216 | |||
214 | return ('Pull request <{url}|#{number}> - {title} ' |
|
217 | return ('Pull request <{url}|#{number}> - {title} ' | |
215 | '{action} by {user}').format( |
|
218 | '{action} by {user}').format( | |
216 | user=data['actor']['username'], |
|
219 | user=data['actor']['username'], | |
217 | number=data['pullrequest']['pull_request_id'], |
|
220 | number=data['pullrequest']['pull_request_id'], | |
218 | url=data['pullrequest']['url'], |
|
221 | url=data['pullrequest']['url'], | |
219 | title=data['pullrequest']['title'], |
|
222 | title=data['pullrequest']['title'], | |
220 | action=action |
|
223 | action=action | |
221 | ) |
|
224 | ) | |
222 |
|
225 | |||
223 | def format_repo_push_event(self, data): |
|
226 | def format_repo_push_event(self, data): | |
224 | result = repo_push_template.render( |
|
227 | result = repo_push_template.render( | |
225 | data=data, |
|
228 | data=data, | |
226 | html_to_slack_links=html_to_slack_links, |
|
229 | html_to_slack_links=html_to_slack_links, | |
227 | ) |
|
230 | ) | |
228 | return result |
|
231 | return result | |
229 |
|
232 | |||
230 | def format_repo_create_event(self, data): |
|
233 | def format_repo_create_event(self, data): | |
231 | return '<{}|{}> ({}) repository created by *{}*'.format( |
|
234 | return '<{}|{}> ({}) repository created by *{}*'.format( | |
232 | data['repo']['url'], |
|
235 | data['repo']['url'], | |
233 | data['repo']['repo_name'], |
|
236 | data['repo']['repo_name'], | |
234 | data['repo']['repo_type'], |
|
237 | data['repo']['repo_type'], | |
235 | data['actor']['username'], |
|
238 | data['actor']['username'], | |
236 | ) |
|
239 | ) | |
237 |
|
240 | |||
238 |
|
241 | |||
239 | def html_to_slack_links(message): |
|
242 | def html_to_slack_links(message): | |
240 | return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub( |
|
243 | return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub( | |
241 | r'<\1|\2>', message) |
|
244 | r'<\1|\2>', message) | |
242 |
|
245 | |||
243 |
|
246 | |||
244 | @task(ignore_result=True) |
|
247 | @task(ignore_result=True) | |
245 | def post_text_to_slack(settings, text): |
|
248 | def post_text_to_slack(settings, text): | |
246 | log.debug('sending %s to slack %s' % (text, settings['service'])) |
|
249 | log.debug('sending %s to slack %s' % (text, settings['service'])) | |
247 | resp = requests.post(settings['service'], json={ |
|
250 | resp = requests.post(settings['service'], json={ | |
248 | "channel": settings.get('channel', ''), |
|
251 | "channel": settings.get('channel', ''), | |
249 | "username": settings.get('username', 'Rhodecode'), |
|
252 | "username": settings.get('username', 'Rhodecode'), | |
250 | "text": text, |
|
253 | "text": text, | |
251 | "icon_emoji": settings.get('icon_emoji', ':studio_microphone:') |
|
254 | "icon_emoji": settings.get('icon_emoji', ':studio_microphone:') | |
252 | }) |
|
255 | }) | |
253 | resp.raise_for_status() # raise exception on a failed request |
|
256 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,111 +1,117 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 |
|
22 | |||
23 | import deform |
|
23 | import deform | |
24 | import logging |
|
24 | import logging | |
25 | import requests |
|
25 | import requests | |
26 | import colander |
|
26 | import colander | |
27 | from celery.task import task |
|
27 | from celery.task import task | |
28 | from mako.template import Template |
|
28 | from mako.template import Template | |
29 |
|
29 | |||
30 | from rhodecode import events |
|
30 | from rhodecode import events | |
31 |
from rhodecode.translation import |
|
31 | from rhodecode.translation import _ | |
32 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
32 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
33 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase |
|
|||
34 |
|
33 | |||
35 | log = logging.getLogger(__name__) |
|
34 | log = logging.getLogger(__name__) | |
36 |
|
35 | |||
37 |
|
36 | |||
38 |
class WebhookSettingsSchema( |
|
37 | class WebhookSettingsSchema(colander.Schema): | |
39 | url = colander.SchemaNode( |
|
38 | url = colander.SchemaNode( | |
40 | colander.String(), |
|
39 | colander.String(), | |
41 |
title= |
|
40 | title=_('Webhook URL'), | |
42 |
description= |
|
41 | description=_('URL of the webhook to receive POST event.'), | |
43 | default='', |
|
42 | missing=colander.required, | |
|
43 | required=True, | |||
44 | validator=colander.url, |
|
44 | validator=colander.url, | |
45 | widget=deform.widget.TextInputWidget( |
|
45 | widget=deform.widget.TextInputWidget( | |
46 | placeholder='https://www.example.com/webhook' |
|
46 | placeholder='https://www.example.com/webhook' | |
47 | ), |
|
47 | ), | |
48 | ) |
|
48 | ) | |
49 | secret_token = colander.SchemaNode( |
|
49 | secret_token = colander.SchemaNode( | |
50 | colander.String(), |
|
50 | colander.String(), | |
51 |
title= |
|
51 | title=_('Secret Token'), | |
52 |
description= |
|
52 | description=_('String used to validate received payloads.'), | |
53 | default='', |
|
53 | default='', | |
|
54 | missing='', | |||
54 | widget=deform.widget.TextInputWidget( |
|
55 | widget=deform.widget.TextInputWidget( | |
55 | placeholder='secret_token' |
|
56 | placeholder='secret_token' | |
56 | ), |
|
57 | ), | |
57 | ) |
|
58 | ) | |
58 |
|
59 | |||
59 |
|
60 | |||
|
61 | ||||
|
62 | ||||
60 | class WebhookIntegrationType(IntegrationTypeBase): |
|
63 | class WebhookIntegrationType(IntegrationTypeBase): | |
61 | key = 'webhook' |
|
64 | key = 'webhook' | |
62 |
display_name = |
|
65 | display_name = _('Webhook') | |
|
66 | description = _('Post json events to a webhook endpoint') | |||
|
67 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' | |||
|
68 | ||||
63 | valid_events = [ |
|
69 | valid_events = [ | |
64 | events.PullRequestCloseEvent, |
|
70 | events.PullRequestCloseEvent, | |
65 | events.PullRequestMergeEvent, |
|
71 | events.PullRequestMergeEvent, | |
66 | events.PullRequestUpdateEvent, |
|
72 | events.PullRequestUpdateEvent, | |
67 | events.PullRequestCommentEvent, |
|
73 | events.PullRequestCommentEvent, | |
68 | events.PullRequestReviewEvent, |
|
74 | events.PullRequestReviewEvent, | |
69 | events.PullRequestCreateEvent, |
|
75 | events.PullRequestCreateEvent, | |
70 | events.RepoPushEvent, |
|
76 | events.RepoPushEvent, | |
71 | events.RepoCreateEvent, |
|
77 | events.RepoCreateEvent, | |
72 | ] |
|
78 | ] | |
73 |
|
79 | |||
74 | def settings_schema(self): |
|
80 | def settings_schema(self): | |
75 | schema = WebhookSettingsSchema() |
|
81 | schema = WebhookSettingsSchema() | |
76 | schema.add(colander.SchemaNode( |
|
82 | schema.add(colander.SchemaNode( | |
77 | colander.Set(), |
|
83 | colander.Set(), | |
78 | widget=deform.widget.CheckboxChoiceWidget( |
|
84 | widget=deform.widget.CheckboxChoiceWidget( | |
79 | values=sorted( |
|
85 | values=sorted( | |
80 | [(e.name, e.display_name) for e in self.valid_events] |
|
86 | [(e.name, e.display_name) for e in self.valid_events] | |
81 | ) |
|
87 | ) | |
82 | ), |
|
88 | ), | |
83 | description="Events activated for this integration", |
|
89 | description="Events activated for this integration", | |
84 | name='events' |
|
90 | name='events' | |
85 | )) |
|
91 | )) | |
86 | return schema |
|
92 | return schema | |
87 |
|
93 | |||
88 | def send_event(self, event): |
|
94 | def send_event(self, event): | |
89 | log.debug('handling event %s with webhook integration %s', |
|
95 | log.debug('handling event %s with webhook integration %s', | |
90 | event.name, self) |
|
96 | event.name, self) | |
91 |
|
97 | |||
92 | if event.__class__ not in self.valid_events: |
|
98 | if event.__class__ not in self.valid_events: | |
93 | log.debug('event not valid: %r' % event) |
|
99 | log.debug('event not valid: %r' % event) | |
94 | return |
|
100 | return | |
95 |
|
101 | |||
96 | if event.name not in self.settings['events']: |
|
102 | if event.name not in self.settings['events']: | |
97 | log.debug('event ignored: %r' % event) |
|
103 | log.debug('event ignored: %r' % event) | |
98 | return |
|
104 | return | |
99 |
|
105 | |||
100 | data = event.as_dict() |
|
106 | data = event.as_dict() | |
101 | post_to_webhook(data, self.settings) |
|
107 | post_to_webhook(data, self.settings) | |
102 |
|
108 | |||
103 |
|
109 | |||
104 | @task(ignore_result=True) |
|
110 | @task(ignore_result=True) | |
105 | def post_to_webhook(data, settings): |
|
111 | def post_to_webhook(data, settings): | |
106 | log.debug('sending event:%s to webhook %s', data['name'], settings['url']) |
|
112 | log.debug('sending event:%s to webhook %s', data['name'], settings['url']) | |
107 | resp = requests.post(settings['url'], json={ |
|
113 | resp = requests.post(settings['url'], json={ | |
108 | 'token': settings['secret_token'], |
|
114 | 'token': settings['secret_token'], | |
109 | 'event': data |
|
115 | 'event': data | |
110 | }) |
|
116 | }) | |
111 | resp.raise_for_status() # raise exception on a failed request |
|
117 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,299 +1,385 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import colander |
|
|||
22 | import logging |
|
|||
23 | import pylons |
|
21 | import pylons | |
24 | import deform |
|
22 | import deform | |
|
23 | import logging | |||
|
24 | import colander | |||
|
25 | import peppercorn | |||
|
26 | import webhelpers.paginate | |||
25 |
|
27 | |||
26 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden |
|
28 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest | |
27 | from pyramid.renderers import render |
|
29 | from pyramid.renderers import render | |
28 | from pyramid.response import Response |
|
30 | from pyramid.response import Response | |
29 |
|
31 | |||
30 | from rhodecode.lib import auth |
|
32 | from rhodecode.lib import auth | |
31 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
33 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator | |
|
34 | from rhodecode.lib.utils2 import safe_int | |||
|
35 | from rhodecode.lib.helpers import Page | |||
32 | from rhodecode.model.db import Repository, RepoGroup, Session, Integration |
|
36 | from rhodecode.model.db import Repository, RepoGroup, Session, Integration | |
33 | from rhodecode.model.scm import ScmModel |
|
37 | from rhodecode.model.scm import ScmModel | |
34 | from rhodecode.model.integration import IntegrationModel |
|
38 | from rhodecode.model.integration import IntegrationModel | |
35 | from rhodecode.admin.navigation import navigation_list |
|
39 | from rhodecode.admin.navigation import navigation_list | |
36 | from rhodecode.translation import _ |
|
40 | from rhodecode.translation import _ | |
37 | from rhodecode.integrations import integration_type_registry |
|
41 | from rhodecode.integrations import integration_type_registry | |
|
42 | from rhodecode.model.validation_schema.schemas.integration_schema import ( | |||
|
43 | make_integration_schema) | |||
38 |
|
44 | |||
39 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
40 |
|
46 | |||
41 |
|
47 | |||
42 | class IntegrationSettingsViewBase(object): |
|
48 | class IntegrationSettingsViewBase(object): | |
43 | """ Base Integration settings view used by both repo / global settings """ |
|
49 | """ Base Integration settings view used by both repo / global settings """ | |
44 |
|
50 | |||
45 | def __init__(self, context, request): |
|
51 | def __init__(self, context, request): | |
46 | self.context = context |
|
52 | self.context = context | |
47 | self.request = request |
|
53 | self.request = request | |
48 | self._load_general_context() |
|
54 | self._load_general_context() | |
49 |
|
55 | |||
50 | if not self.perm_check(request.user): |
|
56 | if not self.perm_check(request.user): | |
51 | raise HTTPForbidden() |
|
57 | raise HTTPForbidden() | |
52 |
|
58 | |||
53 | def _load_general_context(self): |
|
59 | def _load_general_context(self): | |
54 | """ |
|
60 | """ | |
55 | This avoids boilerplate for repo/global+list/edit+views/templates |
|
61 | This avoids boilerplate for repo/global+list/edit+views/templates | |
56 | by doing all possible contexts at the same time however it should |
|
62 | by doing all possible contexts at the same time however it should | |
57 | be split up into separate functions once more "contexts" exist |
|
63 | be split up into separate functions once more "contexts" exist | |
58 | """ |
|
64 | """ | |
59 |
|
65 | |||
60 | self.IntegrationType = None |
|
66 | self.IntegrationType = None | |
61 | self.repo = None |
|
67 | self.repo = None | |
62 | self.repo_group = None |
|
68 | self.repo_group = None | |
63 | self.integration = None |
|
69 | self.integration = None | |
64 | self.integrations = {} |
|
70 | self.integrations = {} | |
65 |
|
71 | |||
66 | request = self.request |
|
72 | request = self.request | |
67 |
|
73 | |||
68 |
if 'repo_name' in request.matchdict: # |
|
74 | if 'repo_name' in request.matchdict: # in repo settings context | |
69 | repo_name = request.matchdict['repo_name'] |
|
75 | repo_name = request.matchdict['repo_name'] | |
70 | self.repo = Repository.get_by_repo_name(repo_name) |
|
76 | self.repo = Repository.get_by_repo_name(repo_name) | |
71 |
|
77 | |||
72 |
if 'repo_group_name' in request.matchdict: # |
|
78 | if 'repo_group_name' in request.matchdict: # in group settings context | |
73 | repo_group_name = request.matchdict['repo_group_name'] |
|
79 | repo_group_name = request.matchdict['repo_group_name'] | |
74 | self.repo_group = RepoGroup.get_by_group_name(repo_group_name) |
|
80 | self.repo_group = RepoGroup.get_by_group_name(repo_group_name) | |
75 |
|
81 | |||
76 | if 'integration' in request.matchdict: # we're in integration context |
|
82 | ||
|
83 | if 'integration' in request.matchdict: # integration type context | |||
77 | integration_type = request.matchdict['integration'] |
|
84 | integration_type = request.matchdict['integration'] | |
78 | self.IntegrationType = integration_type_registry[integration_type] |
|
85 | self.IntegrationType = integration_type_registry[integration_type] | |
79 |
|
86 | |||
80 | if 'integration_id' in request.matchdict: # single integration context |
|
87 | if 'integration_id' in request.matchdict: # single integration context | |
81 | integration_id = request.matchdict['integration_id'] |
|
88 | integration_id = request.matchdict['integration_id'] | |
82 | self.integration = Integration.get(integration_id) |
|
89 | self.integration = Integration.get(integration_id) | |
83 | else: # list integrations context |
|
|||
84 | integrations = IntegrationModel().get_integrations( |
|
|||
85 | repo=self.repo, repo_group=self.repo_group) |
|
|||
86 |
|
90 | |||
87 | for integration in integrations: |
|
91 | # extra perms check just in case | |
88 | self.integrations.setdefault(integration.integration_type, [] |
|
92 | if not self._has_perms_for_integration(self.integration): | |
89 | ).append(integration) |
|
93 | raise HTTPForbidden() | |
90 |
|
94 | |||
91 | self.settings = self.integration and self.integration.settings or {} |
|
95 | self.settings = self.integration and self.integration.settings or {} | |
|
96 | self.admin_view = not (self.repo or self.repo_group) | |||
|
97 | ||||
|
98 | def _has_perms_for_integration(self, integration): | |||
|
99 | perms = self.request.user.permissions | |||
|
100 | ||||
|
101 | if 'hg.admin' in perms['global']: | |||
|
102 | return True | |||
|
103 | ||||
|
104 | if integration.repo: | |||
|
105 | return perms['repositories'].get( | |||
|
106 | integration.repo.repo_name) == 'repository.admin' | |||
|
107 | ||||
|
108 | if integration.repo_group: | |||
|
109 | return perms['repositories_groups'].get( | |||
|
110 | integration.repo_group.group_name) == 'group.admin' | |||
|
111 | ||||
|
112 | return False | |||
92 |
|
113 | |||
93 | def _template_c_context(self): |
|
114 | def _template_c_context(self): | |
94 | # TODO: dan: this is a stopgap in order to inherit from current pylons |
|
115 | # TODO: dan: this is a stopgap in order to inherit from current pylons | |
95 | # based admin/repo settings templates - this should be removed entirely |
|
116 | # based admin/repo settings templates - this should be removed entirely | |
96 | # after port to pyramid |
|
117 | # after port to pyramid | |
97 |
|
118 | |||
98 | c = pylons.tmpl_context |
|
119 | c = pylons.tmpl_context | |
99 | c.active = 'integrations' |
|
120 | c.active = 'integrations' | |
100 | c.rhodecode_user = self.request.user |
|
121 | c.rhodecode_user = self.request.user | |
101 | c.repo = self.repo |
|
122 | c.repo = self.repo | |
102 | c.repo_group = self.repo_group |
|
123 | c.repo_group = self.repo_group | |
103 | c.repo_name = self.repo and self.repo.repo_name or None |
|
124 | c.repo_name = self.repo and self.repo.repo_name or None | |
104 | c.repo_group_name = self.repo_group and self.repo_group.group_name or None |
|
125 | c.repo_group_name = self.repo_group and self.repo_group.group_name or None | |
|
126 | ||||
105 | if self.repo: |
|
127 | if self.repo: | |
106 | c.repo_info = self.repo |
|
128 | c.repo_info = self.repo | |
107 | c.rhodecode_db_repo = self.repo |
|
129 | c.rhodecode_db_repo = self.repo | |
108 | c.repository_pull_requests = ScmModel().get_pull_requests(self.repo) |
|
130 | c.repository_pull_requests = ScmModel().get_pull_requests(self.repo) | |
109 | else: |
|
131 | else: | |
110 | c.navlist = navigation_list(self.request) |
|
132 | c.navlist = navigation_list(self.request) | |
111 |
|
133 | |||
112 | return c |
|
134 | return c | |
113 |
|
135 | |||
114 | def _form_schema(self): |
|
136 | def _form_schema(self): | |
115 | if self.integration: |
|
137 | schema = make_integration_schema(IntegrationType=self.IntegrationType, | |
116 |
settings |
|
138 | settings=self.settings) | |
117 | else: |
|
|||
118 | settings = {} |
|
|||
119 | return self.IntegrationType(settings=settings).settings_schema() |
|
|||
120 |
|
139 | |||
121 | def settings_get(self, defaults=None, errors=None, form=None): |
|
140 | # returns a clone, important if mutating the schema later | |
122 | """ |
|
141 | return schema.bind( | |
123 | View that displays the plugin settings as a form. |
|
142 | permissions=self.request.user.permissions, | |
124 | """ |
|
143 | no_scope=not self.admin_view) | |
125 | defaults = defaults or {} |
|
144 | ||
126 | errors = errors or {} |
|
145 | ||
|
146 | def _form_defaults(self): | |||
|
147 | defaults = {} | |||
127 |
|
148 | |||
128 | if self.integration: |
|
149 | if self.integration: | |
129 | defaults = self.integration.settings or {} |
|
150 | defaults['settings'] = self.integration.settings or {} | |
130 |
defaults[' |
|
151 | defaults['options'] = { | |
131 |
|
|
152 | 'name': self.integration.name, | |
|
153 | 'enabled': self.integration.enabled, | |||
|
154 | 'scope': self.integration.scope, | |||
|
155 | } | |||
132 | else: |
|
156 | else: | |
133 | if self.repo: |
|
157 | if self.repo: | |
134 | scope = _('{repo_name} repository').format( |
|
158 | scope = _('{repo_name} repository').format( | |
135 | repo_name=self.repo.repo_name) |
|
159 | repo_name=self.repo.repo_name) | |
136 | elif self.repo_group: |
|
160 | elif self.repo_group: | |
137 | scope = _('{repo_group_name} repo group').format( |
|
161 | scope = _('{repo_group_name} repo group').format( | |
138 | repo_group_name=self.repo_group.group_name) |
|
162 | repo_group_name=self.repo_group.group_name) | |
139 | else: |
|
163 | else: | |
140 | scope = _('Global') |
|
164 | scope = _('Global') | |
141 |
|
165 | |||
142 | defaults['name'] = '{} {} integration'.format(scope, |
|
166 | defaults['options'] = { | |
143 | self.IntegrationType.display_name) |
|
167 | 'enabled': True, | |
144 | defaults['enabled'] = True |
|
168 | 'name': _('{name} integration').format( | |
|
169 | name=self.IntegrationType.display_name), | |||
|
170 | } | |||
|
171 | if self.repo: | |||
|
172 | defaults['options']['scope'] = self.repo | |||
|
173 | elif self.repo_group: | |||
|
174 | defaults['options']['scope'] = self.repo_group | |||
|
175 | ||||
|
176 | return defaults | |||
145 |
|
177 | |||
146 | schema = self._form_schema().bind(request=self.request) |
|
178 | def _delete_integration(self, integration): | |
|
179 | Session().delete(self.integration) | |||
|
180 | Session().commit() | |||
|
181 | self.request.session.flash( | |||
|
182 | _('Integration {integration_name} deleted successfully.').format( | |||
|
183 | integration_name=self.integration.name), | |||
|
184 | queue='success') | |||
|
185 | ||||
|
186 | if self.repo: | |||
|
187 | redirect_to = self.request.route_url( | |||
|
188 | 'repo_integrations_home', repo_name=self.repo.repo_name) | |||
|
189 | elif self.repo_group: | |||
|
190 | redirect_to = self.request.route_url( | |||
|
191 | 'repo_group_integrations_home', | |||
|
192 | repo_group_name=self.repo_group.group_name) | |||
|
193 | else: | |||
|
194 | redirect_to = self.request.route_url('global_integrations_home') | |||
|
195 | raise HTTPFound(redirect_to) | |||
|
196 | ||||
|
197 | def settings_get(self, defaults=None, form=None): | |||
|
198 | """ | |||
|
199 | View that displays the integration settings as a form. | |||
|
200 | """ | |||
|
201 | ||||
|
202 | defaults = defaults or self._form_defaults() | |||
|
203 | schema = self._form_schema() | |||
147 |
|
204 | |||
148 | if self.integration: |
|
205 | if self.integration: | |
149 | buttons = ('submit', 'delete') |
|
206 | buttons = ('submit', 'delete') | |
150 | else: |
|
207 | else: | |
151 | buttons = ('submit',) |
|
208 | buttons = ('submit',) | |
152 |
|
209 | |||
153 | form = form or deform.Form(schema, appstruct=defaults, buttons=buttons) |
|
210 | form = form or deform.Form(schema, appstruct=defaults, buttons=buttons) | |
154 |
|
211 | |||
155 | for node in schema: |
|
|||
156 | setting = self.settings.get(node.name) |
|
|||
157 | if setting is not None: |
|
|||
158 | defaults.setdefault(node.name, setting) |
|
|||
159 | else: |
|
|||
160 | if node.default: |
|
|||
161 | defaults.setdefault(node.name, node.default) |
|
|||
162 |
|
||||
163 | template_context = { |
|
212 | template_context = { | |
164 | 'form': form, |
|
213 | 'form': form, | |
165 | 'defaults': defaults, |
|
|||
166 | 'errors': errors, |
|
|||
167 | 'schema': schema, |
|
|||
168 | 'current_IntegrationType': self.IntegrationType, |
|
214 | 'current_IntegrationType': self.IntegrationType, | |
169 | 'integration': self.integration, |
|
215 | 'integration': self.integration, | |
170 | 'settings': self.settings, |
|
|||
171 | 'resource': self.context, |
|
|||
172 | 'c': self._template_c_context(), |
|
216 | 'c': self._template_c_context(), | |
173 | } |
|
217 | } | |
174 |
|
218 | |||
175 | return template_context |
|
219 | return template_context | |
176 |
|
220 | |||
177 | @auth.CSRFRequired() |
|
221 | @auth.CSRFRequired() | |
178 | def settings_post(self): |
|
222 | def settings_post(self): | |
179 | """ |
|
223 | """ | |
180 |
View that validates and stores the |
|
224 | View that validates and stores the integration settings. | |
181 | """ |
|
225 | """ | |
182 | if self.request.params.get('delete'): |
|
226 | controls = self.request.POST.items() | |
183 | Session().delete(self.integration) |
|
227 | pstruct = peppercorn.parse(controls) | |
184 | Session().commit() |
|
228 | ||
185 | self.request.session.flash( |
|
229 | if self.integration and pstruct.get('delete'): | |
186 | _('Integration {integration_name} deleted successfully.').format( |
|
230 | return self._delete_integration(self.integration) | |
187 | integration_name=self.integration.name), |
|
231 | ||
188 | queue='success') |
|
232 | schema = self._form_schema() | |
189 | if self.repo: |
|
233 | ||
190 | redirect_to = self.request.route_url( |
|
234 | skip_settings_validation = False | |
191 | 'repo_integrations_home', repo_name=self.repo.repo_name) |
|
235 | if self.integration and 'enabled' not in pstruct.get('options', {}): | |
192 | else: |
|
236 | skip_settings_validation = True | |
193 | redirect_to = self.request.route_url('global_integrations_home') |
|
237 | schema['settings'].validator = None | |
194 | raise HTTPFound(redirect_to) |
|
238 | for field in schema['settings'].children: | |
|
239 | field.validator = None | |||
|
240 | field.missing = '' | |||
195 |
|
241 | |||
196 | schema = self._form_schema().bind(request=self.request) |
|
242 | if self.integration: | |
|
243 | buttons = ('submit', 'delete') | |||
|
244 | else: | |||
|
245 | buttons = ('submit',) | |||
197 |
|
246 | |||
198 |
form = deform.Form(schema, buttons= |
|
247 | form = deform.Form(schema, buttons=buttons) | |
199 |
|
248 | |||
200 | params = {} |
|
249 | if not self.admin_view: | |
201 | for node in schema.children: |
|
250 | # scope is read only field in these cases, and has to be added | |
202 | if type(node.typ) in (colander.Set, colander.List): |
|
251 | options = pstruct.setdefault('options', {}) | |
203 | val = self.request.params.getall(node.name) |
|
252 | if 'scope' not in options: | |
204 |
|
|
253 | if self.repo: | |
205 | val = self.request.params.get(node.name) |
|
254 | options['scope'] = 'repo:{}'.format(self.repo.repo_name) | |
206 |
if |
|
255 | elif self.repo_group: | |
207 | params[node.name] = val |
|
256 | options['scope'] = 'repogroup:{}'.format( | |
|
257 | self.repo_group.group_name) | |||
208 |
|
258 | |||
209 | controls = self.request.POST.items() |
|
|||
210 | try: |
|
259 | try: | |
211 |
valid_data = form.validate(c |
|
260 | valid_data = form.validate_pstruct(pstruct) | |
212 | except deform.ValidationFailure as e: |
|
261 | except deform.ValidationFailure as e: | |
213 | self.request.session.flash( |
|
262 | self.request.session.flash( | |
214 | _('Errors exist when saving integration settings. ' |
|
263 | _('Errors exist when saving integration settings. ' | |
215 | 'Please check the form inputs.'), |
|
264 | 'Please check the form inputs.'), | |
216 | queue='error') |
|
265 | queue='error') | |
217 |
return self.settings_get( |
|
266 | return self.settings_get(form=e) | |
218 |
|
267 | |||
219 | if not self.integration: |
|
268 | if not self.integration: | |
220 | self.integration = Integration() |
|
269 | self.integration = Integration() | |
221 | self.integration.integration_type = self.IntegrationType.key |
|
270 | self.integration.integration_type = self.IntegrationType.key | |
222 | if self.repo: |
|
|||
223 | self.integration.repo = self.repo |
|
|||
224 | elif self.repo_group: |
|
|||
225 | self.integration.repo_group = self.repo_group |
|
|||
226 | Session().add(self.integration) |
|
271 | Session().add(self.integration) | |
227 |
|
272 | |||
228 | self.integration.enabled = valid_data.pop('enabled', False) |
|
273 | scope = valid_data['options']['scope'] | |
229 | self.integration.name = valid_data.pop('name') |
|
|||
230 | self.integration.settings = valid_data |
|
|||
231 |
|
274 | |||
|
275 | IntegrationModel().update_integration(self.integration, | |||
|
276 | name=valid_data['options']['name'], | |||
|
277 | enabled=valid_data['options']['enabled'], | |||
|
278 | settings=valid_data['settings'], | |||
|
279 | scope=scope) | |||
|
280 | ||||
|
281 | self.integration.settings = valid_data['settings'] | |||
232 | Session().commit() |
|
282 | Session().commit() | |
233 |
|
||||
234 | # Display success message and redirect. |
|
283 | # Display success message and redirect. | |
235 | self.request.session.flash( |
|
284 | self.request.session.flash( | |
236 | _('Integration {integration_name} updated successfully.').format( |
|
285 | _('Integration {integration_name} updated successfully.').format( | |
237 | integration_name=self.IntegrationType.display_name), |
|
286 | integration_name=self.IntegrationType.display_name), | |
238 | queue='success') |
|
287 | queue='success') | |
239 |
|
288 | |||
240 | if self.repo: |
|
289 | ||
241 | redirect_to = self.request.route_url( |
|
290 | # if integration scope changes, we must redirect to the right place | |
242 | 'repo_integrations_edit', repo_name=self.repo.repo_name, |
|
291 | # keeping in mind if the original view was for /repo/ or /_admin/ | |
|
292 | admin_view = not (self.repo or self.repo_group) | |||
|
293 | ||||
|
294 | if isinstance(self.integration.scope, Repository) and not admin_view: | |||
|
295 | redirect_to = self.request.route_path( | |||
|
296 | 'repo_integrations_edit', | |||
|
297 | repo_name=self.integration.scope.repo_name, | |||
243 | integration=self.integration.integration_type, |
|
298 | integration=self.integration.integration_type, | |
244 | integration_id=self.integration.integration_id) |
|
299 | integration_id=self.integration.integration_id) | |
245 | elif self.repo: |
|
300 | elif isinstance(self.integration.scope, RepoGroup) and not admin_view: | |
246 |
redirect_to = self.request.route_ |
|
301 | redirect_to = self.request.route_path( | |
247 | 'repo_group_integrations_edit', |
|
302 | 'repo_group_integrations_edit', | |
248 |
repo_group_name=self. |
|
303 | repo_group_name=self.integration.scope.group_name, | |
249 | integration=self.integration.integration_type, |
|
304 | integration=self.integration.integration_type, | |
250 | integration_id=self.integration.integration_id) |
|
305 | integration_id=self.integration.integration_id) | |
251 | else: |
|
306 | else: | |
252 |
redirect_to = self.request.route_ |
|
307 | redirect_to = self.request.route_path( | |
253 | 'global_integrations_edit', |
|
308 | 'global_integrations_edit', | |
254 | integration=self.integration.integration_type, |
|
309 | integration=self.integration.integration_type, | |
255 | integration_id=self.integration.integration_id) |
|
310 | integration_id=self.integration.integration_id) | |
256 |
|
311 | |||
257 | return HTTPFound(redirect_to) |
|
312 | return HTTPFound(redirect_to) | |
258 |
|
313 | |||
259 | def index(self): |
|
314 | def index(self): | |
260 | current_integrations = self.integrations |
|
315 | """ List integrations """ | |
261 |
if self. |
|
316 | if self.repo: | |
262 | current_integrations = { |
|
317 | scope = self.repo | |
263 | self.IntegrationType.key: self.integrations.get( |
|
318 | elif self.repo_group: | |
264 | self.IntegrationType.key, []) |
|
319 | scope = self.repo_group | |
265 |
|
|
320 | else: | |
|
321 | scope = 'all' | |||
|
322 | ||||
|
323 | integrations = [] | |||
|
324 | ||||
|
325 | for integration in IntegrationModel().get_integrations( | |||
|
326 | scope=scope, IntegrationType=self.IntegrationType): | |||
|
327 | ||||
|
328 | # extra permissions check *just in case* | |||
|
329 | if not self._has_perms_for_integration(integration): | |||
|
330 | continue | |||
|
331 | integrations.append(integration) | |||
|
332 | ||||
|
333 | sort_arg = self.request.GET.get('sort', 'name:asc') | |||
|
334 | if ':' in sort_arg: | |||
|
335 | sort_field, sort_dir = sort_arg.split(':') | |||
|
336 | else: | |||
|
337 | sort_field = sort_arg, 'asc' | |||
|
338 | ||||
|
339 | assert sort_field in ('name', 'integration_type', 'enabled', 'scope') | |||
|
340 | ||||
|
341 | integrations.sort( | |||
|
342 | key=lambda x: getattr(x[1], sort_field), reverse=(sort_dir=='desc')) | |||
|
343 | ||||
|
344 | ||||
|
345 | page_url = webhelpers.paginate.PageURL( | |||
|
346 | self.request.path, self.request.GET) | |||
|
347 | page = safe_int(self.request.GET.get('page', 1), 1) | |||
|
348 | ||||
|
349 | integrations = Page(integrations, page=page, items_per_page=10, | |||
|
350 | url=page_url) | |||
266 |
|
351 | |||
267 | template_context = { |
|
352 | template_context = { | |
|
353 | 'sort_field': sort_field, | |||
|
354 | 'rev_sort_dir': sort_dir != 'desc' and 'desc' or 'asc', | |||
268 | 'current_IntegrationType': self.IntegrationType, |
|
355 | 'current_IntegrationType': self.IntegrationType, | |
269 |
' |
|
356 | 'integrations_list': integrations, | |
270 | 'available_integrations': integration_type_registry, |
|
357 | 'available_integrations': integration_type_registry, | |
271 | 'c': self._template_c_context() |
|
358 | 'c': self._template_c_context(), | |
|
359 | 'request': self.request, | |||
272 | } |
|
360 | } | |
|
361 | return template_context | |||
273 |
|
362 | |||
274 | if self.repo: |
|
363 | def new_integration(self): | |
275 | html = render('rhodecode:templates/admin/integrations/list.html', |
|
364 | template_context = { | |
276 | template_context, |
|
365 | 'available_integrations': integration_type_registry, | |
277 | request=self.request) |
|
366 | 'c': self._template_c_context(), | |
278 |
|
|
367 | } | |
279 | html = render('rhodecode:templates/admin/integrations/list.html', |
|
368 | return template_context | |
280 | template_context, |
|
|||
281 | request=self.request) |
|
|||
282 |
|
||||
283 | return Response(html) |
|
|||
284 |
|
||||
285 |
|
369 | |||
286 | class GlobalIntegrationsView(IntegrationSettingsViewBase): |
|
370 | class GlobalIntegrationsView(IntegrationSettingsViewBase): | |
287 | def perm_check(self, user): |
|
371 | def perm_check(self, user): | |
288 | return auth.HasPermissionAll('hg.admin').check_permissions(user=user) |
|
372 | return auth.HasPermissionAll('hg.admin').check_permissions(user=user) | |
289 |
|
373 | |||
290 |
|
374 | |||
291 | class RepoIntegrationsView(IntegrationSettingsViewBase): |
|
375 | class RepoIntegrationsView(IntegrationSettingsViewBase): | |
292 | def perm_check(self, user): |
|
376 | def perm_check(self, user): | |
293 | return auth.HasRepoPermissionAll('repository.admin' |
|
377 | return auth.HasRepoPermissionAll('repository.admin' | |
294 | )(repo_name=self.repo.repo_name, user=user) |
|
378 | )(repo_name=self.repo.repo_name, user=user) | |
295 |
|
379 | |||
|
380 | ||||
296 | class RepoGroupIntegrationsView(IntegrationSettingsViewBase): |
|
381 | class RepoGroupIntegrationsView(IntegrationSettingsViewBase): | |
297 | def perm_check(self, user): |
|
382 | def perm_check(self, user): | |
298 | return auth.HasRepoGroupPermissionAll('group.admin' |
|
383 | return auth.HasRepoGroupPermissionAll('group.admin' | |
299 | )(group_name=self.repo_group.group_name, user=user) |
|
384 | )(group_name=self.repo_group.group_name, user=user) | |
|
385 |
@@ -1,3506 +1,3505 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | Database Models for RhodeCode Enterprise |
|
22 | Database Models for RhodeCode Enterprise | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import os |
|
25 | import os | |
26 | import sys |
|
26 | import sys | |
27 | import time |
|
27 | import time | |
28 | import hashlib |
|
28 | import hashlib | |
29 | import logging |
|
29 | import logging | |
30 | import datetime |
|
30 | import datetime | |
31 | import warnings |
|
31 | import warnings | |
32 | import ipaddress |
|
32 | import ipaddress | |
33 | import functools |
|
33 | import functools | |
34 | import traceback |
|
34 | import traceback | |
35 | import collections |
|
35 | import collections | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | from sqlalchemy import * |
|
38 | from sqlalchemy import * | |
39 | from sqlalchemy.exc import IntegrityError |
|
39 | from sqlalchemy.exc import IntegrityError | |
40 | from sqlalchemy.ext.declarative import declared_attr |
|
40 | from sqlalchemy.ext.declarative import declared_attr | |
41 | from sqlalchemy.ext.hybrid import hybrid_property |
|
41 | from sqlalchemy.ext.hybrid import hybrid_property | |
42 | from sqlalchemy.orm import ( |
|
42 | from sqlalchemy.orm import ( | |
43 | relationship, joinedload, class_mapper, validates, aliased) |
|
43 | relationship, joinedload, class_mapper, validates, aliased) | |
44 | from sqlalchemy.sql.expression import true |
|
44 | from sqlalchemy.sql.expression import true | |
45 | from beaker.cache import cache_region, region_invalidate |
|
45 | from beaker.cache import cache_region, region_invalidate | |
46 | from webob.exc import HTTPNotFound |
|
46 | from webob.exc import HTTPNotFound | |
47 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
47 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
48 |
|
48 | |||
49 | from pylons import url |
|
49 | from pylons import url | |
50 | from pylons.i18n.translation import lazy_ugettext as _ |
|
50 | from pylons.i18n.translation import lazy_ugettext as _ | |
51 |
|
51 | |||
52 | from rhodecode.lib.vcs import get_backend, get_vcs_instance |
|
52 | from rhodecode.lib.vcs import get_backend, get_vcs_instance | |
53 | from rhodecode.lib.vcs.utils.helpers import get_scm |
|
53 | from rhodecode.lib.vcs.utils.helpers import get_scm | |
54 | from rhodecode.lib.vcs.exceptions import VCSError |
|
54 | from rhodecode.lib.vcs.exceptions import VCSError | |
55 | from rhodecode.lib.vcs.backends.base import ( |
|
55 | from rhodecode.lib.vcs.backends.base import ( | |
56 | EmptyCommit, Reference, MergeFailureReason) |
|
56 | EmptyCommit, Reference, MergeFailureReason) | |
57 | from rhodecode.lib.utils2 import ( |
|
57 | from rhodecode.lib.utils2 import ( | |
58 | str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe, |
|
58 | str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe, | |
59 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict) |
|
59 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict) | |
60 | from rhodecode.lib.jsonalchemy import MutationObj, JsonType, JSONDict |
|
60 | from rhodecode.lib.jsonalchemy import MutationObj, JsonType, JSONDict | |
61 | from rhodecode.lib.ext_json import json |
|
61 | from rhodecode.lib.ext_json import json | |
62 | from rhodecode.lib.caching_query import FromCache |
|
62 | from rhodecode.lib.caching_query import FromCache | |
63 | from rhodecode.lib.encrypt import AESCipher |
|
63 | from rhodecode.lib.encrypt import AESCipher | |
64 |
|
64 | |||
65 | from rhodecode.model.meta import Base, Session |
|
65 | from rhodecode.model.meta import Base, Session | |
66 |
|
66 | |||
67 | URL_SEP = '/' |
|
67 | URL_SEP = '/' | |
68 | log = logging.getLogger(__name__) |
|
68 | log = logging.getLogger(__name__) | |
69 |
|
69 | |||
70 | # ============================================================================= |
|
70 | # ============================================================================= | |
71 | # BASE CLASSES |
|
71 | # BASE CLASSES | |
72 | # ============================================================================= |
|
72 | # ============================================================================= | |
73 |
|
73 | |||
74 | # this is propagated from .ini file rhodecode.encrypted_values.secret or |
|
74 | # this is propagated from .ini file rhodecode.encrypted_values.secret or | |
75 | # beaker.session.secret if first is not set. |
|
75 | # beaker.session.secret if first is not set. | |
76 | # and initialized at environment.py |
|
76 | # and initialized at environment.py | |
77 | ENCRYPTION_KEY = None |
|
77 | ENCRYPTION_KEY = None | |
78 |
|
78 | |||
79 | # used to sort permissions by types, '#' used here is not allowed to be in |
|
79 | # used to sort permissions by types, '#' used here is not allowed to be in | |
80 | # usernames, and it's very early in sorted string.printable table. |
|
80 | # usernames, and it's very early in sorted string.printable table. | |
81 | PERMISSION_TYPE_SORT = { |
|
81 | PERMISSION_TYPE_SORT = { | |
82 | 'admin': '####', |
|
82 | 'admin': '####', | |
83 | 'write': '###', |
|
83 | 'write': '###', | |
84 | 'read': '##', |
|
84 | 'read': '##', | |
85 | 'none': '#', |
|
85 | 'none': '#', | |
86 | } |
|
86 | } | |
87 |
|
87 | |||
88 |
|
88 | |||
89 | def display_sort(obj): |
|
89 | def display_sort(obj): | |
90 | """ |
|
90 | """ | |
91 | Sort function used to sort permissions in .permissions() function of |
|
91 | Sort function used to sort permissions in .permissions() function of | |
92 | Repository, RepoGroup, UserGroup. Also it put the default user in front |
|
92 | Repository, RepoGroup, UserGroup. Also it put the default user in front | |
93 | of all other resources |
|
93 | of all other resources | |
94 | """ |
|
94 | """ | |
95 |
|
95 | |||
96 | if obj.username == User.DEFAULT_USER: |
|
96 | if obj.username == User.DEFAULT_USER: | |
97 | return '#####' |
|
97 | return '#####' | |
98 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') |
|
98 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') | |
99 | return prefix + obj.username |
|
99 | return prefix + obj.username | |
100 |
|
100 | |||
101 |
|
101 | |||
102 | def _hash_key(k): |
|
102 | def _hash_key(k): | |
103 | return md5_safe(k) |
|
103 | return md5_safe(k) | |
104 |
|
104 | |||
105 |
|
105 | |||
106 | class EncryptedTextValue(TypeDecorator): |
|
106 | class EncryptedTextValue(TypeDecorator): | |
107 | """ |
|
107 | """ | |
108 | Special column for encrypted long text data, use like:: |
|
108 | Special column for encrypted long text data, use like:: | |
109 |
|
109 | |||
110 | value = Column("encrypted_value", EncryptedValue(), nullable=False) |
|
110 | value = Column("encrypted_value", EncryptedValue(), nullable=False) | |
111 |
|
111 | |||
112 | This column is intelligent so if value is in unencrypted form it return |
|
112 | This column is intelligent so if value is in unencrypted form it return | |
113 | unencrypted form, but on save it always encrypts |
|
113 | unencrypted form, but on save it always encrypts | |
114 | """ |
|
114 | """ | |
115 | impl = Text |
|
115 | impl = Text | |
116 |
|
116 | |||
117 | def process_bind_param(self, value, dialect): |
|
117 | def process_bind_param(self, value, dialect): | |
118 | if not value: |
|
118 | if not value: | |
119 | return value |
|
119 | return value | |
120 | if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'): |
|
120 | if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'): | |
121 | # protect against double encrypting if someone manually starts |
|
121 | # protect against double encrypting if someone manually starts | |
122 | # doing |
|
122 | # doing | |
123 | raise ValueError('value needs to be in unencrypted format, ie. ' |
|
123 | raise ValueError('value needs to be in unencrypted format, ie. ' | |
124 | 'not starting with enc$aes') |
|
124 | 'not starting with enc$aes') | |
125 | return 'enc$aes_hmac$%s' % AESCipher( |
|
125 | return 'enc$aes_hmac$%s' % AESCipher( | |
126 | ENCRYPTION_KEY, hmac=True).encrypt(value) |
|
126 | ENCRYPTION_KEY, hmac=True).encrypt(value) | |
127 |
|
127 | |||
128 | def process_result_value(self, value, dialect): |
|
128 | def process_result_value(self, value, dialect): | |
129 | import rhodecode |
|
129 | import rhodecode | |
130 |
|
130 | |||
131 | if not value: |
|
131 | if not value: | |
132 | return value |
|
132 | return value | |
133 |
|
133 | |||
134 | parts = value.split('$', 3) |
|
134 | parts = value.split('$', 3) | |
135 | if not len(parts) == 3: |
|
135 | if not len(parts) == 3: | |
136 | # probably not encrypted values |
|
136 | # probably not encrypted values | |
137 | return value |
|
137 | return value | |
138 | else: |
|
138 | else: | |
139 | if parts[0] != 'enc': |
|
139 | if parts[0] != 'enc': | |
140 | # parts ok but without our header ? |
|
140 | # parts ok but without our header ? | |
141 | return value |
|
141 | return value | |
142 | enc_strict_mode = str2bool(rhodecode.CONFIG.get( |
|
142 | enc_strict_mode = str2bool(rhodecode.CONFIG.get( | |
143 | 'rhodecode.encrypted_values.strict') or True) |
|
143 | 'rhodecode.encrypted_values.strict') or True) | |
144 | # at that stage we know it's our encryption |
|
144 | # at that stage we know it's our encryption | |
145 | if parts[1] == 'aes': |
|
145 | if parts[1] == 'aes': | |
146 | decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2]) |
|
146 | decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2]) | |
147 | elif parts[1] == 'aes_hmac': |
|
147 | elif parts[1] == 'aes_hmac': | |
148 | decrypted_data = AESCipher( |
|
148 | decrypted_data = AESCipher( | |
149 | ENCRYPTION_KEY, hmac=True, |
|
149 | ENCRYPTION_KEY, hmac=True, | |
150 | strict_verification=enc_strict_mode).decrypt(parts[2]) |
|
150 | strict_verification=enc_strict_mode).decrypt(parts[2]) | |
151 | else: |
|
151 | else: | |
152 | raise ValueError( |
|
152 | raise ValueError( | |
153 | 'Encryption type part is wrong, must be `aes` ' |
|
153 | 'Encryption type part is wrong, must be `aes` ' | |
154 | 'or `aes_hmac`, got `%s` instead' % (parts[1])) |
|
154 | 'or `aes_hmac`, got `%s` instead' % (parts[1])) | |
155 | return decrypted_data |
|
155 | return decrypted_data | |
156 |
|
156 | |||
157 |
|
157 | |||
158 | class BaseModel(object): |
|
158 | class BaseModel(object): | |
159 | """ |
|
159 | """ | |
160 | Base Model for all classes |
|
160 | Base Model for all classes | |
161 | """ |
|
161 | """ | |
162 |
|
162 | |||
163 | @classmethod |
|
163 | @classmethod | |
164 | def _get_keys(cls): |
|
164 | def _get_keys(cls): | |
165 | """return column names for this model """ |
|
165 | """return column names for this model """ | |
166 | return class_mapper(cls).c.keys() |
|
166 | return class_mapper(cls).c.keys() | |
167 |
|
167 | |||
168 | def get_dict(self): |
|
168 | def get_dict(self): | |
169 | """ |
|
169 | """ | |
170 | return dict with keys and values corresponding |
|
170 | return dict with keys and values corresponding | |
171 | to this model data """ |
|
171 | to this model data """ | |
172 |
|
172 | |||
173 | d = {} |
|
173 | d = {} | |
174 | for k in self._get_keys(): |
|
174 | for k in self._get_keys(): | |
175 | d[k] = getattr(self, k) |
|
175 | d[k] = getattr(self, k) | |
176 |
|
176 | |||
177 | # also use __json__() if present to get additional fields |
|
177 | # also use __json__() if present to get additional fields | |
178 | _json_attr = getattr(self, '__json__', None) |
|
178 | _json_attr = getattr(self, '__json__', None) | |
179 | if _json_attr: |
|
179 | if _json_attr: | |
180 | # update with attributes from __json__ |
|
180 | # update with attributes from __json__ | |
181 | if callable(_json_attr): |
|
181 | if callable(_json_attr): | |
182 | _json_attr = _json_attr() |
|
182 | _json_attr = _json_attr() | |
183 | for k, val in _json_attr.iteritems(): |
|
183 | for k, val in _json_attr.iteritems(): | |
184 | d[k] = val |
|
184 | d[k] = val | |
185 | return d |
|
185 | return d | |
186 |
|
186 | |||
187 | def get_appstruct(self): |
|
187 | def get_appstruct(self): | |
188 | """return list with keys and values tuples corresponding |
|
188 | """return list with keys and values tuples corresponding | |
189 | to this model data """ |
|
189 | to this model data """ | |
190 |
|
190 | |||
191 | l = [] |
|
191 | l = [] | |
192 | for k in self._get_keys(): |
|
192 | for k in self._get_keys(): | |
193 | l.append((k, getattr(self, k),)) |
|
193 | l.append((k, getattr(self, k),)) | |
194 | return l |
|
194 | return l | |
195 |
|
195 | |||
196 | def populate_obj(self, populate_dict): |
|
196 | def populate_obj(self, populate_dict): | |
197 | """populate model with data from given populate_dict""" |
|
197 | """populate model with data from given populate_dict""" | |
198 |
|
198 | |||
199 | for k in self._get_keys(): |
|
199 | for k in self._get_keys(): | |
200 | if k in populate_dict: |
|
200 | if k in populate_dict: | |
201 | setattr(self, k, populate_dict[k]) |
|
201 | setattr(self, k, populate_dict[k]) | |
202 |
|
202 | |||
203 | @classmethod |
|
203 | @classmethod | |
204 | def query(cls): |
|
204 | def query(cls): | |
205 | return Session().query(cls) |
|
205 | return Session().query(cls) | |
206 |
|
206 | |||
207 | @classmethod |
|
207 | @classmethod | |
208 | def get(cls, id_): |
|
208 | def get(cls, id_): | |
209 | if id_: |
|
209 | if id_: | |
210 | return cls.query().get(id_) |
|
210 | return cls.query().get(id_) | |
211 |
|
211 | |||
212 | @classmethod |
|
212 | @classmethod | |
213 | def get_or_404(cls, id_): |
|
213 | def get_or_404(cls, id_): | |
214 | try: |
|
214 | try: | |
215 | id_ = int(id_) |
|
215 | id_ = int(id_) | |
216 | except (TypeError, ValueError): |
|
216 | except (TypeError, ValueError): | |
217 | raise HTTPNotFound |
|
217 | raise HTTPNotFound | |
218 |
|
218 | |||
219 | res = cls.query().get(id_) |
|
219 | res = cls.query().get(id_) | |
220 | if not res: |
|
220 | if not res: | |
221 | raise HTTPNotFound |
|
221 | raise HTTPNotFound | |
222 | return res |
|
222 | return res | |
223 |
|
223 | |||
224 | @classmethod |
|
224 | @classmethod | |
225 | def getAll(cls): |
|
225 | def getAll(cls): | |
226 | # deprecated and left for backward compatibility |
|
226 | # deprecated and left for backward compatibility | |
227 | return cls.get_all() |
|
227 | return cls.get_all() | |
228 |
|
228 | |||
229 | @classmethod |
|
229 | @classmethod | |
230 | def get_all(cls): |
|
230 | def get_all(cls): | |
231 | return cls.query().all() |
|
231 | return cls.query().all() | |
232 |
|
232 | |||
233 | @classmethod |
|
233 | @classmethod | |
234 | def delete(cls, id_): |
|
234 | def delete(cls, id_): | |
235 | obj = cls.query().get(id_) |
|
235 | obj = cls.query().get(id_) | |
236 | Session().delete(obj) |
|
236 | Session().delete(obj) | |
237 |
|
237 | |||
238 | @classmethod |
|
238 | @classmethod | |
239 | def identity_cache(cls, session, attr_name, value): |
|
239 | def identity_cache(cls, session, attr_name, value): | |
240 | exist_in_session = [] |
|
240 | exist_in_session = [] | |
241 | for (item_cls, pkey), instance in session.identity_map.items(): |
|
241 | for (item_cls, pkey), instance in session.identity_map.items(): | |
242 | if cls == item_cls and getattr(instance, attr_name) == value: |
|
242 | if cls == item_cls and getattr(instance, attr_name) == value: | |
243 | exist_in_session.append(instance) |
|
243 | exist_in_session.append(instance) | |
244 | if exist_in_session: |
|
244 | if exist_in_session: | |
245 | if len(exist_in_session) == 1: |
|
245 | if len(exist_in_session) == 1: | |
246 | return exist_in_session[0] |
|
246 | return exist_in_session[0] | |
247 | log.exception( |
|
247 | log.exception( | |
248 | 'multiple objects with attr %s and ' |
|
248 | 'multiple objects with attr %s and ' | |
249 | 'value %s found with same name: %r', |
|
249 | 'value %s found with same name: %r', | |
250 | attr_name, value, exist_in_session) |
|
250 | attr_name, value, exist_in_session) | |
251 |
|
251 | |||
252 | def __repr__(self): |
|
252 | def __repr__(self): | |
253 | if hasattr(self, '__unicode__'): |
|
253 | if hasattr(self, '__unicode__'): | |
254 | # python repr needs to return str |
|
254 | # python repr needs to return str | |
255 | try: |
|
255 | try: | |
256 | return safe_str(self.__unicode__()) |
|
256 | return safe_str(self.__unicode__()) | |
257 | except UnicodeDecodeError: |
|
257 | except UnicodeDecodeError: | |
258 | pass |
|
258 | pass | |
259 | return '<DB:%s>' % (self.__class__.__name__) |
|
259 | return '<DB:%s>' % (self.__class__.__name__) | |
260 |
|
260 | |||
261 |
|
261 | |||
262 | class RhodeCodeSetting(Base, BaseModel): |
|
262 | class RhodeCodeSetting(Base, BaseModel): | |
263 | __tablename__ = 'rhodecode_settings' |
|
263 | __tablename__ = 'rhodecode_settings' | |
264 | __table_args__ = ( |
|
264 | __table_args__ = ( | |
265 | UniqueConstraint('app_settings_name'), |
|
265 | UniqueConstraint('app_settings_name'), | |
266 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
266 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
267 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
267 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
268 | ) |
|
268 | ) | |
269 |
|
269 | |||
270 | SETTINGS_TYPES = { |
|
270 | SETTINGS_TYPES = { | |
271 | 'str': safe_str, |
|
271 | 'str': safe_str, | |
272 | 'int': safe_int, |
|
272 | 'int': safe_int, | |
273 | 'unicode': safe_unicode, |
|
273 | 'unicode': safe_unicode, | |
274 | 'bool': str2bool, |
|
274 | 'bool': str2bool, | |
275 | 'list': functools.partial(aslist, sep=',') |
|
275 | 'list': functools.partial(aslist, sep=',') | |
276 | } |
|
276 | } | |
277 | DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' |
|
277 | DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' | |
278 | GLOBAL_CONF_KEY = 'app_settings' |
|
278 | GLOBAL_CONF_KEY = 'app_settings' | |
279 |
|
279 | |||
280 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
280 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
281 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
281 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) | |
282 | _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) |
|
282 | _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) | |
283 | _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) |
|
283 | _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) | |
284 |
|
284 | |||
285 | def __init__(self, key='', val='', type='unicode'): |
|
285 | def __init__(self, key='', val='', type='unicode'): | |
286 | self.app_settings_name = key |
|
286 | self.app_settings_name = key | |
287 | self.app_settings_type = type |
|
287 | self.app_settings_type = type | |
288 | self.app_settings_value = val |
|
288 | self.app_settings_value = val | |
289 |
|
289 | |||
290 | @validates('_app_settings_value') |
|
290 | @validates('_app_settings_value') | |
291 | def validate_settings_value(self, key, val): |
|
291 | def validate_settings_value(self, key, val): | |
292 | assert type(val) == unicode |
|
292 | assert type(val) == unicode | |
293 | return val |
|
293 | return val | |
294 |
|
294 | |||
295 | @hybrid_property |
|
295 | @hybrid_property | |
296 | def app_settings_value(self): |
|
296 | def app_settings_value(self): | |
297 | v = self._app_settings_value |
|
297 | v = self._app_settings_value | |
298 | _type = self.app_settings_type |
|
298 | _type = self.app_settings_type | |
299 | if _type: |
|
299 | if _type: | |
300 | _type = self.app_settings_type.split('.')[0] |
|
300 | _type = self.app_settings_type.split('.')[0] | |
301 | # decode the encrypted value |
|
301 | # decode the encrypted value | |
302 | if 'encrypted' in self.app_settings_type: |
|
302 | if 'encrypted' in self.app_settings_type: | |
303 | cipher = EncryptedTextValue() |
|
303 | cipher = EncryptedTextValue() | |
304 | v = safe_unicode(cipher.process_result_value(v, None)) |
|
304 | v = safe_unicode(cipher.process_result_value(v, None)) | |
305 |
|
305 | |||
306 | converter = self.SETTINGS_TYPES.get(_type) or \ |
|
306 | converter = self.SETTINGS_TYPES.get(_type) or \ | |
307 | self.SETTINGS_TYPES['unicode'] |
|
307 | self.SETTINGS_TYPES['unicode'] | |
308 | return converter(v) |
|
308 | return converter(v) | |
309 |
|
309 | |||
310 | @app_settings_value.setter |
|
310 | @app_settings_value.setter | |
311 | def app_settings_value(self, val): |
|
311 | def app_settings_value(self, val): | |
312 | """ |
|
312 | """ | |
313 | Setter that will always make sure we use unicode in app_settings_value |
|
313 | Setter that will always make sure we use unicode in app_settings_value | |
314 |
|
314 | |||
315 | :param val: |
|
315 | :param val: | |
316 | """ |
|
316 | """ | |
317 | val = safe_unicode(val) |
|
317 | val = safe_unicode(val) | |
318 | # encode the encrypted value |
|
318 | # encode the encrypted value | |
319 | if 'encrypted' in self.app_settings_type: |
|
319 | if 'encrypted' in self.app_settings_type: | |
320 | cipher = EncryptedTextValue() |
|
320 | cipher = EncryptedTextValue() | |
321 | val = safe_unicode(cipher.process_bind_param(val, None)) |
|
321 | val = safe_unicode(cipher.process_bind_param(val, None)) | |
322 | self._app_settings_value = val |
|
322 | self._app_settings_value = val | |
323 |
|
323 | |||
324 | @hybrid_property |
|
324 | @hybrid_property | |
325 | def app_settings_type(self): |
|
325 | def app_settings_type(self): | |
326 | return self._app_settings_type |
|
326 | return self._app_settings_type | |
327 |
|
327 | |||
328 | @app_settings_type.setter |
|
328 | @app_settings_type.setter | |
329 | def app_settings_type(self, val): |
|
329 | def app_settings_type(self, val): | |
330 | if val.split('.')[0] not in self.SETTINGS_TYPES: |
|
330 | if val.split('.')[0] not in self.SETTINGS_TYPES: | |
331 | raise Exception('type must be one of %s got %s' |
|
331 | raise Exception('type must be one of %s got %s' | |
332 | % (self.SETTINGS_TYPES.keys(), val)) |
|
332 | % (self.SETTINGS_TYPES.keys(), val)) | |
333 | self._app_settings_type = val |
|
333 | self._app_settings_type = val | |
334 |
|
334 | |||
335 | def __unicode__(self): |
|
335 | def __unicode__(self): | |
336 | return u"<%s('%s:%s[%s]')>" % ( |
|
336 | return u"<%s('%s:%s[%s]')>" % ( | |
337 | self.__class__.__name__, |
|
337 | self.__class__.__name__, | |
338 | self.app_settings_name, self.app_settings_value, |
|
338 | self.app_settings_name, self.app_settings_value, | |
339 | self.app_settings_type |
|
339 | self.app_settings_type | |
340 | ) |
|
340 | ) | |
341 |
|
341 | |||
342 |
|
342 | |||
343 | class RhodeCodeUi(Base, BaseModel): |
|
343 | class RhodeCodeUi(Base, BaseModel): | |
344 | __tablename__ = 'rhodecode_ui' |
|
344 | __tablename__ = 'rhodecode_ui' | |
345 | __table_args__ = ( |
|
345 | __table_args__ = ( | |
346 | UniqueConstraint('ui_key'), |
|
346 | UniqueConstraint('ui_key'), | |
347 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
347 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
348 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
348 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
349 | ) |
|
349 | ) | |
350 |
|
350 | |||
351 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
351 | HOOK_REPO_SIZE = 'changegroup.repo_size' | |
352 | # HG |
|
352 | # HG | |
353 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' |
|
353 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' | |
354 | HOOK_PULL = 'outgoing.pull_logger' |
|
354 | HOOK_PULL = 'outgoing.pull_logger' | |
355 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' |
|
355 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' | |
356 | HOOK_PUSH = 'changegroup.push_logger' |
|
356 | HOOK_PUSH = 'changegroup.push_logger' | |
357 |
|
357 | |||
358 | # TODO: johbo: Unify way how hooks are configured for git and hg, |
|
358 | # TODO: johbo: Unify way how hooks are configured for git and hg, | |
359 | # git part is currently hardcoded. |
|
359 | # git part is currently hardcoded. | |
360 |
|
360 | |||
361 | # SVN PATTERNS |
|
361 | # SVN PATTERNS | |
362 | SVN_BRANCH_ID = 'vcs_svn_branch' |
|
362 | SVN_BRANCH_ID = 'vcs_svn_branch' | |
363 | SVN_TAG_ID = 'vcs_svn_tag' |
|
363 | SVN_TAG_ID = 'vcs_svn_tag' | |
364 |
|
364 | |||
365 | ui_id = Column( |
|
365 | ui_id = Column( | |
366 | "ui_id", Integer(), nullable=False, unique=True, default=None, |
|
366 | "ui_id", Integer(), nullable=False, unique=True, default=None, | |
367 | primary_key=True) |
|
367 | primary_key=True) | |
368 | ui_section = Column( |
|
368 | ui_section = Column( | |
369 | "ui_section", String(255), nullable=True, unique=None, default=None) |
|
369 | "ui_section", String(255), nullable=True, unique=None, default=None) | |
370 | ui_key = Column( |
|
370 | ui_key = Column( | |
371 | "ui_key", String(255), nullable=True, unique=None, default=None) |
|
371 | "ui_key", String(255), nullable=True, unique=None, default=None) | |
372 | ui_value = Column( |
|
372 | ui_value = Column( | |
373 | "ui_value", String(255), nullable=True, unique=None, default=None) |
|
373 | "ui_value", String(255), nullable=True, unique=None, default=None) | |
374 | ui_active = Column( |
|
374 | ui_active = Column( | |
375 | "ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
375 | "ui_active", Boolean(), nullable=True, unique=None, default=True) | |
376 |
|
376 | |||
377 | def __repr__(self): |
|
377 | def __repr__(self): | |
378 | return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, |
|
378 | return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, | |
379 | self.ui_key, self.ui_value) |
|
379 | self.ui_key, self.ui_value) | |
380 |
|
380 | |||
381 |
|
381 | |||
382 | class RepoRhodeCodeSetting(Base, BaseModel): |
|
382 | class RepoRhodeCodeSetting(Base, BaseModel): | |
383 | __tablename__ = 'repo_rhodecode_settings' |
|
383 | __tablename__ = 'repo_rhodecode_settings' | |
384 | __table_args__ = ( |
|
384 | __table_args__ = ( | |
385 | UniqueConstraint( |
|
385 | UniqueConstraint( | |
386 | 'app_settings_name', 'repository_id', |
|
386 | 'app_settings_name', 'repository_id', | |
387 | name='uq_repo_rhodecode_setting_name_repo_id'), |
|
387 | name='uq_repo_rhodecode_setting_name_repo_id'), | |
388 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
388 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
389 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
389 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
390 | ) |
|
390 | ) | |
391 |
|
391 | |||
392 | repository_id = Column( |
|
392 | repository_id = Column( | |
393 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), |
|
393 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), | |
394 | nullable=False) |
|
394 | nullable=False) | |
395 | app_settings_id = Column( |
|
395 | app_settings_id = Column( | |
396 | "app_settings_id", Integer(), nullable=False, unique=True, |
|
396 | "app_settings_id", Integer(), nullable=False, unique=True, | |
397 | default=None, primary_key=True) |
|
397 | default=None, primary_key=True) | |
398 | app_settings_name = Column( |
|
398 | app_settings_name = Column( | |
399 | "app_settings_name", String(255), nullable=True, unique=None, |
|
399 | "app_settings_name", String(255), nullable=True, unique=None, | |
400 | default=None) |
|
400 | default=None) | |
401 | _app_settings_value = Column( |
|
401 | _app_settings_value = Column( | |
402 | "app_settings_value", String(4096), nullable=True, unique=None, |
|
402 | "app_settings_value", String(4096), nullable=True, unique=None, | |
403 | default=None) |
|
403 | default=None) | |
404 | _app_settings_type = Column( |
|
404 | _app_settings_type = Column( | |
405 | "app_settings_type", String(255), nullable=True, unique=None, |
|
405 | "app_settings_type", String(255), nullable=True, unique=None, | |
406 | default=None) |
|
406 | default=None) | |
407 |
|
407 | |||
408 | repository = relationship('Repository') |
|
408 | repository = relationship('Repository') | |
409 |
|
409 | |||
410 | def __init__(self, repository_id, key='', val='', type='unicode'): |
|
410 | def __init__(self, repository_id, key='', val='', type='unicode'): | |
411 | self.repository_id = repository_id |
|
411 | self.repository_id = repository_id | |
412 | self.app_settings_name = key |
|
412 | self.app_settings_name = key | |
413 | self.app_settings_type = type |
|
413 | self.app_settings_type = type | |
414 | self.app_settings_value = val |
|
414 | self.app_settings_value = val | |
415 |
|
415 | |||
416 | @validates('_app_settings_value') |
|
416 | @validates('_app_settings_value') | |
417 | def validate_settings_value(self, key, val): |
|
417 | def validate_settings_value(self, key, val): | |
418 | assert type(val) == unicode |
|
418 | assert type(val) == unicode | |
419 | return val |
|
419 | return val | |
420 |
|
420 | |||
421 | @hybrid_property |
|
421 | @hybrid_property | |
422 | def app_settings_value(self): |
|
422 | def app_settings_value(self): | |
423 | v = self._app_settings_value |
|
423 | v = self._app_settings_value | |
424 | type_ = self.app_settings_type |
|
424 | type_ = self.app_settings_type | |
425 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES |
|
425 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | |
426 | converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] |
|
426 | converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] | |
427 | return converter(v) |
|
427 | return converter(v) | |
428 |
|
428 | |||
429 | @app_settings_value.setter |
|
429 | @app_settings_value.setter | |
430 | def app_settings_value(self, val): |
|
430 | def app_settings_value(self, val): | |
431 | """ |
|
431 | """ | |
432 | Setter that will always make sure we use unicode in app_settings_value |
|
432 | Setter that will always make sure we use unicode in app_settings_value | |
433 |
|
433 | |||
434 | :param val: |
|
434 | :param val: | |
435 | """ |
|
435 | """ | |
436 | self._app_settings_value = safe_unicode(val) |
|
436 | self._app_settings_value = safe_unicode(val) | |
437 |
|
437 | |||
438 | @hybrid_property |
|
438 | @hybrid_property | |
439 | def app_settings_type(self): |
|
439 | def app_settings_type(self): | |
440 | return self._app_settings_type |
|
440 | return self._app_settings_type | |
441 |
|
441 | |||
442 | @app_settings_type.setter |
|
442 | @app_settings_type.setter | |
443 | def app_settings_type(self, val): |
|
443 | def app_settings_type(self, val): | |
444 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES |
|
444 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | |
445 | if val not in SETTINGS_TYPES: |
|
445 | if val not in SETTINGS_TYPES: | |
446 | raise Exception('type must be one of %s got %s' |
|
446 | raise Exception('type must be one of %s got %s' | |
447 | % (SETTINGS_TYPES.keys(), val)) |
|
447 | % (SETTINGS_TYPES.keys(), val)) | |
448 | self._app_settings_type = val |
|
448 | self._app_settings_type = val | |
449 |
|
449 | |||
450 | def __unicode__(self): |
|
450 | def __unicode__(self): | |
451 | return u"<%s('%s:%s:%s[%s]')>" % ( |
|
451 | return u"<%s('%s:%s:%s[%s]')>" % ( | |
452 | self.__class__.__name__, self.repository.repo_name, |
|
452 | self.__class__.__name__, self.repository.repo_name, | |
453 | self.app_settings_name, self.app_settings_value, |
|
453 | self.app_settings_name, self.app_settings_value, | |
454 | self.app_settings_type |
|
454 | self.app_settings_type | |
455 | ) |
|
455 | ) | |
456 |
|
456 | |||
457 |
|
457 | |||
458 | class RepoRhodeCodeUi(Base, BaseModel): |
|
458 | class RepoRhodeCodeUi(Base, BaseModel): | |
459 | __tablename__ = 'repo_rhodecode_ui' |
|
459 | __tablename__ = 'repo_rhodecode_ui' | |
460 | __table_args__ = ( |
|
460 | __table_args__ = ( | |
461 | UniqueConstraint( |
|
461 | UniqueConstraint( | |
462 | 'repository_id', 'ui_section', 'ui_key', |
|
462 | 'repository_id', 'ui_section', 'ui_key', | |
463 | name='uq_repo_rhodecode_ui_repository_id_section_key'), |
|
463 | name='uq_repo_rhodecode_ui_repository_id_section_key'), | |
464 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
464 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
465 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
465 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
466 | ) |
|
466 | ) | |
467 |
|
467 | |||
468 | repository_id = Column( |
|
468 | repository_id = Column( | |
469 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), |
|
469 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), | |
470 | nullable=False) |
|
470 | nullable=False) | |
471 | ui_id = Column( |
|
471 | ui_id = Column( | |
472 | "ui_id", Integer(), nullable=False, unique=True, default=None, |
|
472 | "ui_id", Integer(), nullable=False, unique=True, default=None, | |
473 | primary_key=True) |
|
473 | primary_key=True) | |
474 | ui_section = Column( |
|
474 | ui_section = Column( | |
475 | "ui_section", String(255), nullable=True, unique=None, default=None) |
|
475 | "ui_section", String(255), nullable=True, unique=None, default=None) | |
476 | ui_key = Column( |
|
476 | ui_key = Column( | |
477 | "ui_key", String(255), nullable=True, unique=None, default=None) |
|
477 | "ui_key", String(255), nullable=True, unique=None, default=None) | |
478 | ui_value = Column( |
|
478 | ui_value = Column( | |
479 | "ui_value", String(255), nullable=True, unique=None, default=None) |
|
479 | "ui_value", String(255), nullable=True, unique=None, default=None) | |
480 | ui_active = Column( |
|
480 | ui_active = Column( | |
481 | "ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
481 | "ui_active", Boolean(), nullable=True, unique=None, default=True) | |
482 |
|
482 | |||
483 | repository = relationship('Repository') |
|
483 | repository = relationship('Repository') | |
484 |
|
484 | |||
485 | def __repr__(self): |
|
485 | def __repr__(self): | |
486 | return '<%s[%s:%s]%s=>%s]>' % ( |
|
486 | return '<%s[%s:%s]%s=>%s]>' % ( | |
487 | self.__class__.__name__, self.repository.repo_name, |
|
487 | self.__class__.__name__, self.repository.repo_name, | |
488 | self.ui_section, self.ui_key, self.ui_value) |
|
488 | self.ui_section, self.ui_key, self.ui_value) | |
489 |
|
489 | |||
490 |
|
490 | |||
491 | class User(Base, BaseModel): |
|
491 | class User(Base, BaseModel): | |
492 | __tablename__ = 'users' |
|
492 | __tablename__ = 'users' | |
493 | __table_args__ = ( |
|
493 | __table_args__ = ( | |
494 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
494 | UniqueConstraint('username'), UniqueConstraint('email'), | |
495 | Index('u_username_idx', 'username'), |
|
495 | Index('u_username_idx', 'username'), | |
496 | Index('u_email_idx', 'email'), |
|
496 | Index('u_email_idx', 'email'), | |
497 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
497 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
498 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
498 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
499 | ) |
|
499 | ) | |
500 | DEFAULT_USER = 'default' |
|
500 | DEFAULT_USER = 'default' | |
501 | DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' |
|
501 | DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' | |
502 | DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' |
|
502 | DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' | |
503 |
|
503 | |||
504 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
504 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
505 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
505 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |
506 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
506 | password = Column("password", String(255), nullable=True, unique=None, default=None) | |
507 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
507 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |
508 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
508 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) | |
509 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) |
|
509 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) | |
510 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
510 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) | |
511 | _email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
511 | _email = Column("email", String(255), nullable=True, unique=None, default=None) | |
512 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
512 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
513 | extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) |
|
513 | extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) | |
514 | extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) |
|
514 | extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) | |
515 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
515 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) | |
516 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
516 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |
517 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
517 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
518 | _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data |
|
518 | _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data | |
519 |
|
519 | |||
520 | user_log = relationship('UserLog') |
|
520 | user_log = relationship('UserLog') | |
521 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
521 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') | |
522 |
|
522 | |||
523 | repositories = relationship('Repository') |
|
523 | repositories = relationship('Repository') | |
524 | repository_groups = relationship('RepoGroup') |
|
524 | repository_groups = relationship('RepoGroup') | |
525 | user_groups = relationship('UserGroup') |
|
525 | user_groups = relationship('UserGroup') | |
526 |
|
526 | |||
527 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
527 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') | |
528 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') |
|
528 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') | |
529 |
|
529 | |||
530 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
530 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') | |
531 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') |
|
531 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') | |
532 | user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all') |
|
532 | user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all') | |
533 |
|
533 | |||
534 | group_member = relationship('UserGroupMember', cascade='all') |
|
534 | group_member = relationship('UserGroupMember', cascade='all') | |
535 |
|
535 | |||
536 | notifications = relationship('UserNotification', cascade='all') |
|
536 | notifications = relationship('UserNotification', cascade='all') | |
537 | # notifications assigned to this user |
|
537 | # notifications assigned to this user | |
538 | user_created_notifications = relationship('Notification', cascade='all') |
|
538 | user_created_notifications = relationship('Notification', cascade='all') | |
539 | # comments created by this user |
|
539 | # comments created by this user | |
540 | user_comments = relationship('ChangesetComment', cascade='all') |
|
540 | user_comments = relationship('ChangesetComment', cascade='all') | |
541 | # user profile extra info |
|
541 | # user profile extra info | |
542 | user_emails = relationship('UserEmailMap', cascade='all') |
|
542 | user_emails = relationship('UserEmailMap', cascade='all') | |
543 | user_ip_map = relationship('UserIpMap', cascade='all') |
|
543 | user_ip_map = relationship('UserIpMap', cascade='all') | |
544 | user_auth_tokens = relationship('UserApiKeys', cascade='all') |
|
544 | user_auth_tokens = relationship('UserApiKeys', cascade='all') | |
545 | # gists |
|
545 | # gists | |
546 | user_gists = relationship('Gist', cascade='all') |
|
546 | user_gists = relationship('Gist', cascade='all') | |
547 | # user pull requests |
|
547 | # user pull requests | |
548 | user_pull_requests = relationship('PullRequest', cascade='all') |
|
548 | user_pull_requests = relationship('PullRequest', cascade='all') | |
549 | # external identities |
|
549 | # external identities | |
550 | extenal_identities = relationship( |
|
550 | extenal_identities = relationship( | |
551 | 'ExternalIdentity', |
|
551 | 'ExternalIdentity', | |
552 | primaryjoin="User.user_id==ExternalIdentity.local_user_id", |
|
552 | primaryjoin="User.user_id==ExternalIdentity.local_user_id", | |
553 | cascade='all') |
|
553 | cascade='all') | |
554 |
|
554 | |||
555 | def __unicode__(self): |
|
555 | def __unicode__(self): | |
556 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
556 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |
557 | self.user_id, self.username) |
|
557 | self.user_id, self.username) | |
558 |
|
558 | |||
559 | @hybrid_property |
|
559 | @hybrid_property | |
560 | def email(self): |
|
560 | def email(self): | |
561 | return self._email |
|
561 | return self._email | |
562 |
|
562 | |||
563 | @email.setter |
|
563 | @email.setter | |
564 | def email(self, val): |
|
564 | def email(self, val): | |
565 | self._email = val.lower() if val else None |
|
565 | self._email = val.lower() if val else None | |
566 |
|
566 | |||
567 | @property |
|
567 | @property | |
568 | def firstname(self): |
|
568 | def firstname(self): | |
569 | # alias for future |
|
569 | # alias for future | |
570 | return self.name |
|
570 | return self.name | |
571 |
|
571 | |||
572 | @property |
|
572 | @property | |
573 | def emails(self): |
|
573 | def emails(self): | |
574 | other = UserEmailMap.query().filter(UserEmailMap.user==self).all() |
|
574 | other = UserEmailMap.query().filter(UserEmailMap.user==self).all() | |
575 | return [self.email] + [x.email for x in other] |
|
575 | return [self.email] + [x.email for x in other] | |
576 |
|
576 | |||
577 | @property |
|
577 | @property | |
578 | def auth_tokens(self): |
|
578 | def auth_tokens(self): | |
579 | return [self.api_key] + [x.api_key for x in self.extra_auth_tokens] |
|
579 | return [self.api_key] + [x.api_key for x in self.extra_auth_tokens] | |
580 |
|
580 | |||
581 | @property |
|
581 | @property | |
582 | def extra_auth_tokens(self): |
|
582 | def extra_auth_tokens(self): | |
583 | return UserApiKeys.query().filter(UserApiKeys.user == self).all() |
|
583 | return UserApiKeys.query().filter(UserApiKeys.user == self).all() | |
584 |
|
584 | |||
585 | @property |
|
585 | @property | |
586 | def feed_token(self): |
|
586 | def feed_token(self): | |
587 | feed_tokens = UserApiKeys.query()\ |
|
587 | feed_tokens = UserApiKeys.query()\ | |
588 | .filter(UserApiKeys.user == self)\ |
|
588 | .filter(UserApiKeys.user == self)\ | |
589 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\ |
|
589 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\ | |
590 | .all() |
|
590 | .all() | |
591 | if feed_tokens: |
|
591 | if feed_tokens: | |
592 | return feed_tokens[0].api_key |
|
592 | return feed_tokens[0].api_key | |
593 | else: |
|
593 | else: | |
594 | # use the main token so we don't end up with nothing... |
|
594 | # use the main token so we don't end up with nothing... | |
595 | return self.api_key |
|
595 | return self.api_key | |
596 |
|
596 | |||
597 | @classmethod |
|
597 | @classmethod | |
598 | def extra_valid_auth_tokens(cls, user, role=None): |
|
598 | def extra_valid_auth_tokens(cls, user, role=None): | |
599 | tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ |
|
599 | tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ | |
600 | .filter(or_(UserApiKeys.expires == -1, |
|
600 | .filter(or_(UserApiKeys.expires == -1, | |
601 | UserApiKeys.expires >= time.time())) |
|
601 | UserApiKeys.expires >= time.time())) | |
602 | if role: |
|
602 | if role: | |
603 | tokens = tokens.filter(or_(UserApiKeys.role == role, |
|
603 | tokens = tokens.filter(or_(UserApiKeys.role == role, | |
604 | UserApiKeys.role == UserApiKeys.ROLE_ALL)) |
|
604 | UserApiKeys.role == UserApiKeys.ROLE_ALL)) | |
605 | return tokens.all() |
|
605 | return tokens.all() | |
606 |
|
606 | |||
607 | @property |
|
607 | @property | |
608 | def ip_addresses(self): |
|
608 | def ip_addresses(self): | |
609 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() |
|
609 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() | |
610 | return [x.ip_addr for x in ret] |
|
610 | return [x.ip_addr for x in ret] | |
611 |
|
611 | |||
612 | @property |
|
612 | @property | |
613 | def username_and_name(self): |
|
613 | def username_and_name(self): | |
614 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) |
|
614 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) | |
615 |
|
615 | |||
616 | @property |
|
616 | @property | |
617 | def username_or_name_or_email(self): |
|
617 | def username_or_name_or_email(self): | |
618 | full_name = self.full_name if self.full_name is not ' ' else None |
|
618 | full_name = self.full_name if self.full_name is not ' ' else None | |
619 | return self.username or full_name or self.email |
|
619 | return self.username or full_name or self.email | |
620 |
|
620 | |||
621 | @property |
|
621 | @property | |
622 | def full_name(self): |
|
622 | def full_name(self): | |
623 | return '%s %s' % (self.firstname, self.lastname) |
|
623 | return '%s %s' % (self.firstname, self.lastname) | |
624 |
|
624 | |||
625 | @property |
|
625 | @property | |
626 | def full_name_or_username(self): |
|
626 | def full_name_or_username(self): | |
627 | return ('%s %s' % (self.firstname, self.lastname) |
|
627 | return ('%s %s' % (self.firstname, self.lastname) | |
628 | if (self.firstname and self.lastname) else self.username) |
|
628 | if (self.firstname and self.lastname) else self.username) | |
629 |
|
629 | |||
630 | @property |
|
630 | @property | |
631 | def full_contact(self): |
|
631 | def full_contact(self): | |
632 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) |
|
632 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) | |
633 |
|
633 | |||
634 | @property |
|
634 | @property | |
635 | def short_contact(self): |
|
635 | def short_contact(self): | |
636 | return '%s %s' % (self.firstname, self.lastname) |
|
636 | return '%s %s' % (self.firstname, self.lastname) | |
637 |
|
637 | |||
638 | @property |
|
638 | @property | |
639 | def is_admin(self): |
|
639 | def is_admin(self): | |
640 | return self.admin |
|
640 | return self.admin | |
641 |
|
641 | |||
642 | @property |
|
642 | @property | |
643 | def AuthUser(self): |
|
643 | def AuthUser(self): | |
644 | """ |
|
644 | """ | |
645 | Returns instance of AuthUser for this user |
|
645 | Returns instance of AuthUser for this user | |
646 | """ |
|
646 | """ | |
647 | from rhodecode.lib.auth import AuthUser |
|
647 | from rhodecode.lib.auth import AuthUser | |
648 | return AuthUser(user_id=self.user_id, api_key=self.api_key, |
|
648 | return AuthUser(user_id=self.user_id, api_key=self.api_key, | |
649 | username=self.username) |
|
649 | username=self.username) | |
650 |
|
650 | |||
651 | @hybrid_property |
|
651 | @hybrid_property | |
652 | def user_data(self): |
|
652 | def user_data(self): | |
653 | if not self._user_data: |
|
653 | if not self._user_data: | |
654 | return {} |
|
654 | return {} | |
655 |
|
655 | |||
656 | try: |
|
656 | try: | |
657 | return json.loads(self._user_data) |
|
657 | return json.loads(self._user_data) | |
658 | except TypeError: |
|
658 | except TypeError: | |
659 | return {} |
|
659 | return {} | |
660 |
|
660 | |||
661 | @user_data.setter |
|
661 | @user_data.setter | |
662 | def user_data(self, val): |
|
662 | def user_data(self, val): | |
663 | if not isinstance(val, dict): |
|
663 | if not isinstance(val, dict): | |
664 | raise Exception('user_data must be dict, got %s' % type(val)) |
|
664 | raise Exception('user_data must be dict, got %s' % type(val)) | |
665 | try: |
|
665 | try: | |
666 | self._user_data = json.dumps(val) |
|
666 | self._user_data = json.dumps(val) | |
667 | except Exception: |
|
667 | except Exception: | |
668 | log.error(traceback.format_exc()) |
|
668 | log.error(traceback.format_exc()) | |
669 |
|
669 | |||
670 | @classmethod |
|
670 | @classmethod | |
671 | def get_by_username(cls, username, case_insensitive=False, |
|
671 | def get_by_username(cls, username, case_insensitive=False, | |
672 | cache=False, identity_cache=False): |
|
672 | cache=False, identity_cache=False): | |
673 | session = Session() |
|
673 | session = Session() | |
674 |
|
674 | |||
675 | if case_insensitive: |
|
675 | if case_insensitive: | |
676 | q = cls.query().filter( |
|
676 | q = cls.query().filter( | |
677 | func.lower(cls.username) == func.lower(username)) |
|
677 | func.lower(cls.username) == func.lower(username)) | |
678 | else: |
|
678 | else: | |
679 | q = cls.query().filter(cls.username == username) |
|
679 | q = cls.query().filter(cls.username == username) | |
680 |
|
680 | |||
681 | if cache: |
|
681 | if cache: | |
682 | if identity_cache: |
|
682 | if identity_cache: | |
683 | val = cls.identity_cache(session, 'username', username) |
|
683 | val = cls.identity_cache(session, 'username', username) | |
684 | if val: |
|
684 | if val: | |
685 | return val |
|
685 | return val | |
686 | else: |
|
686 | else: | |
687 | q = q.options( |
|
687 | q = q.options( | |
688 | FromCache("sql_cache_short", |
|
688 | FromCache("sql_cache_short", | |
689 | "get_user_by_name_%s" % _hash_key(username))) |
|
689 | "get_user_by_name_%s" % _hash_key(username))) | |
690 |
|
690 | |||
691 | return q.scalar() |
|
691 | return q.scalar() | |
692 |
|
692 | |||
693 | @classmethod |
|
693 | @classmethod | |
694 | def get_by_auth_token(cls, auth_token, cache=False, fallback=True): |
|
694 | def get_by_auth_token(cls, auth_token, cache=False, fallback=True): | |
695 | q = cls.query().filter(cls.api_key == auth_token) |
|
695 | q = cls.query().filter(cls.api_key == auth_token) | |
696 |
|
696 | |||
697 | if cache: |
|
697 | if cache: | |
698 | q = q.options(FromCache("sql_cache_short", |
|
698 | q = q.options(FromCache("sql_cache_short", | |
699 | "get_auth_token_%s" % auth_token)) |
|
699 | "get_auth_token_%s" % auth_token)) | |
700 | res = q.scalar() |
|
700 | res = q.scalar() | |
701 |
|
701 | |||
702 | if fallback and not res: |
|
702 | if fallback and not res: | |
703 | #fallback to additional keys |
|
703 | #fallback to additional keys | |
704 | _res = UserApiKeys.query()\ |
|
704 | _res = UserApiKeys.query()\ | |
705 | .filter(UserApiKeys.api_key == auth_token)\ |
|
705 | .filter(UserApiKeys.api_key == auth_token)\ | |
706 | .filter(or_(UserApiKeys.expires == -1, |
|
706 | .filter(or_(UserApiKeys.expires == -1, | |
707 | UserApiKeys.expires >= time.time()))\ |
|
707 | UserApiKeys.expires >= time.time()))\ | |
708 | .first() |
|
708 | .first() | |
709 | if _res: |
|
709 | if _res: | |
710 | res = _res.user |
|
710 | res = _res.user | |
711 | return res |
|
711 | return res | |
712 |
|
712 | |||
713 | @classmethod |
|
713 | @classmethod | |
714 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
714 | def get_by_email(cls, email, case_insensitive=False, cache=False): | |
715 |
|
715 | |||
716 | if case_insensitive: |
|
716 | if case_insensitive: | |
717 | q = cls.query().filter(func.lower(cls.email) == func.lower(email)) |
|
717 | q = cls.query().filter(func.lower(cls.email) == func.lower(email)) | |
718 |
|
718 | |||
719 | else: |
|
719 | else: | |
720 | q = cls.query().filter(cls.email == email) |
|
720 | q = cls.query().filter(cls.email == email) | |
721 |
|
721 | |||
722 | if cache: |
|
722 | if cache: | |
723 | q = q.options(FromCache("sql_cache_short", |
|
723 | q = q.options(FromCache("sql_cache_short", | |
724 | "get_email_key_%s" % _hash_key(email))) |
|
724 | "get_email_key_%s" % _hash_key(email))) | |
725 |
|
725 | |||
726 | ret = q.scalar() |
|
726 | ret = q.scalar() | |
727 | if ret is None: |
|
727 | if ret is None: | |
728 | q = UserEmailMap.query() |
|
728 | q = UserEmailMap.query() | |
729 | # try fetching in alternate email map |
|
729 | # try fetching in alternate email map | |
730 | if case_insensitive: |
|
730 | if case_insensitive: | |
731 | q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) |
|
731 | q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) | |
732 | else: |
|
732 | else: | |
733 | q = q.filter(UserEmailMap.email == email) |
|
733 | q = q.filter(UserEmailMap.email == email) | |
734 | q = q.options(joinedload(UserEmailMap.user)) |
|
734 | q = q.options(joinedload(UserEmailMap.user)) | |
735 | if cache: |
|
735 | if cache: | |
736 | q = q.options(FromCache("sql_cache_short", |
|
736 | q = q.options(FromCache("sql_cache_short", | |
737 | "get_email_map_key_%s" % email)) |
|
737 | "get_email_map_key_%s" % email)) | |
738 | ret = getattr(q.scalar(), 'user', None) |
|
738 | ret = getattr(q.scalar(), 'user', None) | |
739 |
|
739 | |||
740 | return ret |
|
740 | return ret | |
741 |
|
741 | |||
742 | @classmethod |
|
742 | @classmethod | |
743 | def get_from_cs_author(cls, author): |
|
743 | def get_from_cs_author(cls, author): | |
744 | """ |
|
744 | """ | |
745 | Tries to get User objects out of commit author string |
|
745 | Tries to get User objects out of commit author string | |
746 |
|
746 | |||
747 | :param author: |
|
747 | :param author: | |
748 | """ |
|
748 | """ | |
749 | from rhodecode.lib.helpers import email, author_name |
|
749 | from rhodecode.lib.helpers import email, author_name | |
750 | # Valid email in the attribute passed, see if they're in the system |
|
750 | # Valid email in the attribute passed, see if they're in the system | |
751 | _email = email(author) |
|
751 | _email = email(author) | |
752 | if _email: |
|
752 | if _email: | |
753 | user = cls.get_by_email(_email, case_insensitive=True) |
|
753 | user = cls.get_by_email(_email, case_insensitive=True) | |
754 | if user: |
|
754 | if user: | |
755 | return user |
|
755 | return user | |
756 | # Maybe we can match by username? |
|
756 | # Maybe we can match by username? | |
757 | _author = author_name(author) |
|
757 | _author = author_name(author) | |
758 | user = cls.get_by_username(_author, case_insensitive=True) |
|
758 | user = cls.get_by_username(_author, case_insensitive=True) | |
759 | if user: |
|
759 | if user: | |
760 | return user |
|
760 | return user | |
761 |
|
761 | |||
762 | def update_userdata(self, **kwargs): |
|
762 | def update_userdata(self, **kwargs): | |
763 | usr = self |
|
763 | usr = self | |
764 | old = usr.user_data |
|
764 | old = usr.user_data | |
765 | old.update(**kwargs) |
|
765 | old.update(**kwargs) | |
766 | usr.user_data = old |
|
766 | usr.user_data = old | |
767 | Session().add(usr) |
|
767 | Session().add(usr) | |
768 | log.debug('updated userdata with ', kwargs) |
|
768 | log.debug('updated userdata with ', kwargs) | |
769 |
|
769 | |||
770 | def update_lastlogin(self): |
|
770 | def update_lastlogin(self): | |
771 | """Update user lastlogin""" |
|
771 | """Update user lastlogin""" | |
772 | self.last_login = datetime.datetime.now() |
|
772 | self.last_login = datetime.datetime.now() | |
773 | Session().add(self) |
|
773 | Session().add(self) | |
774 | log.debug('updated user %s lastlogin', self.username) |
|
774 | log.debug('updated user %s lastlogin', self.username) | |
775 |
|
775 | |||
776 | def update_lastactivity(self): |
|
776 | def update_lastactivity(self): | |
777 | """Update user lastactivity""" |
|
777 | """Update user lastactivity""" | |
778 | usr = self |
|
778 | usr = self | |
779 | old = usr.user_data |
|
779 | old = usr.user_data | |
780 | old.update({'last_activity': time.time()}) |
|
780 | old.update({'last_activity': time.time()}) | |
781 | usr.user_data = old |
|
781 | usr.user_data = old | |
782 | Session().add(usr) |
|
782 | Session().add(usr) | |
783 | log.debug('updated user %s lastactivity', usr.username) |
|
783 | log.debug('updated user %s lastactivity', usr.username) | |
784 |
|
784 | |||
785 | def update_password(self, new_password, change_api_key=False): |
|
785 | def update_password(self, new_password, change_api_key=False): | |
786 | from rhodecode.lib.auth import get_crypt_password,generate_auth_token |
|
786 | from rhodecode.lib.auth import get_crypt_password,generate_auth_token | |
787 |
|
787 | |||
788 | self.password = get_crypt_password(new_password) |
|
788 | self.password = get_crypt_password(new_password) | |
789 | if change_api_key: |
|
789 | if change_api_key: | |
790 | self.api_key = generate_auth_token(self.username) |
|
790 | self.api_key = generate_auth_token(self.username) | |
791 | Session().add(self) |
|
791 | Session().add(self) | |
792 |
|
792 | |||
793 | @classmethod |
|
793 | @classmethod | |
794 | def get_first_super_admin(cls): |
|
794 | def get_first_super_admin(cls): | |
795 | user = User.query().filter(User.admin == true()).first() |
|
795 | user = User.query().filter(User.admin == true()).first() | |
796 | if user is None: |
|
796 | if user is None: | |
797 | raise Exception('FATAL: Missing administrative account!') |
|
797 | raise Exception('FATAL: Missing administrative account!') | |
798 | return user |
|
798 | return user | |
799 |
|
799 | |||
800 | @classmethod |
|
800 | @classmethod | |
801 | def get_all_super_admins(cls): |
|
801 | def get_all_super_admins(cls): | |
802 | """ |
|
802 | """ | |
803 | Returns all admin accounts sorted by username |
|
803 | Returns all admin accounts sorted by username | |
804 | """ |
|
804 | """ | |
805 | return User.query().filter(User.admin == true())\ |
|
805 | return User.query().filter(User.admin == true())\ | |
806 | .order_by(User.username.asc()).all() |
|
806 | .order_by(User.username.asc()).all() | |
807 |
|
807 | |||
808 | @classmethod |
|
808 | @classmethod | |
809 | def get_default_user(cls, cache=False): |
|
809 | def get_default_user(cls, cache=False): | |
810 | user = User.get_by_username(User.DEFAULT_USER, cache=cache) |
|
810 | user = User.get_by_username(User.DEFAULT_USER, cache=cache) | |
811 | if user is None: |
|
811 | if user is None: | |
812 | raise Exception('FATAL: Missing default account!') |
|
812 | raise Exception('FATAL: Missing default account!') | |
813 | return user |
|
813 | return user | |
814 |
|
814 | |||
815 | def _get_default_perms(self, user, suffix=''): |
|
815 | def _get_default_perms(self, user, suffix=''): | |
816 | from rhodecode.model.permission import PermissionModel |
|
816 | from rhodecode.model.permission import PermissionModel | |
817 | return PermissionModel().get_default_perms(user.user_perms, suffix) |
|
817 | return PermissionModel().get_default_perms(user.user_perms, suffix) | |
818 |
|
818 | |||
819 | def get_default_perms(self, suffix=''): |
|
819 | def get_default_perms(self, suffix=''): | |
820 | return self._get_default_perms(self, suffix) |
|
820 | return self._get_default_perms(self, suffix) | |
821 |
|
821 | |||
822 | def get_api_data(self, include_secrets=False, details='full'): |
|
822 | def get_api_data(self, include_secrets=False, details='full'): | |
823 | """ |
|
823 | """ | |
824 | Common function for generating user related data for API |
|
824 | Common function for generating user related data for API | |
825 |
|
825 | |||
826 | :param include_secrets: By default secrets in the API data will be replaced |
|
826 | :param include_secrets: By default secrets in the API data will be replaced | |
827 | by a placeholder value to prevent exposing this data by accident. In case |
|
827 | by a placeholder value to prevent exposing this data by accident. In case | |
828 | this data shall be exposed, set this flag to ``True``. |
|
828 | this data shall be exposed, set this flag to ``True``. | |
829 |
|
829 | |||
830 | :param details: details can be 'basic|full' basic gives only a subset of |
|
830 | :param details: details can be 'basic|full' basic gives only a subset of | |
831 | the available user information that includes user_id, name and emails. |
|
831 | the available user information that includes user_id, name and emails. | |
832 | """ |
|
832 | """ | |
833 | user = self |
|
833 | user = self | |
834 | user_data = self.user_data |
|
834 | user_data = self.user_data | |
835 | data = { |
|
835 | data = { | |
836 | 'user_id': user.user_id, |
|
836 | 'user_id': user.user_id, | |
837 | 'username': user.username, |
|
837 | 'username': user.username, | |
838 | 'firstname': user.name, |
|
838 | 'firstname': user.name, | |
839 | 'lastname': user.lastname, |
|
839 | 'lastname': user.lastname, | |
840 | 'email': user.email, |
|
840 | 'email': user.email, | |
841 | 'emails': user.emails, |
|
841 | 'emails': user.emails, | |
842 | } |
|
842 | } | |
843 | if details == 'basic': |
|
843 | if details == 'basic': | |
844 | return data |
|
844 | return data | |
845 |
|
845 | |||
846 | api_key_length = 40 |
|
846 | api_key_length = 40 | |
847 | api_key_replacement = '*' * api_key_length |
|
847 | api_key_replacement = '*' * api_key_length | |
848 |
|
848 | |||
849 | extras = { |
|
849 | extras = { | |
850 | 'api_key': api_key_replacement, |
|
850 | 'api_key': api_key_replacement, | |
851 | 'api_keys': [api_key_replacement], |
|
851 | 'api_keys': [api_key_replacement], | |
852 | 'active': user.active, |
|
852 | 'active': user.active, | |
853 | 'admin': user.admin, |
|
853 | 'admin': user.admin, | |
854 | 'extern_type': user.extern_type, |
|
854 | 'extern_type': user.extern_type, | |
855 | 'extern_name': user.extern_name, |
|
855 | 'extern_name': user.extern_name, | |
856 | 'last_login': user.last_login, |
|
856 | 'last_login': user.last_login, | |
857 | 'ip_addresses': user.ip_addresses, |
|
857 | 'ip_addresses': user.ip_addresses, | |
858 | 'language': user_data.get('language') |
|
858 | 'language': user_data.get('language') | |
859 | } |
|
859 | } | |
860 | data.update(extras) |
|
860 | data.update(extras) | |
861 |
|
861 | |||
862 | if include_secrets: |
|
862 | if include_secrets: | |
863 | data['api_key'] = user.api_key |
|
863 | data['api_key'] = user.api_key | |
864 | data['api_keys'] = user.auth_tokens |
|
864 | data['api_keys'] = user.auth_tokens | |
865 | return data |
|
865 | return data | |
866 |
|
866 | |||
867 | def __json__(self): |
|
867 | def __json__(self): | |
868 | data = { |
|
868 | data = { | |
869 | 'full_name': self.full_name, |
|
869 | 'full_name': self.full_name, | |
870 | 'full_name_or_username': self.full_name_or_username, |
|
870 | 'full_name_or_username': self.full_name_or_username, | |
871 | 'short_contact': self.short_contact, |
|
871 | 'short_contact': self.short_contact, | |
872 | 'full_contact': self.full_contact, |
|
872 | 'full_contact': self.full_contact, | |
873 | } |
|
873 | } | |
874 | data.update(self.get_api_data()) |
|
874 | data.update(self.get_api_data()) | |
875 | return data |
|
875 | return data | |
876 |
|
876 | |||
877 |
|
877 | |||
878 | class UserApiKeys(Base, BaseModel): |
|
878 | class UserApiKeys(Base, BaseModel): | |
879 | __tablename__ = 'user_api_keys' |
|
879 | __tablename__ = 'user_api_keys' | |
880 | __table_args__ = ( |
|
880 | __table_args__ = ( | |
881 | Index('uak_api_key_idx', 'api_key'), |
|
881 | Index('uak_api_key_idx', 'api_key'), | |
882 | Index('uak_api_key_expires_idx', 'api_key', 'expires'), |
|
882 | Index('uak_api_key_expires_idx', 'api_key', 'expires'), | |
883 | UniqueConstraint('api_key'), |
|
883 | UniqueConstraint('api_key'), | |
884 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
884 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
885 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
885 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
886 | ) |
|
886 | ) | |
887 | __mapper_args__ = {} |
|
887 | __mapper_args__ = {} | |
888 |
|
888 | |||
889 | # ApiKey role |
|
889 | # ApiKey role | |
890 | ROLE_ALL = 'token_role_all' |
|
890 | ROLE_ALL = 'token_role_all' | |
891 | ROLE_HTTP = 'token_role_http' |
|
891 | ROLE_HTTP = 'token_role_http' | |
892 | ROLE_VCS = 'token_role_vcs' |
|
892 | ROLE_VCS = 'token_role_vcs' | |
893 | ROLE_API = 'token_role_api' |
|
893 | ROLE_API = 'token_role_api' | |
894 | ROLE_FEED = 'token_role_feed' |
|
894 | ROLE_FEED = 'token_role_feed' | |
895 | ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED] |
|
895 | ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED] | |
896 |
|
896 | |||
897 | user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
897 | user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
898 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
898 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
899 | api_key = Column("api_key", String(255), nullable=False, unique=True) |
|
899 | api_key = Column("api_key", String(255), nullable=False, unique=True) | |
900 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
900 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |
901 | expires = Column('expires', Float(53), nullable=False) |
|
901 | expires = Column('expires', Float(53), nullable=False) | |
902 | role = Column('role', String(255), nullable=True) |
|
902 | role = Column('role', String(255), nullable=True) | |
903 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
903 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
904 |
|
904 | |||
905 | user = relationship('User', lazy='joined') |
|
905 | user = relationship('User', lazy='joined') | |
906 |
|
906 | |||
907 | @classmethod |
|
907 | @classmethod | |
908 | def _get_role_name(cls, role): |
|
908 | def _get_role_name(cls, role): | |
909 | return { |
|
909 | return { | |
910 | cls.ROLE_ALL: _('all'), |
|
910 | cls.ROLE_ALL: _('all'), | |
911 | cls.ROLE_HTTP: _('http/web interface'), |
|
911 | cls.ROLE_HTTP: _('http/web interface'), | |
912 | cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), |
|
912 | cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), | |
913 | cls.ROLE_API: _('api calls'), |
|
913 | cls.ROLE_API: _('api calls'), | |
914 | cls.ROLE_FEED: _('feed access'), |
|
914 | cls.ROLE_FEED: _('feed access'), | |
915 | }.get(role, role) |
|
915 | }.get(role, role) | |
916 |
|
916 | |||
917 | @property |
|
917 | @property | |
918 | def expired(self): |
|
918 | def expired(self): | |
919 | if self.expires == -1: |
|
919 | if self.expires == -1: | |
920 | return False |
|
920 | return False | |
921 | return time.time() > self.expires |
|
921 | return time.time() > self.expires | |
922 |
|
922 | |||
923 | @property |
|
923 | @property | |
924 | def role_humanized(self): |
|
924 | def role_humanized(self): | |
925 | return self._get_role_name(self.role) |
|
925 | return self._get_role_name(self.role) | |
926 |
|
926 | |||
927 |
|
927 | |||
928 | class UserEmailMap(Base, BaseModel): |
|
928 | class UserEmailMap(Base, BaseModel): | |
929 | __tablename__ = 'user_email_map' |
|
929 | __tablename__ = 'user_email_map' | |
930 | __table_args__ = ( |
|
930 | __table_args__ = ( | |
931 | Index('uem_email_idx', 'email'), |
|
931 | Index('uem_email_idx', 'email'), | |
932 | UniqueConstraint('email'), |
|
932 | UniqueConstraint('email'), | |
933 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
933 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
934 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
934 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
935 | ) |
|
935 | ) | |
936 | __mapper_args__ = {} |
|
936 | __mapper_args__ = {} | |
937 |
|
937 | |||
938 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
938 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
939 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
939 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
940 | _email = Column("email", String(255), nullable=True, unique=False, default=None) |
|
940 | _email = Column("email", String(255), nullable=True, unique=False, default=None) | |
941 | user = relationship('User', lazy='joined') |
|
941 | user = relationship('User', lazy='joined') | |
942 |
|
942 | |||
943 | @validates('_email') |
|
943 | @validates('_email') | |
944 | def validate_email(self, key, email): |
|
944 | def validate_email(self, key, email): | |
945 | # check if this email is not main one |
|
945 | # check if this email is not main one | |
946 | main_email = Session().query(User).filter(User.email == email).scalar() |
|
946 | main_email = Session().query(User).filter(User.email == email).scalar() | |
947 | if main_email is not None: |
|
947 | if main_email is not None: | |
948 | raise AttributeError('email %s is present is user table' % email) |
|
948 | raise AttributeError('email %s is present is user table' % email) | |
949 | return email |
|
949 | return email | |
950 |
|
950 | |||
951 | @hybrid_property |
|
951 | @hybrid_property | |
952 | def email(self): |
|
952 | def email(self): | |
953 | return self._email |
|
953 | return self._email | |
954 |
|
954 | |||
955 | @email.setter |
|
955 | @email.setter | |
956 | def email(self, val): |
|
956 | def email(self, val): | |
957 | self._email = val.lower() if val else None |
|
957 | self._email = val.lower() if val else None | |
958 |
|
958 | |||
959 |
|
959 | |||
960 | class UserIpMap(Base, BaseModel): |
|
960 | class UserIpMap(Base, BaseModel): | |
961 | __tablename__ = 'user_ip_map' |
|
961 | __tablename__ = 'user_ip_map' | |
962 | __table_args__ = ( |
|
962 | __table_args__ = ( | |
963 | UniqueConstraint('user_id', 'ip_addr'), |
|
963 | UniqueConstraint('user_id', 'ip_addr'), | |
964 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
964 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
965 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
965 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
966 | ) |
|
966 | ) | |
967 | __mapper_args__ = {} |
|
967 | __mapper_args__ = {} | |
968 |
|
968 | |||
969 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
969 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
970 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
970 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
971 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) |
|
971 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) | |
972 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
972 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |
973 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
973 | description = Column("description", String(10000), nullable=True, unique=None, default=None) | |
974 | user = relationship('User', lazy='joined') |
|
974 | user = relationship('User', lazy='joined') | |
975 |
|
975 | |||
976 | @classmethod |
|
976 | @classmethod | |
977 | def _get_ip_range(cls, ip_addr): |
|
977 | def _get_ip_range(cls, ip_addr): | |
978 | net = ipaddress.ip_network(ip_addr, strict=False) |
|
978 | net = ipaddress.ip_network(ip_addr, strict=False) | |
979 | return [str(net.network_address), str(net.broadcast_address)] |
|
979 | return [str(net.network_address), str(net.broadcast_address)] | |
980 |
|
980 | |||
981 | def __json__(self): |
|
981 | def __json__(self): | |
982 | return { |
|
982 | return { | |
983 | 'ip_addr': self.ip_addr, |
|
983 | 'ip_addr': self.ip_addr, | |
984 | 'ip_range': self._get_ip_range(self.ip_addr), |
|
984 | 'ip_range': self._get_ip_range(self.ip_addr), | |
985 | } |
|
985 | } | |
986 |
|
986 | |||
987 | def __unicode__(self): |
|
987 | def __unicode__(self): | |
988 | return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, |
|
988 | return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, | |
989 | self.user_id, self.ip_addr) |
|
989 | self.user_id, self.ip_addr) | |
990 |
|
990 | |||
991 | class UserLog(Base, BaseModel): |
|
991 | class UserLog(Base, BaseModel): | |
992 | __tablename__ = 'user_logs' |
|
992 | __tablename__ = 'user_logs' | |
993 | __table_args__ = ( |
|
993 | __table_args__ = ( | |
994 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
994 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
995 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
995 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
996 | ) |
|
996 | ) | |
997 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
997 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
998 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
998 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
999 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
999 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |
1000 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
1000 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) | |
1001 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
1001 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) | |
1002 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
1002 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) | |
1003 | action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) |
|
1003 | action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) | |
1004 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
1004 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
1005 |
|
1005 | |||
1006 | def __unicode__(self): |
|
1006 | def __unicode__(self): | |
1007 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
1007 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |
1008 | self.repository_name, |
|
1008 | self.repository_name, | |
1009 | self.action) |
|
1009 | self.action) | |
1010 |
|
1010 | |||
1011 | @property |
|
1011 | @property | |
1012 | def action_as_day(self): |
|
1012 | def action_as_day(self): | |
1013 | return datetime.date(*self.action_date.timetuple()[:3]) |
|
1013 | return datetime.date(*self.action_date.timetuple()[:3]) | |
1014 |
|
1014 | |||
1015 | user = relationship('User') |
|
1015 | user = relationship('User') | |
1016 | repository = relationship('Repository', cascade='') |
|
1016 | repository = relationship('Repository', cascade='') | |
1017 |
|
1017 | |||
1018 |
|
1018 | |||
1019 | class UserGroup(Base, BaseModel): |
|
1019 | class UserGroup(Base, BaseModel): | |
1020 | __tablename__ = 'users_groups' |
|
1020 | __tablename__ = 'users_groups' | |
1021 | __table_args__ = ( |
|
1021 | __table_args__ = ( | |
1022 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1022 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1023 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1023 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
1024 | ) |
|
1024 | ) | |
1025 |
|
1025 | |||
1026 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1026 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1027 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
1027 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) | |
1028 | user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) |
|
1028 | user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) | |
1029 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
1029 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) | |
1030 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
1030 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |
1031 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
1031 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |
1032 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1032 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
1033 | _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data |
|
1033 | _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data | |
1034 |
|
1034 | |||
1035 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
1035 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") | |
1036 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') |
|
1036 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') | |
1037 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
1037 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |
1038 | users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
1038 | users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') | |
1039 | user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') |
|
1039 | user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') | |
1040 | user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') |
|
1040 | user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') | |
1041 |
|
1041 | |||
1042 | user = relationship('User') |
|
1042 | user = relationship('User') | |
1043 |
|
1043 | |||
1044 | @hybrid_property |
|
1044 | @hybrid_property | |
1045 | def group_data(self): |
|
1045 | def group_data(self): | |
1046 | if not self._group_data: |
|
1046 | if not self._group_data: | |
1047 | return {} |
|
1047 | return {} | |
1048 |
|
1048 | |||
1049 | try: |
|
1049 | try: | |
1050 | return json.loads(self._group_data) |
|
1050 | return json.loads(self._group_data) | |
1051 | except TypeError: |
|
1051 | except TypeError: | |
1052 | return {} |
|
1052 | return {} | |
1053 |
|
1053 | |||
1054 | @group_data.setter |
|
1054 | @group_data.setter | |
1055 | def group_data(self, val): |
|
1055 | def group_data(self, val): | |
1056 | try: |
|
1056 | try: | |
1057 | self._group_data = json.dumps(val) |
|
1057 | self._group_data = json.dumps(val) | |
1058 | except Exception: |
|
1058 | except Exception: | |
1059 | log.error(traceback.format_exc()) |
|
1059 | log.error(traceback.format_exc()) | |
1060 |
|
1060 | |||
1061 | def __unicode__(self): |
|
1061 | def __unicode__(self): | |
1062 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
1062 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |
1063 | self.users_group_id, |
|
1063 | self.users_group_id, | |
1064 | self.users_group_name) |
|
1064 | self.users_group_name) | |
1065 |
|
1065 | |||
1066 | @classmethod |
|
1066 | @classmethod | |
1067 | def get_by_group_name(cls, group_name, cache=False, |
|
1067 | def get_by_group_name(cls, group_name, cache=False, | |
1068 | case_insensitive=False): |
|
1068 | case_insensitive=False): | |
1069 | if case_insensitive: |
|
1069 | if case_insensitive: | |
1070 | q = cls.query().filter(func.lower(cls.users_group_name) == |
|
1070 | q = cls.query().filter(func.lower(cls.users_group_name) == | |
1071 | func.lower(group_name)) |
|
1071 | func.lower(group_name)) | |
1072 |
|
1072 | |||
1073 | else: |
|
1073 | else: | |
1074 | q = cls.query().filter(cls.users_group_name == group_name) |
|
1074 | q = cls.query().filter(cls.users_group_name == group_name) | |
1075 | if cache: |
|
1075 | if cache: | |
1076 | q = q.options(FromCache( |
|
1076 | q = q.options(FromCache( | |
1077 | "sql_cache_short", |
|
1077 | "sql_cache_short", | |
1078 | "get_group_%s" % _hash_key(group_name))) |
|
1078 | "get_group_%s" % _hash_key(group_name))) | |
1079 | return q.scalar() |
|
1079 | return q.scalar() | |
1080 |
|
1080 | |||
1081 | @classmethod |
|
1081 | @classmethod | |
1082 | def get(cls, user_group_id, cache=False): |
|
1082 | def get(cls, user_group_id, cache=False): | |
1083 | user_group = cls.query() |
|
1083 | user_group = cls.query() | |
1084 | if cache: |
|
1084 | if cache: | |
1085 | user_group = user_group.options(FromCache("sql_cache_short", |
|
1085 | user_group = user_group.options(FromCache("sql_cache_short", | |
1086 | "get_users_group_%s" % user_group_id)) |
|
1086 | "get_users_group_%s" % user_group_id)) | |
1087 | return user_group.get(user_group_id) |
|
1087 | return user_group.get(user_group_id) | |
1088 |
|
1088 | |||
1089 | def permissions(self, with_admins=True, with_owner=True): |
|
1089 | def permissions(self, with_admins=True, with_owner=True): | |
1090 | q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) |
|
1090 | q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) | |
1091 | q = q.options(joinedload(UserUserGroupToPerm.user_group), |
|
1091 | q = q.options(joinedload(UserUserGroupToPerm.user_group), | |
1092 | joinedload(UserUserGroupToPerm.user), |
|
1092 | joinedload(UserUserGroupToPerm.user), | |
1093 | joinedload(UserUserGroupToPerm.permission),) |
|
1093 | joinedload(UserUserGroupToPerm.permission),) | |
1094 |
|
1094 | |||
1095 | # get owners and admins and permissions. We do a trick of re-writing |
|
1095 | # get owners and admins and permissions. We do a trick of re-writing | |
1096 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
1096 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |
1097 | # has a global reference and changing one object propagates to all |
|
1097 | # has a global reference and changing one object propagates to all | |
1098 | # others. This means if admin is also an owner admin_row that change |
|
1098 | # others. This means if admin is also an owner admin_row that change | |
1099 | # would propagate to both objects |
|
1099 | # would propagate to both objects | |
1100 | perm_rows = [] |
|
1100 | perm_rows = [] | |
1101 | for _usr in q.all(): |
|
1101 | for _usr in q.all(): | |
1102 | usr = AttributeDict(_usr.user.get_dict()) |
|
1102 | usr = AttributeDict(_usr.user.get_dict()) | |
1103 | usr.permission = _usr.permission.permission_name |
|
1103 | usr.permission = _usr.permission.permission_name | |
1104 | perm_rows.append(usr) |
|
1104 | perm_rows.append(usr) | |
1105 |
|
1105 | |||
1106 | # filter the perm rows by 'default' first and then sort them by |
|
1106 | # filter the perm rows by 'default' first and then sort them by | |
1107 | # admin,write,read,none permissions sorted again alphabetically in |
|
1107 | # admin,write,read,none permissions sorted again alphabetically in | |
1108 | # each group |
|
1108 | # each group | |
1109 | perm_rows = sorted(perm_rows, key=display_sort) |
|
1109 | perm_rows = sorted(perm_rows, key=display_sort) | |
1110 |
|
1110 | |||
1111 | _admin_perm = 'usergroup.admin' |
|
1111 | _admin_perm = 'usergroup.admin' | |
1112 | owner_row = [] |
|
1112 | owner_row = [] | |
1113 | if with_owner: |
|
1113 | if with_owner: | |
1114 | usr = AttributeDict(self.user.get_dict()) |
|
1114 | usr = AttributeDict(self.user.get_dict()) | |
1115 | usr.owner_row = True |
|
1115 | usr.owner_row = True | |
1116 | usr.permission = _admin_perm |
|
1116 | usr.permission = _admin_perm | |
1117 | owner_row.append(usr) |
|
1117 | owner_row.append(usr) | |
1118 |
|
1118 | |||
1119 | super_admin_rows = [] |
|
1119 | super_admin_rows = [] | |
1120 | if with_admins: |
|
1120 | if with_admins: | |
1121 | for usr in User.get_all_super_admins(): |
|
1121 | for usr in User.get_all_super_admins(): | |
1122 | # if this admin is also owner, don't double the record |
|
1122 | # if this admin is also owner, don't double the record | |
1123 | if usr.user_id == owner_row[0].user_id: |
|
1123 | if usr.user_id == owner_row[0].user_id: | |
1124 | owner_row[0].admin_row = True |
|
1124 | owner_row[0].admin_row = True | |
1125 | else: |
|
1125 | else: | |
1126 | usr = AttributeDict(usr.get_dict()) |
|
1126 | usr = AttributeDict(usr.get_dict()) | |
1127 | usr.admin_row = True |
|
1127 | usr.admin_row = True | |
1128 | usr.permission = _admin_perm |
|
1128 | usr.permission = _admin_perm | |
1129 | super_admin_rows.append(usr) |
|
1129 | super_admin_rows.append(usr) | |
1130 |
|
1130 | |||
1131 | return super_admin_rows + owner_row + perm_rows |
|
1131 | return super_admin_rows + owner_row + perm_rows | |
1132 |
|
1132 | |||
1133 | def permission_user_groups(self): |
|
1133 | def permission_user_groups(self): | |
1134 | q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self) |
|
1134 | q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self) | |
1135 | q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), |
|
1135 | q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), | |
1136 | joinedload(UserGroupUserGroupToPerm.target_user_group), |
|
1136 | joinedload(UserGroupUserGroupToPerm.target_user_group), | |
1137 | joinedload(UserGroupUserGroupToPerm.permission),) |
|
1137 | joinedload(UserGroupUserGroupToPerm.permission),) | |
1138 |
|
1138 | |||
1139 | perm_rows = [] |
|
1139 | perm_rows = [] | |
1140 | for _user_group in q.all(): |
|
1140 | for _user_group in q.all(): | |
1141 | usr = AttributeDict(_user_group.user_group.get_dict()) |
|
1141 | usr = AttributeDict(_user_group.user_group.get_dict()) | |
1142 | usr.permission = _user_group.permission.permission_name |
|
1142 | usr.permission = _user_group.permission.permission_name | |
1143 | perm_rows.append(usr) |
|
1143 | perm_rows.append(usr) | |
1144 |
|
1144 | |||
1145 | return perm_rows |
|
1145 | return perm_rows | |
1146 |
|
1146 | |||
1147 | def _get_default_perms(self, user_group, suffix=''): |
|
1147 | def _get_default_perms(self, user_group, suffix=''): | |
1148 | from rhodecode.model.permission import PermissionModel |
|
1148 | from rhodecode.model.permission import PermissionModel | |
1149 | return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) |
|
1149 | return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) | |
1150 |
|
1150 | |||
1151 | def get_default_perms(self, suffix=''): |
|
1151 | def get_default_perms(self, suffix=''): | |
1152 | return self._get_default_perms(self, suffix) |
|
1152 | return self._get_default_perms(self, suffix) | |
1153 |
|
1153 | |||
1154 | def get_api_data(self, with_group_members=True, include_secrets=False): |
|
1154 | def get_api_data(self, with_group_members=True, include_secrets=False): | |
1155 | """ |
|
1155 | """ | |
1156 | :param include_secrets: See :meth:`User.get_api_data`, this parameter is |
|
1156 | :param include_secrets: See :meth:`User.get_api_data`, this parameter is | |
1157 | basically forwarded. |
|
1157 | basically forwarded. | |
1158 |
|
1158 | |||
1159 | """ |
|
1159 | """ | |
1160 | user_group = self |
|
1160 | user_group = self | |
1161 |
|
1161 | |||
1162 | data = { |
|
1162 | data = { | |
1163 | 'users_group_id': user_group.users_group_id, |
|
1163 | 'users_group_id': user_group.users_group_id, | |
1164 | 'group_name': user_group.users_group_name, |
|
1164 | 'group_name': user_group.users_group_name, | |
1165 | 'group_description': user_group.user_group_description, |
|
1165 | 'group_description': user_group.user_group_description, | |
1166 | 'active': user_group.users_group_active, |
|
1166 | 'active': user_group.users_group_active, | |
1167 | 'owner': user_group.user.username, |
|
1167 | 'owner': user_group.user.username, | |
1168 | } |
|
1168 | } | |
1169 | if with_group_members: |
|
1169 | if with_group_members: | |
1170 | users = [] |
|
1170 | users = [] | |
1171 | for user in user_group.members: |
|
1171 | for user in user_group.members: | |
1172 | user = user.user |
|
1172 | user = user.user | |
1173 | users.append(user.get_api_data(include_secrets=include_secrets)) |
|
1173 | users.append(user.get_api_data(include_secrets=include_secrets)) | |
1174 | data['users'] = users |
|
1174 | data['users'] = users | |
1175 |
|
1175 | |||
1176 | return data |
|
1176 | return data | |
1177 |
|
1177 | |||
1178 |
|
1178 | |||
1179 | class UserGroupMember(Base, BaseModel): |
|
1179 | class UserGroupMember(Base, BaseModel): | |
1180 | __tablename__ = 'users_groups_members' |
|
1180 | __tablename__ = 'users_groups_members' | |
1181 | __table_args__ = ( |
|
1181 | __table_args__ = ( | |
1182 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1182 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1183 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1183 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
1184 | ) |
|
1184 | ) | |
1185 |
|
1185 | |||
1186 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1186 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1187 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
1187 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
1188 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1188 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
1189 |
|
1189 | |||
1190 | user = relationship('User', lazy='joined') |
|
1190 | user = relationship('User', lazy='joined') | |
1191 | users_group = relationship('UserGroup') |
|
1191 | users_group = relationship('UserGroup') | |
1192 |
|
1192 | |||
1193 | def __init__(self, gr_id='', u_id=''): |
|
1193 | def __init__(self, gr_id='', u_id=''): | |
1194 | self.users_group_id = gr_id |
|
1194 | self.users_group_id = gr_id | |
1195 | self.user_id = u_id |
|
1195 | self.user_id = u_id | |
1196 |
|
1196 | |||
1197 |
|
1197 | |||
1198 | class RepositoryField(Base, BaseModel): |
|
1198 | class RepositoryField(Base, BaseModel): | |
1199 | __tablename__ = 'repositories_fields' |
|
1199 | __tablename__ = 'repositories_fields' | |
1200 | __table_args__ = ( |
|
1200 | __table_args__ = ( | |
1201 | UniqueConstraint('repository_id', 'field_key'), # no-multi field |
|
1201 | UniqueConstraint('repository_id', 'field_key'), # no-multi field | |
1202 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1202 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1203 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1203 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
1204 | ) |
|
1204 | ) | |
1205 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields |
|
1205 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields | |
1206 |
|
1206 | |||
1207 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1207 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
1208 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
1208 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
1209 | field_key = Column("field_key", String(250)) |
|
1209 | field_key = Column("field_key", String(250)) | |
1210 | field_label = Column("field_label", String(1024), nullable=False) |
|
1210 | field_label = Column("field_label", String(1024), nullable=False) | |
1211 | field_value = Column("field_value", String(10000), nullable=False) |
|
1211 | field_value = Column("field_value", String(10000), nullable=False) | |
1212 | field_desc = Column("field_desc", String(1024), nullable=False) |
|
1212 | field_desc = Column("field_desc", String(1024), nullable=False) | |
1213 | field_type = Column("field_type", String(255), nullable=False, unique=None) |
|
1213 | field_type = Column("field_type", String(255), nullable=False, unique=None) | |
1214 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1214 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
1215 |
|
1215 | |||
1216 | repository = relationship('Repository') |
|
1216 | repository = relationship('Repository') | |
1217 |
|
1217 | |||
1218 | @property |
|
1218 | @property | |
1219 | def field_key_prefixed(self): |
|
1219 | def field_key_prefixed(self): | |
1220 | return 'ex_%s' % self.field_key |
|
1220 | return 'ex_%s' % self.field_key | |
1221 |
|
1221 | |||
1222 | @classmethod |
|
1222 | @classmethod | |
1223 | def un_prefix_key(cls, key): |
|
1223 | def un_prefix_key(cls, key): | |
1224 | if key.startswith(cls.PREFIX): |
|
1224 | if key.startswith(cls.PREFIX): | |
1225 | return key[len(cls.PREFIX):] |
|
1225 | return key[len(cls.PREFIX):] | |
1226 | return key |
|
1226 | return key | |
1227 |
|
1227 | |||
1228 | @classmethod |
|
1228 | @classmethod | |
1229 | def get_by_key_name(cls, key, repo): |
|
1229 | def get_by_key_name(cls, key, repo): | |
1230 | row = cls.query()\ |
|
1230 | row = cls.query()\ | |
1231 | .filter(cls.repository == repo)\ |
|
1231 | .filter(cls.repository == repo)\ | |
1232 | .filter(cls.field_key == key).scalar() |
|
1232 | .filter(cls.field_key == key).scalar() | |
1233 | return row |
|
1233 | return row | |
1234 |
|
1234 | |||
1235 |
|
1235 | |||
1236 | class Repository(Base, BaseModel): |
|
1236 | class Repository(Base, BaseModel): | |
1237 | __tablename__ = 'repositories' |
|
1237 | __tablename__ = 'repositories' | |
1238 | __table_args__ = ( |
|
1238 | __table_args__ = ( | |
1239 | Index('r_repo_name_idx', 'repo_name', mysql_length=255), |
|
1239 | Index('r_repo_name_idx', 'repo_name', mysql_length=255), | |
1240 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1240 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
1241 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
1241 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
1242 | ) |
|
1242 | ) | |
1243 | DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' |
|
1243 | DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' | |
1244 | DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' |
|
1244 | DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' | |
1245 |
|
1245 | |||
1246 | STATE_CREATED = 'repo_state_created' |
|
1246 | STATE_CREATED = 'repo_state_created' | |
1247 | STATE_PENDING = 'repo_state_pending' |
|
1247 | STATE_PENDING = 'repo_state_pending' | |
1248 | STATE_ERROR = 'repo_state_error' |
|
1248 | STATE_ERROR = 'repo_state_error' | |
1249 |
|
1249 | |||
1250 | LOCK_AUTOMATIC = 'lock_auto' |
|
1250 | LOCK_AUTOMATIC = 'lock_auto' | |
1251 | LOCK_API = 'lock_api' |
|
1251 | LOCK_API = 'lock_api' | |
1252 | LOCK_WEB = 'lock_web' |
|
1252 | LOCK_WEB = 'lock_web' | |
1253 | LOCK_PULL = 'lock_pull' |
|
1253 | LOCK_PULL = 'lock_pull' | |
1254 |
|
1254 | |||
1255 | NAME_SEP = URL_SEP |
|
1255 | NAME_SEP = URL_SEP | |
1256 |
|
1256 | |||
1257 | repo_id = Column( |
|
1257 | repo_id = Column( | |
1258 | "repo_id", Integer(), nullable=False, unique=True, default=None, |
|
1258 | "repo_id", Integer(), nullable=False, unique=True, default=None, | |
1259 | primary_key=True) |
|
1259 | primary_key=True) | |
1260 | _repo_name = Column( |
|
1260 | _repo_name = Column( | |
1261 | "repo_name", Text(), nullable=False, default=None) |
|
1261 | "repo_name", Text(), nullable=False, default=None) | |
1262 | _repo_name_hash = Column( |
|
1262 | _repo_name_hash = Column( | |
1263 | "repo_name_hash", String(255), nullable=False, unique=True) |
|
1263 | "repo_name_hash", String(255), nullable=False, unique=True) | |
1264 | repo_state = Column("repo_state", String(255), nullable=True) |
|
1264 | repo_state = Column("repo_state", String(255), nullable=True) | |
1265 |
|
1265 | |||
1266 | clone_uri = Column( |
|
1266 | clone_uri = Column( | |
1267 | "clone_uri", EncryptedTextValue(), nullable=True, unique=False, |
|
1267 | "clone_uri", EncryptedTextValue(), nullable=True, unique=False, | |
1268 | default=None) |
|
1268 | default=None) | |
1269 | repo_type = Column( |
|
1269 | repo_type = Column( | |
1270 | "repo_type", String(255), nullable=False, unique=False, default=None) |
|
1270 | "repo_type", String(255), nullable=False, unique=False, default=None) | |
1271 | user_id = Column( |
|
1271 | user_id = Column( | |
1272 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, |
|
1272 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | |
1273 | unique=False, default=None) |
|
1273 | unique=False, default=None) | |
1274 | private = Column( |
|
1274 | private = Column( | |
1275 | "private", Boolean(), nullable=True, unique=None, default=None) |
|
1275 | "private", Boolean(), nullable=True, unique=None, default=None) | |
1276 | enable_statistics = Column( |
|
1276 | enable_statistics = Column( | |
1277 | "statistics", Boolean(), nullable=True, unique=None, default=True) |
|
1277 | "statistics", Boolean(), nullable=True, unique=None, default=True) | |
1278 | enable_downloads = Column( |
|
1278 | enable_downloads = Column( | |
1279 | "downloads", Boolean(), nullable=True, unique=None, default=True) |
|
1279 | "downloads", Boolean(), nullable=True, unique=None, default=True) | |
1280 | description = Column( |
|
1280 | description = Column( | |
1281 | "description", String(10000), nullable=True, unique=None, default=None) |
|
1281 | "description", String(10000), nullable=True, unique=None, default=None) | |
1282 | created_on = Column( |
|
1282 | created_on = Column( | |
1283 | 'created_on', DateTime(timezone=False), nullable=True, unique=None, |
|
1283 | 'created_on', DateTime(timezone=False), nullable=True, unique=None, | |
1284 | default=datetime.datetime.now) |
|
1284 | default=datetime.datetime.now) | |
1285 | updated_on = Column( |
|
1285 | updated_on = Column( | |
1286 | 'updated_on', DateTime(timezone=False), nullable=True, unique=None, |
|
1286 | 'updated_on', DateTime(timezone=False), nullable=True, unique=None, | |
1287 | default=datetime.datetime.now) |
|
1287 | default=datetime.datetime.now) | |
1288 | _landing_revision = Column( |
|
1288 | _landing_revision = Column( | |
1289 | "landing_revision", String(255), nullable=False, unique=False, |
|
1289 | "landing_revision", String(255), nullable=False, unique=False, | |
1290 | default=None) |
|
1290 | default=None) | |
1291 | enable_locking = Column( |
|
1291 | enable_locking = Column( | |
1292 | "enable_locking", Boolean(), nullable=False, unique=None, |
|
1292 | "enable_locking", Boolean(), nullable=False, unique=None, | |
1293 | default=False) |
|
1293 | default=False) | |
1294 | _locked = Column( |
|
1294 | _locked = Column( | |
1295 | "locked", String(255), nullable=True, unique=False, default=None) |
|
1295 | "locked", String(255), nullable=True, unique=False, default=None) | |
1296 | _changeset_cache = Column( |
|
1296 | _changeset_cache = Column( | |
1297 | "changeset_cache", LargeBinary(), nullable=True) # JSON data |
|
1297 | "changeset_cache", LargeBinary(), nullable=True) # JSON data | |
1298 |
|
1298 | |||
1299 | fork_id = Column( |
|
1299 | fork_id = Column( | |
1300 | "fork_id", Integer(), ForeignKey('repositories.repo_id'), |
|
1300 | "fork_id", Integer(), ForeignKey('repositories.repo_id'), | |
1301 | nullable=True, unique=False, default=None) |
|
1301 | nullable=True, unique=False, default=None) | |
1302 | group_id = Column( |
|
1302 | group_id = Column( | |
1303 | "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, |
|
1303 | "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, | |
1304 | unique=False, default=None) |
|
1304 | unique=False, default=None) | |
1305 |
|
1305 | |||
1306 | user = relationship('User', lazy='joined') |
|
1306 | user = relationship('User', lazy='joined') | |
1307 | fork = relationship('Repository', remote_side=repo_id, lazy='joined') |
|
1307 | fork = relationship('Repository', remote_side=repo_id, lazy='joined') | |
1308 | group = relationship('RepoGroup', lazy='joined') |
|
1308 | group = relationship('RepoGroup', lazy='joined') | |
1309 | repo_to_perm = relationship( |
|
1309 | repo_to_perm = relationship( | |
1310 | 'UserRepoToPerm', cascade='all', |
|
1310 | 'UserRepoToPerm', cascade='all', | |
1311 | order_by='UserRepoToPerm.repo_to_perm_id') |
|
1311 | order_by='UserRepoToPerm.repo_to_perm_id') | |
1312 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
1312 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |
1313 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
1313 | stats = relationship('Statistics', cascade='all', uselist=False) | |
1314 |
|
1314 | |||
1315 | followers = relationship( |
|
1315 | followers = relationship( | |
1316 | 'UserFollowing', |
|
1316 | 'UserFollowing', | |
1317 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', |
|
1317 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', | |
1318 | cascade='all') |
|
1318 | cascade='all') | |
1319 | extra_fields = relationship( |
|
1319 | extra_fields = relationship( | |
1320 | 'RepositoryField', cascade="all, delete, delete-orphan") |
|
1320 | 'RepositoryField', cascade="all, delete, delete-orphan") | |
1321 | logs = relationship('UserLog') |
|
1321 | logs = relationship('UserLog') | |
1322 | comments = relationship( |
|
1322 | comments = relationship( | |
1323 | 'ChangesetComment', cascade="all, delete, delete-orphan") |
|
1323 | 'ChangesetComment', cascade="all, delete, delete-orphan") | |
1324 | pull_requests_source = relationship( |
|
1324 | pull_requests_source = relationship( | |
1325 | 'PullRequest', |
|
1325 | 'PullRequest', | |
1326 | primaryjoin='PullRequest.source_repo_id==Repository.repo_id', |
|
1326 | primaryjoin='PullRequest.source_repo_id==Repository.repo_id', | |
1327 | cascade="all, delete, delete-orphan") |
|
1327 | cascade="all, delete, delete-orphan") | |
1328 | pull_requests_target = relationship( |
|
1328 | pull_requests_target = relationship( | |
1329 | 'PullRequest', |
|
1329 | 'PullRequest', | |
1330 | primaryjoin='PullRequest.target_repo_id==Repository.repo_id', |
|
1330 | primaryjoin='PullRequest.target_repo_id==Repository.repo_id', | |
1331 | cascade="all, delete, delete-orphan") |
|
1331 | cascade="all, delete, delete-orphan") | |
1332 | ui = relationship('RepoRhodeCodeUi', cascade="all") |
|
1332 | ui = relationship('RepoRhodeCodeUi', cascade="all") | |
1333 | settings = relationship('RepoRhodeCodeSetting', cascade="all") |
|
1333 | settings = relationship('RepoRhodeCodeSetting', cascade="all") | |
1334 | integrations = relationship('Integration', |
|
1334 | integrations = relationship('Integration', | |
1335 | cascade="all, delete, delete-orphan") |
|
1335 | cascade="all, delete, delete-orphan") | |
1336 |
|
1336 | |||
1337 | def __unicode__(self): |
|
1337 | def __unicode__(self): | |
1338 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, |
|
1338 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, | |
1339 | safe_unicode(self.repo_name)) |
|
1339 | safe_unicode(self.repo_name)) | |
1340 |
|
1340 | |||
1341 | @hybrid_property |
|
1341 | @hybrid_property | |
1342 | def landing_rev(self): |
|
1342 | def landing_rev(self): | |
1343 | # always should return [rev_type, rev] |
|
1343 | # always should return [rev_type, rev] | |
1344 | if self._landing_revision: |
|
1344 | if self._landing_revision: | |
1345 | _rev_info = self._landing_revision.split(':') |
|
1345 | _rev_info = self._landing_revision.split(':') | |
1346 | if len(_rev_info) < 2: |
|
1346 | if len(_rev_info) < 2: | |
1347 | _rev_info.insert(0, 'rev') |
|
1347 | _rev_info.insert(0, 'rev') | |
1348 | return [_rev_info[0], _rev_info[1]] |
|
1348 | return [_rev_info[0], _rev_info[1]] | |
1349 | return [None, None] |
|
1349 | return [None, None] | |
1350 |
|
1350 | |||
1351 | @landing_rev.setter |
|
1351 | @landing_rev.setter | |
1352 | def landing_rev(self, val): |
|
1352 | def landing_rev(self, val): | |
1353 | if ':' not in val: |
|
1353 | if ':' not in val: | |
1354 | raise ValueError('value must be delimited with `:` and consist ' |
|
1354 | raise ValueError('value must be delimited with `:` and consist ' | |
1355 | 'of <rev_type>:<rev>, got %s instead' % val) |
|
1355 | 'of <rev_type>:<rev>, got %s instead' % val) | |
1356 | self._landing_revision = val |
|
1356 | self._landing_revision = val | |
1357 |
|
1357 | |||
1358 | @hybrid_property |
|
1358 | @hybrid_property | |
1359 | def locked(self): |
|
1359 | def locked(self): | |
1360 | if self._locked: |
|
1360 | if self._locked: | |
1361 | user_id, timelocked, reason = self._locked.split(':') |
|
1361 | user_id, timelocked, reason = self._locked.split(':') | |
1362 | lock_values = int(user_id), timelocked, reason |
|
1362 | lock_values = int(user_id), timelocked, reason | |
1363 | else: |
|
1363 | else: | |
1364 | lock_values = [None, None, None] |
|
1364 | lock_values = [None, None, None] | |
1365 | return lock_values |
|
1365 | return lock_values | |
1366 |
|
1366 | |||
1367 | @locked.setter |
|
1367 | @locked.setter | |
1368 | def locked(self, val): |
|
1368 | def locked(self, val): | |
1369 | if val and isinstance(val, (list, tuple)): |
|
1369 | if val and isinstance(val, (list, tuple)): | |
1370 | self._locked = ':'.join(map(str, val)) |
|
1370 | self._locked = ':'.join(map(str, val)) | |
1371 | else: |
|
1371 | else: | |
1372 | self._locked = None |
|
1372 | self._locked = None | |
1373 |
|
1373 | |||
1374 | @hybrid_property |
|
1374 | @hybrid_property | |
1375 | def changeset_cache(self): |
|
1375 | def changeset_cache(self): | |
1376 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
1376 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
1377 | dummy = EmptyCommit().__json__() |
|
1377 | dummy = EmptyCommit().__json__() | |
1378 | if not self._changeset_cache: |
|
1378 | if not self._changeset_cache: | |
1379 | return dummy |
|
1379 | return dummy | |
1380 | try: |
|
1380 | try: | |
1381 | return json.loads(self._changeset_cache) |
|
1381 | return json.loads(self._changeset_cache) | |
1382 | except TypeError: |
|
1382 | except TypeError: | |
1383 | return dummy |
|
1383 | return dummy | |
1384 | except Exception: |
|
1384 | except Exception: | |
1385 | log.error(traceback.format_exc()) |
|
1385 | log.error(traceback.format_exc()) | |
1386 | return dummy |
|
1386 | return dummy | |
1387 |
|
1387 | |||
1388 | @changeset_cache.setter |
|
1388 | @changeset_cache.setter | |
1389 | def changeset_cache(self, val): |
|
1389 | def changeset_cache(self, val): | |
1390 | try: |
|
1390 | try: | |
1391 | self._changeset_cache = json.dumps(val) |
|
1391 | self._changeset_cache = json.dumps(val) | |
1392 | except Exception: |
|
1392 | except Exception: | |
1393 | log.error(traceback.format_exc()) |
|
1393 | log.error(traceback.format_exc()) | |
1394 |
|
1394 | |||
1395 | @hybrid_property |
|
1395 | @hybrid_property | |
1396 | def repo_name(self): |
|
1396 | def repo_name(self): | |
1397 | return self._repo_name |
|
1397 | return self._repo_name | |
1398 |
|
1398 | |||
1399 | @repo_name.setter |
|
1399 | @repo_name.setter | |
1400 | def repo_name(self, value): |
|
1400 | def repo_name(self, value): | |
1401 | self._repo_name = value |
|
1401 | self._repo_name = value | |
1402 | self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() |
|
1402 | self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() | |
1403 |
|
1403 | |||
1404 | @classmethod |
|
1404 | @classmethod | |
1405 | def normalize_repo_name(cls, repo_name): |
|
1405 | def normalize_repo_name(cls, repo_name): | |
1406 | """ |
|
1406 | """ | |
1407 | Normalizes os specific repo_name to the format internally stored inside |
|
1407 | Normalizes os specific repo_name to the format internally stored inside | |
1408 | database using URL_SEP |
|
1408 | database using URL_SEP | |
1409 |
|
1409 | |||
1410 | :param cls: |
|
1410 | :param cls: | |
1411 | :param repo_name: |
|
1411 | :param repo_name: | |
1412 | """ |
|
1412 | """ | |
1413 | return cls.NAME_SEP.join(repo_name.split(os.sep)) |
|
1413 | return cls.NAME_SEP.join(repo_name.split(os.sep)) | |
1414 |
|
1414 | |||
1415 | @classmethod |
|
1415 | @classmethod | |
1416 | def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): |
|
1416 | def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): | |
1417 | session = Session() |
|
1417 | session = Session() | |
1418 | q = session.query(cls).filter(cls.repo_name == repo_name) |
|
1418 | q = session.query(cls).filter(cls.repo_name == repo_name) | |
1419 |
|
1419 | |||
1420 | if cache: |
|
1420 | if cache: | |
1421 | if identity_cache: |
|
1421 | if identity_cache: | |
1422 | val = cls.identity_cache(session, 'repo_name', repo_name) |
|
1422 | val = cls.identity_cache(session, 'repo_name', repo_name) | |
1423 | if val: |
|
1423 | if val: | |
1424 | return val |
|
1424 | return val | |
1425 | else: |
|
1425 | else: | |
1426 | q = q.options( |
|
1426 | q = q.options( | |
1427 | FromCache("sql_cache_short", |
|
1427 | FromCache("sql_cache_short", | |
1428 | "get_repo_by_name_%s" % _hash_key(repo_name))) |
|
1428 | "get_repo_by_name_%s" % _hash_key(repo_name))) | |
1429 |
|
1429 | |||
1430 | return q.scalar() |
|
1430 | return q.scalar() | |
1431 |
|
1431 | |||
1432 | @classmethod |
|
1432 | @classmethod | |
1433 | def get_by_full_path(cls, repo_full_path): |
|
1433 | def get_by_full_path(cls, repo_full_path): | |
1434 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] |
|
1434 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] | |
1435 | repo_name = cls.normalize_repo_name(repo_name) |
|
1435 | repo_name = cls.normalize_repo_name(repo_name) | |
1436 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) |
|
1436 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) | |
1437 |
|
1437 | |||
1438 | @classmethod |
|
1438 | @classmethod | |
1439 | def get_repo_forks(cls, repo_id): |
|
1439 | def get_repo_forks(cls, repo_id): | |
1440 | return cls.query().filter(Repository.fork_id == repo_id) |
|
1440 | return cls.query().filter(Repository.fork_id == repo_id) | |
1441 |
|
1441 | |||
1442 | @classmethod |
|
1442 | @classmethod | |
1443 | def base_path(cls): |
|
1443 | def base_path(cls): | |
1444 | """ |
|
1444 | """ | |
1445 | Returns base path when all repos are stored |
|
1445 | Returns base path when all repos are stored | |
1446 |
|
1446 | |||
1447 | :param cls: |
|
1447 | :param cls: | |
1448 | """ |
|
1448 | """ | |
1449 | q = Session().query(RhodeCodeUi)\ |
|
1449 | q = Session().query(RhodeCodeUi)\ | |
1450 | .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) |
|
1450 | .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) | |
1451 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
1451 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
1452 | return q.one().ui_value |
|
1452 | return q.one().ui_value | |
1453 |
|
1453 | |||
1454 | @classmethod |
|
1454 | @classmethod | |
1455 | def is_valid(cls, repo_name): |
|
1455 | def is_valid(cls, repo_name): | |
1456 | """ |
|
1456 | """ | |
1457 | returns True if given repo name is a valid filesystem repository |
|
1457 | returns True if given repo name is a valid filesystem repository | |
1458 |
|
1458 | |||
1459 | :param cls: |
|
1459 | :param cls: | |
1460 | :param repo_name: |
|
1460 | :param repo_name: | |
1461 | """ |
|
1461 | """ | |
1462 | from rhodecode.lib.utils import is_valid_repo |
|
1462 | from rhodecode.lib.utils import is_valid_repo | |
1463 |
|
1463 | |||
1464 | return is_valid_repo(repo_name, cls.base_path()) |
|
1464 | return is_valid_repo(repo_name, cls.base_path()) | |
1465 |
|
1465 | |||
1466 | @classmethod |
|
1466 | @classmethod | |
1467 | def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), |
|
1467 | def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), | |
1468 | case_insensitive=True): |
|
1468 | case_insensitive=True): | |
1469 | q = Repository.query() |
|
1469 | q = Repository.query() | |
1470 |
|
1470 | |||
1471 | if not isinstance(user_id, Optional): |
|
1471 | if not isinstance(user_id, Optional): | |
1472 | q = q.filter(Repository.user_id == user_id) |
|
1472 | q = q.filter(Repository.user_id == user_id) | |
1473 |
|
1473 | |||
1474 | if not isinstance(group_id, Optional): |
|
1474 | if not isinstance(group_id, Optional): | |
1475 | q = q.filter(Repository.group_id == group_id) |
|
1475 | q = q.filter(Repository.group_id == group_id) | |
1476 |
|
1476 | |||
1477 | if case_insensitive: |
|
1477 | if case_insensitive: | |
1478 | q = q.order_by(func.lower(Repository.repo_name)) |
|
1478 | q = q.order_by(func.lower(Repository.repo_name)) | |
1479 | else: |
|
1479 | else: | |
1480 | q = q.order_by(Repository.repo_name) |
|
1480 | q = q.order_by(Repository.repo_name) | |
1481 | return q.all() |
|
1481 | return q.all() | |
1482 |
|
1482 | |||
1483 | @property |
|
1483 | @property | |
1484 | def forks(self): |
|
1484 | def forks(self): | |
1485 | """ |
|
1485 | """ | |
1486 | Return forks of this repo |
|
1486 | Return forks of this repo | |
1487 | """ |
|
1487 | """ | |
1488 | return Repository.get_repo_forks(self.repo_id) |
|
1488 | return Repository.get_repo_forks(self.repo_id) | |
1489 |
|
1489 | |||
1490 | @property |
|
1490 | @property | |
1491 | def parent(self): |
|
1491 | def parent(self): | |
1492 | """ |
|
1492 | """ | |
1493 | Returns fork parent |
|
1493 | Returns fork parent | |
1494 | """ |
|
1494 | """ | |
1495 | return self.fork |
|
1495 | return self.fork | |
1496 |
|
1496 | |||
1497 | @property |
|
1497 | @property | |
1498 | def just_name(self): |
|
1498 | def just_name(self): | |
1499 | return self.repo_name.split(self.NAME_SEP)[-1] |
|
1499 | return self.repo_name.split(self.NAME_SEP)[-1] | |
1500 |
|
1500 | |||
1501 | @property |
|
1501 | @property | |
1502 | def groups_with_parents(self): |
|
1502 | def groups_with_parents(self): | |
1503 | groups = [] |
|
1503 | groups = [] | |
1504 | if self.group is None: |
|
1504 | if self.group is None: | |
1505 | return groups |
|
1505 | return groups | |
1506 |
|
1506 | |||
1507 | cur_gr = self.group |
|
1507 | cur_gr = self.group | |
1508 | groups.insert(0, cur_gr) |
|
1508 | groups.insert(0, cur_gr) | |
1509 | while 1: |
|
1509 | while 1: | |
1510 | gr = getattr(cur_gr, 'parent_group', None) |
|
1510 | gr = getattr(cur_gr, 'parent_group', None) | |
1511 | cur_gr = cur_gr.parent_group |
|
1511 | cur_gr = cur_gr.parent_group | |
1512 | if gr is None: |
|
1512 | if gr is None: | |
1513 | break |
|
1513 | break | |
1514 | groups.insert(0, gr) |
|
1514 | groups.insert(0, gr) | |
1515 |
|
1515 | |||
1516 | return groups |
|
1516 | return groups | |
1517 |
|
1517 | |||
1518 | @property |
|
1518 | @property | |
1519 | def groups_and_repo(self): |
|
1519 | def groups_and_repo(self): | |
1520 | return self.groups_with_parents, self |
|
1520 | return self.groups_with_parents, self | |
1521 |
|
1521 | |||
1522 | @LazyProperty |
|
1522 | @LazyProperty | |
1523 | def repo_path(self): |
|
1523 | def repo_path(self): | |
1524 | """ |
|
1524 | """ | |
1525 | Returns base full path for that repository means where it actually |
|
1525 | Returns base full path for that repository means where it actually | |
1526 | exists on a filesystem |
|
1526 | exists on a filesystem | |
1527 | """ |
|
1527 | """ | |
1528 | q = Session().query(RhodeCodeUi).filter( |
|
1528 | q = Session().query(RhodeCodeUi).filter( | |
1529 | RhodeCodeUi.ui_key == self.NAME_SEP) |
|
1529 | RhodeCodeUi.ui_key == self.NAME_SEP) | |
1530 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
1530 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
1531 | return q.one().ui_value |
|
1531 | return q.one().ui_value | |
1532 |
|
1532 | |||
1533 | @property |
|
1533 | @property | |
1534 | def repo_full_path(self): |
|
1534 | def repo_full_path(self): | |
1535 | p = [self.repo_path] |
|
1535 | p = [self.repo_path] | |
1536 | # we need to split the name by / since this is how we store the |
|
1536 | # we need to split the name by / since this is how we store the | |
1537 | # names in the database, but that eventually needs to be converted |
|
1537 | # names in the database, but that eventually needs to be converted | |
1538 | # into a valid system path |
|
1538 | # into a valid system path | |
1539 | p += self.repo_name.split(self.NAME_SEP) |
|
1539 | p += self.repo_name.split(self.NAME_SEP) | |
1540 | return os.path.join(*map(safe_unicode, p)) |
|
1540 | return os.path.join(*map(safe_unicode, p)) | |
1541 |
|
1541 | |||
1542 | @property |
|
1542 | @property | |
1543 | def cache_keys(self): |
|
1543 | def cache_keys(self): | |
1544 | """ |
|
1544 | """ | |
1545 | Returns associated cache keys for that repo |
|
1545 | Returns associated cache keys for that repo | |
1546 | """ |
|
1546 | """ | |
1547 | return CacheKey.query()\ |
|
1547 | return CacheKey.query()\ | |
1548 | .filter(CacheKey.cache_args == self.repo_name)\ |
|
1548 | .filter(CacheKey.cache_args == self.repo_name)\ | |
1549 | .order_by(CacheKey.cache_key)\ |
|
1549 | .order_by(CacheKey.cache_key)\ | |
1550 | .all() |
|
1550 | .all() | |
1551 |
|
1551 | |||
1552 | def get_new_name(self, repo_name): |
|
1552 | def get_new_name(self, repo_name): | |
1553 | """ |
|
1553 | """ | |
1554 | returns new full repository name based on assigned group and new new |
|
1554 | returns new full repository name based on assigned group and new new | |
1555 |
|
1555 | |||
1556 | :param group_name: |
|
1556 | :param group_name: | |
1557 | """ |
|
1557 | """ | |
1558 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
1558 | path_prefix = self.group.full_path_splitted if self.group else [] | |
1559 | return self.NAME_SEP.join(path_prefix + [repo_name]) |
|
1559 | return self.NAME_SEP.join(path_prefix + [repo_name]) | |
1560 |
|
1560 | |||
1561 | @property |
|
1561 | @property | |
1562 | def _config(self): |
|
1562 | def _config(self): | |
1563 | """ |
|
1563 | """ | |
1564 | Returns db based config object. |
|
1564 | Returns db based config object. | |
1565 | """ |
|
1565 | """ | |
1566 | from rhodecode.lib.utils import make_db_config |
|
1566 | from rhodecode.lib.utils import make_db_config | |
1567 | return make_db_config(clear_session=False, repo=self) |
|
1567 | return make_db_config(clear_session=False, repo=self) | |
1568 |
|
1568 | |||
1569 | def permissions(self, with_admins=True, with_owner=True): |
|
1569 | def permissions(self, with_admins=True, with_owner=True): | |
1570 | q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) |
|
1570 | q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) | |
1571 | q = q.options(joinedload(UserRepoToPerm.repository), |
|
1571 | q = q.options(joinedload(UserRepoToPerm.repository), | |
1572 | joinedload(UserRepoToPerm.user), |
|
1572 | joinedload(UserRepoToPerm.user), | |
1573 | joinedload(UserRepoToPerm.permission),) |
|
1573 | joinedload(UserRepoToPerm.permission),) | |
1574 |
|
1574 | |||
1575 | # get owners and admins and permissions. We do a trick of re-writing |
|
1575 | # get owners and admins and permissions. We do a trick of re-writing | |
1576 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
1576 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |
1577 | # has a global reference and changing one object propagates to all |
|
1577 | # has a global reference and changing one object propagates to all | |
1578 | # others. This means if admin is also an owner admin_row that change |
|
1578 | # others. This means if admin is also an owner admin_row that change | |
1579 | # would propagate to both objects |
|
1579 | # would propagate to both objects | |
1580 | perm_rows = [] |
|
1580 | perm_rows = [] | |
1581 | for _usr in q.all(): |
|
1581 | for _usr in q.all(): | |
1582 | usr = AttributeDict(_usr.user.get_dict()) |
|
1582 | usr = AttributeDict(_usr.user.get_dict()) | |
1583 | usr.permission = _usr.permission.permission_name |
|
1583 | usr.permission = _usr.permission.permission_name | |
1584 | perm_rows.append(usr) |
|
1584 | perm_rows.append(usr) | |
1585 |
|
1585 | |||
1586 | # filter the perm rows by 'default' first and then sort them by |
|
1586 | # filter the perm rows by 'default' first and then sort them by | |
1587 | # admin,write,read,none permissions sorted again alphabetically in |
|
1587 | # admin,write,read,none permissions sorted again alphabetically in | |
1588 | # each group |
|
1588 | # each group | |
1589 | perm_rows = sorted(perm_rows, key=display_sort) |
|
1589 | perm_rows = sorted(perm_rows, key=display_sort) | |
1590 |
|
1590 | |||
1591 | _admin_perm = 'repository.admin' |
|
1591 | _admin_perm = 'repository.admin' | |
1592 | owner_row = [] |
|
1592 | owner_row = [] | |
1593 | if with_owner: |
|
1593 | if with_owner: | |
1594 | usr = AttributeDict(self.user.get_dict()) |
|
1594 | usr = AttributeDict(self.user.get_dict()) | |
1595 | usr.owner_row = True |
|
1595 | usr.owner_row = True | |
1596 | usr.permission = _admin_perm |
|
1596 | usr.permission = _admin_perm | |
1597 | owner_row.append(usr) |
|
1597 | owner_row.append(usr) | |
1598 |
|
1598 | |||
1599 | super_admin_rows = [] |
|
1599 | super_admin_rows = [] | |
1600 | if with_admins: |
|
1600 | if with_admins: | |
1601 | for usr in User.get_all_super_admins(): |
|
1601 | for usr in User.get_all_super_admins(): | |
1602 | # if this admin is also owner, don't double the record |
|
1602 | # if this admin is also owner, don't double the record | |
1603 | if usr.user_id == owner_row[0].user_id: |
|
1603 | if usr.user_id == owner_row[0].user_id: | |
1604 | owner_row[0].admin_row = True |
|
1604 | owner_row[0].admin_row = True | |
1605 | else: |
|
1605 | else: | |
1606 | usr = AttributeDict(usr.get_dict()) |
|
1606 | usr = AttributeDict(usr.get_dict()) | |
1607 | usr.admin_row = True |
|
1607 | usr.admin_row = True | |
1608 | usr.permission = _admin_perm |
|
1608 | usr.permission = _admin_perm | |
1609 | super_admin_rows.append(usr) |
|
1609 | super_admin_rows.append(usr) | |
1610 |
|
1610 | |||
1611 | return super_admin_rows + owner_row + perm_rows |
|
1611 | return super_admin_rows + owner_row + perm_rows | |
1612 |
|
1612 | |||
1613 | def permission_user_groups(self): |
|
1613 | def permission_user_groups(self): | |
1614 | q = UserGroupRepoToPerm.query().filter( |
|
1614 | q = UserGroupRepoToPerm.query().filter( | |
1615 | UserGroupRepoToPerm.repository == self) |
|
1615 | UserGroupRepoToPerm.repository == self) | |
1616 | q = q.options(joinedload(UserGroupRepoToPerm.repository), |
|
1616 | q = q.options(joinedload(UserGroupRepoToPerm.repository), | |
1617 | joinedload(UserGroupRepoToPerm.users_group), |
|
1617 | joinedload(UserGroupRepoToPerm.users_group), | |
1618 | joinedload(UserGroupRepoToPerm.permission),) |
|
1618 | joinedload(UserGroupRepoToPerm.permission),) | |
1619 |
|
1619 | |||
1620 | perm_rows = [] |
|
1620 | perm_rows = [] | |
1621 | for _user_group in q.all(): |
|
1621 | for _user_group in q.all(): | |
1622 | usr = AttributeDict(_user_group.users_group.get_dict()) |
|
1622 | usr = AttributeDict(_user_group.users_group.get_dict()) | |
1623 | usr.permission = _user_group.permission.permission_name |
|
1623 | usr.permission = _user_group.permission.permission_name | |
1624 | perm_rows.append(usr) |
|
1624 | perm_rows.append(usr) | |
1625 |
|
1625 | |||
1626 | return perm_rows |
|
1626 | return perm_rows | |
1627 |
|
1627 | |||
1628 | def get_api_data(self, include_secrets=False): |
|
1628 | def get_api_data(self, include_secrets=False): | |
1629 | """ |
|
1629 | """ | |
1630 | Common function for generating repo api data |
|
1630 | Common function for generating repo api data | |
1631 |
|
1631 | |||
1632 | :param include_secrets: See :meth:`User.get_api_data`. |
|
1632 | :param include_secrets: See :meth:`User.get_api_data`. | |
1633 |
|
1633 | |||
1634 | """ |
|
1634 | """ | |
1635 | # TODO: mikhail: Here there is an anti-pattern, we probably need to |
|
1635 | # TODO: mikhail: Here there is an anti-pattern, we probably need to | |
1636 | # move this methods on models level. |
|
1636 | # move this methods on models level. | |
1637 | from rhodecode.model.settings import SettingsModel |
|
1637 | from rhodecode.model.settings import SettingsModel | |
1638 |
|
1638 | |||
1639 | repo = self |
|
1639 | repo = self | |
1640 | _user_id, _time, _reason = self.locked |
|
1640 | _user_id, _time, _reason = self.locked | |
1641 |
|
1641 | |||
1642 | data = { |
|
1642 | data = { | |
1643 | 'repo_id': repo.repo_id, |
|
1643 | 'repo_id': repo.repo_id, | |
1644 | 'repo_name': repo.repo_name, |
|
1644 | 'repo_name': repo.repo_name, | |
1645 | 'repo_type': repo.repo_type, |
|
1645 | 'repo_type': repo.repo_type, | |
1646 | 'clone_uri': repo.clone_uri or '', |
|
1646 | 'clone_uri': repo.clone_uri or '', | |
1647 | 'url': url('summary_home', repo_name=self.repo_name, qualified=True), |
|
1647 | 'url': url('summary_home', repo_name=self.repo_name, qualified=True), | |
1648 | 'private': repo.private, |
|
1648 | 'private': repo.private, | |
1649 | 'created_on': repo.created_on, |
|
1649 | 'created_on': repo.created_on, | |
1650 | 'description': repo.description, |
|
1650 | 'description': repo.description, | |
1651 | 'landing_rev': repo.landing_rev, |
|
1651 | 'landing_rev': repo.landing_rev, | |
1652 | 'owner': repo.user.username, |
|
1652 | 'owner': repo.user.username, | |
1653 | 'fork_of': repo.fork.repo_name if repo.fork else None, |
|
1653 | 'fork_of': repo.fork.repo_name if repo.fork else None, | |
1654 | 'enable_statistics': repo.enable_statistics, |
|
1654 | 'enable_statistics': repo.enable_statistics, | |
1655 | 'enable_locking': repo.enable_locking, |
|
1655 | 'enable_locking': repo.enable_locking, | |
1656 | 'enable_downloads': repo.enable_downloads, |
|
1656 | 'enable_downloads': repo.enable_downloads, | |
1657 | 'last_changeset': repo.changeset_cache, |
|
1657 | 'last_changeset': repo.changeset_cache, | |
1658 | 'locked_by': User.get(_user_id).get_api_data( |
|
1658 | 'locked_by': User.get(_user_id).get_api_data( | |
1659 | include_secrets=include_secrets) if _user_id else None, |
|
1659 | include_secrets=include_secrets) if _user_id else None, | |
1660 | 'locked_date': time_to_datetime(_time) if _time else None, |
|
1660 | 'locked_date': time_to_datetime(_time) if _time else None, | |
1661 | 'lock_reason': _reason if _reason else None, |
|
1661 | 'lock_reason': _reason if _reason else None, | |
1662 | } |
|
1662 | } | |
1663 |
|
1663 | |||
1664 | # TODO: mikhail: should be per-repo settings here |
|
1664 | # TODO: mikhail: should be per-repo settings here | |
1665 | rc_config = SettingsModel().get_all_settings() |
|
1665 | rc_config = SettingsModel().get_all_settings() | |
1666 | repository_fields = str2bool( |
|
1666 | repository_fields = str2bool( | |
1667 | rc_config.get('rhodecode_repository_fields')) |
|
1667 | rc_config.get('rhodecode_repository_fields')) | |
1668 | if repository_fields: |
|
1668 | if repository_fields: | |
1669 | for f in self.extra_fields: |
|
1669 | for f in self.extra_fields: | |
1670 | data[f.field_key_prefixed] = f.field_value |
|
1670 | data[f.field_key_prefixed] = f.field_value | |
1671 |
|
1671 | |||
1672 | return data |
|
1672 | return data | |
1673 |
|
1673 | |||
1674 | @classmethod |
|
1674 | @classmethod | |
1675 | def lock(cls, repo, user_id, lock_time=None, lock_reason=None): |
|
1675 | def lock(cls, repo, user_id, lock_time=None, lock_reason=None): | |
1676 | if not lock_time: |
|
1676 | if not lock_time: | |
1677 | lock_time = time.time() |
|
1677 | lock_time = time.time() | |
1678 | if not lock_reason: |
|
1678 | if not lock_reason: | |
1679 | lock_reason = cls.LOCK_AUTOMATIC |
|
1679 | lock_reason = cls.LOCK_AUTOMATIC | |
1680 | repo.locked = [user_id, lock_time, lock_reason] |
|
1680 | repo.locked = [user_id, lock_time, lock_reason] | |
1681 | Session().add(repo) |
|
1681 | Session().add(repo) | |
1682 | Session().commit() |
|
1682 | Session().commit() | |
1683 |
|
1683 | |||
1684 | @classmethod |
|
1684 | @classmethod | |
1685 | def unlock(cls, repo): |
|
1685 | def unlock(cls, repo): | |
1686 | repo.locked = None |
|
1686 | repo.locked = None | |
1687 | Session().add(repo) |
|
1687 | Session().add(repo) | |
1688 | Session().commit() |
|
1688 | Session().commit() | |
1689 |
|
1689 | |||
1690 | @classmethod |
|
1690 | @classmethod | |
1691 | def getlock(cls, repo): |
|
1691 | def getlock(cls, repo): | |
1692 | return repo.locked |
|
1692 | return repo.locked | |
1693 |
|
1693 | |||
1694 | def is_user_lock(self, user_id): |
|
1694 | def is_user_lock(self, user_id): | |
1695 | if self.lock[0]: |
|
1695 | if self.lock[0]: | |
1696 | lock_user_id = safe_int(self.lock[0]) |
|
1696 | lock_user_id = safe_int(self.lock[0]) | |
1697 | user_id = safe_int(user_id) |
|
1697 | user_id = safe_int(user_id) | |
1698 | # both are ints, and they are equal |
|
1698 | # both are ints, and they are equal | |
1699 | return all([lock_user_id, user_id]) and lock_user_id == user_id |
|
1699 | return all([lock_user_id, user_id]) and lock_user_id == user_id | |
1700 |
|
1700 | |||
1701 | return False |
|
1701 | return False | |
1702 |
|
1702 | |||
1703 | def get_locking_state(self, action, user_id, only_when_enabled=True): |
|
1703 | def get_locking_state(self, action, user_id, only_when_enabled=True): | |
1704 | """ |
|
1704 | """ | |
1705 | Checks locking on this repository, if locking is enabled and lock is |
|
1705 | Checks locking on this repository, if locking is enabled and lock is | |
1706 | present returns a tuple of make_lock, locked, locked_by. |
|
1706 | present returns a tuple of make_lock, locked, locked_by. | |
1707 | make_lock can have 3 states None (do nothing) True, make lock |
|
1707 | make_lock can have 3 states None (do nothing) True, make lock | |
1708 | False release lock, This value is later propagated to hooks, which |
|
1708 | False release lock, This value is later propagated to hooks, which | |
1709 | do the locking. Think about this as signals passed to hooks what to do. |
|
1709 | do the locking. Think about this as signals passed to hooks what to do. | |
1710 |
|
1710 | |||
1711 | """ |
|
1711 | """ | |
1712 | # TODO: johbo: This is part of the business logic and should be moved |
|
1712 | # TODO: johbo: This is part of the business logic and should be moved | |
1713 | # into the RepositoryModel. |
|
1713 | # into the RepositoryModel. | |
1714 |
|
1714 | |||
1715 | if action not in ('push', 'pull'): |
|
1715 | if action not in ('push', 'pull'): | |
1716 | raise ValueError("Invalid action value: %s" % repr(action)) |
|
1716 | raise ValueError("Invalid action value: %s" % repr(action)) | |
1717 |
|
1717 | |||
1718 | # defines if locked error should be thrown to user |
|
1718 | # defines if locked error should be thrown to user | |
1719 | currently_locked = False |
|
1719 | currently_locked = False | |
1720 | # defines if new lock should be made, tri-state |
|
1720 | # defines if new lock should be made, tri-state | |
1721 | make_lock = None |
|
1721 | make_lock = None | |
1722 | repo = self |
|
1722 | repo = self | |
1723 | user = User.get(user_id) |
|
1723 | user = User.get(user_id) | |
1724 |
|
1724 | |||
1725 | lock_info = repo.locked |
|
1725 | lock_info = repo.locked | |
1726 |
|
1726 | |||
1727 | if repo and (repo.enable_locking or not only_when_enabled): |
|
1727 | if repo and (repo.enable_locking or not only_when_enabled): | |
1728 | if action == 'push': |
|
1728 | if action == 'push': | |
1729 | # check if it's already locked !, if it is compare users |
|
1729 | # check if it's already locked !, if it is compare users | |
1730 | locked_by_user_id = lock_info[0] |
|
1730 | locked_by_user_id = lock_info[0] | |
1731 | if user.user_id == locked_by_user_id: |
|
1731 | if user.user_id == locked_by_user_id: | |
1732 | log.debug( |
|
1732 | log.debug( | |
1733 | 'Got `push` action from user %s, now unlocking', user) |
|
1733 | 'Got `push` action from user %s, now unlocking', user) | |
1734 | # unlock if we have push from user who locked |
|
1734 | # unlock if we have push from user who locked | |
1735 | make_lock = False |
|
1735 | make_lock = False | |
1736 | else: |
|
1736 | else: | |
1737 | # we're not the same user who locked, ban with |
|
1737 | # we're not the same user who locked, ban with | |
1738 | # code defined in settings (default is 423 HTTP Locked) ! |
|
1738 | # code defined in settings (default is 423 HTTP Locked) ! | |
1739 | log.debug('Repo %s is currently locked by %s', repo, user) |
|
1739 | log.debug('Repo %s is currently locked by %s', repo, user) | |
1740 | currently_locked = True |
|
1740 | currently_locked = True | |
1741 | elif action == 'pull': |
|
1741 | elif action == 'pull': | |
1742 | # [0] user [1] date |
|
1742 | # [0] user [1] date | |
1743 | if lock_info[0] and lock_info[1]: |
|
1743 | if lock_info[0] and lock_info[1]: | |
1744 | log.debug('Repo %s is currently locked by %s', repo, user) |
|
1744 | log.debug('Repo %s is currently locked by %s', repo, user) | |
1745 | currently_locked = True |
|
1745 | currently_locked = True | |
1746 | else: |
|
1746 | else: | |
1747 | log.debug('Setting lock on repo %s by %s', repo, user) |
|
1747 | log.debug('Setting lock on repo %s by %s', repo, user) | |
1748 | make_lock = True |
|
1748 | make_lock = True | |
1749 |
|
1749 | |||
1750 | else: |
|
1750 | else: | |
1751 | log.debug('Repository %s do not have locking enabled', repo) |
|
1751 | log.debug('Repository %s do not have locking enabled', repo) | |
1752 |
|
1752 | |||
1753 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', |
|
1753 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', | |
1754 | make_lock, currently_locked, lock_info) |
|
1754 | make_lock, currently_locked, lock_info) | |
1755 |
|
1755 | |||
1756 | from rhodecode.lib.auth import HasRepoPermissionAny |
|
1756 | from rhodecode.lib.auth import HasRepoPermissionAny | |
1757 | perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') |
|
1757 | perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') | |
1758 | if make_lock and not perm_check(repo_name=repo.repo_name, user=user): |
|
1758 | if make_lock and not perm_check(repo_name=repo.repo_name, user=user): | |
1759 | # if we don't have at least write permission we cannot make a lock |
|
1759 | # if we don't have at least write permission we cannot make a lock | |
1760 | log.debug('lock state reset back to FALSE due to lack ' |
|
1760 | log.debug('lock state reset back to FALSE due to lack ' | |
1761 | 'of at least read permission') |
|
1761 | 'of at least read permission') | |
1762 | make_lock = False |
|
1762 | make_lock = False | |
1763 |
|
1763 | |||
1764 | return make_lock, currently_locked, lock_info |
|
1764 | return make_lock, currently_locked, lock_info | |
1765 |
|
1765 | |||
1766 | @property |
|
1766 | @property | |
1767 | def last_db_change(self): |
|
1767 | def last_db_change(self): | |
1768 | return self.updated_on |
|
1768 | return self.updated_on | |
1769 |
|
1769 | |||
1770 | @property |
|
1770 | @property | |
1771 | def clone_uri_hidden(self): |
|
1771 | def clone_uri_hidden(self): | |
1772 | clone_uri = self.clone_uri |
|
1772 | clone_uri = self.clone_uri | |
1773 | if clone_uri: |
|
1773 | if clone_uri: | |
1774 | import urlobject |
|
1774 | import urlobject | |
1775 | url_obj = urlobject.URLObject(clone_uri) |
|
1775 | url_obj = urlobject.URLObject(clone_uri) | |
1776 | if url_obj.password: |
|
1776 | if url_obj.password: | |
1777 | clone_uri = url_obj.with_password('*****') |
|
1777 | clone_uri = url_obj.with_password('*****') | |
1778 | return clone_uri |
|
1778 | return clone_uri | |
1779 |
|
1779 | |||
1780 | def clone_url(self, **override): |
|
1780 | def clone_url(self, **override): | |
1781 | qualified_home_url = url('home', qualified=True) |
|
1781 | qualified_home_url = url('home', qualified=True) | |
1782 |
|
1782 | |||
1783 | uri_tmpl = None |
|
1783 | uri_tmpl = None | |
1784 | if 'with_id' in override: |
|
1784 | if 'with_id' in override: | |
1785 | uri_tmpl = self.DEFAULT_CLONE_URI_ID |
|
1785 | uri_tmpl = self.DEFAULT_CLONE_URI_ID | |
1786 | del override['with_id'] |
|
1786 | del override['with_id'] | |
1787 |
|
1787 | |||
1788 | if 'uri_tmpl' in override: |
|
1788 | if 'uri_tmpl' in override: | |
1789 | uri_tmpl = override['uri_tmpl'] |
|
1789 | uri_tmpl = override['uri_tmpl'] | |
1790 | del override['uri_tmpl'] |
|
1790 | del override['uri_tmpl'] | |
1791 |
|
1791 | |||
1792 | # we didn't override our tmpl from **overrides |
|
1792 | # we didn't override our tmpl from **overrides | |
1793 | if not uri_tmpl: |
|
1793 | if not uri_tmpl: | |
1794 | uri_tmpl = self.DEFAULT_CLONE_URI |
|
1794 | uri_tmpl = self.DEFAULT_CLONE_URI | |
1795 | try: |
|
1795 | try: | |
1796 | from pylons import tmpl_context as c |
|
1796 | from pylons import tmpl_context as c | |
1797 | uri_tmpl = c.clone_uri_tmpl |
|
1797 | uri_tmpl = c.clone_uri_tmpl | |
1798 | except Exception: |
|
1798 | except Exception: | |
1799 | # in any case if we call this outside of request context, |
|
1799 | # in any case if we call this outside of request context, | |
1800 | # ie, not having tmpl_context set up |
|
1800 | # ie, not having tmpl_context set up | |
1801 | pass |
|
1801 | pass | |
1802 |
|
1802 | |||
1803 | return get_clone_url(uri_tmpl=uri_tmpl, |
|
1803 | return get_clone_url(uri_tmpl=uri_tmpl, | |
1804 | qualifed_home_url=qualified_home_url, |
|
1804 | qualifed_home_url=qualified_home_url, | |
1805 | repo_name=self.repo_name, |
|
1805 | repo_name=self.repo_name, | |
1806 | repo_id=self.repo_id, **override) |
|
1806 | repo_id=self.repo_id, **override) | |
1807 |
|
1807 | |||
1808 | def set_state(self, state): |
|
1808 | def set_state(self, state): | |
1809 | self.repo_state = state |
|
1809 | self.repo_state = state | |
1810 | Session().add(self) |
|
1810 | Session().add(self) | |
1811 | #========================================================================== |
|
1811 | #========================================================================== | |
1812 | # SCM PROPERTIES |
|
1812 | # SCM PROPERTIES | |
1813 | #========================================================================== |
|
1813 | #========================================================================== | |
1814 |
|
1814 | |||
1815 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
1815 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): | |
1816 | return get_commit_safe( |
|
1816 | return get_commit_safe( | |
1817 | self.scm_instance(), commit_id, commit_idx, pre_load=pre_load) |
|
1817 | self.scm_instance(), commit_id, commit_idx, pre_load=pre_load) | |
1818 |
|
1818 | |||
1819 | def get_changeset(self, rev=None, pre_load=None): |
|
1819 | def get_changeset(self, rev=None, pre_load=None): | |
1820 | warnings.warn("Use get_commit", DeprecationWarning) |
|
1820 | warnings.warn("Use get_commit", DeprecationWarning) | |
1821 | commit_id = None |
|
1821 | commit_id = None | |
1822 | commit_idx = None |
|
1822 | commit_idx = None | |
1823 | if isinstance(rev, basestring): |
|
1823 | if isinstance(rev, basestring): | |
1824 | commit_id = rev |
|
1824 | commit_id = rev | |
1825 | else: |
|
1825 | else: | |
1826 | commit_idx = rev |
|
1826 | commit_idx = rev | |
1827 | return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, |
|
1827 | return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, | |
1828 | pre_load=pre_load) |
|
1828 | pre_load=pre_load) | |
1829 |
|
1829 | |||
1830 | def get_landing_commit(self): |
|
1830 | def get_landing_commit(self): | |
1831 | """ |
|
1831 | """ | |
1832 | Returns landing commit, or if that doesn't exist returns the tip |
|
1832 | Returns landing commit, or if that doesn't exist returns the tip | |
1833 | """ |
|
1833 | """ | |
1834 | _rev_type, _rev = self.landing_rev |
|
1834 | _rev_type, _rev = self.landing_rev | |
1835 | commit = self.get_commit(_rev) |
|
1835 | commit = self.get_commit(_rev) | |
1836 | if isinstance(commit, EmptyCommit): |
|
1836 | if isinstance(commit, EmptyCommit): | |
1837 | return self.get_commit() |
|
1837 | return self.get_commit() | |
1838 | return commit |
|
1838 | return commit | |
1839 |
|
1839 | |||
1840 | def update_commit_cache(self, cs_cache=None, config=None): |
|
1840 | def update_commit_cache(self, cs_cache=None, config=None): | |
1841 | """ |
|
1841 | """ | |
1842 | Update cache of last changeset for repository, keys should be:: |
|
1842 | Update cache of last changeset for repository, keys should be:: | |
1843 |
|
1843 | |||
1844 | short_id |
|
1844 | short_id | |
1845 | raw_id |
|
1845 | raw_id | |
1846 | revision |
|
1846 | revision | |
1847 | parents |
|
1847 | parents | |
1848 | message |
|
1848 | message | |
1849 | date |
|
1849 | date | |
1850 | author |
|
1850 | author | |
1851 |
|
1851 | |||
1852 | :param cs_cache: |
|
1852 | :param cs_cache: | |
1853 | """ |
|
1853 | """ | |
1854 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
1854 | from rhodecode.lib.vcs.backends.base import BaseChangeset | |
1855 | if cs_cache is None: |
|
1855 | if cs_cache is None: | |
1856 | # use no-cache version here |
|
1856 | # use no-cache version here | |
1857 | scm_repo = self.scm_instance(cache=False, config=config) |
|
1857 | scm_repo = self.scm_instance(cache=False, config=config) | |
1858 | if scm_repo: |
|
1858 | if scm_repo: | |
1859 | cs_cache = scm_repo.get_commit( |
|
1859 | cs_cache = scm_repo.get_commit( | |
1860 | pre_load=["author", "date", "message", "parents"]) |
|
1860 | pre_load=["author", "date", "message", "parents"]) | |
1861 | else: |
|
1861 | else: | |
1862 | cs_cache = EmptyCommit() |
|
1862 | cs_cache = EmptyCommit() | |
1863 |
|
1863 | |||
1864 | if isinstance(cs_cache, BaseChangeset): |
|
1864 | if isinstance(cs_cache, BaseChangeset): | |
1865 | cs_cache = cs_cache.__json__() |
|
1865 | cs_cache = cs_cache.__json__() | |
1866 |
|
1866 | |||
1867 | def is_outdated(new_cs_cache): |
|
1867 | def is_outdated(new_cs_cache): | |
1868 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or |
|
1868 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or | |
1869 | new_cs_cache['revision'] != self.changeset_cache['revision']): |
|
1869 | new_cs_cache['revision'] != self.changeset_cache['revision']): | |
1870 | return True |
|
1870 | return True | |
1871 | return False |
|
1871 | return False | |
1872 |
|
1872 | |||
1873 | # check if we have maybe already latest cached revision |
|
1873 | # check if we have maybe already latest cached revision | |
1874 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
1874 | if is_outdated(cs_cache) or not self.changeset_cache: | |
1875 | _default = datetime.datetime.fromtimestamp(0) |
|
1875 | _default = datetime.datetime.fromtimestamp(0) | |
1876 | last_change = cs_cache.get('date') or _default |
|
1876 | last_change = cs_cache.get('date') or _default | |
1877 | log.debug('updated repo %s with new cs cache %s', |
|
1877 | log.debug('updated repo %s with new cs cache %s', | |
1878 | self.repo_name, cs_cache) |
|
1878 | self.repo_name, cs_cache) | |
1879 | self.updated_on = last_change |
|
1879 | self.updated_on = last_change | |
1880 | self.changeset_cache = cs_cache |
|
1880 | self.changeset_cache = cs_cache | |
1881 | Session().add(self) |
|
1881 | Session().add(self) | |
1882 | Session().commit() |
|
1882 | Session().commit() | |
1883 | else: |
|
1883 | else: | |
1884 | log.debug('Skipping update_commit_cache for repo:`%s` ' |
|
1884 | log.debug('Skipping update_commit_cache for repo:`%s` ' | |
1885 | 'commit already with latest changes', self.repo_name) |
|
1885 | 'commit already with latest changes', self.repo_name) | |
1886 |
|
1886 | |||
1887 | @property |
|
1887 | @property | |
1888 | def tip(self): |
|
1888 | def tip(self): | |
1889 | return self.get_commit('tip') |
|
1889 | return self.get_commit('tip') | |
1890 |
|
1890 | |||
1891 | @property |
|
1891 | @property | |
1892 | def author(self): |
|
1892 | def author(self): | |
1893 | return self.tip.author |
|
1893 | return self.tip.author | |
1894 |
|
1894 | |||
1895 | @property |
|
1895 | @property | |
1896 | def last_change(self): |
|
1896 | def last_change(self): | |
1897 | return self.scm_instance().last_change |
|
1897 | return self.scm_instance().last_change | |
1898 |
|
1898 | |||
1899 | def get_comments(self, revisions=None): |
|
1899 | def get_comments(self, revisions=None): | |
1900 | """ |
|
1900 | """ | |
1901 | Returns comments for this repository grouped by revisions |
|
1901 | Returns comments for this repository grouped by revisions | |
1902 |
|
1902 | |||
1903 | :param revisions: filter query by revisions only |
|
1903 | :param revisions: filter query by revisions only | |
1904 | """ |
|
1904 | """ | |
1905 | cmts = ChangesetComment.query()\ |
|
1905 | cmts = ChangesetComment.query()\ | |
1906 | .filter(ChangesetComment.repo == self) |
|
1906 | .filter(ChangesetComment.repo == self) | |
1907 | if revisions: |
|
1907 | if revisions: | |
1908 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) |
|
1908 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) | |
1909 | grouped = collections.defaultdict(list) |
|
1909 | grouped = collections.defaultdict(list) | |
1910 | for cmt in cmts.all(): |
|
1910 | for cmt in cmts.all(): | |
1911 | grouped[cmt.revision].append(cmt) |
|
1911 | grouped[cmt.revision].append(cmt) | |
1912 | return grouped |
|
1912 | return grouped | |
1913 |
|
1913 | |||
1914 | def statuses(self, revisions=None): |
|
1914 | def statuses(self, revisions=None): | |
1915 | """ |
|
1915 | """ | |
1916 | Returns statuses for this repository |
|
1916 | Returns statuses for this repository | |
1917 |
|
1917 | |||
1918 | :param revisions: list of revisions to get statuses for |
|
1918 | :param revisions: list of revisions to get statuses for | |
1919 | """ |
|
1919 | """ | |
1920 | statuses = ChangesetStatus.query()\ |
|
1920 | statuses = ChangesetStatus.query()\ | |
1921 | .filter(ChangesetStatus.repo == self)\ |
|
1921 | .filter(ChangesetStatus.repo == self)\ | |
1922 | .filter(ChangesetStatus.version == 0) |
|
1922 | .filter(ChangesetStatus.version == 0) | |
1923 |
|
1923 | |||
1924 | if revisions: |
|
1924 | if revisions: | |
1925 | # Try doing the filtering in chunks to avoid hitting limits |
|
1925 | # Try doing the filtering in chunks to avoid hitting limits | |
1926 | size = 500 |
|
1926 | size = 500 | |
1927 | status_results = [] |
|
1927 | status_results = [] | |
1928 | for chunk in xrange(0, len(revisions), size): |
|
1928 | for chunk in xrange(0, len(revisions), size): | |
1929 | status_results += statuses.filter( |
|
1929 | status_results += statuses.filter( | |
1930 | ChangesetStatus.revision.in_( |
|
1930 | ChangesetStatus.revision.in_( | |
1931 | revisions[chunk: chunk+size]) |
|
1931 | revisions[chunk: chunk+size]) | |
1932 | ).all() |
|
1932 | ).all() | |
1933 | else: |
|
1933 | else: | |
1934 | status_results = statuses.all() |
|
1934 | status_results = statuses.all() | |
1935 |
|
1935 | |||
1936 | grouped = {} |
|
1936 | grouped = {} | |
1937 |
|
1937 | |||
1938 | # maybe we have open new pullrequest without a status? |
|
1938 | # maybe we have open new pullrequest without a status? | |
1939 | stat = ChangesetStatus.STATUS_UNDER_REVIEW |
|
1939 | stat = ChangesetStatus.STATUS_UNDER_REVIEW | |
1940 | status_lbl = ChangesetStatus.get_status_lbl(stat) |
|
1940 | status_lbl = ChangesetStatus.get_status_lbl(stat) | |
1941 | for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): |
|
1941 | for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): | |
1942 | for rev in pr.revisions: |
|
1942 | for rev in pr.revisions: | |
1943 | pr_id = pr.pull_request_id |
|
1943 | pr_id = pr.pull_request_id | |
1944 | pr_repo = pr.target_repo.repo_name |
|
1944 | pr_repo = pr.target_repo.repo_name | |
1945 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] |
|
1945 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] | |
1946 |
|
1946 | |||
1947 | for stat in status_results: |
|
1947 | for stat in status_results: | |
1948 | pr_id = pr_repo = None |
|
1948 | pr_id = pr_repo = None | |
1949 | if stat.pull_request: |
|
1949 | if stat.pull_request: | |
1950 | pr_id = stat.pull_request.pull_request_id |
|
1950 | pr_id = stat.pull_request.pull_request_id | |
1951 | pr_repo = stat.pull_request.target_repo.repo_name |
|
1951 | pr_repo = stat.pull_request.target_repo.repo_name | |
1952 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, |
|
1952 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, | |
1953 | pr_id, pr_repo] |
|
1953 | pr_id, pr_repo] | |
1954 | return grouped |
|
1954 | return grouped | |
1955 |
|
1955 | |||
1956 | # ========================================================================== |
|
1956 | # ========================================================================== | |
1957 | # SCM CACHE INSTANCE |
|
1957 | # SCM CACHE INSTANCE | |
1958 | # ========================================================================== |
|
1958 | # ========================================================================== | |
1959 |
|
1959 | |||
1960 | def scm_instance(self, **kwargs): |
|
1960 | def scm_instance(self, **kwargs): | |
1961 | import rhodecode |
|
1961 | import rhodecode | |
1962 |
|
1962 | |||
1963 | # Passing a config will not hit the cache currently only used |
|
1963 | # Passing a config will not hit the cache currently only used | |
1964 | # for repo2dbmapper |
|
1964 | # for repo2dbmapper | |
1965 | config = kwargs.pop('config', None) |
|
1965 | config = kwargs.pop('config', None) | |
1966 | cache = kwargs.pop('cache', None) |
|
1966 | cache = kwargs.pop('cache', None) | |
1967 | full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) |
|
1967 | full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) | |
1968 | # if cache is NOT defined use default global, else we have a full |
|
1968 | # if cache is NOT defined use default global, else we have a full | |
1969 | # control over cache behaviour |
|
1969 | # control over cache behaviour | |
1970 | if cache is None and full_cache and not config: |
|
1970 | if cache is None and full_cache and not config: | |
1971 | return self._get_instance_cached() |
|
1971 | return self._get_instance_cached() | |
1972 | return self._get_instance(cache=bool(cache), config=config) |
|
1972 | return self._get_instance(cache=bool(cache), config=config) | |
1973 |
|
1973 | |||
1974 | def _get_instance_cached(self): |
|
1974 | def _get_instance_cached(self): | |
1975 | @cache_region('long_term') |
|
1975 | @cache_region('long_term') | |
1976 | def _get_repo(cache_key): |
|
1976 | def _get_repo(cache_key): | |
1977 | return self._get_instance() |
|
1977 | return self._get_instance() | |
1978 |
|
1978 | |||
1979 | invalidator_context = CacheKey.repo_context_cache( |
|
1979 | invalidator_context = CacheKey.repo_context_cache( | |
1980 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
1980 | _get_repo, self.repo_name, None, thread_scoped=True) | |
1981 |
|
1981 | |||
1982 | with invalidator_context as context: |
|
1982 | with invalidator_context as context: | |
1983 | context.invalidate() |
|
1983 | context.invalidate() | |
1984 | repo = context.compute() |
|
1984 | repo = context.compute() | |
1985 |
|
1985 | |||
1986 | return repo |
|
1986 | return repo | |
1987 |
|
1987 | |||
1988 | def _get_instance(self, cache=True, config=None): |
|
1988 | def _get_instance(self, cache=True, config=None): | |
1989 | config = config or self._config |
|
1989 | config = config or self._config | |
1990 | custom_wire = { |
|
1990 | custom_wire = { | |
1991 | 'cache': cache # controls the vcs.remote cache |
|
1991 | 'cache': cache # controls the vcs.remote cache | |
1992 | } |
|
1992 | } | |
1993 |
|
1993 | |||
1994 | repo = get_vcs_instance( |
|
1994 | repo = get_vcs_instance( | |
1995 | repo_path=safe_str(self.repo_full_path), |
|
1995 | repo_path=safe_str(self.repo_full_path), | |
1996 | config=config, |
|
1996 | config=config, | |
1997 | with_wire=custom_wire, |
|
1997 | with_wire=custom_wire, | |
1998 | create=False) |
|
1998 | create=False) | |
1999 |
|
1999 | |||
2000 | return repo |
|
2000 | return repo | |
2001 |
|
2001 | |||
2002 | def __json__(self): |
|
2002 | def __json__(self): | |
2003 | return {'landing_rev': self.landing_rev} |
|
2003 | return {'landing_rev': self.landing_rev} | |
2004 |
|
2004 | |||
2005 | def get_dict(self): |
|
2005 | def get_dict(self): | |
2006 |
|
2006 | |||
2007 | # Since we transformed `repo_name` to a hybrid property, we need to |
|
2007 | # Since we transformed `repo_name` to a hybrid property, we need to | |
2008 | # keep compatibility with the code which uses `repo_name` field. |
|
2008 | # keep compatibility with the code which uses `repo_name` field. | |
2009 |
|
2009 | |||
2010 | result = super(Repository, self).get_dict() |
|
2010 | result = super(Repository, self).get_dict() | |
2011 | result['repo_name'] = result.pop('_repo_name', None) |
|
2011 | result['repo_name'] = result.pop('_repo_name', None) | |
2012 | return result |
|
2012 | return result | |
2013 |
|
2013 | |||
2014 |
|
2014 | |||
2015 | class RepoGroup(Base, BaseModel): |
|
2015 | class RepoGroup(Base, BaseModel): | |
2016 | __tablename__ = 'groups' |
|
2016 | __tablename__ = 'groups' | |
2017 | __table_args__ = ( |
|
2017 | __table_args__ = ( | |
2018 | UniqueConstraint('group_name', 'group_parent_id'), |
|
2018 | UniqueConstraint('group_name', 'group_parent_id'), | |
2019 | CheckConstraint('group_id != group_parent_id'), |
|
2019 | CheckConstraint('group_id != group_parent_id'), | |
2020 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2020 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2021 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2021 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
2022 | ) |
|
2022 | ) | |
2023 | __mapper_args__ = {'order_by': 'group_name'} |
|
2023 | __mapper_args__ = {'order_by': 'group_name'} | |
2024 |
|
2024 | |||
2025 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups |
|
2025 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups | |
2026 |
|
2026 | |||
2027 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2027 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2028 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
2028 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) | |
2029 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
2029 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | |
2030 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
2030 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) | |
2031 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
2031 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | |
2032 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
2032 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |
2033 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
2033 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
2034 |
|
2034 | |||
2035 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
2035 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') | |
2036 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
2036 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') | |
2037 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
2037 | parent_group = relationship('RepoGroup', remote_side=group_id) | |
2038 | user = relationship('User') |
|
2038 | user = relationship('User') | |
2039 |
|
2039 | |||
2040 | def __init__(self, group_name='', parent_group=None): |
|
2040 | def __init__(self, group_name='', parent_group=None): | |
2041 | self.group_name = group_name |
|
2041 | self.group_name = group_name | |
2042 | self.parent_group = parent_group |
|
2042 | self.parent_group = parent_group | |
2043 |
|
2043 | |||
2044 | def __unicode__(self): |
|
2044 | def __unicode__(self): | |
2045 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
2045 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id, | |
2046 | self.group_name) |
|
2046 | self.group_name) | |
2047 |
|
2047 | |||
2048 | @classmethod |
|
2048 | @classmethod | |
2049 | def _generate_choice(cls, repo_group): |
|
2049 | def _generate_choice(cls, repo_group): | |
2050 | from webhelpers.html import literal as _literal |
|
2050 | from webhelpers.html import literal as _literal | |
2051 | _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) |
|
2051 | _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) | |
2052 | return repo_group.group_id, _name(repo_group.full_path_splitted) |
|
2052 | return repo_group.group_id, _name(repo_group.full_path_splitted) | |
2053 |
|
2053 | |||
2054 | @classmethod |
|
2054 | @classmethod | |
2055 | def groups_choices(cls, groups=None, show_empty_group=True): |
|
2055 | def groups_choices(cls, groups=None, show_empty_group=True): | |
2056 | if not groups: |
|
2056 | if not groups: | |
2057 | groups = cls.query().all() |
|
2057 | groups = cls.query().all() | |
2058 |
|
2058 | |||
2059 | repo_groups = [] |
|
2059 | repo_groups = [] | |
2060 | if show_empty_group: |
|
2060 | if show_empty_group: | |
2061 | repo_groups = [('-1', u'-- %s --' % _('No parent'))] |
|
2061 | repo_groups = [('-1', u'-- %s --' % _('No parent'))] | |
2062 |
|
2062 | |||
2063 | repo_groups.extend([cls._generate_choice(x) for x in groups]) |
|
2063 | repo_groups.extend([cls._generate_choice(x) for x in groups]) | |
2064 |
|
2064 | |||
2065 | repo_groups = sorted( |
|
2065 | repo_groups = sorted( | |
2066 | repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) |
|
2066 | repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) | |
2067 | return repo_groups |
|
2067 | return repo_groups | |
2068 |
|
2068 | |||
2069 | @classmethod |
|
2069 | @classmethod | |
2070 | def url_sep(cls): |
|
2070 | def url_sep(cls): | |
2071 | return URL_SEP |
|
2071 | return URL_SEP | |
2072 |
|
2072 | |||
2073 | @classmethod |
|
2073 | @classmethod | |
2074 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
2074 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): | |
2075 | if case_insensitive: |
|
2075 | if case_insensitive: | |
2076 | gr = cls.query().filter(func.lower(cls.group_name) |
|
2076 | gr = cls.query().filter(func.lower(cls.group_name) | |
2077 | == func.lower(group_name)) |
|
2077 | == func.lower(group_name)) | |
2078 | else: |
|
2078 | else: | |
2079 | gr = cls.query().filter(cls.group_name == group_name) |
|
2079 | gr = cls.query().filter(cls.group_name == group_name) | |
2080 | if cache: |
|
2080 | if cache: | |
2081 | gr = gr.options(FromCache( |
|
2081 | gr = gr.options(FromCache( | |
2082 | "sql_cache_short", |
|
2082 | "sql_cache_short", | |
2083 | "get_group_%s" % _hash_key(group_name))) |
|
2083 | "get_group_%s" % _hash_key(group_name))) | |
2084 | return gr.scalar() |
|
2084 | return gr.scalar() | |
2085 |
|
2085 | |||
2086 | @classmethod |
|
2086 | @classmethod | |
2087 | def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), |
|
2087 | def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), | |
2088 | case_insensitive=True): |
|
2088 | case_insensitive=True): | |
2089 | q = RepoGroup.query() |
|
2089 | q = RepoGroup.query() | |
2090 |
|
2090 | |||
2091 | if not isinstance(user_id, Optional): |
|
2091 | if not isinstance(user_id, Optional): | |
2092 | q = q.filter(RepoGroup.user_id == user_id) |
|
2092 | q = q.filter(RepoGroup.user_id == user_id) | |
2093 |
|
2093 | |||
2094 | if not isinstance(group_id, Optional): |
|
2094 | if not isinstance(group_id, Optional): | |
2095 | q = q.filter(RepoGroup.group_parent_id == group_id) |
|
2095 | q = q.filter(RepoGroup.group_parent_id == group_id) | |
2096 |
|
2096 | |||
2097 | if case_insensitive: |
|
2097 | if case_insensitive: | |
2098 | q = q.order_by(func.lower(RepoGroup.group_name)) |
|
2098 | q = q.order_by(func.lower(RepoGroup.group_name)) | |
2099 | else: |
|
2099 | else: | |
2100 | q = q.order_by(RepoGroup.group_name) |
|
2100 | q = q.order_by(RepoGroup.group_name) | |
2101 | return q.all() |
|
2101 | return q.all() | |
2102 |
|
2102 | |||
2103 | @property |
|
2103 | @property | |
2104 | def parents(self): |
|
2104 | def parents(self): | |
2105 | parents_recursion_limit = 10 |
|
2105 | parents_recursion_limit = 10 | |
2106 | groups = [] |
|
2106 | groups = [] | |
2107 | if self.parent_group is None: |
|
2107 | if self.parent_group is None: | |
2108 | return groups |
|
2108 | return groups | |
2109 | cur_gr = self.parent_group |
|
2109 | cur_gr = self.parent_group | |
2110 | groups.insert(0, cur_gr) |
|
2110 | groups.insert(0, cur_gr) | |
2111 | cnt = 0 |
|
2111 | cnt = 0 | |
2112 | while 1: |
|
2112 | while 1: | |
2113 | cnt += 1 |
|
2113 | cnt += 1 | |
2114 | gr = getattr(cur_gr, 'parent_group', None) |
|
2114 | gr = getattr(cur_gr, 'parent_group', None) | |
2115 | cur_gr = cur_gr.parent_group |
|
2115 | cur_gr = cur_gr.parent_group | |
2116 | if gr is None: |
|
2116 | if gr is None: | |
2117 | break |
|
2117 | break | |
2118 | if cnt == parents_recursion_limit: |
|
2118 | if cnt == parents_recursion_limit: | |
2119 | # this will prevent accidental infinit loops |
|
2119 | # this will prevent accidental infinit loops | |
2120 | log.error(('more than %s parents found for group %s, stopping ' |
|
2120 | log.error(('more than %s parents found for group %s, stopping ' | |
2121 | 'recursive parent fetching' % (parents_recursion_limit, self))) |
|
2121 | 'recursive parent fetching' % (parents_recursion_limit, self))) | |
2122 | break |
|
2122 | break | |
2123 |
|
2123 | |||
2124 | groups.insert(0, gr) |
|
2124 | groups.insert(0, gr) | |
2125 | return groups |
|
2125 | return groups | |
2126 |
|
2126 | |||
2127 | @property |
|
2127 | @property | |
2128 | def children(self): |
|
2128 | def children(self): | |
2129 | return RepoGroup.query().filter(RepoGroup.parent_group == self) |
|
2129 | return RepoGroup.query().filter(RepoGroup.parent_group == self) | |
2130 |
|
2130 | |||
2131 | @property |
|
2131 | @property | |
2132 | def name(self): |
|
2132 | def name(self): | |
2133 | return self.group_name.split(RepoGroup.url_sep())[-1] |
|
2133 | return self.group_name.split(RepoGroup.url_sep())[-1] | |
2134 |
|
2134 | |||
2135 | @property |
|
2135 | @property | |
2136 | def full_path(self): |
|
2136 | def full_path(self): | |
2137 | return self.group_name |
|
2137 | return self.group_name | |
2138 |
|
2138 | |||
2139 | @property |
|
2139 | @property | |
2140 | def full_path_splitted(self): |
|
2140 | def full_path_splitted(self): | |
2141 | return self.group_name.split(RepoGroup.url_sep()) |
|
2141 | return self.group_name.split(RepoGroup.url_sep()) | |
2142 |
|
2142 | |||
2143 | @property |
|
2143 | @property | |
2144 | def repositories(self): |
|
2144 | def repositories(self): | |
2145 | return Repository.query()\ |
|
2145 | return Repository.query()\ | |
2146 | .filter(Repository.group == self)\ |
|
2146 | .filter(Repository.group == self)\ | |
2147 | .order_by(Repository.repo_name) |
|
2147 | .order_by(Repository.repo_name) | |
2148 |
|
2148 | |||
2149 | @property |
|
2149 | @property | |
2150 | def repositories_recursive_count(self): |
|
2150 | def repositories_recursive_count(self): | |
2151 | cnt = self.repositories.count() |
|
2151 | cnt = self.repositories.count() | |
2152 |
|
2152 | |||
2153 | def children_count(group): |
|
2153 | def children_count(group): | |
2154 | cnt = 0 |
|
2154 | cnt = 0 | |
2155 | for child in group.children: |
|
2155 | for child in group.children: | |
2156 | cnt += child.repositories.count() |
|
2156 | cnt += child.repositories.count() | |
2157 | cnt += children_count(child) |
|
2157 | cnt += children_count(child) | |
2158 | return cnt |
|
2158 | return cnt | |
2159 |
|
2159 | |||
2160 | return cnt + children_count(self) |
|
2160 | return cnt + children_count(self) | |
2161 |
|
2161 | |||
2162 | def _recursive_objects(self, include_repos=True): |
|
2162 | def _recursive_objects(self, include_repos=True): | |
2163 | all_ = [] |
|
2163 | all_ = [] | |
2164 |
|
2164 | |||
2165 | def _get_members(root_gr): |
|
2165 | def _get_members(root_gr): | |
2166 | if include_repos: |
|
2166 | if include_repos: | |
2167 | for r in root_gr.repositories: |
|
2167 | for r in root_gr.repositories: | |
2168 | all_.append(r) |
|
2168 | all_.append(r) | |
2169 | childs = root_gr.children.all() |
|
2169 | childs = root_gr.children.all() | |
2170 | if childs: |
|
2170 | if childs: | |
2171 | for gr in childs: |
|
2171 | for gr in childs: | |
2172 | all_.append(gr) |
|
2172 | all_.append(gr) | |
2173 | _get_members(gr) |
|
2173 | _get_members(gr) | |
2174 |
|
2174 | |||
2175 | _get_members(self) |
|
2175 | _get_members(self) | |
2176 | return [self] + all_ |
|
2176 | return [self] + all_ | |
2177 |
|
2177 | |||
2178 | def recursive_groups_and_repos(self): |
|
2178 | def recursive_groups_and_repos(self): | |
2179 | """ |
|
2179 | """ | |
2180 | Recursive return all groups, with repositories in those groups |
|
2180 | Recursive return all groups, with repositories in those groups | |
2181 | """ |
|
2181 | """ | |
2182 | return self._recursive_objects() |
|
2182 | return self._recursive_objects() | |
2183 |
|
2183 | |||
2184 | def recursive_groups(self): |
|
2184 | def recursive_groups(self): | |
2185 | """ |
|
2185 | """ | |
2186 | Returns all children groups for this group including children of children |
|
2186 | Returns all children groups for this group including children of children | |
2187 | """ |
|
2187 | """ | |
2188 | return self._recursive_objects(include_repos=False) |
|
2188 | return self._recursive_objects(include_repos=False) | |
2189 |
|
2189 | |||
2190 | def get_new_name(self, group_name): |
|
2190 | def get_new_name(self, group_name): | |
2191 | """ |
|
2191 | """ | |
2192 | returns new full group name based on parent and new name |
|
2192 | returns new full group name based on parent and new name | |
2193 |
|
2193 | |||
2194 | :param group_name: |
|
2194 | :param group_name: | |
2195 | """ |
|
2195 | """ | |
2196 | path_prefix = (self.parent_group.full_path_splitted if |
|
2196 | path_prefix = (self.parent_group.full_path_splitted if | |
2197 | self.parent_group else []) |
|
2197 | self.parent_group else []) | |
2198 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
2198 | return RepoGroup.url_sep().join(path_prefix + [group_name]) | |
2199 |
|
2199 | |||
2200 | def permissions(self, with_admins=True, with_owner=True): |
|
2200 | def permissions(self, with_admins=True, with_owner=True): | |
2201 | q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) |
|
2201 | q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) | |
2202 | q = q.options(joinedload(UserRepoGroupToPerm.group), |
|
2202 | q = q.options(joinedload(UserRepoGroupToPerm.group), | |
2203 | joinedload(UserRepoGroupToPerm.user), |
|
2203 | joinedload(UserRepoGroupToPerm.user), | |
2204 | joinedload(UserRepoGroupToPerm.permission),) |
|
2204 | joinedload(UserRepoGroupToPerm.permission),) | |
2205 |
|
2205 | |||
2206 | # get owners and admins and permissions. We do a trick of re-writing |
|
2206 | # get owners and admins and permissions. We do a trick of re-writing | |
2207 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
2207 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |
2208 | # has a global reference and changing one object propagates to all |
|
2208 | # has a global reference and changing one object propagates to all | |
2209 | # others. This means if admin is also an owner admin_row that change |
|
2209 | # others. This means if admin is also an owner admin_row that change | |
2210 | # would propagate to both objects |
|
2210 | # would propagate to both objects | |
2211 | perm_rows = [] |
|
2211 | perm_rows = [] | |
2212 | for _usr in q.all(): |
|
2212 | for _usr in q.all(): | |
2213 | usr = AttributeDict(_usr.user.get_dict()) |
|
2213 | usr = AttributeDict(_usr.user.get_dict()) | |
2214 | usr.permission = _usr.permission.permission_name |
|
2214 | usr.permission = _usr.permission.permission_name | |
2215 | perm_rows.append(usr) |
|
2215 | perm_rows.append(usr) | |
2216 |
|
2216 | |||
2217 | # filter the perm rows by 'default' first and then sort them by |
|
2217 | # filter the perm rows by 'default' first and then sort them by | |
2218 | # admin,write,read,none permissions sorted again alphabetically in |
|
2218 | # admin,write,read,none permissions sorted again alphabetically in | |
2219 | # each group |
|
2219 | # each group | |
2220 | perm_rows = sorted(perm_rows, key=display_sort) |
|
2220 | perm_rows = sorted(perm_rows, key=display_sort) | |
2221 |
|
2221 | |||
2222 | _admin_perm = 'group.admin' |
|
2222 | _admin_perm = 'group.admin' | |
2223 | owner_row = [] |
|
2223 | owner_row = [] | |
2224 | if with_owner: |
|
2224 | if with_owner: | |
2225 | usr = AttributeDict(self.user.get_dict()) |
|
2225 | usr = AttributeDict(self.user.get_dict()) | |
2226 | usr.owner_row = True |
|
2226 | usr.owner_row = True | |
2227 | usr.permission = _admin_perm |
|
2227 | usr.permission = _admin_perm | |
2228 | owner_row.append(usr) |
|
2228 | owner_row.append(usr) | |
2229 |
|
2229 | |||
2230 | super_admin_rows = [] |
|
2230 | super_admin_rows = [] | |
2231 | if with_admins: |
|
2231 | if with_admins: | |
2232 | for usr in User.get_all_super_admins(): |
|
2232 | for usr in User.get_all_super_admins(): | |
2233 | # if this admin is also owner, don't double the record |
|
2233 | # if this admin is also owner, don't double the record | |
2234 | if usr.user_id == owner_row[0].user_id: |
|
2234 | if usr.user_id == owner_row[0].user_id: | |
2235 | owner_row[0].admin_row = True |
|
2235 | owner_row[0].admin_row = True | |
2236 | else: |
|
2236 | else: | |
2237 | usr = AttributeDict(usr.get_dict()) |
|
2237 | usr = AttributeDict(usr.get_dict()) | |
2238 | usr.admin_row = True |
|
2238 | usr.admin_row = True | |
2239 | usr.permission = _admin_perm |
|
2239 | usr.permission = _admin_perm | |
2240 | super_admin_rows.append(usr) |
|
2240 | super_admin_rows.append(usr) | |
2241 |
|
2241 | |||
2242 | return super_admin_rows + owner_row + perm_rows |
|
2242 | return super_admin_rows + owner_row + perm_rows | |
2243 |
|
2243 | |||
2244 | def permission_user_groups(self): |
|
2244 | def permission_user_groups(self): | |
2245 | q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self) |
|
2245 | q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self) | |
2246 | q = q.options(joinedload(UserGroupRepoGroupToPerm.group), |
|
2246 | q = q.options(joinedload(UserGroupRepoGroupToPerm.group), | |
2247 | joinedload(UserGroupRepoGroupToPerm.users_group), |
|
2247 | joinedload(UserGroupRepoGroupToPerm.users_group), | |
2248 | joinedload(UserGroupRepoGroupToPerm.permission),) |
|
2248 | joinedload(UserGroupRepoGroupToPerm.permission),) | |
2249 |
|
2249 | |||
2250 | perm_rows = [] |
|
2250 | perm_rows = [] | |
2251 | for _user_group in q.all(): |
|
2251 | for _user_group in q.all(): | |
2252 | usr = AttributeDict(_user_group.users_group.get_dict()) |
|
2252 | usr = AttributeDict(_user_group.users_group.get_dict()) | |
2253 | usr.permission = _user_group.permission.permission_name |
|
2253 | usr.permission = _user_group.permission.permission_name | |
2254 | perm_rows.append(usr) |
|
2254 | perm_rows.append(usr) | |
2255 |
|
2255 | |||
2256 | return perm_rows |
|
2256 | return perm_rows | |
2257 |
|
2257 | |||
2258 | def get_api_data(self): |
|
2258 | def get_api_data(self): | |
2259 | """ |
|
2259 | """ | |
2260 | Common function for generating api data |
|
2260 | Common function for generating api data | |
2261 |
|
2261 | |||
2262 | """ |
|
2262 | """ | |
2263 | group = self |
|
2263 | group = self | |
2264 | data = { |
|
2264 | data = { | |
2265 | 'group_id': group.group_id, |
|
2265 | 'group_id': group.group_id, | |
2266 | 'group_name': group.group_name, |
|
2266 | 'group_name': group.group_name, | |
2267 | 'group_description': group.group_description, |
|
2267 | 'group_description': group.group_description, | |
2268 | 'parent_group': group.parent_group.group_name if group.parent_group else None, |
|
2268 | 'parent_group': group.parent_group.group_name if group.parent_group else None, | |
2269 | 'repositories': [x.repo_name for x in group.repositories], |
|
2269 | 'repositories': [x.repo_name for x in group.repositories], | |
2270 | 'owner': group.user.username, |
|
2270 | 'owner': group.user.username, | |
2271 | } |
|
2271 | } | |
2272 | return data |
|
2272 | return data | |
2273 |
|
2273 | |||
2274 |
|
2274 | |||
2275 | class Permission(Base, BaseModel): |
|
2275 | class Permission(Base, BaseModel): | |
2276 | __tablename__ = 'permissions' |
|
2276 | __tablename__ = 'permissions' | |
2277 | __table_args__ = ( |
|
2277 | __table_args__ = ( | |
2278 | Index('p_perm_name_idx', 'permission_name'), |
|
2278 | Index('p_perm_name_idx', 'permission_name'), | |
2279 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2279 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2280 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2280 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
2281 | ) |
|
2281 | ) | |
2282 | PERMS = [ |
|
2282 | PERMS = [ | |
2283 | ('hg.admin', _('RhodeCode Super Administrator')), |
|
2283 | ('hg.admin', _('RhodeCode Super Administrator')), | |
2284 |
|
2284 | |||
2285 | ('repository.none', _('Repository no access')), |
|
2285 | ('repository.none', _('Repository no access')), | |
2286 | ('repository.read', _('Repository read access')), |
|
2286 | ('repository.read', _('Repository read access')), | |
2287 | ('repository.write', _('Repository write access')), |
|
2287 | ('repository.write', _('Repository write access')), | |
2288 | ('repository.admin', _('Repository admin access')), |
|
2288 | ('repository.admin', _('Repository admin access')), | |
2289 |
|
2289 | |||
2290 | ('group.none', _('Repository group no access')), |
|
2290 | ('group.none', _('Repository group no access')), | |
2291 | ('group.read', _('Repository group read access')), |
|
2291 | ('group.read', _('Repository group read access')), | |
2292 | ('group.write', _('Repository group write access')), |
|
2292 | ('group.write', _('Repository group write access')), | |
2293 | ('group.admin', _('Repository group admin access')), |
|
2293 | ('group.admin', _('Repository group admin access')), | |
2294 |
|
2294 | |||
2295 | ('usergroup.none', _('User group no access')), |
|
2295 | ('usergroup.none', _('User group no access')), | |
2296 | ('usergroup.read', _('User group read access')), |
|
2296 | ('usergroup.read', _('User group read access')), | |
2297 | ('usergroup.write', _('User group write access')), |
|
2297 | ('usergroup.write', _('User group write access')), | |
2298 | ('usergroup.admin', _('User group admin access')), |
|
2298 | ('usergroup.admin', _('User group admin access')), | |
2299 |
|
2299 | |||
2300 | ('hg.repogroup.create.false', _('Repository Group creation disabled')), |
|
2300 | ('hg.repogroup.create.false', _('Repository Group creation disabled')), | |
2301 | ('hg.repogroup.create.true', _('Repository Group creation enabled')), |
|
2301 | ('hg.repogroup.create.true', _('Repository Group creation enabled')), | |
2302 |
|
2302 | |||
2303 | ('hg.usergroup.create.false', _('User Group creation disabled')), |
|
2303 | ('hg.usergroup.create.false', _('User Group creation disabled')), | |
2304 | ('hg.usergroup.create.true', _('User Group creation enabled')), |
|
2304 | ('hg.usergroup.create.true', _('User Group creation enabled')), | |
2305 |
|
2305 | |||
2306 | ('hg.create.none', _('Repository creation disabled')), |
|
2306 | ('hg.create.none', _('Repository creation disabled')), | |
2307 | ('hg.create.repository', _('Repository creation enabled')), |
|
2307 | ('hg.create.repository', _('Repository creation enabled')), | |
2308 | ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), |
|
2308 | ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), | |
2309 | ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), |
|
2309 | ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), | |
2310 |
|
2310 | |||
2311 | ('hg.fork.none', _('Repository forking disabled')), |
|
2311 | ('hg.fork.none', _('Repository forking disabled')), | |
2312 | ('hg.fork.repository', _('Repository forking enabled')), |
|
2312 | ('hg.fork.repository', _('Repository forking enabled')), | |
2313 |
|
2313 | |||
2314 | ('hg.register.none', _('Registration disabled')), |
|
2314 | ('hg.register.none', _('Registration disabled')), | |
2315 | ('hg.register.manual_activate', _('User Registration with manual account activation')), |
|
2315 | ('hg.register.manual_activate', _('User Registration with manual account activation')), | |
2316 | ('hg.register.auto_activate', _('User Registration with automatic account activation')), |
|
2316 | ('hg.register.auto_activate', _('User Registration with automatic account activation')), | |
2317 |
|
2317 | |||
2318 | ('hg.extern_activate.manual', _('Manual activation of external account')), |
|
2318 | ('hg.extern_activate.manual', _('Manual activation of external account')), | |
2319 | ('hg.extern_activate.auto', _('Automatic activation of external account')), |
|
2319 | ('hg.extern_activate.auto', _('Automatic activation of external account')), | |
2320 |
|
2320 | |||
2321 | ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), |
|
2321 | ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), | |
2322 | ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), |
|
2322 | ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), | |
2323 | ] |
|
2323 | ] | |
2324 |
|
2324 | |||
2325 | # definition of system default permissions for DEFAULT user |
|
2325 | # definition of system default permissions for DEFAULT user | |
2326 | DEFAULT_USER_PERMISSIONS = [ |
|
2326 | DEFAULT_USER_PERMISSIONS = [ | |
2327 | 'repository.read', |
|
2327 | 'repository.read', | |
2328 | 'group.read', |
|
2328 | 'group.read', | |
2329 | 'usergroup.read', |
|
2329 | 'usergroup.read', | |
2330 | 'hg.create.repository', |
|
2330 | 'hg.create.repository', | |
2331 | 'hg.repogroup.create.false', |
|
2331 | 'hg.repogroup.create.false', | |
2332 | 'hg.usergroup.create.false', |
|
2332 | 'hg.usergroup.create.false', | |
2333 | 'hg.create.write_on_repogroup.true', |
|
2333 | 'hg.create.write_on_repogroup.true', | |
2334 | 'hg.fork.repository', |
|
2334 | 'hg.fork.repository', | |
2335 | 'hg.register.manual_activate', |
|
2335 | 'hg.register.manual_activate', | |
2336 | 'hg.extern_activate.auto', |
|
2336 | 'hg.extern_activate.auto', | |
2337 | 'hg.inherit_default_perms.true', |
|
2337 | 'hg.inherit_default_perms.true', | |
2338 | ] |
|
2338 | ] | |
2339 |
|
2339 | |||
2340 | # defines which permissions are more important higher the more important |
|
2340 | # defines which permissions are more important higher the more important | |
2341 | # Weight defines which permissions are more important. |
|
2341 | # Weight defines which permissions are more important. | |
2342 | # The higher number the more important. |
|
2342 | # The higher number the more important. | |
2343 | PERM_WEIGHTS = { |
|
2343 | PERM_WEIGHTS = { | |
2344 | 'repository.none': 0, |
|
2344 | 'repository.none': 0, | |
2345 | 'repository.read': 1, |
|
2345 | 'repository.read': 1, | |
2346 | 'repository.write': 3, |
|
2346 | 'repository.write': 3, | |
2347 | 'repository.admin': 4, |
|
2347 | 'repository.admin': 4, | |
2348 |
|
2348 | |||
2349 | 'group.none': 0, |
|
2349 | 'group.none': 0, | |
2350 | 'group.read': 1, |
|
2350 | 'group.read': 1, | |
2351 | 'group.write': 3, |
|
2351 | 'group.write': 3, | |
2352 | 'group.admin': 4, |
|
2352 | 'group.admin': 4, | |
2353 |
|
2353 | |||
2354 | 'usergroup.none': 0, |
|
2354 | 'usergroup.none': 0, | |
2355 | 'usergroup.read': 1, |
|
2355 | 'usergroup.read': 1, | |
2356 | 'usergroup.write': 3, |
|
2356 | 'usergroup.write': 3, | |
2357 | 'usergroup.admin': 4, |
|
2357 | 'usergroup.admin': 4, | |
2358 |
|
2358 | |||
2359 | 'hg.repogroup.create.false': 0, |
|
2359 | 'hg.repogroup.create.false': 0, | |
2360 | 'hg.repogroup.create.true': 1, |
|
2360 | 'hg.repogroup.create.true': 1, | |
2361 |
|
2361 | |||
2362 | 'hg.usergroup.create.false': 0, |
|
2362 | 'hg.usergroup.create.false': 0, | |
2363 | 'hg.usergroup.create.true': 1, |
|
2363 | 'hg.usergroup.create.true': 1, | |
2364 |
|
2364 | |||
2365 | 'hg.fork.none': 0, |
|
2365 | 'hg.fork.none': 0, | |
2366 | 'hg.fork.repository': 1, |
|
2366 | 'hg.fork.repository': 1, | |
2367 | 'hg.create.none': 0, |
|
2367 | 'hg.create.none': 0, | |
2368 | 'hg.create.repository': 1 |
|
2368 | 'hg.create.repository': 1 | |
2369 | } |
|
2369 | } | |
2370 |
|
2370 | |||
2371 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2371 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2372 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
2372 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) | |
2373 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
2373 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) | |
2374 |
|
2374 | |||
2375 | def __unicode__(self): |
|
2375 | def __unicode__(self): | |
2376 | return u"<%s('%s:%s')>" % ( |
|
2376 | return u"<%s('%s:%s')>" % ( | |
2377 | self.__class__.__name__, self.permission_id, self.permission_name |
|
2377 | self.__class__.__name__, self.permission_id, self.permission_name | |
2378 | ) |
|
2378 | ) | |
2379 |
|
2379 | |||
2380 | @classmethod |
|
2380 | @classmethod | |
2381 | def get_by_key(cls, key): |
|
2381 | def get_by_key(cls, key): | |
2382 | return cls.query().filter(cls.permission_name == key).scalar() |
|
2382 | return cls.query().filter(cls.permission_name == key).scalar() | |
2383 |
|
2383 | |||
2384 | @classmethod |
|
2384 | @classmethod | |
2385 | def get_default_repo_perms(cls, user_id, repo_id=None): |
|
2385 | def get_default_repo_perms(cls, user_id, repo_id=None): | |
2386 | q = Session().query(UserRepoToPerm, Repository, Permission)\ |
|
2386 | q = Session().query(UserRepoToPerm, Repository, Permission)\ | |
2387 | .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ |
|
2387 | .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ | |
2388 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
|
2388 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ | |
2389 | .filter(UserRepoToPerm.user_id == user_id) |
|
2389 | .filter(UserRepoToPerm.user_id == user_id) | |
2390 | if repo_id: |
|
2390 | if repo_id: | |
2391 | q = q.filter(UserRepoToPerm.repository_id == repo_id) |
|
2391 | q = q.filter(UserRepoToPerm.repository_id == repo_id) | |
2392 | return q.all() |
|
2392 | return q.all() | |
2393 |
|
2393 | |||
2394 | @classmethod |
|
2394 | @classmethod | |
2395 | def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): |
|
2395 | def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): | |
2396 | q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ |
|
2396 | q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ | |
2397 | .join( |
|
2397 | .join( | |
2398 | Permission, |
|
2398 | Permission, | |
2399 | UserGroupRepoToPerm.permission_id == Permission.permission_id)\ |
|
2399 | UserGroupRepoToPerm.permission_id == Permission.permission_id)\ | |
2400 | .join( |
|
2400 | .join( | |
2401 | Repository, |
|
2401 | Repository, | |
2402 | UserGroupRepoToPerm.repository_id == Repository.repo_id)\ |
|
2402 | UserGroupRepoToPerm.repository_id == Repository.repo_id)\ | |
2403 | .join( |
|
2403 | .join( | |
2404 | UserGroup, |
|
2404 | UserGroup, | |
2405 | UserGroupRepoToPerm.users_group_id == |
|
2405 | UserGroupRepoToPerm.users_group_id == | |
2406 | UserGroup.users_group_id)\ |
|
2406 | UserGroup.users_group_id)\ | |
2407 | .join( |
|
2407 | .join( | |
2408 | UserGroupMember, |
|
2408 | UserGroupMember, | |
2409 | UserGroupRepoToPerm.users_group_id == |
|
2409 | UserGroupRepoToPerm.users_group_id == | |
2410 | UserGroupMember.users_group_id)\ |
|
2410 | UserGroupMember.users_group_id)\ | |
2411 | .filter( |
|
2411 | .filter( | |
2412 | UserGroupMember.user_id == user_id, |
|
2412 | UserGroupMember.user_id == user_id, | |
2413 | UserGroup.users_group_active == true()) |
|
2413 | UserGroup.users_group_active == true()) | |
2414 | if repo_id: |
|
2414 | if repo_id: | |
2415 | q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) |
|
2415 | q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) | |
2416 | return q.all() |
|
2416 | return q.all() | |
2417 |
|
2417 | |||
2418 | @classmethod |
|
2418 | @classmethod | |
2419 | def get_default_group_perms(cls, user_id, repo_group_id=None): |
|
2419 | def get_default_group_perms(cls, user_id, repo_group_id=None): | |
2420 | q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ |
|
2420 | q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ | |
2421 | .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\ |
|
2421 | .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\ | |
2422 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
2422 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ | |
2423 | .filter(UserRepoGroupToPerm.user_id == user_id) |
|
2423 | .filter(UserRepoGroupToPerm.user_id == user_id) | |
2424 | if repo_group_id: |
|
2424 | if repo_group_id: | |
2425 | q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) |
|
2425 | q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) | |
2426 | return q.all() |
|
2426 | return q.all() | |
2427 |
|
2427 | |||
2428 | @classmethod |
|
2428 | @classmethod | |
2429 | def get_default_group_perms_from_user_group( |
|
2429 | def get_default_group_perms_from_user_group( | |
2430 | cls, user_id, repo_group_id=None): |
|
2430 | cls, user_id, repo_group_id=None): | |
2431 | q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ |
|
2431 | q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ | |
2432 | .join( |
|
2432 | .join( | |
2433 | Permission, |
|
2433 | Permission, | |
2434 | UserGroupRepoGroupToPerm.permission_id == |
|
2434 | UserGroupRepoGroupToPerm.permission_id == | |
2435 | Permission.permission_id)\ |
|
2435 | Permission.permission_id)\ | |
2436 | .join( |
|
2436 | .join( | |
2437 | RepoGroup, |
|
2437 | RepoGroup, | |
2438 | UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ |
|
2438 | UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ | |
2439 | .join( |
|
2439 | .join( | |
2440 | UserGroup, |
|
2440 | UserGroup, | |
2441 | UserGroupRepoGroupToPerm.users_group_id == |
|
2441 | UserGroupRepoGroupToPerm.users_group_id == | |
2442 | UserGroup.users_group_id)\ |
|
2442 | UserGroup.users_group_id)\ | |
2443 | .join( |
|
2443 | .join( | |
2444 | UserGroupMember, |
|
2444 | UserGroupMember, | |
2445 | UserGroupRepoGroupToPerm.users_group_id == |
|
2445 | UserGroupRepoGroupToPerm.users_group_id == | |
2446 | UserGroupMember.users_group_id)\ |
|
2446 | UserGroupMember.users_group_id)\ | |
2447 | .filter( |
|
2447 | .filter( | |
2448 | UserGroupMember.user_id == user_id, |
|
2448 | UserGroupMember.user_id == user_id, | |
2449 | UserGroup.users_group_active == true()) |
|
2449 | UserGroup.users_group_active == true()) | |
2450 | if repo_group_id: |
|
2450 | if repo_group_id: | |
2451 | q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) |
|
2451 | q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) | |
2452 | return q.all() |
|
2452 | return q.all() | |
2453 |
|
2453 | |||
2454 | @classmethod |
|
2454 | @classmethod | |
2455 | def get_default_user_group_perms(cls, user_id, user_group_id=None): |
|
2455 | def get_default_user_group_perms(cls, user_id, user_group_id=None): | |
2456 | q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ |
|
2456 | q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ | |
2457 | .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ |
|
2457 | .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ | |
2458 | .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ |
|
2458 | .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ | |
2459 | .filter(UserUserGroupToPerm.user_id == user_id) |
|
2459 | .filter(UserUserGroupToPerm.user_id == user_id) | |
2460 | if user_group_id: |
|
2460 | if user_group_id: | |
2461 | q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) |
|
2461 | q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) | |
2462 | return q.all() |
|
2462 | return q.all() | |
2463 |
|
2463 | |||
2464 | @classmethod |
|
2464 | @classmethod | |
2465 | def get_default_user_group_perms_from_user_group( |
|
2465 | def get_default_user_group_perms_from_user_group( | |
2466 | cls, user_id, user_group_id=None): |
|
2466 | cls, user_id, user_group_id=None): | |
2467 | TargetUserGroup = aliased(UserGroup, name='target_user_group') |
|
2467 | TargetUserGroup = aliased(UserGroup, name='target_user_group') | |
2468 | q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ |
|
2468 | q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ | |
2469 | .join( |
|
2469 | .join( | |
2470 | Permission, |
|
2470 | Permission, | |
2471 | UserGroupUserGroupToPerm.permission_id == |
|
2471 | UserGroupUserGroupToPerm.permission_id == | |
2472 | Permission.permission_id)\ |
|
2472 | Permission.permission_id)\ | |
2473 | .join( |
|
2473 | .join( | |
2474 | TargetUserGroup, |
|
2474 | TargetUserGroup, | |
2475 | UserGroupUserGroupToPerm.target_user_group_id == |
|
2475 | UserGroupUserGroupToPerm.target_user_group_id == | |
2476 | TargetUserGroup.users_group_id)\ |
|
2476 | TargetUserGroup.users_group_id)\ | |
2477 | .join( |
|
2477 | .join( | |
2478 | UserGroup, |
|
2478 | UserGroup, | |
2479 | UserGroupUserGroupToPerm.user_group_id == |
|
2479 | UserGroupUserGroupToPerm.user_group_id == | |
2480 | UserGroup.users_group_id)\ |
|
2480 | UserGroup.users_group_id)\ | |
2481 | .join( |
|
2481 | .join( | |
2482 | UserGroupMember, |
|
2482 | UserGroupMember, | |
2483 | UserGroupUserGroupToPerm.user_group_id == |
|
2483 | UserGroupUserGroupToPerm.user_group_id == | |
2484 | UserGroupMember.users_group_id)\ |
|
2484 | UserGroupMember.users_group_id)\ | |
2485 | .filter( |
|
2485 | .filter( | |
2486 | UserGroupMember.user_id == user_id, |
|
2486 | UserGroupMember.user_id == user_id, | |
2487 | UserGroup.users_group_active == true()) |
|
2487 | UserGroup.users_group_active == true()) | |
2488 | if user_group_id: |
|
2488 | if user_group_id: | |
2489 | q = q.filter( |
|
2489 | q = q.filter( | |
2490 | UserGroupUserGroupToPerm.user_group_id == user_group_id) |
|
2490 | UserGroupUserGroupToPerm.user_group_id == user_group_id) | |
2491 |
|
2491 | |||
2492 | return q.all() |
|
2492 | return q.all() | |
2493 |
|
2493 | |||
2494 |
|
2494 | |||
2495 | class UserRepoToPerm(Base, BaseModel): |
|
2495 | class UserRepoToPerm(Base, BaseModel): | |
2496 | __tablename__ = 'repo_to_perm' |
|
2496 | __tablename__ = 'repo_to_perm' | |
2497 | __table_args__ = ( |
|
2497 | __table_args__ = ( | |
2498 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
2498 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), | |
2499 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2499 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2500 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2500 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2501 | ) |
|
2501 | ) | |
2502 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2502 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2503 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2503 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
2504 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2504 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
2505 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
2505 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
2506 |
|
2506 | |||
2507 | user = relationship('User') |
|
2507 | user = relationship('User') | |
2508 | repository = relationship('Repository') |
|
2508 | repository = relationship('Repository') | |
2509 | permission = relationship('Permission') |
|
2509 | permission = relationship('Permission') | |
2510 |
|
2510 | |||
2511 | @classmethod |
|
2511 | @classmethod | |
2512 | def create(cls, user, repository, permission): |
|
2512 | def create(cls, user, repository, permission): | |
2513 | n = cls() |
|
2513 | n = cls() | |
2514 | n.user = user |
|
2514 | n.user = user | |
2515 | n.repository = repository |
|
2515 | n.repository = repository | |
2516 | n.permission = permission |
|
2516 | n.permission = permission | |
2517 | Session().add(n) |
|
2517 | Session().add(n) | |
2518 | return n |
|
2518 | return n | |
2519 |
|
2519 | |||
2520 | def __unicode__(self): |
|
2520 | def __unicode__(self): | |
2521 | return u'<%s => %s >' % (self.user, self.repository) |
|
2521 | return u'<%s => %s >' % (self.user, self.repository) | |
2522 |
|
2522 | |||
2523 |
|
2523 | |||
2524 | class UserUserGroupToPerm(Base, BaseModel): |
|
2524 | class UserUserGroupToPerm(Base, BaseModel): | |
2525 | __tablename__ = 'user_user_group_to_perm' |
|
2525 | __tablename__ = 'user_user_group_to_perm' | |
2526 | __table_args__ = ( |
|
2526 | __table_args__ = ( | |
2527 | UniqueConstraint('user_id', 'user_group_id', 'permission_id'), |
|
2527 | UniqueConstraint('user_id', 'user_group_id', 'permission_id'), | |
2528 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2528 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2529 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2529 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2530 | ) |
|
2530 | ) | |
2531 | user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2531 | user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2532 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2532 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
2533 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2533 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
2534 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2534 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
2535 |
|
2535 | |||
2536 | user = relationship('User') |
|
2536 | user = relationship('User') | |
2537 | user_group = relationship('UserGroup') |
|
2537 | user_group = relationship('UserGroup') | |
2538 | permission = relationship('Permission') |
|
2538 | permission = relationship('Permission') | |
2539 |
|
2539 | |||
2540 | @classmethod |
|
2540 | @classmethod | |
2541 | def create(cls, user, user_group, permission): |
|
2541 | def create(cls, user, user_group, permission): | |
2542 | n = cls() |
|
2542 | n = cls() | |
2543 | n.user = user |
|
2543 | n.user = user | |
2544 | n.user_group = user_group |
|
2544 | n.user_group = user_group | |
2545 | n.permission = permission |
|
2545 | n.permission = permission | |
2546 | Session().add(n) |
|
2546 | Session().add(n) | |
2547 | return n |
|
2547 | return n | |
2548 |
|
2548 | |||
2549 | def __unicode__(self): |
|
2549 | def __unicode__(self): | |
2550 | return u'<%s => %s >' % (self.user, self.user_group) |
|
2550 | return u'<%s => %s >' % (self.user, self.user_group) | |
2551 |
|
2551 | |||
2552 |
|
2552 | |||
2553 | class UserToPerm(Base, BaseModel): |
|
2553 | class UserToPerm(Base, BaseModel): | |
2554 | __tablename__ = 'user_to_perm' |
|
2554 | __tablename__ = 'user_to_perm' | |
2555 | __table_args__ = ( |
|
2555 | __table_args__ = ( | |
2556 | UniqueConstraint('user_id', 'permission_id'), |
|
2556 | UniqueConstraint('user_id', 'permission_id'), | |
2557 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2557 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2558 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2558 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2559 | ) |
|
2559 | ) | |
2560 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2560 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2561 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2561 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
2562 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2562 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
2563 |
|
2563 | |||
2564 | user = relationship('User') |
|
2564 | user = relationship('User') | |
2565 | permission = relationship('Permission', lazy='joined') |
|
2565 | permission = relationship('Permission', lazy='joined') | |
2566 |
|
2566 | |||
2567 | def __unicode__(self): |
|
2567 | def __unicode__(self): | |
2568 | return u'<%s => %s >' % (self.user, self.permission) |
|
2568 | return u'<%s => %s >' % (self.user, self.permission) | |
2569 |
|
2569 | |||
2570 |
|
2570 | |||
2571 | class UserGroupRepoToPerm(Base, BaseModel): |
|
2571 | class UserGroupRepoToPerm(Base, BaseModel): | |
2572 | __tablename__ = 'users_group_repo_to_perm' |
|
2572 | __tablename__ = 'users_group_repo_to_perm' | |
2573 | __table_args__ = ( |
|
2573 | __table_args__ = ( | |
2574 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
2574 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), | |
2575 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2575 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2576 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2576 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2577 | ) |
|
2577 | ) | |
2578 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2578 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2579 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2579 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
2580 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2580 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
2581 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
2581 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
2582 |
|
2582 | |||
2583 | users_group = relationship('UserGroup') |
|
2583 | users_group = relationship('UserGroup') | |
2584 | permission = relationship('Permission') |
|
2584 | permission = relationship('Permission') | |
2585 | repository = relationship('Repository') |
|
2585 | repository = relationship('Repository') | |
2586 |
|
2586 | |||
2587 | @classmethod |
|
2587 | @classmethod | |
2588 | def create(cls, users_group, repository, permission): |
|
2588 | def create(cls, users_group, repository, permission): | |
2589 | n = cls() |
|
2589 | n = cls() | |
2590 | n.users_group = users_group |
|
2590 | n.users_group = users_group | |
2591 | n.repository = repository |
|
2591 | n.repository = repository | |
2592 | n.permission = permission |
|
2592 | n.permission = permission | |
2593 | Session().add(n) |
|
2593 | Session().add(n) | |
2594 | return n |
|
2594 | return n | |
2595 |
|
2595 | |||
2596 | def __unicode__(self): |
|
2596 | def __unicode__(self): | |
2597 | return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository) |
|
2597 | return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository) | |
2598 |
|
2598 | |||
2599 |
|
2599 | |||
2600 | class UserGroupUserGroupToPerm(Base, BaseModel): |
|
2600 | class UserGroupUserGroupToPerm(Base, BaseModel): | |
2601 | __tablename__ = 'user_group_user_group_to_perm' |
|
2601 | __tablename__ = 'user_group_user_group_to_perm' | |
2602 | __table_args__ = ( |
|
2602 | __table_args__ = ( | |
2603 | UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), |
|
2603 | UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), | |
2604 | CheckConstraint('target_user_group_id != user_group_id'), |
|
2604 | CheckConstraint('target_user_group_id != user_group_id'), | |
2605 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2605 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2606 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2606 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2607 | ) |
|
2607 | ) | |
2608 | user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2608 | user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2609 | target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2609 | target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
2610 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2610 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
2611 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2611 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
2612 |
|
2612 | |||
2613 | target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') |
|
2613 | target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') | |
2614 | user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') |
|
2614 | user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') | |
2615 | permission = relationship('Permission') |
|
2615 | permission = relationship('Permission') | |
2616 |
|
2616 | |||
2617 | @classmethod |
|
2617 | @classmethod | |
2618 | def create(cls, target_user_group, user_group, permission): |
|
2618 | def create(cls, target_user_group, user_group, permission): | |
2619 | n = cls() |
|
2619 | n = cls() | |
2620 | n.target_user_group = target_user_group |
|
2620 | n.target_user_group = target_user_group | |
2621 | n.user_group = user_group |
|
2621 | n.user_group = user_group | |
2622 | n.permission = permission |
|
2622 | n.permission = permission | |
2623 | Session().add(n) |
|
2623 | Session().add(n) | |
2624 | return n |
|
2624 | return n | |
2625 |
|
2625 | |||
2626 | def __unicode__(self): |
|
2626 | def __unicode__(self): | |
2627 | return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group) |
|
2627 | return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group) | |
2628 |
|
2628 | |||
2629 |
|
2629 | |||
2630 | class UserGroupToPerm(Base, BaseModel): |
|
2630 | class UserGroupToPerm(Base, BaseModel): | |
2631 | __tablename__ = 'users_group_to_perm' |
|
2631 | __tablename__ = 'users_group_to_perm' | |
2632 | __table_args__ = ( |
|
2632 | __table_args__ = ( | |
2633 | UniqueConstraint('users_group_id', 'permission_id',), |
|
2633 | UniqueConstraint('users_group_id', 'permission_id',), | |
2634 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2634 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2635 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2635 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2636 | ) |
|
2636 | ) | |
2637 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2637 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2638 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2638 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
2639 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2639 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
2640 |
|
2640 | |||
2641 | users_group = relationship('UserGroup') |
|
2641 | users_group = relationship('UserGroup') | |
2642 | permission = relationship('Permission') |
|
2642 | permission = relationship('Permission') | |
2643 |
|
2643 | |||
2644 |
|
2644 | |||
2645 | class UserRepoGroupToPerm(Base, BaseModel): |
|
2645 | class UserRepoGroupToPerm(Base, BaseModel): | |
2646 | __tablename__ = 'user_repo_group_to_perm' |
|
2646 | __tablename__ = 'user_repo_group_to_perm' | |
2647 | __table_args__ = ( |
|
2647 | __table_args__ = ( | |
2648 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
2648 | UniqueConstraint('user_id', 'group_id', 'permission_id'), | |
2649 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2649 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2650 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2650 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2651 | ) |
|
2651 | ) | |
2652 |
|
2652 | |||
2653 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2653 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2654 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2654 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
2655 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
2655 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
2656 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2656 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
2657 |
|
2657 | |||
2658 | user = relationship('User') |
|
2658 | user = relationship('User') | |
2659 | group = relationship('RepoGroup') |
|
2659 | group = relationship('RepoGroup') | |
2660 | permission = relationship('Permission') |
|
2660 | permission = relationship('Permission') | |
2661 |
|
2661 | |||
2662 | @classmethod |
|
2662 | @classmethod | |
2663 | def create(cls, user, repository_group, permission): |
|
2663 | def create(cls, user, repository_group, permission): | |
2664 | n = cls() |
|
2664 | n = cls() | |
2665 | n.user = user |
|
2665 | n.user = user | |
2666 | n.group = repository_group |
|
2666 | n.group = repository_group | |
2667 | n.permission = permission |
|
2667 | n.permission = permission | |
2668 | Session().add(n) |
|
2668 | Session().add(n) | |
2669 | return n |
|
2669 | return n | |
2670 |
|
2670 | |||
2671 |
|
2671 | |||
2672 | class UserGroupRepoGroupToPerm(Base, BaseModel): |
|
2672 | class UserGroupRepoGroupToPerm(Base, BaseModel): | |
2673 | __tablename__ = 'users_group_repo_group_to_perm' |
|
2673 | __tablename__ = 'users_group_repo_group_to_perm' | |
2674 | __table_args__ = ( |
|
2674 | __table_args__ = ( | |
2675 | UniqueConstraint('users_group_id', 'group_id'), |
|
2675 | UniqueConstraint('users_group_id', 'group_id'), | |
2676 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2676 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2677 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2677 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2678 | ) |
|
2678 | ) | |
2679 |
|
2679 | |||
2680 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2680 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2681 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
2681 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
2682 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
2682 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
2683 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
2683 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
2684 |
|
2684 | |||
2685 | users_group = relationship('UserGroup') |
|
2685 | users_group = relationship('UserGroup') | |
2686 | permission = relationship('Permission') |
|
2686 | permission = relationship('Permission') | |
2687 | group = relationship('RepoGroup') |
|
2687 | group = relationship('RepoGroup') | |
2688 |
|
2688 | |||
2689 | @classmethod |
|
2689 | @classmethod | |
2690 | def create(cls, user_group, repository_group, permission): |
|
2690 | def create(cls, user_group, repository_group, permission): | |
2691 | n = cls() |
|
2691 | n = cls() | |
2692 | n.users_group = user_group |
|
2692 | n.users_group = user_group | |
2693 | n.group = repository_group |
|
2693 | n.group = repository_group | |
2694 | n.permission = permission |
|
2694 | n.permission = permission | |
2695 | Session().add(n) |
|
2695 | Session().add(n) | |
2696 | return n |
|
2696 | return n | |
2697 |
|
2697 | |||
2698 | def __unicode__(self): |
|
2698 | def __unicode__(self): | |
2699 | return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group) |
|
2699 | return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group) | |
2700 |
|
2700 | |||
2701 |
|
2701 | |||
2702 | class Statistics(Base, BaseModel): |
|
2702 | class Statistics(Base, BaseModel): | |
2703 | __tablename__ = 'statistics' |
|
2703 | __tablename__ = 'statistics' | |
2704 | __table_args__ = ( |
|
2704 | __table_args__ = ( | |
2705 | UniqueConstraint('repository_id'), |
|
2705 | UniqueConstraint('repository_id'), | |
2706 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2706 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2707 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2707 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2708 | ) |
|
2708 | ) | |
2709 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2709 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2710 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
2710 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) | |
2711 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
2711 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) | |
2712 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
2712 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data | |
2713 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
2713 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data | |
2714 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
2714 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data | |
2715 |
|
2715 | |||
2716 | repository = relationship('Repository', single_parent=True) |
|
2716 | repository = relationship('Repository', single_parent=True) | |
2717 |
|
2717 | |||
2718 |
|
2718 | |||
2719 | class UserFollowing(Base, BaseModel): |
|
2719 | class UserFollowing(Base, BaseModel): | |
2720 | __tablename__ = 'user_followings' |
|
2720 | __tablename__ = 'user_followings' | |
2721 | __table_args__ = ( |
|
2721 | __table_args__ = ( | |
2722 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
2722 | UniqueConstraint('user_id', 'follows_repository_id'), | |
2723 | UniqueConstraint('user_id', 'follows_user_id'), |
|
2723 | UniqueConstraint('user_id', 'follows_user_id'), | |
2724 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2724 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2725 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2725 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2726 | ) |
|
2726 | ) | |
2727 |
|
2727 | |||
2728 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2728 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2729 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
2729 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
2730 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
2730 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) | |
2731 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
2731 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
2732 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
2732 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
2733 |
|
2733 | |||
2734 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
2734 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') | |
2735 |
|
2735 | |||
2736 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
2736 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') | |
2737 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
2737 | follows_repository = relationship('Repository', order_by='Repository.repo_name') | |
2738 |
|
2738 | |||
2739 | @classmethod |
|
2739 | @classmethod | |
2740 | def get_repo_followers(cls, repo_id): |
|
2740 | def get_repo_followers(cls, repo_id): | |
2741 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
2741 | return cls.query().filter(cls.follows_repo_id == repo_id) | |
2742 |
|
2742 | |||
2743 |
|
2743 | |||
2744 | class CacheKey(Base, BaseModel): |
|
2744 | class CacheKey(Base, BaseModel): | |
2745 | __tablename__ = 'cache_invalidation' |
|
2745 | __tablename__ = 'cache_invalidation' | |
2746 | __table_args__ = ( |
|
2746 | __table_args__ = ( | |
2747 | UniqueConstraint('cache_key'), |
|
2747 | UniqueConstraint('cache_key'), | |
2748 | Index('key_idx', 'cache_key'), |
|
2748 | Index('key_idx', 'cache_key'), | |
2749 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2749 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2750 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2750 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
2751 | ) |
|
2751 | ) | |
2752 | CACHE_TYPE_ATOM = 'ATOM' |
|
2752 | CACHE_TYPE_ATOM = 'ATOM' | |
2753 | CACHE_TYPE_RSS = 'RSS' |
|
2753 | CACHE_TYPE_RSS = 'RSS' | |
2754 | CACHE_TYPE_README = 'README' |
|
2754 | CACHE_TYPE_README = 'README' | |
2755 |
|
2755 | |||
2756 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2756 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2757 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
2757 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) | |
2758 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
2758 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) | |
2759 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
2759 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) | |
2760 |
|
2760 | |||
2761 | def __init__(self, cache_key, cache_args=''): |
|
2761 | def __init__(self, cache_key, cache_args=''): | |
2762 | self.cache_key = cache_key |
|
2762 | self.cache_key = cache_key | |
2763 | self.cache_args = cache_args |
|
2763 | self.cache_args = cache_args | |
2764 | self.cache_active = False |
|
2764 | self.cache_active = False | |
2765 |
|
2765 | |||
2766 | def __unicode__(self): |
|
2766 | def __unicode__(self): | |
2767 | return u"<%s('%s:%s[%s]')>" % ( |
|
2767 | return u"<%s('%s:%s[%s]')>" % ( | |
2768 | self.__class__.__name__, |
|
2768 | self.__class__.__name__, | |
2769 | self.cache_id, self.cache_key, self.cache_active) |
|
2769 | self.cache_id, self.cache_key, self.cache_active) | |
2770 |
|
2770 | |||
2771 | def _cache_key_partition(self): |
|
2771 | def _cache_key_partition(self): | |
2772 | prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) |
|
2772 | prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) | |
2773 | return prefix, repo_name, suffix |
|
2773 | return prefix, repo_name, suffix | |
2774 |
|
2774 | |||
2775 | def get_prefix(self): |
|
2775 | def get_prefix(self): | |
2776 | """ |
|
2776 | """ | |
2777 | Try to extract prefix from existing cache key. The key could consist |
|
2777 | Try to extract prefix from existing cache key. The key could consist | |
2778 | of prefix, repo_name, suffix |
|
2778 | of prefix, repo_name, suffix | |
2779 | """ |
|
2779 | """ | |
2780 | # this returns prefix, repo_name, suffix |
|
2780 | # this returns prefix, repo_name, suffix | |
2781 | return self._cache_key_partition()[0] |
|
2781 | return self._cache_key_partition()[0] | |
2782 |
|
2782 | |||
2783 | def get_suffix(self): |
|
2783 | def get_suffix(self): | |
2784 | """ |
|
2784 | """ | |
2785 | get suffix that might have been used in _get_cache_key to |
|
2785 | get suffix that might have been used in _get_cache_key to | |
2786 | generate self.cache_key. Only used for informational purposes |
|
2786 | generate self.cache_key. Only used for informational purposes | |
2787 | in repo_edit.html. |
|
2787 | in repo_edit.html. | |
2788 | """ |
|
2788 | """ | |
2789 | # prefix, repo_name, suffix |
|
2789 | # prefix, repo_name, suffix | |
2790 | return self._cache_key_partition()[2] |
|
2790 | return self._cache_key_partition()[2] | |
2791 |
|
2791 | |||
2792 | @classmethod |
|
2792 | @classmethod | |
2793 | def delete_all_cache(cls): |
|
2793 | def delete_all_cache(cls): | |
2794 | """ |
|
2794 | """ | |
2795 | Delete all cache keys from database. |
|
2795 | Delete all cache keys from database. | |
2796 | Should only be run when all instances are down and all entries |
|
2796 | Should only be run when all instances are down and all entries | |
2797 | thus stale. |
|
2797 | thus stale. | |
2798 | """ |
|
2798 | """ | |
2799 | cls.query().delete() |
|
2799 | cls.query().delete() | |
2800 | Session().commit() |
|
2800 | Session().commit() | |
2801 |
|
2801 | |||
2802 | @classmethod |
|
2802 | @classmethod | |
2803 | def get_cache_key(cls, repo_name, cache_type): |
|
2803 | def get_cache_key(cls, repo_name, cache_type): | |
2804 | """ |
|
2804 | """ | |
2805 |
|
2805 | |||
2806 | Generate a cache key for this process of RhodeCode instance. |
|
2806 | Generate a cache key for this process of RhodeCode instance. | |
2807 | Prefix most likely will be process id or maybe explicitly set |
|
2807 | Prefix most likely will be process id or maybe explicitly set | |
2808 | instance_id from .ini file. |
|
2808 | instance_id from .ini file. | |
2809 | """ |
|
2809 | """ | |
2810 | import rhodecode |
|
2810 | import rhodecode | |
2811 | prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '') |
|
2811 | prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '') | |
2812 |
|
2812 | |||
2813 | repo_as_unicode = safe_unicode(repo_name) |
|
2813 | repo_as_unicode = safe_unicode(repo_name) | |
2814 | key = u'{}_{}'.format(repo_as_unicode, cache_type) \ |
|
2814 | key = u'{}_{}'.format(repo_as_unicode, cache_type) \ | |
2815 | if cache_type else repo_as_unicode |
|
2815 | if cache_type else repo_as_unicode | |
2816 |
|
2816 | |||
2817 | return u'{}{}'.format(prefix, key) |
|
2817 | return u'{}{}'.format(prefix, key) | |
2818 |
|
2818 | |||
2819 | @classmethod |
|
2819 | @classmethod | |
2820 | def set_invalidate(cls, repo_name, delete=False): |
|
2820 | def set_invalidate(cls, repo_name, delete=False): | |
2821 | """ |
|
2821 | """ | |
2822 | Mark all caches of a repo as invalid in the database. |
|
2822 | Mark all caches of a repo as invalid in the database. | |
2823 | """ |
|
2823 | """ | |
2824 |
|
2824 | |||
2825 | try: |
|
2825 | try: | |
2826 | qry = Session().query(cls).filter(cls.cache_args == repo_name) |
|
2826 | qry = Session().query(cls).filter(cls.cache_args == repo_name) | |
2827 | if delete: |
|
2827 | if delete: | |
2828 | log.debug('cache objects deleted for repo %s', |
|
2828 | log.debug('cache objects deleted for repo %s', | |
2829 | safe_str(repo_name)) |
|
2829 | safe_str(repo_name)) | |
2830 | qry.delete() |
|
2830 | qry.delete() | |
2831 | else: |
|
2831 | else: | |
2832 | log.debug('cache objects marked as invalid for repo %s', |
|
2832 | log.debug('cache objects marked as invalid for repo %s', | |
2833 | safe_str(repo_name)) |
|
2833 | safe_str(repo_name)) | |
2834 | qry.update({"cache_active": False}) |
|
2834 | qry.update({"cache_active": False}) | |
2835 |
|
2835 | |||
2836 | Session().commit() |
|
2836 | Session().commit() | |
2837 | except Exception: |
|
2837 | except Exception: | |
2838 | log.exception( |
|
2838 | log.exception( | |
2839 | 'Cache key invalidation failed for repository %s', |
|
2839 | 'Cache key invalidation failed for repository %s', | |
2840 | safe_str(repo_name)) |
|
2840 | safe_str(repo_name)) | |
2841 | Session().rollback() |
|
2841 | Session().rollback() | |
2842 |
|
2842 | |||
2843 | @classmethod |
|
2843 | @classmethod | |
2844 | def get_active_cache(cls, cache_key): |
|
2844 | def get_active_cache(cls, cache_key): | |
2845 | inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() |
|
2845 | inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() | |
2846 | if inv_obj: |
|
2846 | if inv_obj: | |
2847 | return inv_obj |
|
2847 | return inv_obj | |
2848 | return None |
|
2848 | return None | |
2849 |
|
2849 | |||
2850 | @classmethod |
|
2850 | @classmethod | |
2851 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
2851 | def repo_context_cache(cls, compute_func, repo_name, cache_type, | |
2852 | thread_scoped=False): |
|
2852 | thread_scoped=False): | |
2853 | """ |
|
2853 | """ | |
2854 | @cache_region('long_term') |
|
2854 | @cache_region('long_term') | |
2855 | def _heavy_calculation(cache_key): |
|
2855 | def _heavy_calculation(cache_key): | |
2856 | return 'result' |
|
2856 | return 'result' | |
2857 |
|
2857 | |||
2858 | cache_context = CacheKey.repo_context_cache( |
|
2858 | cache_context = CacheKey.repo_context_cache( | |
2859 | _heavy_calculation, repo_name, cache_type) |
|
2859 | _heavy_calculation, repo_name, cache_type) | |
2860 |
|
2860 | |||
2861 | with cache_context as context: |
|
2861 | with cache_context as context: | |
2862 | context.invalidate() |
|
2862 | context.invalidate() | |
2863 | computed = context.compute() |
|
2863 | computed = context.compute() | |
2864 |
|
2864 | |||
2865 | assert computed == 'result' |
|
2865 | assert computed == 'result' | |
2866 | """ |
|
2866 | """ | |
2867 | from rhodecode.lib import caches |
|
2867 | from rhodecode.lib import caches | |
2868 | return caches.InvalidationContext( |
|
2868 | return caches.InvalidationContext( | |
2869 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
2869 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) | |
2870 |
|
2870 | |||
2871 |
|
2871 | |||
2872 | class ChangesetComment(Base, BaseModel): |
|
2872 | class ChangesetComment(Base, BaseModel): | |
2873 | __tablename__ = 'changeset_comments' |
|
2873 | __tablename__ = 'changeset_comments' | |
2874 | __table_args__ = ( |
|
2874 | __table_args__ = ( | |
2875 | Index('cc_revision_idx', 'revision'), |
|
2875 | Index('cc_revision_idx', 'revision'), | |
2876 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2876 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2877 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
2877 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
2878 | ) |
|
2878 | ) | |
2879 |
|
2879 | |||
2880 | COMMENT_OUTDATED = u'comment_outdated' |
|
2880 | COMMENT_OUTDATED = u'comment_outdated' | |
2881 |
|
2881 | |||
2882 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
2882 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) | |
2883 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
2883 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
2884 | revision = Column('revision', String(40), nullable=True) |
|
2884 | revision = Column('revision', String(40), nullable=True) | |
2885 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
2885 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |
2886 | pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) |
|
2886 | pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) | |
2887 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
2887 | line_no = Column('line_no', Unicode(10), nullable=True) | |
2888 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) |
|
2888 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) | |
2889 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
2889 | f_path = Column('f_path', Unicode(1000), nullable=True) | |
2890 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
2890 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |
2891 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) |
|
2891 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) | |
2892 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
2892 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
2893 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
2893 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
2894 | renderer = Column('renderer', Unicode(64), nullable=True) |
|
2894 | renderer = Column('renderer', Unicode(64), nullable=True) | |
2895 | display_state = Column('display_state', Unicode(128), nullable=True) |
|
2895 | display_state = Column('display_state', Unicode(128), nullable=True) | |
2896 |
|
2896 | |||
2897 | author = relationship('User', lazy='joined') |
|
2897 | author = relationship('User', lazy='joined') | |
2898 | repo = relationship('Repository') |
|
2898 | repo = relationship('Repository') | |
2899 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") |
|
2899 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") | |
2900 | pull_request = relationship('PullRequest', lazy='joined') |
|
2900 | pull_request = relationship('PullRequest', lazy='joined') | |
2901 | pull_request_version = relationship('PullRequestVersion') |
|
2901 | pull_request_version = relationship('PullRequestVersion') | |
2902 |
|
2902 | |||
2903 | @classmethod |
|
2903 | @classmethod | |
2904 | def get_users(cls, revision=None, pull_request_id=None): |
|
2904 | def get_users(cls, revision=None, pull_request_id=None): | |
2905 | """ |
|
2905 | """ | |
2906 | Returns user associated with this ChangesetComment. ie those |
|
2906 | Returns user associated with this ChangesetComment. ie those | |
2907 | who actually commented |
|
2907 | who actually commented | |
2908 |
|
2908 | |||
2909 | :param cls: |
|
2909 | :param cls: | |
2910 | :param revision: |
|
2910 | :param revision: | |
2911 | """ |
|
2911 | """ | |
2912 | q = Session().query(User)\ |
|
2912 | q = Session().query(User)\ | |
2913 | .join(ChangesetComment.author) |
|
2913 | .join(ChangesetComment.author) | |
2914 | if revision: |
|
2914 | if revision: | |
2915 | q = q.filter(cls.revision == revision) |
|
2915 | q = q.filter(cls.revision == revision) | |
2916 | elif pull_request_id: |
|
2916 | elif pull_request_id: | |
2917 | q = q.filter(cls.pull_request_id == pull_request_id) |
|
2917 | q = q.filter(cls.pull_request_id == pull_request_id) | |
2918 | return q.all() |
|
2918 | return q.all() | |
2919 |
|
2919 | |||
2920 | def render(self, mentions=False): |
|
2920 | def render(self, mentions=False): | |
2921 | from rhodecode.lib import helpers as h |
|
2921 | from rhodecode.lib import helpers as h | |
2922 | return h.render(self.text, renderer=self.renderer, mentions=mentions) |
|
2922 | return h.render(self.text, renderer=self.renderer, mentions=mentions) | |
2923 |
|
2923 | |||
2924 | def __repr__(self): |
|
2924 | def __repr__(self): | |
2925 | if self.comment_id: |
|
2925 | if self.comment_id: | |
2926 | return '<DB:ChangesetComment #%s>' % self.comment_id |
|
2926 | return '<DB:ChangesetComment #%s>' % self.comment_id | |
2927 | else: |
|
2927 | else: | |
2928 | return '<DB:ChangesetComment at %#x>' % id(self) |
|
2928 | return '<DB:ChangesetComment at %#x>' % id(self) | |
2929 |
|
2929 | |||
2930 |
|
2930 | |||
2931 | class ChangesetStatus(Base, BaseModel): |
|
2931 | class ChangesetStatus(Base, BaseModel): | |
2932 | __tablename__ = 'changeset_statuses' |
|
2932 | __tablename__ = 'changeset_statuses' | |
2933 | __table_args__ = ( |
|
2933 | __table_args__ = ( | |
2934 | Index('cs_revision_idx', 'revision'), |
|
2934 | Index('cs_revision_idx', 'revision'), | |
2935 | Index('cs_version_idx', 'version'), |
|
2935 | Index('cs_version_idx', 'version'), | |
2936 | UniqueConstraint('repo_id', 'revision', 'version'), |
|
2936 | UniqueConstraint('repo_id', 'revision', 'version'), | |
2937 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
2937 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
2938 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
2938 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
2939 | ) |
|
2939 | ) | |
2940 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' |
|
2940 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' | |
2941 | STATUS_APPROVED = 'approved' |
|
2941 | STATUS_APPROVED = 'approved' | |
2942 | STATUS_REJECTED = 'rejected' |
|
2942 | STATUS_REJECTED = 'rejected' | |
2943 | STATUS_UNDER_REVIEW = 'under_review' |
|
2943 | STATUS_UNDER_REVIEW = 'under_review' | |
2944 |
|
2944 | |||
2945 | STATUSES = [ |
|
2945 | STATUSES = [ | |
2946 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default |
|
2946 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default | |
2947 | (STATUS_APPROVED, _("Approved")), |
|
2947 | (STATUS_APPROVED, _("Approved")), | |
2948 | (STATUS_REJECTED, _("Rejected")), |
|
2948 | (STATUS_REJECTED, _("Rejected")), | |
2949 | (STATUS_UNDER_REVIEW, _("Under Review")), |
|
2949 | (STATUS_UNDER_REVIEW, _("Under Review")), | |
2950 | ] |
|
2950 | ] | |
2951 |
|
2951 | |||
2952 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) |
|
2952 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) | |
2953 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
2953 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
2954 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
2954 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) | |
2955 | revision = Column('revision', String(40), nullable=False) |
|
2955 | revision = Column('revision', String(40), nullable=False) | |
2956 | status = Column('status', String(128), nullable=False, default=DEFAULT) |
|
2956 | status = Column('status', String(128), nullable=False, default=DEFAULT) | |
2957 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) |
|
2957 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) | |
2958 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
2958 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) | |
2959 | version = Column('version', Integer(), nullable=False, default=0) |
|
2959 | version = Column('version', Integer(), nullable=False, default=0) | |
2960 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
2960 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |
2961 |
|
2961 | |||
2962 | author = relationship('User', lazy='joined') |
|
2962 | author = relationship('User', lazy='joined') | |
2963 | repo = relationship('Repository') |
|
2963 | repo = relationship('Repository') | |
2964 | comment = relationship('ChangesetComment', lazy='joined') |
|
2964 | comment = relationship('ChangesetComment', lazy='joined') | |
2965 | pull_request = relationship('PullRequest', lazy='joined') |
|
2965 | pull_request = relationship('PullRequest', lazy='joined') | |
2966 |
|
2966 | |||
2967 | def __unicode__(self): |
|
2967 | def __unicode__(self): | |
2968 | return u"<%s('%s[%s]:%s')>" % ( |
|
2968 | return u"<%s('%s[%s]:%s')>" % ( | |
2969 | self.__class__.__name__, |
|
2969 | self.__class__.__name__, | |
2970 | self.status, self.version, self.author |
|
2970 | self.status, self.version, self.author | |
2971 | ) |
|
2971 | ) | |
2972 |
|
2972 | |||
2973 | @classmethod |
|
2973 | @classmethod | |
2974 | def get_status_lbl(cls, value): |
|
2974 | def get_status_lbl(cls, value): | |
2975 | return dict(cls.STATUSES).get(value) |
|
2975 | return dict(cls.STATUSES).get(value) | |
2976 |
|
2976 | |||
2977 | @property |
|
2977 | @property | |
2978 | def status_lbl(self): |
|
2978 | def status_lbl(self): | |
2979 | return ChangesetStatus.get_status_lbl(self.status) |
|
2979 | return ChangesetStatus.get_status_lbl(self.status) | |
2980 |
|
2980 | |||
2981 |
|
2981 | |||
2982 | class _PullRequestBase(BaseModel): |
|
2982 | class _PullRequestBase(BaseModel): | |
2983 | """ |
|
2983 | """ | |
2984 | Common attributes of pull request and version entries. |
|
2984 | Common attributes of pull request and version entries. | |
2985 | """ |
|
2985 | """ | |
2986 |
|
2986 | |||
2987 | # .status values |
|
2987 | # .status values | |
2988 | STATUS_NEW = u'new' |
|
2988 | STATUS_NEW = u'new' | |
2989 | STATUS_OPEN = u'open' |
|
2989 | STATUS_OPEN = u'open' | |
2990 | STATUS_CLOSED = u'closed' |
|
2990 | STATUS_CLOSED = u'closed' | |
2991 |
|
2991 | |||
2992 | title = Column('title', Unicode(255), nullable=True) |
|
2992 | title = Column('title', Unicode(255), nullable=True) | |
2993 | description = Column( |
|
2993 | description = Column( | |
2994 | 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), |
|
2994 | 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), | |
2995 | nullable=True) |
|
2995 | nullable=True) | |
2996 | # new/open/closed status of pull request (not approve/reject/etc) |
|
2996 | # new/open/closed status of pull request (not approve/reject/etc) | |
2997 | status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) |
|
2997 | status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) | |
2998 | created_on = Column( |
|
2998 | created_on = Column( | |
2999 | 'created_on', DateTime(timezone=False), nullable=False, |
|
2999 | 'created_on', DateTime(timezone=False), nullable=False, | |
3000 | default=datetime.datetime.now) |
|
3000 | default=datetime.datetime.now) | |
3001 | updated_on = Column( |
|
3001 | updated_on = Column( | |
3002 | 'updated_on', DateTime(timezone=False), nullable=False, |
|
3002 | 'updated_on', DateTime(timezone=False), nullable=False, | |
3003 | default=datetime.datetime.now) |
|
3003 | default=datetime.datetime.now) | |
3004 |
|
3004 | |||
3005 | @declared_attr |
|
3005 | @declared_attr | |
3006 | def user_id(cls): |
|
3006 | def user_id(cls): | |
3007 | return Column( |
|
3007 | return Column( | |
3008 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, |
|
3008 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | |
3009 | unique=None) |
|
3009 | unique=None) | |
3010 |
|
3010 | |||
3011 | # 500 revisions max |
|
3011 | # 500 revisions max | |
3012 | _revisions = Column( |
|
3012 | _revisions = Column( | |
3013 | 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) |
|
3013 | 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) | |
3014 |
|
3014 | |||
3015 | @declared_attr |
|
3015 | @declared_attr | |
3016 | def source_repo_id(cls): |
|
3016 | def source_repo_id(cls): | |
3017 | # TODO: dan: rename column to source_repo_id |
|
3017 | # TODO: dan: rename column to source_repo_id | |
3018 | return Column( |
|
3018 | return Column( | |
3019 | 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
3019 | 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), | |
3020 | nullable=False) |
|
3020 | nullable=False) | |
3021 |
|
3021 | |||
3022 | source_ref = Column('org_ref', Unicode(255), nullable=False) |
|
3022 | source_ref = Column('org_ref', Unicode(255), nullable=False) | |
3023 |
|
3023 | |||
3024 | @declared_attr |
|
3024 | @declared_attr | |
3025 | def target_repo_id(cls): |
|
3025 | def target_repo_id(cls): | |
3026 | # TODO: dan: rename column to target_repo_id |
|
3026 | # TODO: dan: rename column to target_repo_id | |
3027 | return Column( |
|
3027 | return Column( | |
3028 | 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
3028 | 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), | |
3029 | nullable=False) |
|
3029 | nullable=False) | |
3030 |
|
3030 | |||
3031 | target_ref = Column('other_ref', Unicode(255), nullable=False) |
|
3031 | target_ref = Column('other_ref', Unicode(255), nullable=False) | |
3032 |
|
3032 | |||
3033 | # TODO: dan: rename column to last_merge_source_rev |
|
3033 | # TODO: dan: rename column to last_merge_source_rev | |
3034 | _last_merge_source_rev = Column( |
|
3034 | _last_merge_source_rev = Column( | |
3035 | 'last_merge_org_rev', String(40), nullable=True) |
|
3035 | 'last_merge_org_rev', String(40), nullable=True) | |
3036 | # TODO: dan: rename column to last_merge_target_rev |
|
3036 | # TODO: dan: rename column to last_merge_target_rev | |
3037 | _last_merge_target_rev = Column( |
|
3037 | _last_merge_target_rev = Column( | |
3038 | 'last_merge_other_rev', String(40), nullable=True) |
|
3038 | 'last_merge_other_rev', String(40), nullable=True) | |
3039 | _last_merge_status = Column('merge_status', Integer(), nullable=True) |
|
3039 | _last_merge_status = Column('merge_status', Integer(), nullable=True) | |
3040 | merge_rev = Column('merge_rev', String(40), nullable=True) |
|
3040 | merge_rev = Column('merge_rev', String(40), nullable=True) | |
3041 |
|
3041 | |||
3042 | @hybrid_property |
|
3042 | @hybrid_property | |
3043 | def revisions(self): |
|
3043 | def revisions(self): | |
3044 | return self._revisions.split(':') if self._revisions else [] |
|
3044 | return self._revisions.split(':') if self._revisions else [] | |
3045 |
|
3045 | |||
3046 | @revisions.setter |
|
3046 | @revisions.setter | |
3047 | def revisions(self, val): |
|
3047 | def revisions(self, val): | |
3048 | self._revisions = ':'.join(val) |
|
3048 | self._revisions = ':'.join(val) | |
3049 |
|
3049 | |||
3050 | @declared_attr |
|
3050 | @declared_attr | |
3051 | def author(cls): |
|
3051 | def author(cls): | |
3052 | return relationship('User', lazy='joined') |
|
3052 | return relationship('User', lazy='joined') | |
3053 |
|
3053 | |||
3054 | @declared_attr |
|
3054 | @declared_attr | |
3055 | def source_repo(cls): |
|
3055 | def source_repo(cls): | |
3056 | return relationship( |
|
3056 | return relationship( | |
3057 | 'Repository', |
|
3057 | 'Repository', | |
3058 | primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) |
|
3058 | primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) | |
3059 |
|
3059 | |||
3060 | @property |
|
3060 | @property | |
3061 | def source_ref_parts(self): |
|
3061 | def source_ref_parts(self): | |
3062 | refs = self.source_ref.split(':') |
|
3062 | refs = self.source_ref.split(':') | |
3063 | return Reference(refs[0], refs[1], refs[2]) |
|
3063 | return Reference(refs[0], refs[1], refs[2]) | |
3064 |
|
3064 | |||
3065 | @declared_attr |
|
3065 | @declared_attr | |
3066 | def target_repo(cls): |
|
3066 | def target_repo(cls): | |
3067 | return relationship( |
|
3067 | return relationship( | |
3068 | 'Repository', |
|
3068 | 'Repository', | |
3069 | primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) |
|
3069 | primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) | |
3070 |
|
3070 | |||
3071 | @property |
|
3071 | @property | |
3072 | def target_ref_parts(self): |
|
3072 | def target_ref_parts(self): | |
3073 | refs = self.target_ref.split(':') |
|
3073 | refs = self.target_ref.split(':') | |
3074 | return Reference(refs[0], refs[1], refs[2]) |
|
3074 | return Reference(refs[0], refs[1], refs[2]) | |
3075 |
|
3075 | |||
3076 |
|
3076 | |||
3077 | class PullRequest(Base, _PullRequestBase): |
|
3077 | class PullRequest(Base, _PullRequestBase): | |
3078 | __tablename__ = 'pull_requests' |
|
3078 | __tablename__ = 'pull_requests' | |
3079 | __table_args__ = ( |
|
3079 | __table_args__ = ( | |
3080 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3080 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
3081 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3081 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
3082 | ) |
|
3082 | ) | |
3083 |
|
3083 | |||
3084 | pull_request_id = Column( |
|
3084 | pull_request_id = Column( | |
3085 | 'pull_request_id', Integer(), nullable=False, primary_key=True) |
|
3085 | 'pull_request_id', Integer(), nullable=False, primary_key=True) | |
3086 |
|
3086 | |||
3087 | def __repr__(self): |
|
3087 | def __repr__(self): | |
3088 | if self.pull_request_id: |
|
3088 | if self.pull_request_id: | |
3089 | return '<DB:PullRequest #%s>' % self.pull_request_id |
|
3089 | return '<DB:PullRequest #%s>' % self.pull_request_id | |
3090 | else: |
|
3090 | else: | |
3091 | return '<DB:PullRequest at %#x>' % id(self) |
|
3091 | return '<DB:PullRequest at %#x>' % id(self) | |
3092 |
|
3092 | |||
3093 | reviewers = relationship('PullRequestReviewers', |
|
3093 | reviewers = relationship('PullRequestReviewers', | |
3094 | cascade="all, delete, delete-orphan") |
|
3094 | cascade="all, delete, delete-orphan") | |
3095 | statuses = relationship('ChangesetStatus') |
|
3095 | statuses = relationship('ChangesetStatus') | |
3096 | comments = relationship('ChangesetComment', |
|
3096 | comments = relationship('ChangesetComment', | |
3097 | cascade="all, delete, delete-orphan") |
|
3097 | cascade="all, delete, delete-orphan") | |
3098 | versions = relationship('PullRequestVersion', |
|
3098 | versions = relationship('PullRequestVersion', | |
3099 | cascade="all, delete, delete-orphan") |
|
3099 | cascade="all, delete, delete-orphan") | |
3100 |
|
3100 | |||
3101 | def is_closed(self): |
|
3101 | def is_closed(self): | |
3102 | return self.status == self.STATUS_CLOSED |
|
3102 | return self.status == self.STATUS_CLOSED | |
3103 |
|
3103 | |||
3104 | def get_api_data(self): |
|
3104 | def get_api_data(self): | |
3105 | from rhodecode.model.pull_request import PullRequestModel |
|
3105 | from rhodecode.model.pull_request import PullRequestModel | |
3106 | pull_request = self |
|
3106 | pull_request = self | |
3107 | merge_status = PullRequestModel().merge_status(pull_request) |
|
3107 | merge_status = PullRequestModel().merge_status(pull_request) | |
3108 | data = { |
|
3108 | data = { | |
3109 | 'pull_request_id': pull_request.pull_request_id, |
|
3109 | 'pull_request_id': pull_request.pull_request_id, | |
3110 | 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name, |
|
3110 | 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name, | |
3111 | pull_request_id=self.pull_request_id, |
|
3111 | pull_request_id=self.pull_request_id, | |
3112 | qualified=True), |
|
3112 | qualified=True), | |
3113 | 'title': pull_request.title, |
|
3113 | 'title': pull_request.title, | |
3114 | 'description': pull_request.description, |
|
3114 | 'description': pull_request.description, | |
3115 | 'status': pull_request.status, |
|
3115 | 'status': pull_request.status, | |
3116 | 'created_on': pull_request.created_on, |
|
3116 | 'created_on': pull_request.created_on, | |
3117 | 'updated_on': pull_request.updated_on, |
|
3117 | 'updated_on': pull_request.updated_on, | |
3118 | 'commit_ids': pull_request.revisions, |
|
3118 | 'commit_ids': pull_request.revisions, | |
3119 | 'review_status': pull_request.calculated_review_status(), |
|
3119 | 'review_status': pull_request.calculated_review_status(), | |
3120 | 'mergeable': { |
|
3120 | 'mergeable': { | |
3121 | 'status': merge_status[0], |
|
3121 | 'status': merge_status[0], | |
3122 | 'message': unicode(merge_status[1]), |
|
3122 | 'message': unicode(merge_status[1]), | |
3123 | }, |
|
3123 | }, | |
3124 | 'source': { |
|
3124 | 'source': { | |
3125 | 'clone_url': pull_request.source_repo.clone_url(), |
|
3125 | 'clone_url': pull_request.source_repo.clone_url(), | |
3126 | 'repository': pull_request.source_repo.repo_name, |
|
3126 | 'repository': pull_request.source_repo.repo_name, | |
3127 | 'reference': { |
|
3127 | 'reference': { | |
3128 | 'name': pull_request.source_ref_parts.name, |
|
3128 | 'name': pull_request.source_ref_parts.name, | |
3129 | 'type': pull_request.source_ref_parts.type, |
|
3129 | 'type': pull_request.source_ref_parts.type, | |
3130 | 'commit_id': pull_request.source_ref_parts.commit_id, |
|
3130 | 'commit_id': pull_request.source_ref_parts.commit_id, | |
3131 | }, |
|
3131 | }, | |
3132 | }, |
|
3132 | }, | |
3133 | 'target': { |
|
3133 | 'target': { | |
3134 | 'clone_url': pull_request.target_repo.clone_url(), |
|
3134 | 'clone_url': pull_request.target_repo.clone_url(), | |
3135 | 'repository': pull_request.target_repo.repo_name, |
|
3135 | 'repository': pull_request.target_repo.repo_name, | |
3136 | 'reference': { |
|
3136 | 'reference': { | |
3137 | 'name': pull_request.target_ref_parts.name, |
|
3137 | 'name': pull_request.target_ref_parts.name, | |
3138 | 'type': pull_request.target_ref_parts.type, |
|
3138 | 'type': pull_request.target_ref_parts.type, | |
3139 | 'commit_id': pull_request.target_ref_parts.commit_id, |
|
3139 | 'commit_id': pull_request.target_ref_parts.commit_id, | |
3140 | }, |
|
3140 | }, | |
3141 | }, |
|
3141 | }, | |
3142 | 'author': pull_request.author.get_api_data(include_secrets=False, |
|
3142 | 'author': pull_request.author.get_api_data(include_secrets=False, | |
3143 | details='basic'), |
|
3143 | details='basic'), | |
3144 | 'reviewers': [ |
|
3144 | 'reviewers': [ | |
3145 | { |
|
3145 | { | |
3146 | 'user': reviewer.get_api_data(include_secrets=False, |
|
3146 | 'user': reviewer.get_api_data(include_secrets=False, | |
3147 | details='basic'), |
|
3147 | details='basic'), | |
3148 | 'review_status': st[0][1].status if st else 'not_reviewed', |
|
3148 | 'review_status': st[0][1].status if st else 'not_reviewed', | |
3149 | } |
|
3149 | } | |
3150 | for reviewer, st in pull_request.reviewers_statuses() |
|
3150 | for reviewer, st in pull_request.reviewers_statuses() | |
3151 | ] |
|
3151 | ] | |
3152 | } |
|
3152 | } | |
3153 |
|
3153 | |||
3154 | return data |
|
3154 | return data | |
3155 |
|
3155 | |||
3156 | def __json__(self): |
|
3156 | def __json__(self): | |
3157 | return { |
|
3157 | return { | |
3158 | 'revisions': self.revisions, |
|
3158 | 'revisions': self.revisions, | |
3159 | } |
|
3159 | } | |
3160 |
|
3160 | |||
3161 | def calculated_review_status(self): |
|
3161 | def calculated_review_status(self): | |
3162 | # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html |
|
3162 | # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html | |
3163 | # because it's tricky on how to use ChangesetStatusModel from there |
|
3163 | # because it's tricky on how to use ChangesetStatusModel from there | |
3164 | warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning) |
|
3164 | warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning) | |
3165 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
3165 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
3166 | return ChangesetStatusModel().calculated_review_status(self) |
|
3166 | return ChangesetStatusModel().calculated_review_status(self) | |
3167 |
|
3167 | |||
3168 | def reviewers_statuses(self): |
|
3168 | def reviewers_statuses(self): | |
3169 | warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning) |
|
3169 | warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning) | |
3170 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
3170 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
3171 | return ChangesetStatusModel().reviewers_statuses(self) |
|
3171 | return ChangesetStatusModel().reviewers_statuses(self) | |
3172 |
|
3172 | |||
3173 |
|
3173 | |||
3174 | class PullRequestVersion(Base, _PullRequestBase): |
|
3174 | class PullRequestVersion(Base, _PullRequestBase): | |
3175 | __tablename__ = 'pull_request_versions' |
|
3175 | __tablename__ = 'pull_request_versions' | |
3176 | __table_args__ = ( |
|
3176 | __table_args__ = ( | |
3177 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3177 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
3178 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3178 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
3179 | ) |
|
3179 | ) | |
3180 |
|
3180 | |||
3181 | pull_request_version_id = Column( |
|
3181 | pull_request_version_id = Column( | |
3182 | 'pull_request_version_id', Integer(), nullable=False, primary_key=True) |
|
3182 | 'pull_request_version_id', Integer(), nullable=False, primary_key=True) | |
3183 | pull_request_id = Column( |
|
3183 | pull_request_id = Column( | |
3184 | 'pull_request_id', Integer(), |
|
3184 | 'pull_request_id', Integer(), | |
3185 | ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
3185 | ForeignKey('pull_requests.pull_request_id'), nullable=False) | |
3186 | pull_request = relationship('PullRequest') |
|
3186 | pull_request = relationship('PullRequest') | |
3187 |
|
3187 | |||
3188 | def __repr__(self): |
|
3188 | def __repr__(self): | |
3189 | if self.pull_request_version_id: |
|
3189 | if self.pull_request_version_id: | |
3190 | return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id |
|
3190 | return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id | |
3191 | else: |
|
3191 | else: | |
3192 | return '<DB:PullRequestVersion at %#x>' % id(self) |
|
3192 | return '<DB:PullRequestVersion at %#x>' % id(self) | |
3193 |
|
3193 | |||
3194 |
|
3194 | |||
3195 | class PullRequestReviewers(Base, BaseModel): |
|
3195 | class PullRequestReviewers(Base, BaseModel): | |
3196 | __tablename__ = 'pull_request_reviewers' |
|
3196 | __tablename__ = 'pull_request_reviewers' | |
3197 | __table_args__ = ( |
|
3197 | __table_args__ = ( | |
3198 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3198 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
3199 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3199 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
3200 | ) |
|
3200 | ) | |
3201 |
|
3201 | |||
3202 | def __init__(self, user=None, pull_request=None): |
|
3202 | def __init__(self, user=None, pull_request=None): | |
3203 | self.user = user |
|
3203 | self.user = user | |
3204 | self.pull_request = pull_request |
|
3204 | self.pull_request = pull_request | |
3205 |
|
3205 | |||
3206 | pull_requests_reviewers_id = Column( |
|
3206 | pull_requests_reviewers_id = Column( | |
3207 | 'pull_requests_reviewers_id', Integer(), nullable=False, |
|
3207 | 'pull_requests_reviewers_id', Integer(), nullable=False, | |
3208 | primary_key=True) |
|
3208 | primary_key=True) | |
3209 | pull_request_id = Column( |
|
3209 | pull_request_id = Column( | |
3210 | "pull_request_id", Integer(), |
|
3210 | "pull_request_id", Integer(), | |
3211 | ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
3211 | ForeignKey('pull_requests.pull_request_id'), nullable=False) | |
3212 | user_id = Column( |
|
3212 | user_id = Column( | |
3213 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
3213 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) | |
3214 |
|
3214 | |||
3215 | user = relationship('User') |
|
3215 | user = relationship('User') | |
3216 | pull_request = relationship('PullRequest') |
|
3216 | pull_request = relationship('PullRequest') | |
3217 |
|
3217 | |||
3218 |
|
3218 | |||
3219 | class Notification(Base, BaseModel): |
|
3219 | class Notification(Base, BaseModel): | |
3220 | __tablename__ = 'notifications' |
|
3220 | __tablename__ = 'notifications' | |
3221 | __table_args__ = ( |
|
3221 | __table_args__ = ( | |
3222 | Index('notification_type_idx', 'type'), |
|
3222 | Index('notification_type_idx', 'type'), | |
3223 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3223 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
3224 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3224 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
3225 | ) |
|
3225 | ) | |
3226 |
|
3226 | |||
3227 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
3227 | TYPE_CHANGESET_COMMENT = u'cs_comment' | |
3228 | TYPE_MESSAGE = u'message' |
|
3228 | TYPE_MESSAGE = u'message' | |
3229 | TYPE_MENTION = u'mention' |
|
3229 | TYPE_MENTION = u'mention' | |
3230 | TYPE_REGISTRATION = u'registration' |
|
3230 | TYPE_REGISTRATION = u'registration' | |
3231 | TYPE_PULL_REQUEST = u'pull_request' |
|
3231 | TYPE_PULL_REQUEST = u'pull_request' | |
3232 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' |
|
3232 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' | |
3233 |
|
3233 | |||
3234 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
3234 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) | |
3235 | subject = Column('subject', Unicode(512), nullable=True) |
|
3235 | subject = Column('subject', Unicode(512), nullable=True) | |
3236 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) |
|
3236 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) | |
3237 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
3237 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) | |
3238 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3238 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
3239 | type_ = Column('type', Unicode(255)) |
|
3239 | type_ = Column('type', Unicode(255)) | |
3240 |
|
3240 | |||
3241 | created_by_user = relationship('User') |
|
3241 | created_by_user = relationship('User') | |
3242 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
3242 | notifications_to_users = relationship('UserNotification', lazy='joined', | |
3243 | cascade="all, delete, delete-orphan") |
|
3243 | cascade="all, delete, delete-orphan") | |
3244 |
|
3244 | |||
3245 | @property |
|
3245 | @property | |
3246 | def recipients(self): |
|
3246 | def recipients(self): | |
3247 | return [x.user for x in UserNotification.query()\ |
|
3247 | return [x.user for x in UserNotification.query()\ | |
3248 | .filter(UserNotification.notification == self)\ |
|
3248 | .filter(UserNotification.notification == self)\ | |
3249 | .order_by(UserNotification.user_id.asc()).all()] |
|
3249 | .order_by(UserNotification.user_id.asc()).all()] | |
3250 |
|
3250 | |||
3251 | @classmethod |
|
3251 | @classmethod | |
3252 | def create(cls, created_by, subject, body, recipients, type_=None): |
|
3252 | def create(cls, created_by, subject, body, recipients, type_=None): | |
3253 | if type_ is None: |
|
3253 | if type_ is None: | |
3254 | type_ = Notification.TYPE_MESSAGE |
|
3254 | type_ = Notification.TYPE_MESSAGE | |
3255 |
|
3255 | |||
3256 | notification = cls() |
|
3256 | notification = cls() | |
3257 | notification.created_by_user = created_by |
|
3257 | notification.created_by_user = created_by | |
3258 | notification.subject = subject |
|
3258 | notification.subject = subject | |
3259 | notification.body = body |
|
3259 | notification.body = body | |
3260 | notification.type_ = type_ |
|
3260 | notification.type_ = type_ | |
3261 | notification.created_on = datetime.datetime.now() |
|
3261 | notification.created_on = datetime.datetime.now() | |
3262 |
|
3262 | |||
3263 | for u in recipients: |
|
3263 | for u in recipients: | |
3264 | assoc = UserNotification() |
|
3264 | assoc = UserNotification() | |
3265 | assoc.notification = notification |
|
3265 | assoc.notification = notification | |
3266 |
|
3266 | |||
3267 | # if created_by is inside recipients mark his notification |
|
3267 | # if created_by is inside recipients mark his notification | |
3268 | # as read |
|
3268 | # as read | |
3269 | if u.user_id == created_by.user_id: |
|
3269 | if u.user_id == created_by.user_id: | |
3270 | assoc.read = True |
|
3270 | assoc.read = True | |
3271 |
|
3271 | |||
3272 | u.notifications.append(assoc) |
|
3272 | u.notifications.append(assoc) | |
3273 | Session().add(notification) |
|
3273 | Session().add(notification) | |
3274 |
|
3274 | |||
3275 | return notification |
|
3275 | return notification | |
3276 |
|
3276 | |||
3277 | @property |
|
3277 | @property | |
3278 | def description(self): |
|
3278 | def description(self): | |
3279 | from rhodecode.model.notification import NotificationModel |
|
3279 | from rhodecode.model.notification import NotificationModel | |
3280 | return NotificationModel().make_description(self) |
|
3280 | return NotificationModel().make_description(self) | |
3281 |
|
3281 | |||
3282 |
|
3282 | |||
3283 | class UserNotification(Base, BaseModel): |
|
3283 | class UserNotification(Base, BaseModel): | |
3284 | __tablename__ = 'user_to_notification' |
|
3284 | __tablename__ = 'user_to_notification' | |
3285 | __table_args__ = ( |
|
3285 | __table_args__ = ( | |
3286 | UniqueConstraint('user_id', 'notification_id'), |
|
3286 | UniqueConstraint('user_id', 'notification_id'), | |
3287 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3287 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
3288 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3288 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
3289 | ) |
|
3289 | ) | |
3290 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
3290 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |
3291 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
3291 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) | |
3292 | read = Column('read', Boolean, default=False) |
|
3292 | read = Column('read', Boolean, default=False) | |
3293 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
3293 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) | |
3294 |
|
3294 | |||
3295 | user = relationship('User', lazy="joined") |
|
3295 | user = relationship('User', lazy="joined") | |
3296 | notification = relationship('Notification', lazy="joined", |
|
3296 | notification = relationship('Notification', lazy="joined", | |
3297 | order_by=lambda: Notification.created_on.desc(),) |
|
3297 | order_by=lambda: Notification.created_on.desc(),) | |
3298 |
|
3298 | |||
3299 | def mark_as_read(self): |
|
3299 | def mark_as_read(self): | |
3300 | self.read = True |
|
3300 | self.read = True | |
3301 | Session().add(self) |
|
3301 | Session().add(self) | |
3302 |
|
3302 | |||
3303 |
|
3303 | |||
3304 | class Gist(Base, BaseModel): |
|
3304 | class Gist(Base, BaseModel): | |
3305 | __tablename__ = 'gists' |
|
3305 | __tablename__ = 'gists' | |
3306 | __table_args__ = ( |
|
3306 | __table_args__ = ( | |
3307 | Index('g_gist_access_id_idx', 'gist_access_id'), |
|
3307 | Index('g_gist_access_id_idx', 'gist_access_id'), | |
3308 | Index('g_created_on_idx', 'created_on'), |
|
3308 | Index('g_created_on_idx', 'created_on'), | |
3309 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3309 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
3310 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3310 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
3311 | ) |
|
3311 | ) | |
3312 | GIST_PUBLIC = u'public' |
|
3312 | GIST_PUBLIC = u'public' | |
3313 | GIST_PRIVATE = u'private' |
|
3313 | GIST_PRIVATE = u'private' | |
3314 | DEFAULT_FILENAME = u'gistfile1.txt' |
|
3314 | DEFAULT_FILENAME = u'gistfile1.txt' | |
3315 |
|
3315 | |||
3316 | ACL_LEVEL_PUBLIC = u'acl_public' |
|
3316 | ACL_LEVEL_PUBLIC = u'acl_public' | |
3317 | ACL_LEVEL_PRIVATE = u'acl_private' |
|
3317 | ACL_LEVEL_PRIVATE = u'acl_private' | |
3318 |
|
3318 | |||
3319 | gist_id = Column('gist_id', Integer(), primary_key=True) |
|
3319 | gist_id = Column('gist_id', Integer(), primary_key=True) | |
3320 | gist_access_id = Column('gist_access_id', Unicode(250)) |
|
3320 | gist_access_id = Column('gist_access_id', Unicode(250)) | |
3321 | gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
3321 | gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |
3322 | gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) |
|
3322 | gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) | |
3323 | gist_expires = Column('gist_expires', Float(53), nullable=False) |
|
3323 | gist_expires = Column('gist_expires', Float(53), nullable=False) | |
3324 | gist_type = Column('gist_type', Unicode(128), nullable=False) |
|
3324 | gist_type = Column('gist_type', Unicode(128), nullable=False) | |
3325 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3325 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
3326 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3326 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
3327 | acl_level = Column('acl_level', Unicode(128), nullable=True) |
|
3327 | acl_level = Column('acl_level', Unicode(128), nullable=True) | |
3328 |
|
3328 | |||
3329 | owner = relationship('User') |
|
3329 | owner = relationship('User') | |
3330 |
|
3330 | |||
3331 | def __repr__(self): |
|
3331 | def __repr__(self): | |
3332 | return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id) |
|
3332 | return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id) | |
3333 |
|
3333 | |||
3334 | @classmethod |
|
3334 | @classmethod | |
3335 | def get_or_404(cls, id_): |
|
3335 | def get_or_404(cls, id_): | |
3336 | res = cls.query().filter(cls.gist_access_id == id_).scalar() |
|
3336 | res = cls.query().filter(cls.gist_access_id == id_).scalar() | |
3337 | if not res: |
|
3337 | if not res: | |
3338 | raise HTTPNotFound |
|
3338 | raise HTTPNotFound | |
3339 | return res |
|
3339 | return res | |
3340 |
|
3340 | |||
3341 | @classmethod |
|
3341 | @classmethod | |
3342 | def get_by_access_id(cls, gist_access_id): |
|
3342 | def get_by_access_id(cls, gist_access_id): | |
3343 | return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() |
|
3343 | return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() | |
3344 |
|
3344 | |||
3345 | def gist_url(self): |
|
3345 | def gist_url(self): | |
3346 | import rhodecode |
|
3346 | import rhodecode | |
3347 | alias_url = rhodecode.CONFIG.get('gist_alias_url') |
|
3347 | alias_url = rhodecode.CONFIG.get('gist_alias_url') | |
3348 | if alias_url: |
|
3348 | if alias_url: | |
3349 | return alias_url.replace('{gistid}', self.gist_access_id) |
|
3349 | return alias_url.replace('{gistid}', self.gist_access_id) | |
3350 |
|
3350 | |||
3351 | return url('gist', gist_id=self.gist_access_id, qualified=True) |
|
3351 | return url('gist', gist_id=self.gist_access_id, qualified=True) | |
3352 |
|
3352 | |||
3353 | @classmethod |
|
3353 | @classmethod | |
3354 | def base_path(cls): |
|
3354 | def base_path(cls): | |
3355 | """ |
|
3355 | """ | |
3356 | Returns base path when all gists are stored |
|
3356 | Returns base path when all gists are stored | |
3357 |
|
3357 | |||
3358 | :param cls: |
|
3358 | :param cls: | |
3359 | """ |
|
3359 | """ | |
3360 | from rhodecode.model.gist import GIST_STORE_LOC |
|
3360 | from rhodecode.model.gist import GIST_STORE_LOC | |
3361 | q = Session().query(RhodeCodeUi)\ |
|
3361 | q = Session().query(RhodeCodeUi)\ | |
3362 | .filter(RhodeCodeUi.ui_key == URL_SEP) |
|
3362 | .filter(RhodeCodeUi.ui_key == URL_SEP) | |
3363 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
3363 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
3364 | return os.path.join(q.one().ui_value, GIST_STORE_LOC) |
|
3364 | return os.path.join(q.one().ui_value, GIST_STORE_LOC) | |
3365 |
|
3365 | |||
3366 | def get_api_data(self): |
|
3366 | def get_api_data(self): | |
3367 | """ |
|
3367 | """ | |
3368 | Common function for generating gist related data for API |
|
3368 | Common function for generating gist related data for API | |
3369 | """ |
|
3369 | """ | |
3370 | gist = self |
|
3370 | gist = self | |
3371 | data = { |
|
3371 | data = { | |
3372 | 'gist_id': gist.gist_id, |
|
3372 | 'gist_id': gist.gist_id, | |
3373 | 'type': gist.gist_type, |
|
3373 | 'type': gist.gist_type, | |
3374 | 'access_id': gist.gist_access_id, |
|
3374 | 'access_id': gist.gist_access_id, | |
3375 | 'description': gist.gist_description, |
|
3375 | 'description': gist.gist_description, | |
3376 | 'url': gist.gist_url(), |
|
3376 | 'url': gist.gist_url(), | |
3377 | 'expires': gist.gist_expires, |
|
3377 | 'expires': gist.gist_expires, | |
3378 | 'created_on': gist.created_on, |
|
3378 | 'created_on': gist.created_on, | |
3379 | 'modified_at': gist.modified_at, |
|
3379 | 'modified_at': gist.modified_at, | |
3380 | 'content': None, |
|
3380 | 'content': None, | |
3381 | 'acl_level': gist.acl_level, |
|
3381 | 'acl_level': gist.acl_level, | |
3382 | } |
|
3382 | } | |
3383 | return data |
|
3383 | return data | |
3384 |
|
3384 | |||
3385 | def __json__(self): |
|
3385 | def __json__(self): | |
3386 | data = dict( |
|
3386 | data = dict( | |
3387 | ) |
|
3387 | ) | |
3388 | data.update(self.get_api_data()) |
|
3388 | data.update(self.get_api_data()) | |
3389 | return data |
|
3389 | return data | |
3390 | # SCM functions |
|
3390 | # SCM functions | |
3391 |
|
3391 | |||
3392 | def scm_instance(self, **kwargs): |
|
3392 | def scm_instance(self, **kwargs): | |
3393 | full_repo_path = os.path.join(self.base_path(), self.gist_access_id) |
|
3393 | full_repo_path = os.path.join(self.base_path(), self.gist_access_id) | |
3394 | return get_vcs_instance( |
|
3394 | return get_vcs_instance( | |
3395 | repo_path=safe_str(full_repo_path), create=False) |
|
3395 | repo_path=safe_str(full_repo_path), create=False) | |
3396 |
|
3396 | |||
3397 |
|
3397 | |||
3398 | class DbMigrateVersion(Base, BaseModel): |
|
3398 | class DbMigrateVersion(Base, BaseModel): | |
3399 | __tablename__ = 'db_migrate_version' |
|
3399 | __tablename__ = 'db_migrate_version' | |
3400 | __table_args__ = ( |
|
3400 | __table_args__ = ( | |
3401 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3401 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
3402 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, |
|
3402 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
3403 | ) |
|
3403 | ) | |
3404 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
3404 | repository_id = Column('repository_id', String(250), primary_key=True) | |
3405 | repository_path = Column('repository_path', Text) |
|
3405 | repository_path = Column('repository_path', Text) | |
3406 | version = Column('version', Integer) |
|
3406 | version = Column('version', Integer) | |
3407 |
|
3407 | |||
3408 |
|
3408 | |||
3409 | class ExternalIdentity(Base, BaseModel): |
|
3409 | class ExternalIdentity(Base, BaseModel): | |
3410 | __tablename__ = 'external_identities' |
|
3410 | __tablename__ = 'external_identities' | |
3411 | __table_args__ = ( |
|
3411 | __table_args__ = ( | |
3412 | Index('local_user_id_idx', 'local_user_id'), |
|
3412 | Index('local_user_id_idx', 'local_user_id'), | |
3413 | Index('external_id_idx', 'external_id'), |
|
3413 | Index('external_id_idx', 'external_id'), | |
3414 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3414 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
3415 | 'mysql_charset': 'utf8'}) |
|
3415 | 'mysql_charset': 'utf8'}) | |
3416 |
|
3416 | |||
3417 | external_id = Column('external_id', Unicode(255), default=u'', |
|
3417 | external_id = Column('external_id', Unicode(255), default=u'', | |
3418 | primary_key=True) |
|
3418 | primary_key=True) | |
3419 | external_username = Column('external_username', Unicode(1024), default=u'') |
|
3419 | external_username = Column('external_username', Unicode(1024), default=u'') | |
3420 | local_user_id = Column('local_user_id', Integer(), |
|
3420 | local_user_id = Column('local_user_id', Integer(), | |
3421 | ForeignKey('users.user_id'), primary_key=True) |
|
3421 | ForeignKey('users.user_id'), primary_key=True) | |
3422 | provider_name = Column('provider_name', Unicode(255), default=u'', |
|
3422 | provider_name = Column('provider_name', Unicode(255), default=u'', | |
3423 | primary_key=True) |
|
3423 | primary_key=True) | |
3424 | access_token = Column('access_token', String(1024), default=u'') |
|
3424 | access_token = Column('access_token', String(1024), default=u'') | |
3425 | alt_token = Column('alt_token', String(1024), default=u'') |
|
3425 | alt_token = Column('alt_token', String(1024), default=u'') | |
3426 | token_secret = Column('token_secret', String(1024), default=u'') |
|
3426 | token_secret = Column('token_secret', String(1024), default=u'') | |
3427 |
|
3427 | |||
3428 | @classmethod |
|
3428 | @classmethod | |
3429 | def by_external_id_and_provider(cls, external_id, provider_name, |
|
3429 | def by_external_id_and_provider(cls, external_id, provider_name, | |
3430 | local_user_id=None): |
|
3430 | local_user_id=None): | |
3431 | """ |
|
3431 | """ | |
3432 | Returns ExternalIdentity instance based on search params |
|
3432 | Returns ExternalIdentity instance based on search params | |
3433 |
|
3433 | |||
3434 | :param external_id: |
|
3434 | :param external_id: | |
3435 | :param provider_name: |
|
3435 | :param provider_name: | |
3436 | :return: ExternalIdentity |
|
3436 | :return: ExternalIdentity | |
3437 | """ |
|
3437 | """ | |
3438 | query = cls.query() |
|
3438 | query = cls.query() | |
3439 | query = query.filter(cls.external_id == external_id) |
|
3439 | query = query.filter(cls.external_id == external_id) | |
3440 | query = query.filter(cls.provider_name == provider_name) |
|
3440 | query = query.filter(cls.provider_name == provider_name) | |
3441 | if local_user_id: |
|
3441 | if local_user_id: | |
3442 | query = query.filter(cls.local_user_id == local_user_id) |
|
3442 | query = query.filter(cls.local_user_id == local_user_id) | |
3443 | return query.first() |
|
3443 | return query.first() | |
3444 |
|
3444 | |||
3445 | @classmethod |
|
3445 | @classmethod | |
3446 | def user_by_external_id_and_provider(cls, external_id, provider_name): |
|
3446 | def user_by_external_id_and_provider(cls, external_id, provider_name): | |
3447 | """ |
|
3447 | """ | |
3448 | Returns User instance based on search params |
|
3448 | Returns User instance based on search params | |
3449 |
|
3449 | |||
3450 | :param external_id: |
|
3450 | :param external_id: | |
3451 | :param provider_name: |
|
3451 | :param provider_name: | |
3452 | :return: User |
|
3452 | :return: User | |
3453 | """ |
|
3453 | """ | |
3454 | query = User.query() |
|
3454 | query = User.query() | |
3455 | query = query.filter(cls.external_id == external_id) |
|
3455 | query = query.filter(cls.external_id == external_id) | |
3456 | query = query.filter(cls.provider_name == provider_name) |
|
3456 | query = query.filter(cls.provider_name == provider_name) | |
3457 | query = query.filter(User.user_id == cls.local_user_id) |
|
3457 | query = query.filter(User.user_id == cls.local_user_id) | |
3458 | return query.first() |
|
3458 | return query.first() | |
3459 |
|
3459 | |||
3460 | @classmethod |
|
3460 | @classmethod | |
3461 | def by_local_user_id(cls, local_user_id): |
|
3461 | def by_local_user_id(cls, local_user_id): | |
3462 | """ |
|
3462 | """ | |
3463 | Returns all tokens for user |
|
3463 | Returns all tokens for user | |
3464 |
|
3464 | |||
3465 | :param local_user_id: |
|
3465 | :param local_user_id: | |
3466 | :return: ExternalIdentity |
|
3466 | :return: ExternalIdentity | |
3467 | """ |
|
3467 | """ | |
3468 | query = cls.query() |
|
3468 | query = cls.query() | |
3469 | query = query.filter(cls.local_user_id == local_user_id) |
|
3469 | query = query.filter(cls.local_user_id == local_user_id) | |
3470 | return query |
|
3470 | return query | |
3471 |
|
3471 | |||
3472 |
|
3472 | |||
3473 | class Integration(Base, BaseModel): |
|
3473 | class Integration(Base, BaseModel): | |
3474 | __tablename__ = 'integrations' |
|
3474 | __tablename__ = 'integrations' | |
3475 | __table_args__ = ( |
|
3475 | __table_args__ = ( | |
3476 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
3476 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
3477 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} |
|
3477 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
3478 | ) |
|
3478 | ) | |
3479 |
|
3479 | |||
3480 | integration_id = Column('integration_id', Integer(), primary_key=True) |
|
3480 | integration_id = Column('integration_id', Integer(), primary_key=True) | |
3481 | integration_type = Column('integration_type', String(255)) |
|
3481 | integration_type = Column('integration_type', String(255)) | |
3482 | enabled = Column('enabled', Boolean(), nullable=False) |
|
3482 | enabled = Column('enabled', Boolean(), nullable=False) | |
3483 | name = Column('name', String(255), nullable=False) |
|
3483 | name = Column('name', String(255), nullable=False) | |
3484 |
|
||||
3485 | settings = Column( |
|
3484 | settings = Column( | |
3486 | 'settings_json', MutationObj.as_mutable( |
|
3485 | 'settings_json', MutationObj.as_mutable( | |
3487 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) |
|
3486 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) | |
3488 | repo_id = Column( |
|
3487 | repo_id = Column( | |
3489 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
3488 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), | |
3490 | nullable=True, unique=None, default=None) |
|
3489 | nullable=True, unique=None, default=None) | |
3491 | repo = relationship('Repository', lazy='joined') |
|
3490 | repo = relationship('Repository', lazy='joined') | |
3492 |
|
3491 | |||
3493 | repo_group_id = Column( |
|
3492 | repo_group_id = Column( | |
3494 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), |
|
3493 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), | |
3495 | nullable=True, unique=None, default=None) |
|
3494 | nullable=True, unique=None, default=None) | |
3496 | repo_group = relationship('RepoGroup', lazy='joined') |
|
3495 | repo_group = relationship('RepoGroup', lazy='joined') | |
3497 |
|
3496 | |||
3498 | def __repr__(self): |
|
3497 | def __repr__(self): | |
3499 | if self.repo: |
|
3498 | if self.repo: | |
3500 | scope = 'repo=%r' % self.repo |
|
3499 | scope = 'repo=%r' % self.repo | |
3501 | elif self.repo_group: |
|
3500 | elif self.repo_group: | |
3502 | scope = 'repo_group=%r' % self.repo_group |
|
3501 | scope = 'repo_group=%r' % self.repo_group | |
3503 | else: |
|
3502 | else: | |
3504 | scope = 'global' |
|
3503 | scope = 'global' | |
3505 |
|
3504 | |||
3506 | return '<Integration(%r, %r)>' % (self.integration_type, scope) |
|
3505 | return '<Integration(%r, %r)>' % (self.integration_type, scope) |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: file renamed from rhodecode/templates/admin/integrations/edit.html to rhodecode/templates/admin/integrations/form.html |
|
NO CONTENT: file renamed from rhodecode/templates/admin/integrations/edit.html to rhodecode/templates/admin/integrations/form.html | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now