Show More
This diff has been collapsed as it changes many lines, (3529 lines changed) Show them Hide them | |||
@@ -0,0 +1,3529 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | """ | |
|
22 | Database Models for RhodeCode Enterprise | |
|
23 | """ | |
|
24 | ||
|
25 | import os | |
|
26 | import sys | |
|
27 | import time | |
|
28 | import hashlib | |
|
29 | import logging | |
|
30 | import datetime | |
|
31 | import warnings | |
|
32 | import ipaddress | |
|
33 | import functools | |
|
34 | import traceback | |
|
35 | import collections | |
|
36 | ||
|
37 | ||
|
38 | from sqlalchemy import * | |
|
39 | from sqlalchemy.exc import IntegrityError | |
|
40 | from sqlalchemy.ext.declarative import declared_attr | |
|
41 | from sqlalchemy.ext.hybrid import hybrid_property | |
|
42 | from sqlalchemy.orm import ( | |
|
43 | relationship, joinedload, class_mapper, validates, aliased) | |
|
44 | from sqlalchemy.sql.expression import true | |
|
45 | from beaker.cache import cache_region, region_invalidate | |
|
46 | from webob.exc import HTTPNotFound | |
|
47 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
|
48 | ||
|
49 | from pylons import url | |
|
50 | from pylons.i18n.translation import lazy_ugettext as _ | |
|
51 | ||
|
52 | from rhodecode.lib.vcs import get_backend, get_vcs_instance | |
|
53 | from rhodecode.lib.vcs.utils.helpers import get_scm | |
|
54 | from rhodecode.lib.vcs.exceptions import VCSError | |
|
55 | from rhodecode.lib.vcs.backends.base import ( | |
|
56 | EmptyCommit, Reference, MergeFailureReason) | |
|
57 | from rhodecode.lib.utils2 import ( | |
|
58 | str2bool, safe_str, get_commit_safe, safe_unicode, remove_prefix, md5_safe, | |
|
59 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict) | |
|
60 | from rhodecode.lib.jsonalchemy import MutationObj, JsonType, JSONDict | |
|
61 | from rhodecode.lib.ext_json import json | |
|
62 | from rhodecode.lib.caching_query import FromCache | |
|
63 | from rhodecode.lib.encrypt import AESCipher | |
|
64 | ||
|
65 | from rhodecode.model.meta import Base, Session | |
|
66 | ||
|
67 | URL_SEP = '/' | |
|
68 | log = logging.getLogger(__name__) | |
|
69 | ||
|
70 | # ============================================================================= | |
|
71 | # BASE CLASSES | |
|
72 | # ============================================================================= | |
|
73 | ||
|
74 | # this is propagated from .ini file rhodecode.encrypted_values.secret or | |
|
75 | # beaker.session.secret if first is not set. | |
|
76 | # and initialized at environment.py | |
|
77 | ENCRYPTION_KEY = None | |
|
78 | ||
|
79 | # used to sort permissions by types, '#' used here is not allowed to be in | |
|
80 | # usernames, and it's very early in sorted string.printable table. | |
|
81 | PERMISSION_TYPE_SORT = { | |
|
82 | 'admin': '####', | |
|
83 | 'write': '###', | |
|
84 | 'read': '##', | |
|
85 | 'none': '#', | |
|
86 | } | |
|
87 | ||
|
88 | ||
|
89 | def display_sort(obj): | |
|
90 | """ | |
|
91 | Sort function used to sort permissions in .permissions() function of | |
|
92 | Repository, RepoGroup, UserGroup. Also it put the default user in front | |
|
93 | of all other resources | |
|
94 | """ | |
|
95 | ||
|
96 | if obj.username == User.DEFAULT_USER: | |
|
97 | return '#####' | |
|
98 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') | |
|
99 | return prefix + obj.username | |
|
100 | ||
|
101 | ||
|
102 | def _hash_key(k): | |
|
103 | return md5_safe(k) | |
|
104 | ||
|
105 | ||
|
106 | class EncryptedTextValue(TypeDecorator): | |
|
107 | """ | |
|
108 | Special column for encrypted long text data, use like:: | |
|
109 | ||
|
110 | value = Column("encrypted_value", EncryptedValue(), nullable=False) | |
|
111 | ||
|
112 | This column is intelligent so if value is in unencrypted form it return | |
|
113 | unencrypted form, but on save it always encrypts | |
|
114 | """ | |
|
115 | impl = Text | |
|
116 | ||
|
117 | def process_bind_param(self, value, dialect): | |
|
118 | if not value: | |
|
119 | return value | |
|
120 | if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'): | |
|
121 | # protect against double encrypting if someone manually starts | |
|
122 | # doing | |
|
123 | raise ValueError('value needs to be in unencrypted format, ie. ' | |
|
124 | 'not starting with enc$aes') | |
|
125 | return 'enc$aes_hmac$%s' % AESCipher( | |
|
126 | ENCRYPTION_KEY, hmac=True).encrypt(value) | |
|
127 | ||
|
128 | def process_result_value(self, value, dialect): | |
|
129 | import rhodecode | |
|
130 | ||
|
131 | if not value: | |
|
132 | return value | |
|
133 | ||
|
134 | parts = value.split('$', 3) | |
|
135 | if not len(parts) == 3: | |
|
136 | # probably not encrypted values | |
|
137 | return value | |
|
138 | else: | |
|
139 | if parts[0] != 'enc': | |
|
140 | # parts ok but without our header ? | |
|
141 | return value | |
|
142 | enc_strict_mode = str2bool(rhodecode.CONFIG.get( | |
|
143 | 'rhodecode.encrypted_values.strict') or True) | |
|
144 | # at that stage we know it's our encryption | |
|
145 | if parts[1] == 'aes': | |
|
146 | decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2]) | |
|
147 | elif parts[1] == 'aes_hmac': | |
|
148 | decrypted_data = AESCipher( | |
|
149 | ENCRYPTION_KEY, hmac=True, | |
|
150 | strict_verification=enc_strict_mode).decrypt(parts[2]) | |
|
151 | else: | |
|
152 | raise ValueError( | |
|
153 | 'Encryption type part is wrong, must be `aes` ' | |
|
154 | 'or `aes_hmac`, got `%s` instead' % (parts[1])) | |
|
155 | return decrypted_data | |
|
156 | ||
|
157 | ||
|
158 | class BaseModel(object): | |
|
159 | """ | |
|
160 | Base Model for all classes | |
|
161 | """ | |
|
162 | ||
|
163 | @classmethod | |
|
164 | def _get_keys(cls): | |
|
165 | """return column names for this model """ | |
|
166 | return class_mapper(cls).c.keys() | |
|
167 | ||
|
168 | def get_dict(self): | |
|
169 | """ | |
|
170 | return dict with keys and values corresponding | |
|
171 | to this model data """ | |
|
172 | ||
|
173 | d = {} | |
|
174 | for k in self._get_keys(): | |
|
175 | d[k] = getattr(self, k) | |
|
176 | ||
|
177 | # also use __json__() if present to get additional fields | |
|
178 | _json_attr = getattr(self, '__json__', None) | |
|
179 | if _json_attr: | |
|
180 | # update with attributes from __json__ | |
|
181 | if callable(_json_attr): | |
|
182 | _json_attr = _json_attr() | |
|
183 | for k, val in _json_attr.iteritems(): | |
|
184 | d[k] = val | |
|
185 | return d | |
|
186 | ||
|
187 | def get_appstruct(self): | |
|
188 | """return list with keys and values tuples corresponding | |
|
189 | to this model data """ | |
|
190 | ||
|
191 | l = [] | |
|
192 | for k in self._get_keys(): | |
|
193 | l.append((k, getattr(self, k),)) | |
|
194 | return l | |
|
195 | ||
|
196 | def populate_obj(self, populate_dict): | |
|
197 | """populate model with data from given populate_dict""" | |
|
198 | ||
|
199 | for k in self._get_keys(): | |
|
200 | if k in populate_dict: | |
|
201 | setattr(self, k, populate_dict[k]) | |
|
202 | ||
|
203 | @classmethod | |
|
204 | def query(cls): | |
|
205 | return Session().query(cls) | |
|
206 | ||
|
207 | @classmethod | |
|
208 | def get(cls, id_): | |
|
209 | if id_: | |
|
210 | return cls.query().get(id_) | |
|
211 | ||
|
212 | @classmethod | |
|
213 | def get_or_404(cls, id_): | |
|
214 | try: | |
|
215 | id_ = int(id_) | |
|
216 | except (TypeError, ValueError): | |
|
217 | raise HTTPNotFound | |
|
218 | ||
|
219 | res = cls.query().get(id_) | |
|
220 | if not res: | |
|
221 | raise HTTPNotFound | |
|
222 | return res | |
|
223 | ||
|
224 | @classmethod | |
|
225 | def getAll(cls): | |
|
226 | # deprecated and left for backward compatibility | |
|
227 | return cls.get_all() | |
|
228 | ||
|
229 | @classmethod | |
|
230 | def get_all(cls): | |
|
231 | return cls.query().all() | |
|
232 | ||
|
233 | @classmethod | |
|
234 | def delete(cls, id_): | |
|
235 | obj = cls.query().get(id_) | |
|
236 | Session().delete(obj) | |
|
237 | ||
|
238 | @classmethod | |
|
239 | def identity_cache(cls, session, attr_name, value): | |
|
240 | exist_in_session = [] | |
|
241 | for (item_cls, pkey), instance in session.identity_map.items(): | |
|
242 | if cls == item_cls and getattr(instance, attr_name) == value: | |
|
243 | exist_in_session.append(instance) | |
|
244 | if exist_in_session: | |
|
245 | if len(exist_in_session) == 1: | |
|
246 | return exist_in_session[0] | |
|
247 | log.exception( | |
|
248 | 'multiple objects with attr %s and ' | |
|
249 | 'value %s found with same name: %r', | |
|
250 | attr_name, value, exist_in_session) | |
|
251 | ||
|
252 | def __repr__(self): | |
|
253 | if hasattr(self, '__unicode__'): | |
|
254 | # python repr needs to return str | |
|
255 | try: | |
|
256 | return safe_str(self.__unicode__()) | |
|
257 | except UnicodeDecodeError: | |
|
258 | pass | |
|
259 | return '<DB:%s>' % (self.__class__.__name__) | |
|
260 | ||
|
261 | ||
|
262 | class RhodeCodeSetting(Base, BaseModel): | |
|
263 | __tablename__ = 'rhodecode_settings' | |
|
264 | __table_args__ = ( | |
|
265 | UniqueConstraint('app_settings_name'), | |
|
266 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
267 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
268 | ) | |
|
269 | ||
|
270 | SETTINGS_TYPES = { | |
|
271 | 'str': safe_str, | |
|
272 | 'int': safe_int, | |
|
273 | 'unicode': safe_unicode, | |
|
274 | 'bool': str2bool, | |
|
275 | 'list': functools.partial(aslist, sep=',') | |
|
276 | } | |
|
277 | DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' | |
|
278 | GLOBAL_CONF_KEY = 'app_settings' | |
|
279 | ||
|
280 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
281 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) | |
|
282 | _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) | |
|
283 | _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) | |
|
284 | ||
|
285 | def __init__(self, key='', val='', type='unicode'): | |
|
286 | self.app_settings_name = key | |
|
287 | self.app_settings_type = type | |
|
288 | self.app_settings_value = val | |
|
289 | ||
|
290 | @validates('_app_settings_value') | |
|
291 | def validate_settings_value(self, key, val): | |
|
292 | assert type(val) == unicode | |
|
293 | return val | |
|
294 | ||
|
295 | @hybrid_property | |
|
296 | def app_settings_value(self): | |
|
297 | v = self._app_settings_value | |
|
298 | _type = self.app_settings_type | |
|
299 | if _type: | |
|
300 | _type = self.app_settings_type.split('.')[0] | |
|
301 | # decode the encrypted value | |
|
302 | if 'encrypted' in self.app_settings_type: | |
|
303 | cipher = EncryptedTextValue() | |
|
304 | v = safe_unicode(cipher.process_result_value(v, None)) | |
|
305 | ||
|
306 | converter = self.SETTINGS_TYPES.get(_type) or \ | |
|
307 | self.SETTINGS_TYPES['unicode'] | |
|
308 | return converter(v) | |
|
309 | ||
|
310 | @app_settings_value.setter | |
|
311 | def app_settings_value(self, val): | |
|
312 | """ | |
|
313 | Setter that will always make sure we use unicode in app_settings_value | |
|
314 | ||
|
315 | :param val: | |
|
316 | """ | |
|
317 | val = safe_unicode(val) | |
|
318 | # encode the encrypted value | |
|
319 | if 'encrypted' in self.app_settings_type: | |
|
320 | cipher = EncryptedTextValue() | |
|
321 | val = safe_unicode(cipher.process_bind_param(val, None)) | |
|
322 | self._app_settings_value = val | |
|
323 | ||
|
324 | @hybrid_property | |
|
325 | def app_settings_type(self): | |
|
326 | return self._app_settings_type | |
|
327 | ||
|
328 | @app_settings_type.setter | |
|
329 | def app_settings_type(self, val): | |
|
330 | if val.split('.')[0] not in self.SETTINGS_TYPES: | |
|
331 | raise Exception('type must be one of %s got %s' | |
|
332 | % (self.SETTINGS_TYPES.keys(), val)) | |
|
333 | self._app_settings_type = val | |
|
334 | ||
|
335 | def __unicode__(self): | |
|
336 | return u"<%s('%s:%s[%s]')>" % ( | |
|
337 | self.__class__.__name__, | |
|
338 | self.app_settings_name, self.app_settings_value, | |
|
339 | self.app_settings_type | |
|
340 | ) | |
|
341 | ||
|
342 | ||
|
343 | class RhodeCodeUi(Base, BaseModel): | |
|
344 | __tablename__ = 'rhodecode_ui' | |
|
345 | __table_args__ = ( | |
|
346 | UniqueConstraint('ui_key'), | |
|
347 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
348 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
349 | ) | |
|
350 | ||
|
351 | HOOK_REPO_SIZE = 'changegroup.repo_size' | |
|
352 | # HG | |
|
353 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' | |
|
354 | HOOK_PULL = 'outgoing.pull_logger' | |
|
355 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' | |
|
356 | HOOK_PUSH = 'changegroup.push_logger' | |
|
357 | ||
|
358 | # TODO: johbo: Unify way how hooks are configured for git and hg, | |
|
359 | # git part is currently hardcoded. | |
|
360 | ||
|
361 | # SVN PATTERNS | |
|
362 | SVN_BRANCH_ID = 'vcs_svn_branch' | |
|
363 | SVN_TAG_ID = 'vcs_svn_tag' | |
|
364 | ||
|
365 | ui_id = Column( | |
|
366 | "ui_id", Integer(), nullable=False, unique=True, default=None, | |
|
367 | primary_key=True) | |
|
368 | ui_section = Column( | |
|
369 | "ui_section", String(255), nullable=True, unique=None, default=None) | |
|
370 | ui_key = Column( | |
|
371 | "ui_key", String(255), nullable=True, unique=None, default=None) | |
|
372 | ui_value = Column( | |
|
373 | "ui_value", String(255), nullable=True, unique=None, default=None) | |
|
374 | ui_active = Column( | |
|
375 | "ui_active", Boolean(), nullable=True, unique=None, default=True) | |
|
376 | ||
|
377 | def __repr__(self): | |
|
378 | return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, | |
|
379 | self.ui_key, self.ui_value) | |
|
380 | ||
|
381 | ||
|
382 | class RepoRhodeCodeSetting(Base, BaseModel): | |
|
383 | __tablename__ = 'repo_rhodecode_settings' | |
|
384 | __table_args__ = ( | |
|
385 | UniqueConstraint( | |
|
386 | 'app_settings_name', 'repository_id', | |
|
387 | name='uq_repo_rhodecode_setting_name_repo_id'), | |
|
388 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
389 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
390 | ) | |
|
391 | ||
|
392 | repository_id = Column( | |
|
393 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), | |
|
394 | nullable=False) | |
|
395 | app_settings_id = Column( | |
|
396 | "app_settings_id", Integer(), nullable=False, unique=True, | |
|
397 | default=None, primary_key=True) | |
|
398 | app_settings_name = Column( | |
|
399 | "app_settings_name", String(255), nullable=True, unique=None, | |
|
400 | default=None) | |
|
401 | _app_settings_value = Column( | |
|
402 | "app_settings_value", String(4096), nullable=True, unique=None, | |
|
403 | default=None) | |
|
404 | _app_settings_type = Column( | |
|
405 | "app_settings_type", String(255), nullable=True, unique=None, | |
|
406 | default=None) | |
|
407 | ||
|
408 | repository = relationship('Repository') | |
|
409 | ||
|
410 | def __init__(self, repository_id, key='', val='', type='unicode'): | |
|
411 | self.repository_id = repository_id | |
|
412 | self.app_settings_name = key | |
|
413 | self.app_settings_type = type | |
|
414 | self.app_settings_value = val | |
|
415 | ||
|
416 | @validates('_app_settings_value') | |
|
417 | def validate_settings_value(self, key, val): | |
|
418 | assert type(val) == unicode | |
|
419 | return val | |
|
420 | ||
|
421 | @hybrid_property | |
|
422 | def app_settings_value(self): | |
|
423 | v = self._app_settings_value | |
|
424 | type_ = self.app_settings_type | |
|
425 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | |
|
426 | converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] | |
|
427 | return converter(v) | |
|
428 | ||
|
429 | @app_settings_value.setter | |
|
430 | def app_settings_value(self, val): | |
|
431 | """ | |
|
432 | Setter that will always make sure we use unicode in app_settings_value | |
|
433 | ||
|
434 | :param val: | |
|
435 | """ | |
|
436 | self._app_settings_value = safe_unicode(val) | |
|
437 | ||
|
438 | @hybrid_property | |
|
439 | def app_settings_type(self): | |
|
440 | return self._app_settings_type | |
|
441 | ||
|
442 | @app_settings_type.setter | |
|
443 | def app_settings_type(self, val): | |
|
444 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | |
|
445 | if val not in SETTINGS_TYPES: | |
|
446 | raise Exception('type must be one of %s got %s' | |
|
447 | % (SETTINGS_TYPES.keys(), val)) | |
|
448 | self._app_settings_type = val | |
|
449 | ||
|
450 | def __unicode__(self): | |
|
451 | return u"<%s('%s:%s:%s[%s]')>" % ( | |
|
452 | self.__class__.__name__, self.repository.repo_name, | |
|
453 | self.app_settings_name, self.app_settings_value, | |
|
454 | self.app_settings_type | |
|
455 | ) | |
|
456 | ||
|
457 | ||
|
458 | class RepoRhodeCodeUi(Base, BaseModel): | |
|
459 | __tablename__ = 'repo_rhodecode_ui' | |
|
460 | __table_args__ = ( | |
|
461 | UniqueConstraint( | |
|
462 | 'repository_id', 'ui_section', 'ui_key', | |
|
463 | name='uq_repo_rhodecode_ui_repository_id_section_key'), | |
|
464 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
465 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
466 | ) | |
|
467 | ||
|
468 | repository_id = Column( | |
|
469 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), | |
|
470 | nullable=False) | |
|
471 | ui_id = Column( | |
|
472 | "ui_id", Integer(), nullable=False, unique=True, default=None, | |
|
473 | primary_key=True) | |
|
474 | ui_section = Column( | |
|
475 | "ui_section", String(255), nullable=True, unique=None, default=None) | |
|
476 | ui_key = Column( | |
|
477 | "ui_key", String(255), nullable=True, unique=None, default=None) | |
|
478 | ui_value = Column( | |
|
479 | "ui_value", String(255), nullable=True, unique=None, default=None) | |
|
480 | ui_active = Column( | |
|
481 | "ui_active", Boolean(), nullable=True, unique=None, default=True) | |
|
482 | ||
|
483 | repository = relationship('Repository') | |
|
484 | ||
|
485 | def __repr__(self): | |
|
486 | return '<%s[%s:%s]%s=>%s]>' % ( | |
|
487 | self.__class__.__name__, self.repository.repo_name, | |
|
488 | self.ui_section, self.ui_key, self.ui_value) | |
|
489 | ||
|
490 | ||
|
491 | class User(Base, BaseModel): | |
|
492 | __tablename__ = 'users' | |
|
493 | __table_args__ = ( | |
|
494 | UniqueConstraint('username'), UniqueConstraint('email'), | |
|
495 | Index('u_username_idx', 'username'), | |
|
496 | Index('u_email_idx', 'email'), | |
|
497 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
498 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
499 | ) | |
|
500 | DEFAULT_USER = 'default' | |
|
501 | DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' | |
|
502 | DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' | |
|
503 | ||
|
504 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
505 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |
|
506 | password = Column("password", String(255), nullable=True, unique=None, default=None) | |
|
507 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |
|
508 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) | |
|
509 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) | |
|
510 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) | |
|
511 | _email = Column("email", String(255), nullable=True, unique=None, default=None) | |
|
512 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
|
513 | extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) | |
|
514 | extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) | |
|
515 | api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) | |
|
516 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |
|
517 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
518 | _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data | |
|
519 | ||
|
520 | user_log = relationship('UserLog') | |
|
521 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') | |
|
522 | ||
|
523 | repositories = relationship('Repository') | |
|
524 | repository_groups = relationship('RepoGroup') | |
|
525 | user_groups = relationship('UserGroup') | |
|
526 | ||
|
527 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') | |
|
528 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') | |
|
529 | ||
|
530 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') | |
|
531 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') | |
|
532 | user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all') | |
|
533 | ||
|
534 | group_member = relationship('UserGroupMember', cascade='all') | |
|
535 | ||
|
536 | notifications = relationship('UserNotification', cascade='all') | |
|
537 | # notifications assigned to this user | |
|
538 | user_created_notifications = relationship('Notification', cascade='all') | |
|
539 | # comments created by this user | |
|
540 | user_comments = relationship('ChangesetComment', cascade='all') | |
|
541 | # user profile extra info | |
|
542 | user_emails = relationship('UserEmailMap', cascade='all') | |
|
543 | user_ip_map = relationship('UserIpMap', cascade='all') | |
|
544 | user_auth_tokens = relationship('UserApiKeys', cascade='all') | |
|
545 | # gists | |
|
546 | user_gists = relationship('Gist', cascade='all') | |
|
547 | # user pull requests | |
|
548 | user_pull_requests = relationship('PullRequest', cascade='all') | |
|
549 | # external identities | |
|
550 | extenal_identities = relationship( | |
|
551 | 'ExternalIdentity', | |
|
552 | primaryjoin="User.user_id==ExternalIdentity.local_user_id", | |
|
553 | cascade='all') | |
|
554 | ||
|
555 | def __unicode__(self): | |
|
556 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |
|
557 | self.user_id, self.username) | |
|
558 | ||
|
559 | @hybrid_property | |
|
560 | def email(self): | |
|
561 | return self._email | |
|
562 | ||
|
563 | @email.setter | |
|
564 | def email(self, val): | |
|
565 | self._email = val.lower() if val else None | |
|
566 | ||
|
567 | @property | |
|
568 | def firstname(self): | |
|
569 | # alias for future | |
|
570 | return self.name | |
|
571 | ||
|
572 | @property | |
|
573 | def emails(self): | |
|
574 | other = UserEmailMap.query().filter(UserEmailMap.user==self).all() | |
|
575 | return [self.email] + [x.email for x in other] | |
|
576 | ||
|
577 | @property | |
|
578 | def auth_tokens(self): | |
|
579 | return [self.api_key] + [x.api_key for x in self.extra_auth_tokens] | |
|
580 | ||
|
581 | @property | |
|
582 | def extra_auth_tokens(self): | |
|
583 | return UserApiKeys.query().filter(UserApiKeys.user == self).all() | |
|
584 | ||
|
585 | @property | |
|
586 | def feed_token(self): | |
|
587 | feed_tokens = UserApiKeys.query()\ | |
|
588 | .filter(UserApiKeys.user == self)\ | |
|
589 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\ | |
|
590 | .all() | |
|
591 | if feed_tokens: | |
|
592 | return feed_tokens[0].api_key | |
|
593 | else: | |
|
594 | # use the main token so we don't end up with nothing... | |
|
595 | return self.api_key | |
|
596 | ||
|
597 | @classmethod | |
|
598 | def extra_valid_auth_tokens(cls, user, role=None): | |
|
599 | tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ | |
|
600 | .filter(or_(UserApiKeys.expires == -1, | |
|
601 | UserApiKeys.expires >= time.time())) | |
|
602 | if role: | |
|
603 | tokens = tokens.filter(or_(UserApiKeys.role == role, | |
|
604 | UserApiKeys.role == UserApiKeys.ROLE_ALL)) | |
|
605 | return tokens.all() | |
|
606 | ||
|
607 | @property | |
|
608 | def ip_addresses(self): | |
|
609 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() | |
|
610 | return [x.ip_addr for x in ret] | |
|
611 | ||
|
612 | @property | |
|
613 | def username_and_name(self): | |
|
614 | return '%s (%s %s)' % (self.username, self.firstname, self.lastname) | |
|
615 | ||
|
616 | @property | |
|
617 | def username_or_name_or_email(self): | |
|
618 | full_name = self.full_name if self.full_name is not ' ' else None | |
|
619 | return self.username or full_name or self.email | |
|
620 | ||
|
621 | @property | |
|
622 | def full_name(self): | |
|
623 | return '%s %s' % (self.firstname, self.lastname) | |
|
624 | ||
|
625 | @property | |
|
626 | def full_name_or_username(self): | |
|
627 | return ('%s %s' % (self.firstname, self.lastname) | |
|
628 | if (self.firstname and self.lastname) else self.username) | |
|
629 | ||
|
630 | @property | |
|
631 | def full_contact(self): | |
|
632 | return '%s %s <%s>' % (self.firstname, self.lastname, self.email) | |
|
633 | ||
|
634 | @property | |
|
635 | def short_contact(self): | |
|
636 | return '%s %s' % (self.firstname, self.lastname) | |
|
637 | ||
|
638 | @property | |
|
639 | def is_admin(self): | |
|
640 | return self.admin | |
|
641 | ||
|
642 | @property | |
|
643 | def AuthUser(self): | |
|
644 | """ | |
|
645 | Returns instance of AuthUser for this user | |
|
646 | """ | |
|
647 | from rhodecode.lib.auth import AuthUser | |
|
648 | return AuthUser(user_id=self.user_id, api_key=self.api_key, | |
|
649 | username=self.username) | |
|
650 | ||
|
651 | @hybrid_property | |
|
652 | def user_data(self): | |
|
653 | if not self._user_data: | |
|
654 | return {} | |
|
655 | ||
|
656 | try: | |
|
657 | return json.loads(self._user_data) | |
|
658 | except TypeError: | |
|
659 | return {} | |
|
660 | ||
|
661 | @user_data.setter | |
|
662 | def user_data(self, val): | |
|
663 | if not isinstance(val, dict): | |
|
664 | raise Exception('user_data must be dict, got %s' % type(val)) | |
|
665 | try: | |
|
666 | self._user_data = json.dumps(val) | |
|
667 | except Exception: | |
|
668 | log.error(traceback.format_exc()) | |
|
669 | ||
|
670 | @classmethod | |
|
671 | def get_by_username(cls, username, case_insensitive=False, | |
|
672 | cache=False, identity_cache=False): | |
|
673 | session = Session() | |
|
674 | ||
|
675 | if case_insensitive: | |
|
676 | q = cls.query().filter( | |
|
677 | func.lower(cls.username) == func.lower(username)) | |
|
678 | else: | |
|
679 | q = cls.query().filter(cls.username == username) | |
|
680 | ||
|
681 | if cache: | |
|
682 | if identity_cache: | |
|
683 | val = cls.identity_cache(session, 'username', username) | |
|
684 | if val: | |
|
685 | return val | |
|
686 | else: | |
|
687 | q = q.options( | |
|
688 | FromCache("sql_cache_short", | |
|
689 | "get_user_by_name_%s" % _hash_key(username))) | |
|
690 | ||
|
691 | return q.scalar() | |
|
692 | ||
|
693 | @classmethod | |
|
694 | def get_by_auth_token(cls, auth_token, cache=False, fallback=True): | |
|
695 | q = cls.query().filter(cls.api_key == auth_token) | |
|
696 | ||
|
697 | if cache: | |
|
698 | q = q.options(FromCache("sql_cache_short", | |
|
699 | "get_auth_token_%s" % auth_token)) | |
|
700 | res = q.scalar() | |
|
701 | ||
|
702 | if fallback and not res: | |
|
703 | #fallback to additional keys | |
|
704 | _res = UserApiKeys.query()\ | |
|
705 | .filter(UserApiKeys.api_key == auth_token)\ | |
|
706 | .filter(or_(UserApiKeys.expires == -1, | |
|
707 | UserApiKeys.expires >= time.time()))\ | |
|
708 | .first() | |
|
709 | if _res: | |
|
710 | res = _res.user | |
|
711 | return res | |
|
712 | ||
|
713 | @classmethod | |
|
714 | def get_by_email(cls, email, case_insensitive=False, cache=False): | |
|
715 | ||
|
716 | if case_insensitive: | |
|
717 | q = cls.query().filter(func.lower(cls.email) == func.lower(email)) | |
|
718 | ||
|
719 | else: | |
|
720 | q = cls.query().filter(cls.email == email) | |
|
721 | ||
|
722 | if cache: | |
|
723 | q = q.options(FromCache("sql_cache_short", | |
|
724 | "get_email_key_%s" % _hash_key(email))) | |
|
725 | ||
|
726 | ret = q.scalar() | |
|
727 | if ret is None: | |
|
728 | q = UserEmailMap.query() | |
|
729 | # try fetching in alternate email map | |
|
730 | if case_insensitive: | |
|
731 | q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) | |
|
732 | else: | |
|
733 | q = q.filter(UserEmailMap.email == email) | |
|
734 | q = q.options(joinedload(UserEmailMap.user)) | |
|
735 | if cache: | |
|
736 | q = q.options(FromCache("sql_cache_short", | |
|
737 | "get_email_map_key_%s" % email)) | |
|
738 | ret = getattr(q.scalar(), 'user', None) | |
|
739 | ||
|
740 | return ret | |
|
741 | ||
|
742 | @classmethod | |
|
743 | def get_from_cs_author(cls, author): | |
|
744 | """ | |
|
745 | Tries to get User objects out of commit author string | |
|
746 | ||
|
747 | :param author: | |
|
748 | """ | |
|
749 | from rhodecode.lib.helpers import email, author_name | |
|
750 | # Valid email in the attribute passed, see if they're in the system | |
|
751 | _email = email(author) | |
|
752 | if _email: | |
|
753 | user = cls.get_by_email(_email, case_insensitive=True) | |
|
754 | if user: | |
|
755 | return user | |
|
756 | # Maybe we can match by username? | |
|
757 | _author = author_name(author) | |
|
758 | user = cls.get_by_username(_author, case_insensitive=True) | |
|
759 | if user: | |
|
760 | return user | |
|
761 | ||
|
762 | def update_userdata(self, **kwargs): | |
|
763 | usr = self | |
|
764 | old = usr.user_data | |
|
765 | old.update(**kwargs) | |
|
766 | usr.user_data = old | |
|
767 | Session().add(usr) | |
|
768 | log.debug('updated userdata with ', kwargs) | |
|
769 | ||
|
770 | def update_lastlogin(self): | |
|
771 | """Update user lastlogin""" | |
|
772 | self.last_login = datetime.datetime.now() | |
|
773 | Session().add(self) | |
|
774 | log.debug('updated user %s lastlogin', self.username) | |
|
775 | ||
|
776 | def update_lastactivity(self): | |
|
777 | """Update user lastactivity""" | |
|
778 | usr = self | |
|
779 | old = usr.user_data | |
|
780 | old.update({'last_activity': time.time()}) | |
|
781 | usr.user_data = old | |
|
782 | Session().add(usr) | |
|
783 | log.debug('updated user %s lastactivity', usr.username) | |
|
784 | ||
|
785 | def update_password(self, new_password, change_api_key=False): | |
|
786 | from rhodecode.lib.auth import get_crypt_password,generate_auth_token | |
|
787 | ||
|
788 | self.password = get_crypt_password(new_password) | |
|
789 | if change_api_key: | |
|
790 | self.api_key = generate_auth_token(self.username) | |
|
791 | Session().add(self) | |
|
792 | ||
|
793 | @classmethod | |
|
794 | def get_first_super_admin(cls): | |
|
795 | user = User.query().filter(User.admin == true()).first() | |
|
796 | if user is None: | |
|
797 | raise Exception('FATAL: Missing administrative account!') | |
|
798 | return user | |
|
799 | ||
|
800 | @classmethod | |
|
801 | def get_all_super_admins(cls): | |
|
802 | """ | |
|
803 | Returns all admin accounts sorted by username | |
|
804 | """ | |
|
805 | return User.query().filter(User.admin == true())\ | |
|
806 | .order_by(User.username.asc()).all() | |
|
807 | ||
|
808 | @classmethod | |
|
809 | def get_default_user(cls, cache=False): | |
|
810 | user = User.get_by_username(User.DEFAULT_USER, cache=cache) | |
|
811 | if user is None: | |
|
812 | raise Exception('FATAL: Missing default account!') | |
|
813 | return user | |
|
814 | ||
|
815 | def _get_default_perms(self, user, suffix=''): | |
|
816 | from rhodecode.model.permission import PermissionModel | |
|
817 | return PermissionModel().get_default_perms(user.user_perms, suffix) | |
|
818 | ||
|
819 | def get_default_perms(self, suffix=''): | |
|
820 | return self._get_default_perms(self, suffix) | |
|
821 | ||
|
822 | def get_api_data(self, include_secrets=False, details='full'): | |
|
823 | """ | |
|
824 | Common function for generating user related data for API | |
|
825 | ||
|
826 | :param include_secrets: By default secrets in the API data will be replaced | |
|
827 | by a placeholder value to prevent exposing this data by accident. In case | |
|
828 | this data shall be exposed, set this flag to ``True``. | |
|
829 | ||
|
830 | :param details: details can be 'basic|full' basic gives only a subset of | |
|
831 | the available user information that includes user_id, name and emails. | |
|
832 | """ | |
|
833 | user = self | |
|
834 | user_data = self.user_data | |
|
835 | data = { | |
|
836 | 'user_id': user.user_id, | |
|
837 | 'username': user.username, | |
|
838 | 'firstname': user.name, | |
|
839 | 'lastname': user.lastname, | |
|
840 | 'email': user.email, | |
|
841 | 'emails': user.emails, | |
|
842 | } | |
|
843 | if details == 'basic': | |
|
844 | return data | |
|
845 | ||
|
846 | api_key_length = 40 | |
|
847 | api_key_replacement = '*' * api_key_length | |
|
848 | ||
|
849 | extras = { | |
|
850 | 'api_key': api_key_replacement, | |
|
851 | 'api_keys': [api_key_replacement], | |
|
852 | 'active': user.active, | |
|
853 | 'admin': user.admin, | |
|
854 | 'extern_type': user.extern_type, | |
|
855 | 'extern_name': user.extern_name, | |
|
856 | 'last_login': user.last_login, | |
|
857 | 'ip_addresses': user.ip_addresses, | |
|
858 | 'language': user_data.get('language') | |
|
859 | } | |
|
860 | data.update(extras) | |
|
861 | ||
|
862 | if include_secrets: | |
|
863 | data['api_key'] = user.api_key | |
|
864 | data['api_keys'] = user.auth_tokens | |
|
865 | return data | |
|
866 | ||
|
867 | def __json__(self): | |
|
868 | data = { | |
|
869 | 'full_name': self.full_name, | |
|
870 | 'full_name_or_username': self.full_name_or_username, | |
|
871 | 'short_contact': self.short_contact, | |
|
872 | 'full_contact': self.full_contact, | |
|
873 | } | |
|
874 | data.update(self.get_api_data()) | |
|
875 | return data | |
|
876 | ||
|
877 | ||
|
878 | class UserApiKeys(Base, BaseModel): | |
|
879 | __tablename__ = 'user_api_keys' | |
|
880 | __table_args__ = ( | |
|
881 | Index('uak_api_key_idx', 'api_key'), | |
|
882 | Index('uak_api_key_expires_idx', 'api_key', 'expires'), | |
|
883 | UniqueConstraint('api_key'), | |
|
884 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
885 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
886 | ) | |
|
887 | __mapper_args__ = {} | |
|
888 | ||
|
889 | # ApiKey role | |
|
890 | ROLE_ALL = 'token_role_all' | |
|
891 | ROLE_HTTP = 'token_role_http' | |
|
892 | ROLE_VCS = 'token_role_vcs' | |
|
893 | ROLE_API = 'token_role_api' | |
|
894 | ROLE_FEED = 'token_role_feed' | |
|
895 | ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED] | |
|
896 | ||
|
897 | user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
898 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
|
899 | api_key = Column("api_key", String(255), nullable=False, unique=True) | |
|
900 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |
|
901 | expires = Column('expires', Float(53), nullable=False) | |
|
902 | role = Column('role', String(255), nullable=True) | |
|
903 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
904 | ||
|
905 | user = relationship('User', lazy='joined') | |
|
906 | ||
|
907 | @classmethod | |
|
908 | def _get_role_name(cls, role): | |
|
909 | return { | |
|
910 | cls.ROLE_ALL: _('all'), | |
|
911 | cls.ROLE_HTTP: _('http/web interface'), | |
|
912 | cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), | |
|
913 | cls.ROLE_API: _('api calls'), | |
|
914 | cls.ROLE_FEED: _('feed access'), | |
|
915 | }.get(role, role) | |
|
916 | ||
|
917 | @property | |
|
918 | def expired(self): | |
|
919 | if self.expires == -1: | |
|
920 | return False | |
|
921 | return time.time() > self.expires | |
|
922 | ||
|
923 | @property | |
|
924 | def role_humanized(self): | |
|
925 | return self._get_role_name(self.role) | |
|
926 | ||
|
927 | ||
|
928 | class UserEmailMap(Base, BaseModel): | |
|
929 | __tablename__ = 'user_email_map' | |
|
930 | __table_args__ = ( | |
|
931 | Index('uem_email_idx', 'email'), | |
|
932 | UniqueConstraint('email'), | |
|
933 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
934 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
935 | ) | |
|
936 | __mapper_args__ = {} | |
|
937 | ||
|
938 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
939 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
|
940 | _email = Column("email", String(255), nullable=True, unique=False, default=None) | |
|
941 | user = relationship('User', lazy='joined') | |
|
942 | ||
|
943 | @validates('_email') | |
|
944 | def validate_email(self, key, email): | |
|
945 | # check if this email is not main one | |
|
946 | main_email = Session().query(User).filter(User.email == email).scalar() | |
|
947 | if main_email is not None: | |
|
948 | raise AttributeError('email %s is present is user table' % email) | |
|
949 | return email | |
|
950 | ||
|
951 | @hybrid_property | |
|
952 | def email(self): | |
|
953 | return self._email | |
|
954 | ||
|
955 | @email.setter | |
|
956 | def email(self, val): | |
|
957 | self._email = val.lower() if val else None | |
|
958 | ||
|
959 | ||
|
960 | class UserIpMap(Base, BaseModel): | |
|
961 | __tablename__ = 'user_ip_map' | |
|
962 | __table_args__ = ( | |
|
963 | UniqueConstraint('user_id', 'ip_addr'), | |
|
964 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
965 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
966 | ) | |
|
967 | __mapper_args__ = {} | |
|
968 | ||
|
969 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
970 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
|
971 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) | |
|
972 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |
|
973 | description = Column("description", String(10000), nullable=True, unique=None, default=None) | |
|
974 | user = relationship('User', lazy='joined') | |
|
975 | ||
|
976 | @classmethod | |
|
977 | def _get_ip_range(cls, ip_addr): | |
|
978 | net = ipaddress.ip_network(ip_addr, strict=False) | |
|
979 | return [str(net.network_address), str(net.broadcast_address)] | |
|
980 | ||
|
981 | def __json__(self): | |
|
982 | return { | |
|
983 | 'ip_addr': self.ip_addr, | |
|
984 | 'ip_range': self._get_ip_range(self.ip_addr), | |
|
985 | } | |
|
986 | ||
|
987 | def __unicode__(self): | |
|
988 | return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, | |
|
989 | self.user_id, self.ip_addr) | |
|
990 | ||
|
991 | class UserLog(Base, BaseModel): | |
|
992 | __tablename__ = 'user_logs' | |
|
993 | __table_args__ = ( | |
|
994 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
995 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
996 | ) | |
|
997 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
998 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
|
999 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |
|
1000 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) | |
|
1001 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) | |
|
1002 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) | |
|
1003 | action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) | |
|
1004 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) | |
|
1005 | ||
|
1006 | def __unicode__(self): | |
|
1007 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |
|
1008 | self.repository_name, | |
|
1009 | self.action) | |
|
1010 | ||
|
1011 | @property | |
|
1012 | def action_as_day(self): | |
|
1013 | return datetime.date(*self.action_date.timetuple()[:3]) | |
|
1014 | ||
|
1015 | user = relationship('User') | |
|
1016 | repository = relationship('Repository', cascade='') | |
|
1017 | ||
|
1018 | ||
|
1019 | class UserGroup(Base, BaseModel): | |
|
1020 | __tablename__ = 'users_groups' | |
|
1021 | __table_args__ = ( | |
|
1022 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
1023 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
1024 | ) | |
|
1025 | ||
|
1026 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
1027 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) | |
|
1028 | user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) | |
|
1029 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) | |
|
1030 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |
|
1031 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |
|
1032 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
1033 | _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data | |
|
1034 | ||
|
1035 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") | |
|
1036 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') | |
|
1037 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |
|
1038 | users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') | |
|
1039 | user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') | |
|
1040 | user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') | |
|
1041 | ||
|
1042 | user = relationship('User') | |
|
1043 | ||
|
1044 | @hybrid_property | |
|
1045 | def group_data(self): | |
|
1046 | if not self._group_data: | |
|
1047 | return {} | |
|
1048 | ||
|
1049 | try: | |
|
1050 | return json.loads(self._group_data) | |
|
1051 | except TypeError: | |
|
1052 | return {} | |
|
1053 | ||
|
1054 | @group_data.setter | |
|
1055 | def group_data(self, val): | |
|
1056 | try: | |
|
1057 | self._group_data = json.dumps(val) | |
|
1058 | except Exception: | |
|
1059 | log.error(traceback.format_exc()) | |
|
1060 | ||
|
1061 | def __unicode__(self): | |
|
1062 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |
|
1063 | self.users_group_id, | |
|
1064 | self.users_group_name) | |
|
1065 | ||
|
1066 | @classmethod | |
|
1067 | def get_by_group_name(cls, group_name, cache=False, | |
|
1068 | case_insensitive=False): | |
|
1069 | if case_insensitive: | |
|
1070 | q = cls.query().filter(func.lower(cls.users_group_name) == | |
|
1071 | func.lower(group_name)) | |
|
1072 | ||
|
1073 | else: | |
|
1074 | q = cls.query().filter(cls.users_group_name == group_name) | |
|
1075 | if cache: | |
|
1076 | q = q.options(FromCache( | |
|
1077 | "sql_cache_short", | |
|
1078 | "get_group_%s" % _hash_key(group_name))) | |
|
1079 | return q.scalar() | |
|
1080 | ||
|
1081 | @classmethod | |
|
1082 | def get(cls, user_group_id, cache=False): | |
|
1083 | user_group = cls.query() | |
|
1084 | if cache: | |
|
1085 | user_group = user_group.options(FromCache("sql_cache_short", | |
|
1086 | "get_users_group_%s" % user_group_id)) | |
|
1087 | return user_group.get(user_group_id) | |
|
1088 | ||
|
1089 | def permissions(self, with_admins=True, with_owner=True): | |
|
1090 | q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) | |
|
1091 | q = q.options(joinedload(UserUserGroupToPerm.user_group), | |
|
1092 | joinedload(UserUserGroupToPerm.user), | |
|
1093 | joinedload(UserUserGroupToPerm.permission),) | |
|
1094 | ||
|
1095 | # get owners and admins and permissions. We do a trick of re-writing | |
|
1096 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |
|
1097 | # has a global reference and changing one object propagates to all | |
|
1098 | # others. This means if admin is also an owner admin_row that change | |
|
1099 | # would propagate to both objects | |
|
1100 | perm_rows = [] | |
|
1101 | for _usr in q.all(): | |
|
1102 | usr = AttributeDict(_usr.user.get_dict()) | |
|
1103 | usr.permission = _usr.permission.permission_name | |
|
1104 | perm_rows.append(usr) | |
|
1105 | ||
|
1106 | # filter the perm rows by 'default' first and then sort them by | |
|
1107 | # admin,write,read,none permissions sorted again alphabetically in | |
|
1108 | # each group | |
|
1109 | perm_rows = sorted(perm_rows, key=display_sort) | |
|
1110 | ||
|
1111 | _admin_perm = 'usergroup.admin' | |
|
1112 | owner_row = [] | |
|
1113 | if with_owner: | |
|
1114 | usr = AttributeDict(self.user.get_dict()) | |
|
1115 | usr.owner_row = True | |
|
1116 | usr.permission = _admin_perm | |
|
1117 | owner_row.append(usr) | |
|
1118 | ||
|
1119 | super_admin_rows = [] | |
|
1120 | if with_admins: | |
|
1121 | for usr in User.get_all_super_admins(): | |
|
1122 | # if this admin is also owner, don't double the record | |
|
1123 | if usr.user_id == owner_row[0].user_id: | |
|
1124 | owner_row[0].admin_row = True | |
|
1125 | else: | |
|
1126 | usr = AttributeDict(usr.get_dict()) | |
|
1127 | usr.admin_row = True | |
|
1128 | usr.permission = _admin_perm | |
|
1129 | super_admin_rows.append(usr) | |
|
1130 | ||
|
1131 | return super_admin_rows + owner_row + perm_rows | |
|
1132 | ||
|
1133 | def permission_user_groups(self): | |
|
1134 | q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self) | |
|
1135 | q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), | |
|
1136 | joinedload(UserGroupUserGroupToPerm.target_user_group), | |
|
1137 | joinedload(UserGroupUserGroupToPerm.permission),) | |
|
1138 | ||
|
1139 | perm_rows = [] | |
|
1140 | for _user_group in q.all(): | |
|
1141 | usr = AttributeDict(_user_group.user_group.get_dict()) | |
|
1142 | usr.permission = _user_group.permission.permission_name | |
|
1143 | perm_rows.append(usr) | |
|
1144 | ||
|
1145 | return perm_rows | |
|
1146 | ||
|
1147 | def _get_default_perms(self, user_group, suffix=''): | |
|
1148 | from rhodecode.model.permission import PermissionModel | |
|
1149 | return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) | |
|
1150 | ||
|
1151 | def get_default_perms(self, suffix=''): | |
|
1152 | return self._get_default_perms(self, suffix) | |
|
1153 | ||
|
1154 | def get_api_data(self, with_group_members=True, include_secrets=False): | |
|
1155 | """ | |
|
1156 | :param include_secrets: See :meth:`User.get_api_data`, this parameter is | |
|
1157 | basically forwarded. | |
|
1158 | ||
|
1159 | """ | |
|
1160 | user_group = self | |
|
1161 | ||
|
1162 | data = { | |
|
1163 | 'users_group_id': user_group.users_group_id, | |
|
1164 | 'group_name': user_group.users_group_name, | |
|
1165 | 'group_description': user_group.user_group_description, | |
|
1166 | 'active': user_group.users_group_active, | |
|
1167 | 'owner': user_group.user.username, | |
|
1168 | } | |
|
1169 | if with_group_members: | |
|
1170 | users = [] | |
|
1171 | for user in user_group.members: | |
|
1172 | user = user.user | |
|
1173 | users.append(user.get_api_data(include_secrets=include_secrets)) | |
|
1174 | data['users'] = users | |
|
1175 | ||
|
1176 | return data | |
|
1177 | ||
|
1178 | ||
|
1179 | class UserGroupMember(Base, BaseModel): | |
|
1180 | __tablename__ = 'users_groups_members' | |
|
1181 | __table_args__ = ( | |
|
1182 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
1183 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
1184 | ) | |
|
1185 | ||
|
1186 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
1187 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
|
1188 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
|
1189 | ||
|
1190 | user = relationship('User', lazy='joined') | |
|
1191 | users_group = relationship('UserGroup') | |
|
1192 | ||
|
1193 | def __init__(self, gr_id='', u_id=''): | |
|
1194 | self.users_group_id = gr_id | |
|
1195 | self.user_id = u_id | |
|
1196 | ||
|
1197 | ||
|
1198 | class RepositoryField(Base, BaseModel): | |
|
1199 | __tablename__ = 'repositories_fields' | |
|
1200 | __table_args__ = ( | |
|
1201 | UniqueConstraint('repository_id', 'field_key'), # no-multi field | |
|
1202 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
1203 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
1204 | ) | |
|
1205 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields | |
|
1206 | ||
|
1207 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
1208 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
|
1209 | field_key = Column("field_key", String(250)) | |
|
1210 | field_label = Column("field_label", String(1024), nullable=False) | |
|
1211 | field_value = Column("field_value", String(10000), nullable=False) | |
|
1212 | field_desc = Column("field_desc", String(1024), nullable=False) | |
|
1213 | field_type = Column("field_type", String(255), nullable=False, unique=None) | |
|
1214 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
1215 | ||
|
1216 | repository = relationship('Repository') | |
|
1217 | ||
|
1218 | @property | |
|
1219 | def field_key_prefixed(self): | |
|
1220 | return 'ex_%s' % self.field_key | |
|
1221 | ||
|
1222 | @classmethod | |
|
1223 | def un_prefix_key(cls, key): | |
|
1224 | if key.startswith(cls.PREFIX): | |
|
1225 | return key[len(cls.PREFIX):] | |
|
1226 | return key | |
|
1227 | ||
|
1228 | @classmethod | |
|
1229 | def get_by_key_name(cls, key, repo): | |
|
1230 | row = cls.query()\ | |
|
1231 | .filter(cls.repository == repo)\ | |
|
1232 | .filter(cls.field_key == key).scalar() | |
|
1233 | return row | |
|
1234 | ||
|
1235 | ||
|
1236 | class Repository(Base, BaseModel): | |
|
1237 | __tablename__ = 'repositories' | |
|
1238 | __table_args__ = ( | |
|
1239 | Index('r_repo_name_idx', 'repo_name', mysql_length=255), | |
|
1240 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
1241 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
1242 | ) | |
|
1243 | DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' | |
|
1244 | DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' | |
|
1245 | ||
|
1246 | STATE_CREATED = 'repo_state_created' | |
|
1247 | STATE_PENDING = 'repo_state_pending' | |
|
1248 | STATE_ERROR = 'repo_state_error' | |
|
1249 | ||
|
1250 | LOCK_AUTOMATIC = 'lock_auto' | |
|
1251 | LOCK_API = 'lock_api' | |
|
1252 | LOCK_WEB = 'lock_web' | |
|
1253 | LOCK_PULL = 'lock_pull' | |
|
1254 | ||
|
1255 | NAME_SEP = URL_SEP | |
|
1256 | ||
|
1257 | repo_id = Column( | |
|
1258 | "repo_id", Integer(), nullable=False, unique=True, default=None, | |
|
1259 | primary_key=True) | |
|
1260 | _repo_name = Column( | |
|
1261 | "repo_name", Text(), nullable=False, default=None) | |
|
1262 | _repo_name_hash = Column( | |
|
1263 | "repo_name_hash", String(255), nullable=False, unique=True) | |
|
1264 | repo_state = Column("repo_state", String(255), nullable=True) | |
|
1265 | ||
|
1266 | clone_uri = Column( | |
|
1267 | "clone_uri", EncryptedTextValue(), nullable=True, unique=False, | |
|
1268 | default=None) | |
|
1269 | repo_type = Column( | |
|
1270 | "repo_type", String(255), nullable=False, unique=False, default=None) | |
|
1271 | user_id = Column( | |
|
1272 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | |
|
1273 | unique=False, default=None) | |
|
1274 | private = Column( | |
|
1275 | "private", Boolean(), nullable=True, unique=None, default=None) | |
|
1276 | enable_statistics = Column( | |
|
1277 | "statistics", Boolean(), nullable=True, unique=None, default=True) | |
|
1278 | enable_downloads = Column( | |
|
1279 | "downloads", Boolean(), nullable=True, unique=None, default=True) | |
|
1280 | description = Column( | |
|
1281 | "description", String(10000), nullable=True, unique=None, default=None) | |
|
1282 | created_on = Column( | |
|
1283 | 'created_on', DateTime(timezone=False), nullable=True, unique=None, | |
|
1284 | default=datetime.datetime.now) | |
|
1285 | updated_on = Column( | |
|
1286 | 'updated_on', DateTime(timezone=False), nullable=True, unique=None, | |
|
1287 | default=datetime.datetime.now) | |
|
1288 | _landing_revision = Column( | |
|
1289 | "landing_revision", String(255), nullable=False, unique=False, | |
|
1290 | default=None) | |
|
1291 | enable_locking = Column( | |
|
1292 | "enable_locking", Boolean(), nullable=False, unique=None, | |
|
1293 | default=False) | |
|
1294 | _locked = Column( | |
|
1295 | "locked", String(255), nullable=True, unique=False, default=None) | |
|
1296 | _changeset_cache = Column( | |
|
1297 | "changeset_cache", LargeBinary(), nullable=True) # JSON data | |
|
1298 | ||
|
1299 | fork_id = Column( | |
|
1300 | "fork_id", Integer(), ForeignKey('repositories.repo_id'), | |
|
1301 | nullable=True, unique=False, default=None) | |
|
1302 | group_id = Column( | |
|
1303 | "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, | |
|
1304 | unique=False, default=None) | |
|
1305 | ||
|
1306 | user = relationship('User', lazy='joined') | |
|
1307 | fork = relationship('Repository', remote_side=repo_id, lazy='joined') | |
|
1308 | group = relationship('RepoGroup', lazy='joined') | |
|
1309 | repo_to_perm = relationship( | |
|
1310 | 'UserRepoToPerm', cascade='all', | |
|
1311 | order_by='UserRepoToPerm.repo_to_perm_id') | |
|
1312 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |
|
1313 | stats = relationship('Statistics', cascade='all', uselist=False) | |
|
1314 | ||
|
1315 | followers = relationship( | |
|
1316 | 'UserFollowing', | |
|
1317 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', | |
|
1318 | cascade='all') | |
|
1319 | extra_fields = relationship( | |
|
1320 | 'RepositoryField', cascade="all, delete, delete-orphan") | |
|
1321 | logs = relationship('UserLog') | |
|
1322 | comments = relationship( | |
|
1323 | 'ChangesetComment', cascade="all, delete, delete-orphan") | |
|
1324 | pull_requests_source = relationship( | |
|
1325 | 'PullRequest', | |
|
1326 | primaryjoin='PullRequest.source_repo_id==Repository.repo_id', | |
|
1327 | cascade="all, delete, delete-orphan") | |
|
1328 | pull_requests_target = relationship( | |
|
1329 | 'PullRequest', | |
|
1330 | primaryjoin='PullRequest.target_repo_id==Repository.repo_id', | |
|
1331 | cascade="all, delete, delete-orphan") | |
|
1332 | ui = relationship('RepoRhodeCodeUi', cascade="all") | |
|
1333 | settings = relationship('RepoRhodeCodeSetting', cascade="all") | |
|
1334 | integrations = relationship('Integration', | |
|
1335 | cascade="all, delete, delete-orphan") | |
|
1336 | ||
|
1337 | def __unicode__(self): | |
|
1338 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, | |
|
1339 | safe_unicode(self.repo_name)) | |
|
1340 | ||
|
1341 | @hybrid_property | |
|
1342 | def landing_rev(self): | |
|
1343 | # always should return [rev_type, rev] | |
|
1344 | if self._landing_revision: | |
|
1345 | _rev_info = self._landing_revision.split(':') | |
|
1346 | if len(_rev_info) < 2: | |
|
1347 | _rev_info.insert(0, 'rev') | |
|
1348 | return [_rev_info[0], _rev_info[1]] | |
|
1349 | return [None, None] | |
|
1350 | ||
|
1351 | @landing_rev.setter | |
|
1352 | def landing_rev(self, val): | |
|
1353 | if ':' not in val: | |
|
1354 | raise ValueError('value must be delimited with `:` and consist ' | |
|
1355 | 'of <rev_type>:<rev>, got %s instead' % val) | |
|
1356 | self._landing_revision = val | |
|
1357 | ||
|
1358 | @hybrid_property | |
|
1359 | def locked(self): | |
|
1360 | if self._locked: | |
|
1361 | user_id, timelocked, reason = self._locked.split(':') | |
|
1362 | lock_values = int(user_id), timelocked, reason | |
|
1363 | else: | |
|
1364 | lock_values = [None, None, None] | |
|
1365 | return lock_values | |
|
1366 | ||
|
1367 | @locked.setter | |
|
1368 | def locked(self, val): | |
|
1369 | if val and isinstance(val, (list, tuple)): | |
|
1370 | self._locked = ':'.join(map(str, val)) | |
|
1371 | else: | |
|
1372 | self._locked = None | |
|
1373 | ||
|
1374 | @hybrid_property | |
|
1375 | def changeset_cache(self): | |
|
1376 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
|
1377 | dummy = EmptyCommit().__json__() | |
|
1378 | if not self._changeset_cache: | |
|
1379 | return dummy | |
|
1380 | try: | |
|
1381 | return json.loads(self._changeset_cache) | |
|
1382 | except TypeError: | |
|
1383 | return dummy | |
|
1384 | except Exception: | |
|
1385 | log.error(traceback.format_exc()) | |
|
1386 | return dummy | |
|
1387 | ||
|
1388 | @changeset_cache.setter | |
|
1389 | def changeset_cache(self, val): | |
|
1390 | try: | |
|
1391 | self._changeset_cache = json.dumps(val) | |
|
1392 | except Exception: | |
|
1393 | log.error(traceback.format_exc()) | |
|
1394 | ||
|
1395 | @hybrid_property | |
|
1396 | def repo_name(self): | |
|
1397 | return self._repo_name | |
|
1398 | ||
|
1399 | @repo_name.setter | |
|
1400 | def repo_name(self, value): | |
|
1401 | self._repo_name = value | |
|
1402 | self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() | |
|
1403 | ||
|
1404 | @classmethod | |
|
1405 | def normalize_repo_name(cls, repo_name): | |
|
1406 | """ | |
|
1407 | Normalizes os specific repo_name to the format internally stored inside | |
|
1408 | database using URL_SEP | |
|
1409 | ||
|
1410 | :param cls: | |
|
1411 | :param repo_name: | |
|
1412 | """ | |
|
1413 | return cls.NAME_SEP.join(repo_name.split(os.sep)) | |
|
1414 | ||
|
1415 | @classmethod | |
|
1416 | def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): | |
|
1417 | session = Session() | |
|
1418 | q = session.query(cls).filter(cls.repo_name == repo_name) | |
|
1419 | ||
|
1420 | if cache: | |
|
1421 | if identity_cache: | |
|
1422 | val = cls.identity_cache(session, 'repo_name', repo_name) | |
|
1423 | if val: | |
|
1424 | return val | |
|
1425 | else: | |
|
1426 | q = q.options( | |
|
1427 | FromCache("sql_cache_short", | |
|
1428 | "get_repo_by_name_%s" % _hash_key(repo_name))) | |
|
1429 | ||
|
1430 | return q.scalar() | |
|
1431 | ||
|
1432 | @classmethod | |
|
1433 | def get_by_full_path(cls, repo_full_path): | |
|
1434 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] | |
|
1435 | repo_name = cls.normalize_repo_name(repo_name) | |
|
1436 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) | |
|
1437 | ||
|
1438 | @classmethod | |
|
1439 | def get_repo_forks(cls, repo_id): | |
|
1440 | return cls.query().filter(Repository.fork_id == repo_id) | |
|
1441 | ||
|
1442 | @classmethod | |
|
1443 | def base_path(cls): | |
|
1444 | """ | |
|
1445 | Returns base path when all repos are stored | |
|
1446 | ||
|
1447 | :param cls: | |
|
1448 | """ | |
|
1449 | q = Session().query(RhodeCodeUi)\ | |
|
1450 | .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) | |
|
1451 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
|
1452 | return q.one().ui_value | |
|
1453 | ||
|
1454 | @classmethod | |
|
1455 | def is_valid(cls, repo_name): | |
|
1456 | """ | |
|
1457 | returns True if given repo name is a valid filesystem repository | |
|
1458 | ||
|
1459 | :param cls: | |
|
1460 | :param repo_name: | |
|
1461 | """ | |
|
1462 | from rhodecode.lib.utils import is_valid_repo | |
|
1463 | ||
|
1464 | return is_valid_repo(repo_name, cls.base_path()) | |
|
1465 | ||
|
1466 | @classmethod | |
|
1467 | def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), | |
|
1468 | case_insensitive=True): | |
|
1469 | q = Repository.query() | |
|
1470 | ||
|
1471 | if not isinstance(user_id, Optional): | |
|
1472 | q = q.filter(Repository.user_id == user_id) | |
|
1473 | ||
|
1474 | if not isinstance(group_id, Optional): | |
|
1475 | q = q.filter(Repository.group_id == group_id) | |
|
1476 | ||
|
1477 | if case_insensitive: | |
|
1478 | q = q.order_by(func.lower(Repository.repo_name)) | |
|
1479 | else: | |
|
1480 | q = q.order_by(Repository.repo_name) | |
|
1481 | return q.all() | |
|
1482 | ||
|
1483 | @property | |
|
1484 | def forks(self): | |
|
1485 | """ | |
|
1486 | Return forks of this repo | |
|
1487 | """ | |
|
1488 | return Repository.get_repo_forks(self.repo_id) | |
|
1489 | ||
|
1490 | @property | |
|
1491 | def parent(self): | |
|
1492 | """ | |
|
1493 | Returns fork parent | |
|
1494 | """ | |
|
1495 | return self.fork | |
|
1496 | ||
|
1497 | @property | |
|
1498 | def just_name(self): | |
|
1499 | return self.repo_name.split(self.NAME_SEP)[-1] | |
|
1500 | ||
|
1501 | @property | |
|
1502 | def groups_with_parents(self): | |
|
1503 | groups = [] | |
|
1504 | if self.group is None: | |
|
1505 | return groups | |
|
1506 | ||
|
1507 | cur_gr = self.group | |
|
1508 | groups.insert(0, cur_gr) | |
|
1509 | while 1: | |
|
1510 | gr = getattr(cur_gr, 'parent_group', None) | |
|
1511 | cur_gr = cur_gr.parent_group | |
|
1512 | if gr is None: | |
|
1513 | break | |
|
1514 | groups.insert(0, gr) | |
|
1515 | ||
|
1516 | return groups | |
|
1517 | ||
|
1518 | @property | |
|
1519 | def groups_and_repo(self): | |
|
1520 | return self.groups_with_parents, self | |
|
1521 | ||
|
1522 | @LazyProperty | |
|
1523 | def repo_path(self): | |
|
1524 | """ | |
|
1525 | Returns base full path for that repository means where it actually | |
|
1526 | exists on a filesystem | |
|
1527 | """ | |
|
1528 | q = Session().query(RhodeCodeUi).filter( | |
|
1529 | RhodeCodeUi.ui_key == self.NAME_SEP) | |
|
1530 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
|
1531 | return q.one().ui_value | |
|
1532 | ||
|
1533 | @property | |
|
1534 | def repo_full_path(self): | |
|
1535 | p = [self.repo_path] | |
|
1536 | # we need to split the name by / since this is how we store the | |
|
1537 | # names in the database, but that eventually needs to be converted | |
|
1538 | # into a valid system path | |
|
1539 | p += self.repo_name.split(self.NAME_SEP) | |
|
1540 | return os.path.join(*map(safe_unicode, p)) | |
|
1541 | ||
|
1542 | @property | |
|
1543 | def cache_keys(self): | |
|
1544 | """ | |
|
1545 | Returns associated cache keys for that repo | |
|
1546 | """ | |
|
1547 | return CacheKey.query()\ | |
|
1548 | .filter(CacheKey.cache_args == self.repo_name)\ | |
|
1549 | .order_by(CacheKey.cache_key)\ | |
|
1550 | .all() | |
|
1551 | ||
|
1552 | def get_new_name(self, repo_name): | |
|
1553 | """ | |
|
1554 | returns new full repository name based on assigned group and new new | |
|
1555 | ||
|
1556 | :param group_name: | |
|
1557 | """ | |
|
1558 | path_prefix = self.group.full_path_splitted if self.group else [] | |
|
1559 | return self.NAME_SEP.join(path_prefix + [repo_name]) | |
|
1560 | ||
|
1561 | @property | |
|
1562 | def _config(self): | |
|
1563 | """ | |
|
1564 | Returns db based config object. | |
|
1565 | """ | |
|
1566 | from rhodecode.lib.utils import make_db_config | |
|
1567 | return make_db_config(clear_session=False, repo=self) | |
|
1568 | ||
|
1569 | def permissions(self, with_admins=True, with_owner=True): | |
|
1570 | q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) | |
|
1571 | q = q.options(joinedload(UserRepoToPerm.repository), | |
|
1572 | joinedload(UserRepoToPerm.user), | |
|
1573 | joinedload(UserRepoToPerm.permission),) | |
|
1574 | ||
|
1575 | # get owners and admins and permissions. We do a trick of re-writing | |
|
1576 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |
|
1577 | # has a global reference and changing one object propagates to all | |
|
1578 | # others. This means if admin is also an owner admin_row that change | |
|
1579 | # would propagate to both objects | |
|
1580 | perm_rows = [] | |
|
1581 | for _usr in q.all(): | |
|
1582 | usr = AttributeDict(_usr.user.get_dict()) | |
|
1583 | usr.permission = _usr.permission.permission_name | |
|
1584 | perm_rows.append(usr) | |
|
1585 | ||
|
1586 | # filter the perm rows by 'default' first and then sort them by | |
|
1587 | # admin,write,read,none permissions sorted again alphabetically in | |
|
1588 | # each group | |
|
1589 | perm_rows = sorted(perm_rows, key=display_sort) | |
|
1590 | ||
|
1591 | _admin_perm = 'repository.admin' | |
|
1592 | owner_row = [] | |
|
1593 | if with_owner: | |
|
1594 | usr = AttributeDict(self.user.get_dict()) | |
|
1595 | usr.owner_row = True | |
|
1596 | usr.permission = _admin_perm | |
|
1597 | owner_row.append(usr) | |
|
1598 | ||
|
1599 | super_admin_rows = [] | |
|
1600 | if with_admins: | |
|
1601 | for usr in User.get_all_super_admins(): | |
|
1602 | # if this admin is also owner, don't double the record | |
|
1603 | if usr.user_id == owner_row[0].user_id: | |
|
1604 | owner_row[0].admin_row = True | |
|
1605 | else: | |
|
1606 | usr = AttributeDict(usr.get_dict()) | |
|
1607 | usr.admin_row = True | |
|
1608 | usr.permission = _admin_perm | |
|
1609 | super_admin_rows.append(usr) | |
|
1610 | ||
|
1611 | return super_admin_rows + owner_row + perm_rows | |
|
1612 | ||
|
1613 | def permission_user_groups(self): | |
|
1614 | q = UserGroupRepoToPerm.query().filter( | |
|
1615 | UserGroupRepoToPerm.repository == self) | |
|
1616 | q = q.options(joinedload(UserGroupRepoToPerm.repository), | |
|
1617 | joinedload(UserGroupRepoToPerm.users_group), | |
|
1618 | joinedload(UserGroupRepoToPerm.permission),) | |
|
1619 | ||
|
1620 | perm_rows = [] | |
|
1621 | for _user_group in q.all(): | |
|
1622 | usr = AttributeDict(_user_group.users_group.get_dict()) | |
|
1623 | usr.permission = _user_group.permission.permission_name | |
|
1624 | perm_rows.append(usr) | |
|
1625 | ||
|
1626 | return perm_rows | |
|
1627 | ||
|
1628 | def get_api_data(self, include_secrets=False): | |
|
1629 | """ | |
|
1630 | Common function for generating repo api data | |
|
1631 | ||
|
1632 | :param include_secrets: See :meth:`User.get_api_data`. | |
|
1633 | ||
|
1634 | """ | |
|
1635 | # TODO: mikhail: Here there is an anti-pattern, we probably need to | |
|
1636 | # move this methods on models level. | |
|
1637 | from rhodecode.model.settings import SettingsModel | |
|
1638 | ||
|
1639 | repo = self | |
|
1640 | _user_id, _time, _reason = self.locked | |
|
1641 | ||
|
1642 | data = { | |
|
1643 | 'repo_id': repo.repo_id, | |
|
1644 | 'repo_name': repo.repo_name, | |
|
1645 | 'repo_type': repo.repo_type, | |
|
1646 | 'clone_uri': repo.clone_uri or '', | |
|
1647 | 'url': url('summary_home', repo_name=self.repo_name, qualified=True), | |
|
1648 | 'private': repo.private, | |
|
1649 | 'created_on': repo.created_on, | |
|
1650 | 'description': repo.description, | |
|
1651 | 'landing_rev': repo.landing_rev, | |
|
1652 | 'owner': repo.user.username, | |
|
1653 | 'fork_of': repo.fork.repo_name if repo.fork else None, | |
|
1654 | 'enable_statistics': repo.enable_statistics, | |
|
1655 | 'enable_locking': repo.enable_locking, | |
|
1656 | 'enable_downloads': repo.enable_downloads, | |
|
1657 | 'last_changeset': repo.changeset_cache, | |
|
1658 | 'locked_by': User.get(_user_id).get_api_data( | |
|
1659 | include_secrets=include_secrets) if _user_id else None, | |
|
1660 | 'locked_date': time_to_datetime(_time) if _time else None, | |
|
1661 | 'lock_reason': _reason if _reason else None, | |
|
1662 | } | |
|
1663 | ||
|
1664 | # TODO: mikhail: should be per-repo settings here | |
|
1665 | rc_config = SettingsModel().get_all_settings() | |
|
1666 | repository_fields = str2bool( | |
|
1667 | rc_config.get('rhodecode_repository_fields')) | |
|
1668 | if repository_fields: | |
|
1669 | for f in self.extra_fields: | |
|
1670 | data[f.field_key_prefixed] = f.field_value | |
|
1671 | ||
|
1672 | return data | |
|
1673 | ||
|
1674 | @classmethod | |
|
1675 | def lock(cls, repo, user_id, lock_time=None, lock_reason=None): | |
|
1676 | if not lock_time: | |
|
1677 | lock_time = time.time() | |
|
1678 | if not lock_reason: | |
|
1679 | lock_reason = cls.LOCK_AUTOMATIC | |
|
1680 | repo.locked = [user_id, lock_time, lock_reason] | |
|
1681 | Session().add(repo) | |
|
1682 | Session().commit() | |
|
1683 | ||
|
1684 | @classmethod | |
|
1685 | def unlock(cls, repo): | |
|
1686 | repo.locked = None | |
|
1687 | Session().add(repo) | |
|
1688 | Session().commit() | |
|
1689 | ||
|
1690 | @classmethod | |
|
1691 | def getlock(cls, repo): | |
|
1692 | return repo.locked | |
|
1693 | ||
|
1694 | def is_user_lock(self, user_id): | |
|
1695 | if self.lock[0]: | |
|
1696 | lock_user_id = safe_int(self.lock[0]) | |
|
1697 | user_id = safe_int(user_id) | |
|
1698 | # both are ints, and they are equal | |
|
1699 | return all([lock_user_id, user_id]) and lock_user_id == user_id | |
|
1700 | ||
|
1701 | return False | |
|
1702 | ||
|
1703 | def get_locking_state(self, action, user_id, only_when_enabled=True): | |
|
1704 | """ | |
|
1705 | Checks locking on this repository, if locking is enabled and lock is | |
|
1706 | present returns a tuple of make_lock, locked, locked_by. | |
|
1707 | make_lock can have 3 states None (do nothing) True, make lock | |
|
1708 | False release lock, This value is later propagated to hooks, which | |
|
1709 | do the locking. Think about this as signals passed to hooks what to do. | |
|
1710 | ||
|
1711 | """ | |
|
1712 | # TODO: johbo: This is part of the business logic and should be moved | |
|
1713 | # into the RepositoryModel. | |
|
1714 | ||
|
1715 | if action not in ('push', 'pull'): | |
|
1716 | raise ValueError("Invalid action value: %s" % repr(action)) | |
|
1717 | ||
|
1718 | # defines if locked error should be thrown to user | |
|
1719 | currently_locked = False | |
|
1720 | # defines if new lock should be made, tri-state | |
|
1721 | make_lock = None | |
|
1722 | repo = self | |
|
1723 | user = User.get(user_id) | |
|
1724 | ||
|
1725 | lock_info = repo.locked | |
|
1726 | ||
|
1727 | if repo and (repo.enable_locking or not only_when_enabled): | |
|
1728 | if action == 'push': | |
|
1729 | # check if it's already locked !, if it is compare users | |
|
1730 | locked_by_user_id = lock_info[0] | |
|
1731 | if user.user_id == locked_by_user_id: | |
|
1732 | log.debug( | |
|
1733 | 'Got `push` action from user %s, now unlocking', user) | |
|
1734 | # unlock if we have push from user who locked | |
|
1735 | make_lock = False | |
|
1736 | else: | |
|
1737 | # we're not the same user who locked, ban with | |
|
1738 | # code defined in settings (default is 423 HTTP Locked) ! | |
|
1739 | log.debug('Repo %s is currently locked by %s', repo, user) | |
|
1740 | currently_locked = True | |
|
1741 | elif action == 'pull': | |
|
1742 | # [0] user [1] date | |
|
1743 | if lock_info[0] and lock_info[1]: | |
|
1744 | log.debug('Repo %s is currently locked by %s', repo, user) | |
|
1745 | currently_locked = True | |
|
1746 | else: | |
|
1747 | log.debug('Setting lock on repo %s by %s', repo, user) | |
|
1748 | make_lock = True | |
|
1749 | ||
|
1750 | else: | |
|
1751 | log.debug('Repository %s do not have locking enabled', repo) | |
|
1752 | ||
|
1753 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', | |
|
1754 | make_lock, currently_locked, lock_info) | |
|
1755 | ||
|
1756 | from rhodecode.lib.auth import HasRepoPermissionAny | |
|
1757 | perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') | |
|
1758 | if make_lock and not perm_check(repo_name=repo.repo_name, user=user): | |
|
1759 | # if we don't have at least write permission we cannot make a lock | |
|
1760 | log.debug('lock state reset back to FALSE due to lack ' | |
|
1761 | 'of at least read permission') | |
|
1762 | make_lock = False | |
|
1763 | ||
|
1764 | return make_lock, currently_locked, lock_info | |
|
1765 | ||
|
1766 | @property | |
|
1767 | def last_db_change(self): | |
|
1768 | return self.updated_on | |
|
1769 | ||
|
1770 | @property | |
|
1771 | def clone_uri_hidden(self): | |
|
1772 | clone_uri = self.clone_uri | |
|
1773 | if clone_uri: | |
|
1774 | import urlobject | |
|
1775 | url_obj = urlobject.URLObject(clone_uri) | |
|
1776 | if url_obj.password: | |
|
1777 | clone_uri = url_obj.with_password('*****') | |
|
1778 | return clone_uri | |
|
1779 | ||
|
1780 | def clone_url(self, **override): | |
|
1781 | qualified_home_url = url('home', qualified=True) | |
|
1782 | ||
|
1783 | uri_tmpl = None | |
|
1784 | if 'with_id' in override: | |
|
1785 | uri_tmpl = self.DEFAULT_CLONE_URI_ID | |
|
1786 | del override['with_id'] | |
|
1787 | ||
|
1788 | if 'uri_tmpl' in override: | |
|
1789 | uri_tmpl = override['uri_tmpl'] | |
|
1790 | del override['uri_tmpl'] | |
|
1791 | ||
|
1792 | # we didn't override our tmpl from **overrides | |
|
1793 | if not uri_tmpl: | |
|
1794 | uri_tmpl = self.DEFAULT_CLONE_URI | |
|
1795 | try: | |
|
1796 | from pylons import tmpl_context as c | |
|
1797 | uri_tmpl = c.clone_uri_tmpl | |
|
1798 | except Exception: | |
|
1799 | # in any case if we call this outside of request context, | |
|
1800 | # ie, not having tmpl_context set up | |
|
1801 | pass | |
|
1802 | ||
|
1803 | return get_clone_url(uri_tmpl=uri_tmpl, | |
|
1804 | qualifed_home_url=qualified_home_url, | |
|
1805 | repo_name=self.repo_name, | |
|
1806 | repo_id=self.repo_id, **override) | |
|
1807 | ||
|
1808 | def set_state(self, state): | |
|
1809 | self.repo_state = state | |
|
1810 | Session().add(self) | |
|
1811 | #========================================================================== | |
|
1812 | # SCM PROPERTIES | |
|
1813 | #========================================================================== | |
|
1814 | ||
|
1815 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): | |
|
1816 | return get_commit_safe( | |
|
1817 | self.scm_instance(), commit_id, commit_idx, pre_load=pre_load) | |
|
1818 | ||
|
1819 | def get_changeset(self, rev=None, pre_load=None): | |
|
1820 | warnings.warn("Use get_commit", DeprecationWarning) | |
|
1821 | commit_id = None | |
|
1822 | commit_idx = None | |
|
1823 | if isinstance(rev, basestring): | |
|
1824 | commit_id = rev | |
|
1825 | else: | |
|
1826 | commit_idx = rev | |
|
1827 | return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, | |
|
1828 | pre_load=pre_load) | |
|
1829 | ||
|
1830 | def get_landing_commit(self): | |
|
1831 | """ | |
|
1832 | Returns landing commit, or if that doesn't exist returns the tip | |
|
1833 | """ | |
|
1834 | _rev_type, _rev = self.landing_rev | |
|
1835 | commit = self.get_commit(_rev) | |
|
1836 | if isinstance(commit, EmptyCommit): | |
|
1837 | return self.get_commit() | |
|
1838 | return commit | |
|
1839 | ||
|
1840 | def update_commit_cache(self, cs_cache=None, config=None): | |
|
1841 | """ | |
|
1842 | Update cache of last changeset for repository, keys should be:: | |
|
1843 | ||
|
1844 | short_id | |
|
1845 | raw_id | |
|
1846 | revision | |
|
1847 | parents | |
|
1848 | message | |
|
1849 | date | |
|
1850 | author | |
|
1851 | ||
|
1852 | :param cs_cache: | |
|
1853 | """ | |
|
1854 | from rhodecode.lib.vcs.backends.base import BaseChangeset | |
|
1855 | if cs_cache is None: | |
|
1856 | # use no-cache version here | |
|
1857 | scm_repo = self.scm_instance(cache=False, config=config) | |
|
1858 | if scm_repo: | |
|
1859 | cs_cache = scm_repo.get_commit( | |
|
1860 | pre_load=["author", "date", "message", "parents"]) | |
|
1861 | else: | |
|
1862 | cs_cache = EmptyCommit() | |
|
1863 | ||
|
1864 | if isinstance(cs_cache, BaseChangeset): | |
|
1865 | cs_cache = cs_cache.__json__() | |
|
1866 | ||
|
1867 | def is_outdated(new_cs_cache): | |
|
1868 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or | |
|
1869 | new_cs_cache['revision'] != self.changeset_cache['revision']): | |
|
1870 | return True | |
|
1871 | return False | |
|
1872 | ||
|
1873 | # check if we have maybe already latest cached revision | |
|
1874 | if is_outdated(cs_cache) or not self.changeset_cache: | |
|
1875 | _default = datetime.datetime.fromtimestamp(0) | |
|
1876 | last_change = cs_cache.get('date') or _default | |
|
1877 | log.debug('updated repo %s with new cs cache %s', | |
|
1878 | self.repo_name, cs_cache) | |
|
1879 | self.updated_on = last_change | |
|
1880 | self.changeset_cache = cs_cache | |
|
1881 | Session().add(self) | |
|
1882 | Session().commit() | |
|
1883 | else: | |
|
1884 | log.debug('Skipping update_commit_cache for repo:`%s` ' | |
|
1885 | 'commit already with latest changes', self.repo_name) | |
|
1886 | ||
|
1887 | @property | |
|
1888 | def tip(self): | |
|
1889 | return self.get_commit('tip') | |
|
1890 | ||
|
1891 | @property | |
|
1892 | def author(self): | |
|
1893 | return self.tip.author | |
|
1894 | ||
|
1895 | @property | |
|
1896 | def last_change(self): | |
|
1897 | return self.scm_instance().last_change | |
|
1898 | ||
|
1899 | def get_comments(self, revisions=None): | |
|
1900 | """ | |
|
1901 | Returns comments for this repository grouped by revisions | |
|
1902 | ||
|
1903 | :param revisions: filter query by revisions only | |
|
1904 | """ | |
|
1905 | cmts = ChangesetComment.query()\ | |
|
1906 | .filter(ChangesetComment.repo == self) | |
|
1907 | if revisions: | |
|
1908 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) | |
|
1909 | grouped = collections.defaultdict(list) | |
|
1910 | for cmt in cmts.all(): | |
|
1911 | grouped[cmt.revision].append(cmt) | |
|
1912 | return grouped | |
|
1913 | ||
|
1914 | def statuses(self, revisions=None): | |
|
1915 | """ | |
|
1916 | Returns statuses for this repository | |
|
1917 | ||
|
1918 | :param revisions: list of revisions to get statuses for | |
|
1919 | """ | |
|
1920 | statuses = ChangesetStatus.query()\ | |
|
1921 | .filter(ChangesetStatus.repo == self)\ | |
|
1922 | .filter(ChangesetStatus.version == 0) | |
|
1923 | ||
|
1924 | if revisions: | |
|
1925 | # Try doing the filtering in chunks to avoid hitting limits | |
|
1926 | size = 500 | |
|
1927 | status_results = [] | |
|
1928 | for chunk in xrange(0, len(revisions), size): | |
|
1929 | status_results += statuses.filter( | |
|
1930 | ChangesetStatus.revision.in_( | |
|
1931 | revisions[chunk: chunk+size]) | |
|
1932 | ).all() | |
|
1933 | else: | |
|
1934 | status_results = statuses.all() | |
|
1935 | ||
|
1936 | grouped = {} | |
|
1937 | ||
|
1938 | # maybe we have open new pullrequest without a status? | |
|
1939 | stat = ChangesetStatus.STATUS_UNDER_REVIEW | |
|
1940 | status_lbl = ChangesetStatus.get_status_lbl(stat) | |
|
1941 | for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): | |
|
1942 | for rev in pr.revisions: | |
|
1943 | pr_id = pr.pull_request_id | |
|
1944 | pr_repo = pr.target_repo.repo_name | |
|
1945 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] | |
|
1946 | ||
|
1947 | for stat in status_results: | |
|
1948 | pr_id = pr_repo = None | |
|
1949 | if stat.pull_request: | |
|
1950 | pr_id = stat.pull_request.pull_request_id | |
|
1951 | pr_repo = stat.pull_request.target_repo.repo_name | |
|
1952 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, | |
|
1953 | pr_id, pr_repo] | |
|
1954 | return grouped | |
|
1955 | ||
|
1956 | # ========================================================================== | |
|
1957 | # SCM CACHE INSTANCE | |
|
1958 | # ========================================================================== | |
|
1959 | ||
|
1960 | def scm_instance(self, **kwargs): | |
|
1961 | import rhodecode | |
|
1962 | ||
|
1963 | # Passing a config will not hit the cache currently only used | |
|
1964 | # for repo2dbmapper | |
|
1965 | config = kwargs.pop('config', None) | |
|
1966 | cache = kwargs.pop('cache', None) | |
|
1967 | full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) | |
|
1968 | # if cache is NOT defined use default global, else we have a full | |
|
1969 | # control over cache behaviour | |
|
1970 | if cache is None and full_cache and not config: | |
|
1971 | return self._get_instance_cached() | |
|
1972 | return self._get_instance(cache=bool(cache), config=config) | |
|
1973 | ||
|
1974 | def _get_instance_cached(self): | |
|
1975 | @cache_region('long_term') | |
|
1976 | def _get_repo(cache_key): | |
|
1977 | return self._get_instance() | |
|
1978 | ||
|
1979 | invalidator_context = CacheKey.repo_context_cache( | |
|
1980 | _get_repo, self.repo_name, None, thread_scoped=True) | |
|
1981 | ||
|
1982 | with invalidator_context as context: | |
|
1983 | context.invalidate() | |
|
1984 | repo = context.compute() | |
|
1985 | ||
|
1986 | return repo | |
|
1987 | ||
|
1988 | def _get_instance(self, cache=True, config=None): | |
|
1989 | config = config or self._config | |
|
1990 | custom_wire = { | |
|
1991 | 'cache': cache # controls the vcs.remote cache | |
|
1992 | } | |
|
1993 | ||
|
1994 | repo = get_vcs_instance( | |
|
1995 | repo_path=safe_str(self.repo_full_path), | |
|
1996 | config=config, | |
|
1997 | with_wire=custom_wire, | |
|
1998 | create=False) | |
|
1999 | ||
|
2000 | return repo | |
|
2001 | ||
|
2002 | def __json__(self): | |
|
2003 | return {'landing_rev': self.landing_rev} | |
|
2004 | ||
|
2005 | def get_dict(self): | |
|
2006 | ||
|
2007 | # Since we transformed `repo_name` to a hybrid property, we need to | |
|
2008 | # keep compatibility with the code which uses `repo_name` field. | |
|
2009 | ||
|
2010 | result = super(Repository, self).get_dict() | |
|
2011 | result['repo_name'] = result.pop('_repo_name', None) | |
|
2012 | return result | |
|
2013 | ||
|
2014 | ||
|
2015 | class RepoGroup(Base, BaseModel): | |
|
2016 | __tablename__ = 'groups' | |
|
2017 | __table_args__ = ( | |
|
2018 | UniqueConstraint('group_name', 'group_parent_id'), | |
|
2019 | CheckConstraint('group_id != group_parent_id'), | |
|
2020 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2021 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
2022 | ) | |
|
2023 | __mapper_args__ = {'order_by': 'group_name'} | |
|
2024 | ||
|
2025 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups | |
|
2026 | ||
|
2027 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2028 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) | |
|
2029 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | |
|
2030 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) | |
|
2031 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | |
|
2032 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |
|
2033 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
2034 | ||
|
2035 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') | |
|
2036 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') | |
|
2037 | parent_group = relationship('RepoGroup', remote_side=group_id) | |
|
2038 | user = relationship('User') | |
|
2039 | ||
|
2040 | def __init__(self, group_name='', parent_group=None): | |
|
2041 | self.group_name = group_name | |
|
2042 | self.parent_group = parent_group | |
|
2043 | ||
|
2044 | def __unicode__(self): | |
|
2045 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id, | |
|
2046 | self.group_name) | |
|
2047 | ||
|
2048 | @classmethod | |
|
2049 | def _generate_choice(cls, repo_group): | |
|
2050 | from webhelpers.html import literal as _literal | |
|
2051 | _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) | |
|
2052 | return repo_group.group_id, _name(repo_group.full_path_splitted) | |
|
2053 | ||
|
2054 | @classmethod | |
|
2055 | def groups_choices(cls, groups=None, show_empty_group=True): | |
|
2056 | if not groups: | |
|
2057 | groups = cls.query().all() | |
|
2058 | ||
|
2059 | repo_groups = [] | |
|
2060 | if show_empty_group: | |
|
2061 | repo_groups = [('-1', u'-- %s --' % _('No parent'))] | |
|
2062 | ||
|
2063 | repo_groups.extend([cls._generate_choice(x) for x in groups]) | |
|
2064 | ||
|
2065 | repo_groups = sorted( | |
|
2066 | repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) | |
|
2067 | return repo_groups | |
|
2068 | ||
|
2069 | @classmethod | |
|
2070 | def url_sep(cls): | |
|
2071 | return URL_SEP | |
|
2072 | ||
|
2073 | @classmethod | |
|
2074 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): | |
|
2075 | if case_insensitive: | |
|
2076 | gr = cls.query().filter(func.lower(cls.group_name) | |
|
2077 | == func.lower(group_name)) | |
|
2078 | else: | |
|
2079 | gr = cls.query().filter(cls.group_name == group_name) | |
|
2080 | if cache: | |
|
2081 | gr = gr.options(FromCache( | |
|
2082 | "sql_cache_short", | |
|
2083 | "get_group_%s" % _hash_key(group_name))) | |
|
2084 | return gr.scalar() | |
|
2085 | ||
|
2086 | @classmethod | |
|
2087 | def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), | |
|
2088 | case_insensitive=True): | |
|
2089 | q = RepoGroup.query() | |
|
2090 | ||
|
2091 | if not isinstance(user_id, Optional): | |
|
2092 | q = q.filter(RepoGroup.user_id == user_id) | |
|
2093 | ||
|
2094 | if not isinstance(group_id, Optional): | |
|
2095 | q = q.filter(RepoGroup.group_parent_id == group_id) | |
|
2096 | ||
|
2097 | if case_insensitive: | |
|
2098 | q = q.order_by(func.lower(RepoGroup.group_name)) | |
|
2099 | else: | |
|
2100 | q = q.order_by(RepoGroup.group_name) | |
|
2101 | return q.all() | |
|
2102 | ||
|
2103 | @property | |
|
2104 | def parents(self): | |
|
2105 | parents_recursion_limit = 10 | |
|
2106 | groups = [] | |
|
2107 | if self.parent_group is None: | |
|
2108 | return groups | |
|
2109 | cur_gr = self.parent_group | |
|
2110 | groups.insert(0, cur_gr) | |
|
2111 | cnt = 0 | |
|
2112 | while 1: | |
|
2113 | cnt += 1 | |
|
2114 | gr = getattr(cur_gr, 'parent_group', None) | |
|
2115 | cur_gr = cur_gr.parent_group | |
|
2116 | if gr is None: | |
|
2117 | break | |
|
2118 | if cnt == parents_recursion_limit: | |
|
2119 | # this will prevent accidental infinit loops | |
|
2120 | log.error(('more than %s parents found for group %s, stopping ' | |
|
2121 | 'recursive parent fetching' % (parents_recursion_limit, self))) | |
|
2122 | break | |
|
2123 | ||
|
2124 | groups.insert(0, gr) | |
|
2125 | return groups | |
|
2126 | ||
|
2127 | @property | |
|
2128 | def children(self): | |
|
2129 | return RepoGroup.query().filter(RepoGroup.parent_group == self) | |
|
2130 | ||
|
2131 | @property | |
|
2132 | def name(self): | |
|
2133 | return self.group_name.split(RepoGroup.url_sep())[-1] | |
|
2134 | ||
|
2135 | @property | |
|
2136 | def full_path(self): | |
|
2137 | return self.group_name | |
|
2138 | ||
|
2139 | @property | |
|
2140 | def full_path_splitted(self): | |
|
2141 | return self.group_name.split(RepoGroup.url_sep()) | |
|
2142 | ||
|
2143 | @property | |
|
2144 | def repositories(self): | |
|
2145 | return Repository.query()\ | |
|
2146 | .filter(Repository.group == self)\ | |
|
2147 | .order_by(Repository.repo_name) | |
|
2148 | ||
|
2149 | @property | |
|
2150 | def repositories_recursive_count(self): | |
|
2151 | cnt = self.repositories.count() | |
|
2152 | ||
|
2153 | def children_count(group): | |
|
2154 | cnt = 0 | |
|
2155 | for child in group.children: | |
|
2156 | cnt += child.repositories.count() | |
|
2157 | cnt += children_count(child) | |
|
2158 | return cnt | |
|
2159 | ||
|
2160 | return cnt + children_count(self) | |
|
2161 | ||
|
2162 | def _recursive_objects(self, include_repos=True): | |
|
2163 | all_ = [] | |
|
2164 | ||
|
2165 | def _get_members(root_gr): | |
|
2166 | if include_repos: | |
|
2167 | for r in root_gr.repositories: | |
|
2168 | all_.append(r) | |
|
2169 | childs = root_gr.children.all() | |
|
2170 | if childs: | |
|
2171 | for gr in childs: | |
|
2172 | all_.append(gr) | |
|
2173 | _get_members(gr) | |
|
2174 | ||
|
2175 | _get_members(self) | |
|
2176 | return [self] + all_ | |
|
2177 | ||
|
2178 | def recursive_groups_and_repos(self): | |
|
2179 | """ | |
|
2180 | Recursive return all groups, with repositories in those groups | |
|
2181 | """ | |
|
2182 | return self._recursive_objects() | |
|
2183 | ||
|
2184 | def recursive_groups(self): | |
|
2185 | """ | |
|
2186 | Returns all children groups for this group including children of children | |
|
2187 | """ | |
|
2188 | return self._recursive_objects(include_repos=False) | |
|
2189 | ||
|
2190 | def get_new_name(self, group_name): | |
|
2191 | """ | |
|
2192 | returns new full group name based on parent and new name | |
|
2193 | ||
|
2194 | :param group_name: | |
|
2195 | """ | |
|
2196 | path_prefix = (self.parent_group.full_path_splitted if | |
|
2197 | self.parent_group else []) | |
|
2198 | return RepoGroup.url_sep().join(path_prefix + [group_name]) | |
|
2199 | ||
|
2200 | def permissions(self, with_admins=True, with_owner=True): | |
|
2201 | q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) | |
|
2202 | q = q.options(joinedload(UserRepoGroupToPerm.group), | |
|
2203 | joinedload(UserRepoGroupToPerm.user), | |
|
2204 | joinedload(UserRepoGroupToPerm.permission),) | |
|
2205 | ||
|
2206 | # get owners and admins and permissions. We do a trick of re-writing | |
|
2207 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |
|
2208 | # has a global reference and changing one object propagates to all | |
|
2209 | # others. This means if admin is also an owner admin_row that change | |
|
2210 | # would propagate to both objects | |
|
2211 | perm_rows = [] | |
|
2212 | for _usr in q.all(): | |
|
2213 | usr = AttributeDict(_usr.user.get_dict()) | |
|
2214 | usr.permission = _usr.permission.permission_name | |
|
2215 | perm_rows.append(usr) | |
|
2216 | ||
|
2217 | # filter the perm rows by 'default' first and then sort them by | |
|
2218 | # admin,write,read,none permissions sorted again alphabetically in | |
|
2219 | # each group | |
|
2220 | perm_rows = sorted(perm_rows, key=display_sort) | |
|
2221 | ||
|
2222 | _admin_perm = 'group.admin' | |
|
2223 | owner_row = [] | |
|
2224 | if with_owner: | |
|
2225 | usr = AttributeDict(self.user.get_dict()) | |
|
2226 | usr.owner_row = True | |
|
2227 | usr.permission = _admin_perm | |
|
2228 | owner_row.append(usr) | |
|
2229 | ||
|
2230 | super_admin_rows = [] | |
|
2231 | if with_admins: | |
|
2232 | for usr in User.get_all_super_admins(): | |
|
2233 | # if this admin is also owner, don't double the record | |
|
2234 | if usr.user_id == owner_row[0].user_id: | |
|
2235 | owner_row[0].admin_row = True | |
|
2236 | else: | |
|
2237 | usr = AttributeDict(usr.get_dict()) | |
|
2238 | usr.admin_row = True | |
|
2239 | usr.permission = _admin_perm | |
|
2240 | super_admin_rows.append(usr) | |
|
2241 | ||
|
2242 | return super_admin_rows + owner_row + perm_rows | |
|
2243 | ||
|
2244 | def permission_user_groups(self): | |
|
2245 | q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self) | |
|
2246 | q = q.options(joinedload(UserGroupRepoGroupToPerm.group), | |
|
2247 | joinedload(UserGroupRepoGroupToPerm.users_group), | |
|
2248 | joinedload(UserGroupRepoGroupToPerm.permission),) | |
|
2249 | ||
|
2250 | perm_rows = [] | |
|
2251 | for _user_group in q.all(): | |
|
2252 | usr = AttributeDict(_user_group.users_group.get_dict()) | |
|
2253 | usr.permission = _user_group.permission.permission_name | |
|
2254 | perm_rows.append(usr) | |
|
2255 | ||
|
2256 | return perm_rows | |
|
2257 | ||
|
2258 | def get_api_data(self): | |
|
2259 | """ | |
|
2260 | Common function for generating api data | |
|
2261 | ||
|
2262 | """ | |
|
2263 | group = self | |
|
2264 | data = { | |
|
2265 | 'group_id': group.group_id, | |
|
2266 | 'group_name': group.group_name, | |
|
2267 | 'group_description': group.group_description, | |
|
2268 | 'parent_group': group.parent_group.group_name if group.parent_group else None, | |
|
2269 | 'repositories': [x.repo_name for x in group.repositories], | |
|
2270 | 'owner': group.user.username, | |
|
2271 | } | |
|
2272 | return data | |
|
2273 | ||
|
2274 | ||
|
2275 | class Permission(Base, BaseModel): | |
|
2276 | __tablename__ = 'permissions' | |
|
2277 | __table_args__ = ( | |
|
2278 | Index('p_perm_name_idx', 'permission_name'), | |
|
2279 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2280 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
2281 | ) | |
|
2282 | PERMS = [ | |
|
2283 | ('hg.admin', _('RhodeCode Super Administrator')), | |
|
2284 | ||
|
2285 | ('repository.none', _('Repository no access')), | |
|
2286 | ('repository.read', _('Repository read access')), | |
|
2287 | ('repository.write', _('Repository write access')), | |
|
2288 | ('repository.admin', _('Repository admin access')), | |
|
2289 | ||
|
2290 | ('group.none', _('Repository group no access')), | |
|
2291 | ('group.read', _('Repository group read access')), | |
|
2292 | ('group.write', _('Repository group write access')), | |
|
2293 | ('group.admin', _('Repository group admin access')), | |
|
2294 | ||
|
2295 | ('usergroup.none', _('User group no access')), | |
|
2296 | ('usergroup.read', _('User group read access')), | |
|
2297 | ('usergroup.write', _('User group write access')), | |
|
2298 | ('usergroup.admin', _('User group admin access')), | |
|
2299 | ||
|
2300 | ('hg.repogroup.create.false', _('Repository Group creation disabled')), | |
|
2301 | ('hg.repogroup.create.true', _('Repository Group creation enabled')), | |
|
2302 | ||
|
2303 | ('hg.usergroup.create.false', _('User Group creation disabled')), | |
|
2304 | ('hg.usergroup.create.true', _('User Group creation enabled')), | |
|
2305 | ||
|
2306 | ('hg.create.none', _('Repository creation disabled')), | |
|
2307 | ('hg.create.repository', _('Repository creation enabled')), | |
|
2308 | ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), | |
|
2309 | ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), | |
|
2310 | ||
|
2311 | ('hg.fork.none', _('Repository forking disabled')), | |
|
2312 | ('hg.fork.repository', _('Repository forking enabled')), | |
|
2313 | ||
|
2314 | ('hg.register.none', _('Registration disabled')), | |
|
2315 | ('hg.register.manual_activate', _('User Registration with manual account activation')), | |
|
2316 | ('hg.register.auto_activate', _('User Registration with automatic account activation')), | |
|
2317 | ||
|
2318 | ('hg.extern_activate.manual', _('Manual activation of external account')), | |
|
2319 | ('hg.extern_activate.auto', _('Automatic activation of external account')), | |
|
2320 | ||
|
2321 | ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), | |
|
2322 | ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), | |
|
2323 | ] | |
|
2324 | ||
|
2325 | # definition of system default permissions for DEFAULT user | |
|
2326 | DEFAULT_USER_PERMISSIONS = [ | |
|
2327 | 'repository.read', | |
|
2328 | 'group.read', | |
|
2329 | 'usergroup.read', | |
|
2330 | 'hg.create.repository', | |
|
2331 | 'hg.repogroup.create.false', | |
|
2332 | 'hg.usergroup.create.false', | |
|
2333 | 'hg.create.write_on_repogroup.true', | |
|
2334 | 'hg.fork.repository', | |
|
2335 | 'hg.register.manual_activate', | |
|
2336 | 'hg.extern_activate.auto', | |
|
2337 | 'hg.inherit_default_perms.true', | |
|
2338 | ] | |
|
2339 | ||
|
2340 | # defines which permissions are more important higher the more important | |
|
2341 | # Weight defines which permissions are more important. | |
|
2342 | # The higher number the more important. | |
|
2343 | PERM_WEIGHTS = { | |
|
2344 | 'repository.none': 0, | |
|
2345 | 'repository.read': 1, | |
|
2346 | 'repository.write': 3, | |
|
2347 | 'repository.admin': 4, | |
|
2348 | ||
|
2349 | 'group.none': 0, | |
|
2350 | 'group.read': 1, | |
|
2351 | 'group.write': 3, | |
|
2352 | 'group.admin': 4, | |
|
2353 | ||
|
2354 | 'usergroup.none': 0, | |
|
2355 | 'usergroup.read': 1, | |
|
2356 | 'usergroup.write': 3, | |
|
2357 | 'usergroup.admin': 4, | |
|
2358 | ||
|
2359 | 'hg.repogroup.create.false': 0, | |
|
2360 | 'hg.repogroup.create.true': 1, | |
|
2361 | ||
|
2362 | 'hg.usergroup.create.false': 0, | |
|
2363 | 'hg.usergroup.create.true': 1, | |
|
2364 | ||
|
2365 | 'hg.fork.none': 0, | |
|
2366 | 'hg.fork.repository': 1, | |
|
2367 | 'hg.create.none': 0, | |
|
2368 | 'hg.create.repository': 1 | |
|
2369 | } | |
|
2370 | ||
|
2371 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2372 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) | |
|
2373 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) | |
|
2374 | ||
|
2375 | def __unicode__(self): | |
|
2376 | return u"<%s('%s:%s')>" % ( | |
|
2377 | self.__class__.__name__, self.permission_id, self.permission_name | |
|
2378 | ) | |
|
2379 | ||
|
2380 | @classmethod | |
|
2381 | def get_by_key(cls, key): | |
|
2382 | return cls.query().filter(cls.permission_name == key).scalar() | |
|
2383 | ||
|
2384 | @classmethod | |
|
2385 | def get_default_repo_perms(cls, user_id, repo_id=None): | |
|
2386 | q = Session().query(UserRepoToPerm, Repository, Permission)\ | |
|
2387 | .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ | |
|
2388 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ | |
|
2389 | .filter(UserRepoToPerm.user_id == user_id) | |
|
2390 | if repo_id: | |
|
2391 | q = q.filter(UserRepoToPerm.repository_id == repo_id) | |
|
2392 | return q.all() | |
|
2393 | ||
|
2394 | @classmethod | |
|
2395 | def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): | |
|
2396 | q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ | |
|
2397 | .join( | |
|
2398 | Permission, | |
|
2399 | UserGroupRepoToPerm.permission_id == Permission.permission_id)\ | |
|
2400 | .join( | |
|
2401 | Repository, | |
|
2402 | UserGroupRepoToPerm.repository_id == Repository.repo_id)\ | |
|
2403 | .join( | |
|
2404 | UserGroup, | |
|
2405 | UserGroupRepoToPerm.users_group_id == | |
|
2406 | UserGroup.users_group_id)\ | |
|
2407 | .join( | |
|
2408 | UserGroupMember, | |
|
2409 | UserGroupRepoToPerm.users_group_id == | |
|
2410 | UserGroupMember.users_group_id)\ | |
|
2411 | .filter( | |
|
2412 | UserGroupMember.user_id == user_id, | |
|
2413 | UserGroup.users_group_active == true()) | |
|
2414 | if repo_id: | |
|
2415 | q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) | |
|
2416 | return q.all() | |
|
2417 | ||
|
2418 | @classmethod | |
|
2419 | def get_default_group_perms(cls, user_id, repo_group_id=None): | |
|
2420 | q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ | |
|
2421 | .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\ | |
|
2422 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ | |
|
2423 | .filter(UserRepoGroupToPerm.user_id == user_id) | |
|
2424 | if repo_group_id: | |
|
2425 | q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) | |
|
2426 | return q.all() | |
|
2427 | ||
|
2428 | @classmethod | |
|
2429 | def get_default_group_perms_from_user_group( | |
|
2430 | cls, user_id, repo_group_id=None): | |
|
2431 | q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ | |
|
2432 | .join( | |
|
2433 | Permission, | |
|
2434 | UserGroupRepoGroupToPerm.permission_id == | |
|
2435 | Permission.permission_id)\ | |
|
2436 | .join( | |
|
2437 | RepoGroup, | |
|
2438 | UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ | |
|
2439 | .join( | |
|
2440 | UserGroup, | |
|
2441 | UserGroupRepoGroupToPerm.users_group_id == | |
|
2442 | UserGroup.users_group_id)\ | |
|
2443 | .join( | |
|
2444 | UserGroupMember, | |
|
2445 | UserGroupRepoGroupToPerm.users_group_id == | |
|
2446 | UserGroupMember.users_group_id)\ | |
|
2447 | .filter( | |
|
2448 | UserGroupMember.user_id == user_id, | |
|
2449 | UserGroup.users_group_active == true()) | |
|
2450 | if repo_group_id: | |
|
2451 | q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) | |
|
2452 | return q.all() | |
|
2453 | ||
|
2454 | @classmethod | |
|
2455 | def get_default_user_group_perms(cls, user_id, user_group_id=None): | |
|
2456 | q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ | |
|
2457 | .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ | |
|
2458 | .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ | |
|
2459 | .filter(UserUserGroupToPerm.user_id == user_id) | |
|
2460 | if user_group_id: | |
|
2461 | q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) | |
|
2462 | return q.all() | |
|
2463 | ||
|
2464 | @classmethod | |
|
2465 | def get_default_user_group_perms_from_user_group( | |
|
2466 | cls, user_id, user_group_id=None): | |
|
2467 | TargetUserGroup = aliased(UserGroup, name='target_user_group') | |
|
2468 | q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ | |
|
2469 | .join( | |
|
2470 | Permission, | |
|
2471 | UserGroupUserGroupToPerm.permission_id == | |
|
2472 | Permission.permission_id)\ | |
|
2473 | .join( | |
|
2474 | TargetUserGroup, | |
|
2475 | UserGroupUserGroupToPerm.target_user_group_id == | |
|
2476 | TargetUserGroup.users_group_id)\ | |
|
2477 | .join( | |
|
2478 | UserGroup, | |
|
2479 | UserGroupUserGroupToPerm.user_group_id == | |
|
2480 | UserGroup.users_group_id)\ | |
|
2481 | .join( | |
|
2482 | UserGroupMember, | |
|
2483 | UserGroupUserGroupToPerm.user_group_id == | |
|
2484 | UserGroupMember.users_group_id)\ | |
|
2485 | .filter( | |
|
2486 | UserGroupMember.user_id == user_id, | |
|
2487 | UserGroup.users_group_active == true()) | |
|
2488 | if user_group_id: | |
|
2489 | q = q.filter( | |
|
2490 | UserGroupUserGroupToPerm.user_group_id == user_group_id) | |
|
2491 | ||
|
2492 | return q.all() | |
|
2493 | ||
|
2494 | ||
|
2495 | class UserRepoToPerm(Base, BaseModel): | |
|
2496 | __tablename__ = 'repo_to_perm' | |
|
2497 | __table_args__ = ( | |
|
2498 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), | |
|
2499 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2500 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2501 | ) | |
|
2502 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2503 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
|
2504 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
|
2505 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
|
2506 | ||
|
2507 | user = relationship('User') | |
|
2508 | repository = relationship('Repository') | |
|
2509 | permission = relationship('Permission') | |
|
2510 | ||
|
2511 | @classmethod | |
|
2512 | def create(cls, user, repository, permission): | |
|
2513 | n = cls() | |
|
2514 | n.user = user | |
|
2515 | n.repository = repository | |
|
2516 | n.permission = permission | |
|
2517 | Session().add(n) | |
|
2518 | return n | |
|
2519 | ||
|
2520 | def __unicode__(self): | |
|
2521 | return u'<%s => %s >' % (self.user, self.repository) | |
|
2522 | ||
|
2523 | ||
|
2524 | class UserUserGroupToPerm(Base, BaseModel): | |
|
2525 | __tablename__ = 'user_user_group_to_perm' | |
|
2526 | __table_args__ = ( | |
|
2527 | UniqueConstraint('user_id', 'user_group_id', 'permission_id'), | |
|
2528 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2529 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2530 | ) | |
|
2531 | user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2532 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
|
2533 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
|
2534 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
|
2535 | ||
|
2536 | user = relationship('User') | |
|
2537 | user_group = relationship('UserGroup') | |
|
2538 | permission = relationship('Permission') | |
|
2539 | ||
|
2540 | @classmethod | |
|
2541 | def create(cls, user, user_group, permission): | |
|
2542 | n = cls() | |
|
2543 | n.user = user | |
|
2544 | n.user_group = user_group | |
|
2545 | n.permission = permission | |
|
2546 | Session().add(n) | |
|
2547 | return n | |
|
2548 | ||
|
2549 | def __unicode__(self): | |
|
2550 | return u'<%s => %s >' % (self.user, self.user_group) | |
|
2551 | ||
|
2552 | ||
|
2553 | class UserToPerm(Base, BaseModel): | |
|
2554 | __tablename__ = 'user_to_perm' | |
|
2555 | __table_args__ = ( | |
|
2556 | UniqueConstraint('user_id', 'permission_id'), | |
|
2557 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2558 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2559 | ) | |
|
2560 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2561 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
|
2562 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
|
2563 | ||
|
2564 | user = relationship('User') | |
|
2565 | permission = relationship('Permission', lazy='joined') | |
|
2566 | ||
|
2567 | def __unicode__(self): | |
|
2568 | return u'<%s => %s >' % (self.user, self.permission) | |
|
2569 | ||
|
2570 | ||
|
2571 | class UserGroupRepoToPerm(Base, BaseModel): | |
|
2572 | __tablename__ = 'users_group_repo_to_perm' | |
|
2573 | __table_args__ = ( | |
|
2574 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), | |
|
2575 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2576 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2577 | ) | |
|
2578 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2579 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
|
2580 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
|
2581 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |
|
2582 | ||
|
2583 | users_group = relationship('UserGroup') | |
|
2584 | permission = relationship('Permission') | |
|
2585 | repository = relationship('Repository') | |
|
2586 | ||
|
2587 | @classmethod | |
|
2588 | def create(cls, users_group, repository, permission): | |
|
2589 | n = cls() | |
|
2590 | n.users_group = users_group | |
|
2591 | n.repository = repository | |
|
2592 | n.permission = permission | |
|
2593 | Session().add(n) | |
|
2594 | return n | |
|
2595 | ||
|
2596 | def __unicode__(self): | |
|
2597 | return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository) | |
|
2598 | ||
|
2599 | ||
|
2600 | class UserGroupUserGroupToPerm(Base, BaseModel): | |
|
2601 | __tablename__ = 'user_group_user_group_to_perm' | |
|
2602 | __table_args__ = ( | |
|
2603 | UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), | |
|
2604 | CheckConstraint('target_user_group_id != user_group_id'), | |
|
2605 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2606 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2607 | ) | |
|
2608 | user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2609 | target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
|
2610 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
|
2611 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
|
2612 | ||
|
2613 | target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') | |
|
2614 | user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') | |
|
2615 | permission = relationship('Permission') | |
|
2616 | ||
|
2617 | @classmethod | |
|
2618 | def create(cls, target_user_group, user_group, permission): | |
|
2619 | n = cls() | |
|
2620 | n.target_user_group = target_user_group | |
|
2621 | n.user_group = user_group | |
|
2622 | n.permission = permission | |
|
2623 | Session().add(n) | |
|
2624 | return n | |
|
2625 | ||
|
2626 | def __unicode__(self): | |
|
2627 | return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group) | |
|
2628 | ||
|
2629 | ||
|
2630 | class UserGroupToPerm(Base, BaseModel): | |
|
2631 | __tablename__ = 'users_group_to_perm' | |
|
2632 | __table_args__ = ( | |
|
2633 | UniqueConstraint('users_group_id', 'permission_id',), | |
|
2634 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2635 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2636 | ) | |
|
2637 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2638 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
|
2639 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
|
2640 | ||
|
2641 | users_group = relationship('UserGroup') | |
|
2642 | permission = relationship('Permission') | |
|
2643 | ||
|
2644 | ||
|
2645 | class UserRepoGroupToPerm(Base, BaseModel): | |
|
2646 | __tablename__ = 'user_repo_group_to_perm' | |
|
2647 | __table_args__ = ( | |
|
2648 | UniqueConstraint('user_id', 'group_id', 'permission_id'), | |
|
2649 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2650 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2651 | ) | |
|
2652 | ||
|
2653 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2654 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
|
2655 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
|
2656 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
|
2657 | ||
|
2658 | user = relationship('User') | |
|
2659 | group = relationship('RepoGroup') | |
|
2660 | permission = relationship('Permission') | |
|
2661 | ||
|
2662 | @classmethod | |
|
2663 | def create(cls, user, repository_group, permission): | |
|
2664 | n = cls() | |
|
2665 | n.user = user | |
|
2666 | n.group = repository_group | |
|
2667 | n.permission = permission | |
|
2668 | Session().add(n) | |
|
2669 | return n | |
|
2670 | ||
|
2671 | ||
|
2672 | class UserGroupRepoGroupToPerm(Base, BaseModel): | |
|
2673 | __tablename__ = 'users_group_repo_group_to_perm' | |
|
2674 | __table_args__ = ( | |
|
2675 | UniqueConstraint('users_group_id', 'group_id'), | |
|
2676 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2677 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2678 | ) | |
|
2679 | ||
|
2680 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2681 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |
|
2682 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |
|
2683 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |
|
2684 | ||
|
2685 | users_group = relationship('UserGroup') | |
|
2686 | permission = relationship('Permission') | |
|
2687 | group = relationship('RepoGroup') | |
|
2688 | ||
|
2689 | @classmethod | |
|
2690 | def create(cls, user_group, repository_group, permission): | |
|
2691 | n = cls() | |
|
2692 | n.users_group = user_group | |
|
2693 | n.group = repository_group | |
|
2694 | n.permission = permission | |
|
2695 | Session().add(n) | |
|
2696 | return n | |
|
2697 | ||
|
2698 | def __unicode__(self): | |
|
2699 | return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group) | |
|
2700 | ||
|
2701 | ||
|
2702 | class Statistics(Base, BaseModel): | |
|
2703 | __tablename__ = 'statistics' | |
|
2704 | __table_args__ = ( | |
|
2705 | UniqueConstraint('repository_id'), | |
|
2706 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2707 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2708 | ) | |
|
2709 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2710 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) | |
|
2711 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) | |
|
2712 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data | |
|
2713 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data | |
|
2714 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data | |
|
2715 | ||
|
2716 | repository = relationship('Repository', single_parent=True) | |
|
2717 | ||
|
2718 | ||
|
2719 | class UserFollowing(Base, BaseModel): | |
|
2720 | __tablename__ = 'user_followings' | |
|
2721 | __table_args__ = ( | |
|
2722 | UniqueConstraint('user_id', 'follows_repository_id'), | |
|
2723 | UniqueConstraint('user_id', 'follows_user_id'), | |
|
2724 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2725 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2726 | ) | |
|
2727 | ||
|
2728 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2729 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |
|
2730 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) | |
|
2731 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |
|
2732 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |
|
2733 | ||
|
2734 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') | |
|
2735 | ||
|
2736 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') | |
|
2737 | follows_repository = relationship('Repository', order_by='Repository.repo_name') | |
|
2738 | ||
|
2739 | @classmethod | |
|
2740 | def get_repo_followers(cls, repo_id): | |
|
2741 | return cls.query().filter(cls.follows_repo_id == repo_id) | |
|
2742 | ||
|
2743 | ||
|
2744 | class CacheKey(Base, BaseModel): | |
|
2745 | __tablename__ = 'cache_invalidation' | |
|
2746 | __table_args__ = ( | |
|
2747 | UniqueConstraint('cache_key'), | |
|
2748 | Index('key_idx', 'cache_key'), | |
|
2749 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2750 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
2751 | ) | |
|
2752 | CACHE_TYPE_ATOM = 'ATOM' | |
|
2753 | CACHE_TYPE_RSS = 'RSS' | |
|
2754 | CACHE_TYPE_README = 'README' | |
|
2755 | ||
|
2756 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
|
2757 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) | |
|
2758 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) | |
|
2759 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) | |
|
2760 | ||
|
2761 | def __init__(self, cache_key, cache_args=''): | |
|
2762 | self.cache_key = cache_key | |
|
2763 | self.cache_args = cache_args | |
|
2764 | self.cache_active = False | |
|
2765 | ||
|
2766 | def __unicode__(self): | |
|
2767 | return u"<%s('%s:%s[%s]')>" % ( | |
|
2768 | self.__class__.__name__, | |
|
2769 | self.cache_id, self.cache_key, self.cache_active) | |
|
2770 | ||
|
2771 | def _cache_key_partition(self): | |
|
2772 | prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) | |
|
2773 | return prefix, repo_name, suffix | |
|
2774 | ||
|
2775 | def get_prefix(self): | |
|
2776 | """ | |
|
2777 | Try to extract prefix from existing cache key. The key could consist | |
|
2778 | of prefix, repo_name, suffix | |
|
2779 | """ | |
|
2780 | # this returns prefix, repo_name, suffix | |
|
2781 | return self._cache_key_partition()[0] | |
|
2782 | ||
|
2783 | def get_suffix(self): | |
|
2784 | """ | |
|
2785 | get suffix that might have been used in _get_cache_key to | |
|
2786 | generate self.cache_key. Only used for informational purposes | |
|
2787 | in repo_edit.html. | |
|
2788 | """ | |
|
2789 | # prefix, repo_name, suffix | |
|
2790 | return self._cache_key_partition()[2] | |
|
2791 | ||
|
2792 | @classmethod | |
|
2793 | def delete_all_cache(cls): | |
|
2794 | """ | |
|
2795 | Delete all cache keys from database. | |
|
2796 | Should only be run when all instances are down and all entries | |
|
2797 | thus stale. | |
|
2798 | """ | |
|
2799 | cls.query().delete() | |
|
2800 | Session().commit() | |
|
2801 | ||
|
2802 | @classmethod | |
|
2803 | def get_cache_key(cls, repo_name, cache_type): | |
|
2804 | """ | |
|
2805 | ||
|
2806 | Generate a cache key for this process of RhodeCode instance. | |
|
2807 | Prefix most likely will be process id or maybe explicitly set | |
|
2808 | instance_id from .ini file. | |
|
2809 | """ | |
|
2810 | import rhodecode | |
|
2811 | prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '') | |
|
2812 | ||
|
2813 | repo_as_unicode = safe_unicode(repo_name) | |
|
2814 | key = u'{}_{}'.format(repo_as_unicode, cache_type) \ | |
|
2815 | if cache_type else repo_as_unicode | |
|
2816 | ||
|
2817 | return u'{}{}'.format(prefix, key) | |
|
2818 | ||
|
2819 | @classmethod | |
|
2820 | def set_invalidate(cls, repo_name, delete=False): | |
|
2821 | """ | |
|
2822 | Mark all caches of a repo as invalid in the database. | |
|
2823 | """ | |
|
2824 | ||
|
2825 | try: | |
|
2826 | qry = Session().query(cls).filter(cls.cache_args == repo_name) | |
|
2827 | if delete: | |
|
2828 | log.debug('cache objects deleted for repo %s', | |
|
2829 | safe_str(repo_name)) | |
|
2830 | qry.delete() | |
|
2831 | else: | |
|
2832 | log.debug('cache objects marked as invalid for repo %s', | |
|
2833 | safe_str(repo_name)) | |
|
2834 | qry.update({"cache_active": False}) | |
|
2835 | ||
|
2836 | Session().commit() | |
|
2837 | except Exception: | |
|
2838 | log.exception( | |
|
2839 | 'Cache key invalidation failed for repository %s', | |
|
2840 | safe_str(repo_name)) | |
|
2841 | Session().rollback() | |
|
2842 | ||
|
2843 | @classmethod | |
|
2844 | def get_active_cache(cls, cache_key): | |
|
2845 | inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() | |
|
2846 | if inv_obj: | |
|
2847 | return inv_obj | |
|
2848 | return None | |
|
2849 | ||
|
2850 | @classmethod | |
|
2851 | def repo_context_cache(cls, compute_func, repo_name, cache_type, | |
|
2852 | thread_scoped=False): | |
|
2853 | """ | |
|
2854 | @cache_region('long_term') | |
|
2855 | def _heavy_calculation(cache_key): | |
|
2856 | return 'result' | |
|
2857 | ||
|
2858 | cache_context = CacheKey.repo_context_cache( | |
|
2859 | _heavy_calculation, repo_name, cache_type) | |
|
2860 | ||
|
2861 | with cache_context as context: | |
|
2862 | context.invalidate() | |
|
2863 | computed = context.compute() | |
|
2864 | ||
|
2865 | assert computed == 'result' | |
|
2866 | """ | |
|
2867 | from rhodecode.lib import caches | |
|
2868 | return caches.InvalidationContext( | |
|
2869 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) | |
|
2870 | ||
|
2871 | ||
|
2872 | class ChangesetComment(Base, BaseModel): | |
|
2873 | __tablename__ = 'changeset_comments' | |
|
2874 | __table_args__ = ( | |
|
2875 | Index('cc_revision_idx', 'revision'), | |
|
2876 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2877 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
2878 | ) | |
|
2879 | ||
|
2880 | COMMENT_OUTDATED = u'comment_outdated' | |
|
2881 | ||
|
2882 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) | |
|
2883 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
|
2884 | revision = Column('revision', String(40), nullable=True) | |
|
2885 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |
|
2886 | pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) | |
|
2887 | line_no = Column('line_no', Unicode(10), nullable=True) | |
|
2888 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) | |
|
2889 | f_path = Column('f_path', Unicode(1000), nullable=True) | |
|
2890 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |
|
2891 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) | |
|
2892 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
2893 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
2894 | renderer = Column('renderer', Unicode(64), nullable=True) | |
|
2895 | display_state = Column('display_state', Unicode(128), nullable=True) | |
|
2896 | ||
|
2897 | author = relationship('User', lazy='joined') | |
|
2898 | repo = relationship('Repository') | |
|
2899 | status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan") | |
|
2900 | pull_request = relationship('PullRequest', lazy='joined') | |
|
2901 | pull_request_version = relationship('PullRequestVersion') | |
|
2902 | ||
|
2903 | @classmethod | |
|
2904 | def get_users(cls, revision=None, pull_request_id=None): | |
|
2905 | """ | |
|
2906 | Returns user associated with this ChangesetComment. ie those | |
|
2907 | who actually commented | |
|
2908 | ||
|
2909 | :param cls: | |
|
2910 | :param revision: | |
|
2911 | """ | |
|
2912 | q = Session().query(User)\ | |
|
2913 | .join(ChangesetComment.author) | |
|
2914 | if revision: | |
|
2915 | q = q.filter(cls.revision == revision) | |
|
2916 | elif pull_request_id: | |
|
2917 | q = q.filter(cls.pull_request_id == pull_request_id) | |
|
2918 | return q.all() | |
|
2919 | ||
|
2920 | def render(self, mentions=False): | |
|
2921 | from rhodecode.lib import helpers as h | |
|
2922 | return h.render(self.text, renderer=self.renderer, mentions=mentions) | |
|
2923 | ||
|
2924 | def __repr__(self): | |
|
2925 | if self.comment_id: | |
|
2926 | return '<DB:ChangesetComment #%s>' % self.comment_id | |
|
2927 | else: | |
|
2928 | return '<DB:ChangesetComment at %#x>' % id(self) | |
|
2929 | ||
|
2930 | ||
|
2931 | class ChangesetStatus(Base, BaseModel): | |
|
2932 | __tablename__ = 'changeset_statuses' | |
|
2933 | __table_args__ = ( | |
|
2934 | Index('cs_revision_idx', 'revision'), | |
|
2935 | Index('cs_version_idx', 'version'), | |
|
2936 | UniqueConstraint('repo_id', 'revision', 'version'), | |
|
2937 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
2938 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
2939 | ) | |
|
2940 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' | |
|
2941 | STATUS_APPROVED = 'approved' | |
|
2942 | STATUS_REJECTED = 'rejected' | |
|
2943 | STATUS_UNDER_REVIEW = 'under_review' | |
|
2944 | ||
|
2945 | STATUSES = [ | |
|
2946 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default | |
|
2947 | (STATUS_APPROVED, _("Approved")), | |
|
2948 | (STATUS_REJECTED, _("Rejected")), | |
|
2949 | (STATUS_UNDER_REVIEW, _("Under Review")), | |
|
2950 | ] | |
|
2951 | ||
|
2952 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) | |
|
2953 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |
|
2954 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) | |
|
2955 | revision = Column('revision', String(40), nullable=False) | |
|
2956 | status = Column('status', String(128), nullable=False, default=DEFAULT) | |
|
2957 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) | |
|
2958 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) | |
|
2959 | version = Column('version', Integer(), nullable=False, default=0) | |
|
2960 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |
|
2961 | ||
|
2962 | author = relationship('User', lazy='joined') | |
|
2963 | repo = relationship('Repository') | |
|
2964 | comment = relationship('ChangesetComment', lazy='joined') | |
|
2965 | pull_request = relationship('PullRequest', lazy='joined') | |
|
2966 | ||
|
2967 | def __unicode__(self): | |
|
2968 | return u"<%s('%s[%s]:%s')>" % ( | |
|
2969 | self.__class__.__name__, | |
|
2970 | self.status, self.version, self.author | |
|
2971 | ) | |
|
2972 | ||
|
2973 | @classmethod | |
|
2974 | def get_status_lbl(cls, value): | |
|
2975 | return dict(cls.STATUSES).get(value) | |
|
2976 | ||
|
2977 | @property | |
|
2978 | def status_lbl(self): | |
|
2979 | return ChangesetStatus.get_status_lbl(self.status) | |
|
2980 | ||
|
2981 | ||
|
2982 | class _PullRequestBase(BaseModel): | |
|
2983 | """ | |
|
2984 | Common attributes of pull request and version entries. | |
|
2985 | """ | |
|
2986 | ||
|
2987 | # .status values | |
|
2988 | STATUS_NEW = u'new' | |
|
2989 | STATUS_OPEN = u'open' | |
|
2990 | STATUS_CLOSED = u'closed' | |
|
2991 | ||
|
2992 | title = Column('title', Unicode(255), nullable=True) | |
|
2993 | description = Column( | |
|
2994 | 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), | |
|
2995 | nullable=True) | |
|
2996 | # new/open/closed status of pull request (not approve/reject/etc) | |
|
2997 | status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) | |
|
2998 | created_on = Column( | |
|
2999 | 'created_on', DateTime(timezone=False), nullable=False, | |
|
3000 | default=datetime.datetime.now) | |
|
3001 | updated_on = Column( | |
|
3002 | 'updated_on', DateTime(timezone=False), nullable=False, | |
|
3003 | default=datetime.datetime.now) | |
|
3004 | ||
|
3005 | @declared_attr | |
|
3006 | def user_id(cls): | |
|
3007 | return Column( | |
|
3008 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | |
|
3009 | unique=None) | |
|
3010 | ||
|
3011 | # 500 revisions max | |
|
3012 | _revisions = Column( | |
|
3013 | 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) | |
|
3014 | ||
|
3015 | @declared_attr | |
|
3016 | def source_repo_id(cls): | |
|
3017 | # TODO: dan: rename column to source_repo_id | |
|
3018 | return Column( | |
|
3019 | 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), | |
|
3020 | nullable=False) | |
|
3021 | ||
|
3022 | source_ref = Column('org_ref', Unicode(255), nullable=False) | |
|
3023 | ||
|
3024 | @declared_attr | |
|
3025 | def target_repo_id(cls): | |
|
3026 | # TODO: dan: rename column to target_repo_id | |
|
3027 | return Column( | |
|
3028 | 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), | |
|
3029 | nullable=False) | |
|
3030 | ||
|
3031 | target_ref = Column('other_ref', Unicode(255), nullable=False) | |
|
3032 | ||
|
3033 | # TODO: dan: rename column to last_merge_source_rev | |
|
3034 | _last_merge_source_rev = Column( | |
|
3035 | 'last_merge_org_rev', String(40), nullable=True) | |
|
3036 | # TODO: dan: rename column to last_merge_target_rev | |
|
3037 | _last_merge_target_rev = Column( | |
|
3038 | 'last_merge_other_rev', String(40), nullable=True) | |
|
3039 | _last_merge_status = Column('merge_status', Integer(), nullable=True) | |
|
3040 | merge_rev = Column('merge_rev', String(40), nullable=True) | |
|
3041 | ||
|
3042 | @hybrid_property | |
|
3043 | def revisions(self): | |
|
3044 | return self._revisions.split(':') if self._revisions else [] | |
|
3045 | ||
|
3046 | @revisions.setter | |
|
3047 | def revisions(self, val): | |
|
3048 | self._revisions = ':'.join(val) | |
|
3049 | ||
|
3050 | @declared_attr | |
|
3051 | def author(cls): | |
|
3052 | return relationship('User', lazy='joined') | |
|
3053 | ||
|
3054 | @declared_attr | |
|
3055 | def source_repo(cls): | |
|
3056 | return relationship( | |
|
3057 | 'Repository', | |
|
3058 | primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) | |
|
3059 | ||
|
3060 | @property | |
|
3061 | def source_ref_parts(self): | |
|
3062 | refs = self.source_ref.split(':') | |
|
3063 | return Reference(refs[0], refs[1], refs[2]) | |
|
3064 | ||
|
3065 | @declared_attr | |
|
3066 | def target_repo(cls): | |
|
3067 | return relationship( | |
|
3068 | 'Repository', | |
|
3069 | primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) | |
|
3070 | ||
|
3071 | @property | |
|
3072 | def target_ref_parts(self): | |
|
3073 | refs = self.target_ref.split(':') | |
|
3074 | return Reference(refs[0], refs[1], refs[2]) | |
|
3075 | ||
|
3076 | ||
|
3077 | class PullRequest(Base, _PullRequestBase): | |
|
3078 | __tablename__ = 'pull_requests' | |
|
3079 | __table_args__ = ( | |
|
3080 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
3081 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
3082 | ) | |
|
3083 | ||
|
3084 | pull_request_id = Column( | |
|
3085 | 'pull_request_id', Integer(), nullable=False, primary_key=True) | |
|
3086 | ||
|
3087 | def __repr__(self): | |
|
3088 | if self.pull_request_id: | |
|
3089 | return '<DB:PullRequest #%s>' % self.pull_request_id | |
|
3090 | else: | |
|
3091 | return '<DB:PullRequest at %#x>' % id(self) | |
|
3092 | ||
|
3093 | reviewers = relationship('PullRequestReviewers', | |
|
3094 | cascade="all, delete, delete-orphan") | |
|
3095 | statuses = relationship('ChangesetStatus') | |
|
3096 | comments = relationship('ChangesetComment', | |
|
3097 | cascade="all, delete, delete-orphan") | |
|
3098 | versions = relationship('PullRequestVersion', | |
|
3099 | cascade="all, delete, delete-orphan") | |
|
3100 | ||
|
3101 | def is_closed(self): | |
|
3102 | return self.status == self.STATUS_CLOSED | |
|
3103 | ||
|
3104 | def get_api_data(self): | |
|
3105 | from rhodecode.model.pull_request import PullRequestModel | |
|
3106 | pull_request = self | |
|
3107 | merge_status = PullRequestModel().merge_status(pull_request) | |
|
3108 | data = { | |
|
3109 | 'pull_request_id': pull_request.pull_request_id, | |
|
3110 | 'url': url('pullrequest_show', repo_name=self.target_repo.repo_name, | |
|
3111 | pull_request_id=self.pull_request_id, | |
|
3112 | qualified=True), | |
|
3113 | 'title': pull_request.title, | |
|
3114 | 'description': pull_request.description, | |
|
3115 | 'status': pull_request.status, | |
|
3116 | 'created_on': pull_request.created_on, | |
|
3117 | 'updated_on': pull_request.updated_on, | |
|
3118 | 'commit_ids': pull_request.revisions, | |
|
3119 | 'review_status': pull_request.calculated_review_status(), | |
|
3120 | 'mergeable': { | |
|
3121 | 'status': merge_status[0], | |
|
3122 | 'message': unicode(merge_status[1]), | |
|
3123 | }, | |
|
3124 | 'source': { | |
|
3125 | 'clone_url': pull_request.source_repo.clone_url(), | |
|
3126 | 'repository': pull_request.source_repo.repo_name, | |
|
3127 | 'reference': { | |
|
3128 | 'name': pull_request.source_ref_parts.name, | |
|
3129 | 'type': pull_request.source_ref_parts.type, | |
|
3130 | 'commit_id': pull_request.source_ref_parts.commit_id, | |
|
3131 | }, | |
|
3132 | }, | |
|
3133 | 'target': { | |
|
3134 | 'clone_url': pull_request.target_repo.clone_url(), | |
|
3135 | 'repository': pull_request.target_repo.repo_name, | |
|
3136 | 'reference': { | |
|
3137 | 'name': pull_request.target_ref_parts.name, | |
|
3138 | 'type': pull_request.target_ref_parts.type, | |
|
3139 | 'commit_id': pull_request.target_ref_parts.commit_id, | |
|
3140 | }, | |
|
3141 | }, | |
|
3142 | 'author': pull_request.author.get_api_data(include_secrets=False, | |
|
3143 | details='basic'), | |
|
3144 | 'reviewers': [ | |
|
3145 | { | |
|
3146 | 'user': reviewer.get_api_data(include_secrets=False, | |
|
3147 | details='basic'), | |
|
3148 | 'review_status': st[0][1].status if st else 'not_reviewed', | |
|
3149 | } | |
|
3150 | for reviewer, st in pull_request.reviewers_statuses() | |
|
3151 | ] | |
|
3152 | } | |
|
3153 | ||
|
3154 | return data | |
|
3155 | ||
|
3156 | def __json__(self): | |
|
3157 | return { | |
|
3158 | 'revisions': self.revisions, | |
|
3159 | } | |
|
3160 | ||
|
3161 | def calculated_review_status(self): | |
|
3162 | # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html | |
|
3163 | # because it's tricky on how to use ChangesetStatusModel from there | |
|
3164 | warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning) | |
|
3165 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
|
3166 | return ChangesetStatusModel().calculated_review_status(self) | |
|
3167 | ||
|
3168 | def reviewers_statuses(self): | |
|
3169 | warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning) | |
|
3170 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
|
3171 | return ChangesetStatusModel().reviewers_statuses(self) | |
|
3172 | ||
|
3173 | ||
|
3174 | class PullRequestVersion(Base, _PullRequestBase): | |
|
3175 | __tablename__ = 'pull_request_versions' | |
|
3176 | __table_args__ = ( | |
|
3177 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
3178 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
3179 | ) | |
|
3180 | ||
|
3181 | pull_request_version_id = Column( | |
|
3182 | 'pull_request_version_id', Integer(), nullable=False, primary_key=True) | |
|
3183 | pull_request_id = Column( | |
|
3184 | 'pull_request_id', Integer(), | |
|
3185 | ForeignKey('pull_requests.pull_request_id'), nullable=False) | |
|
3186 | pull_request = relationship('PullRequest') | |
|
3187 | ||
|
3188 | def __repr__(self): | |
|
3189 | if self.pull_request_version_id: | |
|
3190 | return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id | |
|
3191 | else: | |
|
3192 | return '<DB:PullRequestVersion at %#x>' % id(self) | |
|
3193 | ||
|
3194 | ||
|
3195 | class PullRequestReviewers(Base, BaseModel): | |
|
3196 | __tablename__ = 'pull_request_reviewers' | |
|
3197 | __table_args__ = ( | |
|
3198 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
3199 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
3200 | ) | |
|
3201 | ||
|
3202 | def __init__(self, user=None, pull_request=None): | |
|
3203 | self.user = user | |
|
3204 | self.pull_request = pull_request | |
|
3205 | ||
|
3206 | pull_requests_reviewers_id = Column( | |
|
3207 | 'pull_requests_reviewers_id', Integer(), nullable=False, | |
|
3208 | primary_key=True) | |
|
3209 | pull_request_id = Column( | |
|
3210 | "pull_request_id", Integer(), | |
|
3211 | ForeignKey('pull_requests.pull_request_id'), nullable=False) | |
|
3212 | user_id = Column( | |
|
3213 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) | |
|
3214 | ||
|
3215 | user = relationship('User') | |
|
3216 | pull_request = relationship('PullRequest') | |
|
3217 | ||
|
3218 | ||
|
3219 | class Notification(Base, BaseModel): | |
|
3220 | __tablename__ = 'notifications' | |
|
3221 | __table_args__ = ( | |
|
3222 | Index('notification_type_idx', 'type'), | |
|
3223 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
3224 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
3225 | ) | |
|
3226 | ||
|
3227 | TYPE_CHANGESET_COMMENT = u'cs_comment' | |
|
3228 | TYPE_MESSAGE = u'message' | |
|
3229 | TYPE_MENTION = u'mention' | |
|
3230 | TYPE_REGISTRATION = u'registration' | |
|
3231 | TYPE_PULL_REQUEST = u'pull_request' | |
|
3232 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' | |
|
3233 | ||
|
3234 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) | |
|
3235 | subject = Column('subject', Unicode(512), nullable=True) | |
|
3236 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) | |
|
3237 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) | |
|
3238 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
3239 | type_ = Column('type', Unicode(255)) | |
|
3240 | ||
|
3241 | created_by_user = relationship('User') | |
|
3242 | notifications_to_users = relationship('UserNotification', lazy='joined', | |
|
3243 | cascade="all, delete, delete-orphan") | |
|
3244 | ||
|
3245 | @property | |
|
3246 | def recipients(self): | |
|
3247 | return [x.user for x in UserNotification.query()\ | |
|
3248 | .filter(UserNotification.notification == self)\ | |
|
3249 | .order_by(UserNotification.user_id.asc()).all()] | |
|
3250 | ||
|
3251 | @classmethod | |
|
3252 | def create(cls, created_by, subject, body, recipients, type_=None): | |
|
3253 | if type_ is None: | |
|
3254 | type_ = Notification.TYPE_MESSAGE | |
|
3255 | ||
|
3256 | notification = cls() | |
|
3257 | notification.created_by_user = created_by | |
|
3258 | notification.subject = subject | |
|
3259 | notification.body = body | |
|
3260 | notification.type_ = type_ | |
|
3261 | notification.created_on = datetime.datetime.now() | |
|
3262 | ||
|
3263 | for u in recipients: | |
|
3264 | assoc = UserNotification() | |
|
3265 | assoc.notification = notification | |
|
3266 | ||
|
3267 | # if created_by is inside recipients mark his notification | |
|
3268 | # as read | |
|
3269 | if u.user_id == created_by.user_id: | |
|
3270 | assoc.read = True | |
|
3271 | ||
|
3272 | u.notifications.append(assoc) | |
|
3273 | Session().add(notification) | |
|
3274 | ||
|
3275 | return notification | |
|
3276 | ||
|
3277 | @property | |
|
3278 | def description(self): | |
|
3279 | from rhodecode.model.notification import NotificationModel | |
|
3280 | return NotificationModel().make_description(self) | |
|
3281 | ||
|
3282 | ||
|
3283 | class UserNotification(Base, BaseModel): | |
|
3284 | __tablename__ = 'user_to_notification' | |
|
3285 | __table_args__ = ( | |
|
3286 | UniqueConstraint('user_id', 'notification_id'), | |
|
3287 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
3288 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
3289 | ) | |
|
3290 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |
|
3291 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) | |
|
3292 | read = Column('read', Boolean, default=False) | |
|
3293 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) | |
|
3294 | ||
|
3295 | user = relationship('User', lazy="joined") | |
|
3296 | notification = relationship('Notification', lazy="joined", | |
|
3297 | order_by=lambda: Notification.created_on.desc(),) | |
|
3298 | ||
|
3299 | def mark_as_read(self): | |
|
3300 | self.read = True | |
|
3301 | Session().add(self) | |
|
3302 | ||
|
3303 | ||
|
3304 | class Gist(Base, BaseModel): | |
|
3305 | __tablename__ = 'gists' | |
|
3306 | __table_args__ = ( | |
|
3307 | Index('g_gist_access_id_idx', 'gist_access_id'), | |
|
3308 | Index('g_created_on_idx', 'created_on'), | |
|
3309 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
3310 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
3311 | ) | |
|
3312 | GIST_PUBLIC = u'public' | |
|
3313 | GIST_PRIVATE = u'private' | |
|
3314 | DEFAULT_FILENAME = u'gistfile1.txt' | |
|
3315 | ||
|
3316 | ACL_LEVEL_PUBLIC = u'acl_public' | |
|
3317 | ACL_LEVEL_PRIVATE = u'acl_private' | |
|
3318 | ||
|
3319 | gist_id = Column('gist_id', Integer(), primary_key=True) | |
|
3320 | gist_access_id = Column('gist_access_id', Unicode(250)) | |
|
3321 | gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |
|
3322 | gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) | |
|
3323 | gist_expires = Column('gist_expires', Float(53), nullable=False) | |
|
3324 | gist_type = Column('gist_type', Unicode(128), nullable=False) | |
|
3325 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
3326 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |
|
3327 | acl_level = Column('acl_level', Unicode(128), nullable=True) | |
|
3328 | ||
|
3329 | owner = relationship('User') | |
|
3330 | ||
|
3331 | def __repr__(self): | |
|
3332 | return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id) | |
|
3333 | ||
|
3334 | @classmethod | |
|
3335 | def get_or_404(cls, id_): | |
|
3336 | res = cls.query().filter(cls.gist_access_id == id_).scalar() | |
|
3337 | if not res: | |
|
3338 | raise HTTPNotFound | |
|
3339 | return res | |
|
3340 | ||
|
3341 | @classmethod | |
|
3342 | def get_by_access_id(cls, gist_access_id): | |
|
3343 | return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() | |
|
3344 | ||
|
3345 | def gist_url(self): | |
|
3346 | import rhodecode | |
|
3347 | alias_url = rhodecode.CONFIG.get('gist_alias_url') | |
|
3348 | if alias_url: | |
|
3349 | return alias_url.replace('{gistid}', self.gist_access_id) | |
|
3350 | ||
|
3351 | return url('gist', gist_id=self.gist_access_id, qualified=True) | |
|
3352 | ||
|
3353 | @classmethod | |
|
3354 | def base_path(cls): | |
|
3355 | """ | |
|
3356 | Returns base path when all gists are stored | |
|
3357 | ||
|
3358 | :param cls: | |
|
3359 | """ | |
|
3360 | from rhodecode.model.gist import GIST_STORE_LOC | |
|
3361 | q = Session().query(RhodeCodeUi)\ | |
|
3362 | .filter(RhodeCodeUi.ui_key == URL_SEP) | |
|
3363 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |
|
3364 | return os.path.join(q.one().ui_value, GIST_STORE_LOC) | |
|
3365 | ||
|
3366 | def get_api_data(self): | |
|
3367 | """ | |
|
3368 | Common function for generating gist related data for API | |
|
3369 | """ | |
|
3370 | gist = self | |
|
3371 | data = { | |
|
3372 | 'gist_id': gist.gist_id, | |
|
3373 | 'type': gist.gist_type, | |
|
3374 | 'access_id': gist.gist_access_id, | |
|
3375 | 'description': gist.gist_description, | |
|
3376 | 'url': gist.gist_url(), | |
|
3377 | 'expires': gist.gist_expires, | |
|
3378 | 'created_on': gist.created_on, | |
|
3379 | 'modified_at': gist.modified_at, | |
|
3380 | 'content': None, | |
|
3381 | 'acl_level': gist.acl_level, | |
|
3382 | } | |
|
3383 | return data | |
|
3384 | ||
|
3385 | def __json__(self): | |
|
3386 | data = dict( | |
|
3387 | ) | |
|
3388 | data.update(self.get_api_data()) | |
|
3389 | return data | |
|
3390 | # SCM functions | |
|
3391 | ||
|
3392 | def scm_instance(self, **kwargs): | |
|
3393 | full_repo_path = os.path.join(self.base_path(), self.gist_access_id) | |
|
3394 | return get_vcs_instance( | |
|
3395 | repo_path=safe_str(full_repo_path), create=False) | |
|
3396 | ||
|
3397 | ||
|
3398 | class DbMigrateVersion(Base, BaseModel): | |
|
3399 | __tablename__ = 'db_migrate_version' | |
|
3400 | __table_args__ = ( | |
|
3401 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
3402 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}, | |
|
3403 | ) | |
|
3404 | repository_id = Column('repository_id', String(250), primary_key=True) | |
|
3405 | repository_path = Column('repository_path', Text) | |
|
3406 | version = Column('version', Integer) | |
|
3407 | ||
|
3408 | ||
|
3409 | class ExternalIdentity(Base, BaseModel): | |
|
3410 | __tablename__ = 'external_identities' | |
|
3411 | __table_args__ = ( | |
|
3412 | Index('local_user_id_idx', 'local_user_id'), | |
|
3413 | Index('external_id_idx', 'external_id'), | |
|
3414 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
3415 | 'mysql_charset': 'utf8'}) | |
|
3416 | ||
|
3417 | external_id = Column('external_id', Unicode(255), default=u'', | |
|
3418 | primary_key=True) | |
|
3419 | external_username = Column('external_username', Unicode(1024), default=u'') | |
|
3420 | local_user_id = Column('local_user_id', Integer(), | |
|
3421 | ForeignKey('users.user_id'), primary_key=True) | |
|
3422 | provider_name = Column('provider_name', Unicode(255), default=u'', | |
|
3423 | primary_key=True) | |
|
3424 | access_token = Column('access_token', String(1024), default=u'') | |
|
3425 | alt_token = Column('alt_token', String(1024), default=u'') | |
|
3426 | token_secret = Column('token_secret', String(1024), default=u'') | |
|
3427 | ||
|
3428 | @classmethod | |
|
3429 | def by_external_id_and_provider(cls, external_id, provider_name, | |
|
3430 | local_user_id=None): | |
|
3431 | """ | |
|
3432 | Returns ExternalIdentity instance based on search params | |
|
3433 | ||
|
3434 | :param external_id: | |
|
3435 | :param provider_name: | |
|
3436 | :return: ExternalIdentity | |
|
3437 | """ | |
|
3438 | query = cls.query() | |
|
3439 | query = query.filter(cls.external_id == external_id) | |
|
3440 | query = query.filter(cls.provider_name == provider_name) | |
|
3441 | if local_user_id: | |
|
3442 | query = query.filter(cls.local_user_id == local_user_id) | |
|
3443 | return query.first() | |
|
3444 | ||
|
3445 | @classmethod | |
|
3446 | def user_by_external_id_and_provider(cls, external_id, provider_name): | |
|
3447 | """ | |
|
3448 | Returns User instance based on search params | |
|
3449 | ||
|
3450 | :param external_id: | |
|
3451 | :param provider_name: | |
|
3452 | :return: User | |
|
3453 | """ | |
|
3454 | query = User.query() | |
|
3455 | query = query.filter(cls.external_id == external_id) | |
|
3456 | query = query.filter(cls.provider_name == provider_name) | |
|
3457 | query = query.filter(User.user_id == cls.local_user_id) | |
|
3458 | return query.first() | |
|
3459 | ||
|
3460 | @classmethod | |
|
3461 | def by_local_user_id(cls, local_user_id): | |
|
3462 | """ | |
|
3463 | Returns all tokens for user | |
|
3464 | ||
|
3465 | :param local_user_id: | |
|
3466 | :return: ExternalIdentity | |
|
3467 | """ | |
|
3468 | query = cls.query() | |
|
3469 | query = query.filter(cls.local_user_id == local_user_id) | |
|
3470 | return query | |
|
3471 | ||
|
3472 | ||
|
3473 | class Integration(Base, BaseModel): | |
|
3474 | __tablename__ = 'integrations' | |
|
3475 | __table_args__ = ( | |
|
3476 | {'extend_existing': True, 'mysql_engine': 'InnoDB', | |
|
3477 | 'mysql_charset': 'utf8', 'sqlite_autoincrement': True} | |
|
3478 | ) | |
|
3479 | ||
|
3480 | integration_id = Column('integration_id', Integer(), primary_key=True) | |
|
3481 | integration_type = Column('integration_type', String(255)) | |
|
3482 | enabled = Column('enabled', Boolean(), nullable=False) | |
|
3483 | name = Column('name', String(255), nullable=False) | |
|
3484 | child_repos_only = Column('child_repos_only', Boolean(), nullable=True) | |
|
3485 | ||
|
3486 | settings = Column( | |
|
3487 | 'settings_json', MutationObj.as_mutable( | |
|
3488 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) | |
|
3489 | repo_id = Column( | |
|
3490 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), | |
|
3491 | nullable=True, unique=None, default=None) | |
|
3492 | repo = relationship('Repository', lazy='joined') | |
|
3493 | ||
|
3494 | repo_group_id = Column( | |
|
3495 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), | |
|
3496 | nullable=True, unique=None, default=None) | |
|
3497 | repo_group = relationship('RepoGroup', lazy='joined') | |
|
3498 | ||
|
3499 | @hybrid_property | |
|
3500 | def scope(self): | |
|
3501 | if self.repo: | |
|
3502 | return self.repo | |
|
3503 | if self.repo_group: | |
|
3504 | return self.repo_group | |
|
3505 | if self.child_repos_only: | |
|
3506 | return 'root_repos' | |
|
3507 | return 'global' | |
|
3508 | ||
|
3509 | @scope.setter | |
|
3510 | def scope(self, value): | |
|
3511 | self.repo = None | |
|
3512 | self.repo_id = None | |
|
3513 | self.repo_group_id = None | |
|
3514 | self.repo_group = None | |
|
3515 | self.child_repos_only = None | |
|
3516 | if isinstance(value, Repository): | |
|
3517 | self.repo = value | |
|
3518 | elif isinstance(value, RepoGroup): | |
|
3519 | self.repo_group = value | |
|
3520 | elif value == 'root_repos': | |
|
3521 | self.child_repos_only = True | |
|
3522 | elif value == 'global': | |
|
3523 | pass | |
|
3524 | else: | |
|
3525 | raise Exception("invalid scope: %s, must be one of " | |
|
3526 | "['global', 'root_repos', <RepoGroup>. <Repository>]" % value) | |
|
3527 | ||
|
3528 | def __repr__(self): | |
|
3529 | return '<Integration(%r, %r)>' % (self.integration_type, self.scope) |
@@ -0,0 +1,35 b'' | |||
|
1 | import logging | |
|
2 | import datetime | |
|
3 | ||
|
4 | from sqlalchemy import * | |
|
5 | from sqlalchemy.exc import DatabaseError | |
|
6 | from sqlalchemy.orm import relation, backref, class_mapper, joinedload | |
|
7 | from sqlalchemy.orm.session import Session | |
|
8 | from sqlalchemy.ext.declarative import declarative_base | |
|
9 | ||
|
10 | from rhodecode.lib.dbmigrate.migrate import * | |
|
11 | from rhodecode.lib.dbmigrate.migrate.changeset import * | |
|
12 | from rhodecode.lib.utils2 import str2bool | |
|
13 | ||
|
14 | from rhodecode.model.meta import Base | |
|
15 | from rhodecode.model import meta | |
|
16 | from rhodecode.lib.dbmigrate.versions import _reset_base, notify | |
|
17 | ||
|
18 | log = logging.getLogger(__name__) | |
|
19 | ||
|
20 | ||
|
21 | def upgrade(migrate_engine): | |
|
22 | """ | |
|
23 | Upgrade operations go here. | |
|
24 | Don't create your own engine; bind migrate_engine to your metadata | |
|
25 | """ | |
|
26 | _reset_base(migrate_engine) | |
|
27 | from rhodecode.lib.dbmigrate.schema import db_4_4_0_1 | |
|
28 | ||
|
29 | tbl = db_4_4_0_1.Integration.__table__ | |
|
30 | child_repos_only = db_4_4_0_1.Integration.child_repos_only | |
|
31 | child_repos_only.create(table=tbl) | |
|
32 | ||
|
33 | def downgrade(migrate_engine): | |
|
34 | meta = MetaData() | |
|
35 | meta.bind = migrate_engine |
@@ -0,0 +1,187 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import os | |
|
22 | ||
|
23 | import deform | |
|
24 | import colander | |
|
25 | ||
|
26 | from rhodecode.translation import _ | |
|
27 | from rhodecode.model.db import Repository, RepoGroup | |
|
28 | from rhodecode.model.validation_schema import validators, preparers | |
|
29 | ||
|
30 | ||
|
31 | def integration_scope_choices(permissions): | |
|
32 | """ | |
|
33 | Return list of (value, label) choices for integration scopes depending on | |
|
34 | the permissions | |
|
35 | """ | |
|
36 | result = [('', _('Pick a scope:'))] | |
|
37 | if 'hg.admin' in permissions['global']: | |
|
38 | result.extend([ | |
|
39 | ('global', _('Global (all repositories)')), | |
|
40 | ('root_repos', _('Top level repositories only')), | |
|
41 | ]) | |
|
42 | ||
|
43 | repo_choices = [ | |
|
44 | ('repo:%s' % repo_name, '/' + repo_name) | |
|
45 | for repo_name, repo_perm | |
|
46 | in permissions['repositories'].items() | |
|
47 | if repo_perm == 'repository.admin' | |
|
48 | ] | |
|
49 | repogroup_choices = [ | |
|
50 | ('repogroup:%s' % repo_group_name, '/' + repo_group_name + ' (group)') | |
|
51 | for repo_group_name, repo_group_perm | |
|
52 | in permissions['repositories_groups'].items() | |
|
53 | if repo_group_perm == 'group.admin' | |
|
54 | ] | |
|
55 | result.extend( | |
|
56 | sorted(repogroup_choices + repo_choices, | |
|
57 | key=lambda (choice, label): choice.split(':', 1)[1] | |
|
58 | ) | |
|
59 | ) | |
|
60 | return result | |
|
61 | ||
|
62 | ||
|
63 | @colander.deferred | |
|
64 | def deferred_integration_scopes_validator(node, kw): | |
|
65 | perms = kw.get('permissions') | |
|
66 | def _scope_validator(_node, scope): | |
|
67 | is_super_admin = 'hg.admin' in perms['global'] | |
|
68 | ||
|
69 | if scope in ('global', 'root_repos'): | |
|
70 | if is_super_admin: | |
|
71 | return True | |
|
72 | msg = _('Only superadmins can create global integrations') | |
|
73 | raise colander.Invalid(_node, msg) | |
|
74 | elif isinstance(scope, Repository): | |
|
75 | if (is_super_admin or perms['repositories'].get( | |
|
76 | scope.repo_name) == 'repository.admin'): | |
|
77 | return True | |
|
78 | msg = _('Only repo admins can create integrations') | |
|
79 | raise colander.Invalid(_node, msg) | |
|
80 | elif isinstance(scope, RepoGroup): | |
|
81 | if (is_super_admin or perms['repositories_groups'].get( | |
|
82 | scope.group_name) == 'group.admin'): | |
|
83 | return True | |
|
84 | ||
|
85 | msg = _('Only repogroup admins can create integrations') | |
|
86 | raise colander.Invalid(_node, msg) | |
|
87 | ||
|
88 | msg = _('Invalid integration scope: %s' % scope) | |
|
89 | raise colander.Invalid(node, msg) | |
|
90 | ||
|
91 | return _scope_validator | |
|
92 | ||
|
93 | ||
|
94 | @colander.deferred | |
|
95 | def deferred_integration_scopes_widget(node, kw): | |
|
96 | if kw.get('no_scope'): | |
|
97 | return deform.widget.TextInputWidget(readonly=True) | |
|
98 | ||
|
99 | choices = integration_scope_choices(kw.get('permissions')) | |
|
100 | widget = deform.widget.Select2Widget(values=choices) | |
|
101 | return widget | |
|
102 | ||
|
103 | class IntegrationScope(colander.SchemaType): | |
|
104 | def serialize(self, node, appstruct): | |
|
105 | if appstruct is colander.null: | |
|
106 | return colander.null | |
|
107 | ||
|
108 | if isinstance(appstruct, Repository): | |
|
109 | return 'repo:%s' % appstruct.repo_name | |
|
110 | elif isinstance(appstruct, RepoGroup): | |
|
111 | return 'repogroup:%s' % appstruct.group_name | |
|
112 | elif appstruct in ('global', 'root_repos'): | |
|
113 | return appstruct | |
|
114 | raise colander.Invalid(node, '%r is not a valid scope' % appstruct) | |
|
115 | ||
|
116 | def deserialize(self, node, cstruct): | |
|
117 | if cstruct is colander.null: | |
|
118 | return colander.null | |
|
119 | ||
|
120 | if cstruct.startswith('repo:'): | |
|
121 | repo = Repository.get_by_repo_name(cstruct.split(':')[1]) | |
|
122 | if repo: | |
|
123 | return repo | |
|
124 | elif cstruct.startswith('repogroup:'): | |
|
125 | repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1]) | |
|
126 | if repo_group: | |
|
127 | return repo_group | |
|
128 | elif cstruct in ('global', 'root_repos'): | |
|
129 | return cstruct | |
|
130 | ||
|
131 | raise colander.Invalid(node, '%r is not a valid scope' % cstruct) | |
|
132 | ||
|
133 | class IntegrationOptionsSchemaBase(colander.MappingSchema): | |
|
134 | ||
|
135 | name = colander.SchemaNode( | |
|
136 | colander.String(), | |
|
137 | description=_('Short name for this integration.'), | |
|
138 | missing=colander.required, | |
|
139 | title=_('Integration name'), | |
|
140 | ) | |
|
141 | ||
|
142 | scope = colander.SchemaNode( | |
|
143 | IntegrationScope(), | |
|
144 | description=_( | |
|
145 | 'Scope of the integration. Group scope means the integration ' | |
|
146 | ' runs on all child repos of that group.'), | |
|
147 | title=_('Integration scope'), | |
|
148 | validator=deferred_integration_scopes_validator, | |
|
149 | widget=deferred_integration_scopes_widget, | |
|
150 | missing=colander.required, | |
|
151 | ) | |
|
152 | ||
|
153 | enabled = colander.SchemaNode( | |
|
154 | colander.Bool(), | |
|
155 | default=True, | |
|
156 | description=_('Enable or disable this integration.'), | |
|
157 | missing=False, | |
|
158 | title=_('Enabled'), | |
|
159 | ) | |
|
160 | ||
|
161 | ||
|
162 | ||
|
163 | def make_integration_schema(IntegrationType, settings=None): | |
|
164 | """ | |
|
165 | Return a colander schema for an integration type | |
|
166 | ||
|
167 | :param IntegrationType: the integration type class | |
|
168 | :param settings: existing integration settings dict (optional) | |
|
169 | """ | |
|
170 | ||
|
171 | settings = settings or {} | |
|
172 | settings_schema = IntegrationType(settings=settings).settings_schema() | |
|
173 | ||
|
174 | class IntegrationSchema(colander.Schema): | |
|
175 | options = IntegrationOptionsSchemaBase() | |
|
176 | ||
|
177 | schema = IntegrationSchema() | |
|
178 | schema['options'].title = _('General integration options') | |
|
179 | ||
|
180 | settings_schema.name = 'settings' | |
|
181 | settings_schema.title = _('{integration_type} settings').format( | |
|
182 | integration_type=IntegrationType.display_name) | |
|
183 | schema.add(settings_schema) | |
|
184 | ||
|
185 | return schema | |
|
186 | ||
|
187 |
@@ -0,0 +1,66 b'' | |||
|
1 | ## -*- coding: utf-8 -*- | |
|
2 | <%inherit file="base.html"/> | |
|
3 | <%namespace name="widgets" file="/widgets.html"/> | |
|
4 | ||
|
5 | <%def name="breadcrumbs_links()"> | |
|
6 | %if c.repo: | |
|
7 | ${h.link_to('Settings',h.url('edit_repo', repo_name=c.repo.repo_name))} | |
|
8 | » | |
|
9 | ${h.link_to(_('Integrations'),request.route_url(route_name='repo_integrations_home', repo_name=c.repo.repo_name))} | |
|
10 | %elif c.repo_group: | |
|
11 | ${h.link_to(_('Admin'),h.url('admin_home'))} | |
|
12 | » | |
|
13 | ${h.link_to(_('Repository Groups'),h.url('repo_groups'))} | |
|
14 | » | |
|
15 | ${h.link_to(c.repo_group.group_name,h.url('edit_repo_group', group_name=c.repo_group.group_name))} | |
|
16 | » | |
|
17 | ${h.link_to(_('Integrations'),request.route_url(route_name='repo_group_integrations_home', repo_group_name=c.repo_group.group_name))} | |
|
18 | %else: | |
|
19 | ${h.link_to(_('Admin'),h.url('admin_home'))} | |
|
20 | » | |
|
21 | ${h.link_to(_('Settings'),h.url('admin_settings'))} | |
|
22 | » | |
|
23 | ${h.link_to(_('Integrations'),request.route_url(route_name='global_integrations_home'))} | |
|
24 | %endif | |
|
25 | » | |
|
26 | ${_('Create new integration')} | |
|
27 | </%def> | |
|
28 | <%widgets:panel class_='integrations'> | |
|
29 | <%def name="title()"> | |
|
30 | %if c.repo: | |
|
31 | ${_('Create New Integration for repository: {repo_name}').format(repo_name=c.repo.repo_name)} | |
|
32 | %elif c.repo_group: | |
|
33 | ${_('Create New Integration for repository group: {repo_group_name}').format(repo_group_name=c.repo_group.group_name)} | |
|
34 | %else: | |
|
35 | ${_('Create New Global Integration')} | |
|
36 | %endif | |
|
37 | </%def> | |
|
38 | ||
|
39 | %for integration, IntegrationType in available_integrations.items(): | |
|
40 | <% | |
|
41 | if c.repo: | |
|
42 | create_url = request.route_path('repo_integrations_create', | |
|
43 | repo_name=c.repo.repo_name, | |
|
44 | integration=integration) | |
|
45 | elif c.repo_group: | |
|
46 | create_url = request.route_path('repo_group_integrations_create', | |
|
47 | repo_group_name=c.repo_group.group_name, | |
|
48 | integration=integration) | |
|
49 | else: | |
|
50 | create_url = request.route_path('global_integrations_create', | |
|
51 | integration=integration) | |
|
52 | %> | |
|
53 | <a href="${create_url}" class="integration-box"> | |
|
54 | <%widgets:panel> | |
|
55 | <h2> | |
|
56 | <div class="integration-icon"> | |
|
57 | ${IntegrationType.icon|n} | |
|
58 | </div> | |
|
59 | ${IntegrationType.display_name} | |
|
60 | </h2> | |
|
61 | ${IntegrationType.description or _('No description available')} | |
|
62 | </%widgets:panel> | |
|
63 | </a> | |
|
64 | %endfor | |
|
65 | <div style="clear:both"></div> | |
|
66 | </%widgets:panel> |
@@ -0,0 +1,4 b'' | |||
|
1 | <div class="form-control readonly" | |
|
2 | id="${oid|field.oid}"> | |
|
3 | ${cstruct} | |
|
4 | </div> |
@@ -0,0 +1,262 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import mock | |
|
22 | import pytest | |
|
23 | from webob.exc import HTTPNotFound | |
|
24 | ||
|
25 | import rhodecode | |
|
26 | from rhodecode.model.db import Integration | |
|
27 | from rhodecode.model.meta import Session | |
|
28 | from rhodecode.tests import assert_session_flash, url, TEST_USER_ADMIN_LOGIN | |
|
29 | from rhodecode.tests.utils import AssertResponse | |
|
30 | from rhodecode.integrations import integration_type_registry | |
|
31 | from rhodecode.config.routing import ADMIN_PREFIX | |
|
32 | ||
|
33 | ||
|
34 | @pytest.mark.usefixtures('app', 'autologin_user') | |
|
35 | class TestIntegrationsView(object): | |
|
36 | pass | |
|
37 | ||
|
38 | ||
|
39 | class TestGlobalIntegrationsView(TestIntegrationsView): | |
|
40 | def test_index_no_integrations(self, app): | |
|
41 | url = ADMIN_PREFIX + '/integrations' | |
|
42 | response = app.get(url) | |
|
43 | ||
|
44 | assert response.status_code == 200 | |
|
45 | assert 'exist yet' in response.body | |
|
46 | ||
|
47 | def test_index_with_integrations(self, app, global_integration_stub): | |
|
48 | url = ADMIN_PREFIX + '/integrations' | |
|
49 | response = app.get(url) | |
|
50 | ||
|
51 | assert response.status_code == 200 | |
|
52 | assert 'exist yet' not in response.body | |
|
53 | assert global_integration_stub.name in response.body | |
|
54 | ||
|
55 | def test_new_integration_page(self, app): | |
|
56 | url = ADMIN_PREFIX + '/integrations/new' | |
|
57 | ||
|
58 | response = app.get(url) | |
|
59 | ||
|
60 | assert response.status_code == 200 | |
|
61 | ||
|
62 | for integration_key in integration_type_registry: | |
|
63 | nurl = (ADMIN_PREFIX + '/integrations/{integration}/new').format( | |
|
64 | integration=integration_key) | |
|
65 | assert nurl in response.body | |
|
66 | ||
|
67 | @pytest.mark.parametrize( | |
|
68 | 'IntegrationType', integration_type_registry.values()) | |
|
69 | def test_get_create_integration_page(self, app, IntegrationType): | |
|
70 | url = ADMIN_PREFIX + '/integrations/{integration_key}/new'.format( | |
|
71 | integration_key=IntegrationType.key) | |
|
72 | ||
|
73 | response = app.get(url) | |
|
74 | ||
|
75 | assert response.status_code == 200 | |
|
76 | assert IntegrationType.display_name in response.body | |
|
77 | ||
|
78 | def test_post_integration_page(self, app, StubIntegrationType, csrf_token, | |
|
79 | test_repo_group, backend_random): | |
|
80 | url = ADMIN_PREFIX + '/integrations/{integration_key}/new'.format( | |
|
81 | integration_key=StubIntegrationType.key) | |
|
82 | ||
|
83 | _post_integration_test_helper(app, url, csrf_token, admin_view=True, | |
|
84 | repo=backend_random.repo, repo_group=test_repo_group) | |
|
85 | ||
|
86 | ||
|
87 | class TestRepoGroupIntegrationsView(TestIntegrationsView): | |
|
88 | def test_index_no_integrations(self, app, test_repo_group): | |
|
89 | url = '/{repo_group_name}/settings/integrations'.format( | |
|
90 | repo_group_name=test_repo_group.group_name) | |
|
91 | response = app.get(url) | |
|
92 | ||
|
93 | assert response.status_code == 200 | |
|
94 | assert 'exist yet' in response.body | |
|
95 | ||
|
96 | def test_index_with_integrations(self, app, test_repo_group, | |
|
97 | repogroup_integration_stub): | |
|
98 | url = '/{repo_group_name}/settings/integrations'.format( | |
|
99 | repo_group_name=test_repo_group.group_name) | |
|
100 | ||
|
101 | stub_name = repogroup_integration_stub.name | |
|
102 | response = app.get(url) | |
|
103 | ||
|
104 | assert response.status_code == 200 | |
|
105 | assert 'exist yet' not in response.body | |
|
106 | assert stub_name in response.body | |
|
107 | ||
|
108 | def test_new_integration_page(self, app, test_repo_group): | |
|
109 | repo_group_name = test_repo_group.group_name | |
|
110 | url = '/{repo_group_name}/settings/integrations/new'.format( | |
|
111 | repo_group_name=test_repo_group.group_name) | |
|
112 | ||
|
113 | response = app.get(url) | |
|
114 | ||
|
115 | assert response.status_code == 200 | |
|
116 | ||
|
117 | for integration_key in integration_type_registry: | |
|
118 | nurl = ('/{repo_group_name}/settings/integrations' | |
|
119 | '/{integration}/new').format( | |
|
120 | repo_group_name=repo_group_name, | |
|
121 | integration=integration_key) | |
|
122 | ||
|
123 | assert nurl in response.body | |
|
124 | ||
|
125 | @pytest.mark.parametrize( | |
|
126 | 'IntegrationType', integration_type_registry.values()) | |
|
127 | def test_get_create_integration_page(self, app, test_repo_group, | |
|
128 | IntegrationType): | |
|
129 | repo_group_name = test_repo_group.group_name | |
|
130 | url = ('/{repo_group_name}/settings/integrations/{integration_key}/new' | |
|
131 | ).format(repo_group_name=repo_group_name, | |
|
132 | integration_key=IntegrationType.key) | |
|
133 | ||
|
134 | response = app.get(url) | |
|
135 | ||
|
136 | assert response.status_code == 200 | |
|
137 | assert IntegrationType.display_name in response.body | |
|
138 | ||
|
139 | def test_post_integration_page(self, app, test_repo_group, backend_random, | |
|
140 | StubIntegrationType, csrf_token): | |
|
141 | repo_group_name = test_repo_group.group_name | |
|
142 | url = ('/{repo_group_name}/settings/integrations/{integration_key}/new' | |
|
143 | ).format(repo_group_name=repo_group_name, | |
|
144 | integration_key=StubIntegrationType.key) | |
|
145 | ||
|
146 | _post_integration_test_helper(app, url, csrf_token, admin_view=False, | |
|
147 | repo=backend_random.repo, repo_group=test_repo_group) | |
|
148 | ||
|
149 | ||
|
150 | class TestRepoIntegrationsView(TestIntegrationsView): | |
|
151 | def test_index_no_integrations(self, app, backend_random): | |
|
152 | url = '/{repo_name}/settings/integrations'.format( | |
|
153 | repo_name=backend_random.repo.repo_name) | |
|
154 | response = app.get(url) | |
|
155 | ||
|
156 | assert response.status_code == 200 | |
|
157 | assert 'exist yet' in response.body | |
|
158 | ||
|
159 | def test_index_with_integrations(self, app, repo_integration_stub): | |
|
160 | url = '/{repo_name}/settings/integrations'.format( | |
|
161 | repo_name=repo_integration_stub.repo.repo_name) | |
|
162 | stub_name = repo_integration_stub.name | |
|
163 | ||
|
164 | response = app.get(url) | |
|
165 | ||
|
166 | assert response.status_code == 200 | |
|
167 | assert stub_name in response.body | |
|
168 | assert 'exist yet' not in response.body | |
|
169 | ||
|
170 | def test_new_integration_page(self, app, backend_random): | |
|
171 | repo_name = backend_random.repo.repo_name | |
|
172 | url = '/{repo_name}/settings/integrations/new'.format( | |
|
173 | repo_name=repo_name) | |
|
174 | ||
|
175 | response = app.get(url) | |
|
176 | ||
|
177 | assert response.status_code == 200 | |
|
178 | ||
|
179 | for integration_key in integration_type_registry: | |
|
180 | nurl = ('/{repo_name}/settings/integrations' | |
|
181 | '/{integration}/new').format( | |
|
182 | repo_name=repo_name, | |
|
183 | integration=integration_key) | |
|
184 | ||
|
185 | assert nurl in response.body | |
|
186 | ||
|
187 | @pytest.mark.parametrize( | |
|
188 | 'IntegrationType', integration_type_registry.values()) | |
|
189 | def test_get_create_integration_page(self, app, backend_random, | |
|
190 | IntegrationType): | |
|
191 | repo_name = backend_random.repo.repo_name | |
|
192 | url = '/{repo_name}/settings/integrations/{integration_key}/new'.format( | |
|
193 | repo_name=repo_name, integration_key=IntegrationType.key) | |
|
194 | ||
|
195 | response = app.get(url) | |
|
196 | ||
|
197 | assert response.status_code == 200 | |
|
198 | assert IntegrationType.display_name in response.body | |
|
199 | ||
|
200 | def test_post_integration_page(self, app, backend_random, test_repo_group, | |
|
201 | StubIntegrationType, csrf_token): | |
|
202 | repo_name = backend_random.repo.repo_name | |
|
203 | url = '/{repo_name}/settings/integrations/{integration_key}/new'.format( | |
|
204 | repo_name=repo_name, integration_key=StubIntegrationType.key) | |
|
205 | ||
|
206 | _post_integration_test_helper(app, url, csrf_token, admin_view=False, | |
|
207 | repo=backend_random.repo, repo_group=test_repo_group) | |
|
208 | ||
|
209 | ||
|
210 | def _post_integration_test_helper(app, url, csrf_token, repo, repo_group, | |
|
211 | admin_view): | |
|
212 | """ | |
|
213 | Posts form data to create integration at the url given then deletes it and | |
|
214 | checks if the redirect url is correct. | |
|
215 | """ | |
|
216 | ||
|
217 | app.post(url, params={}, status=403) # missing csrf check | |
|
218 | response = app.post(url, params={'csrf_token': csrf_token}) | |
|
219 | assert response.status_code == 200 | |
|
220 | assert 'Errors exist' in response.body | |
|
221 | ||
|
222 | scopes_destinations = [ | |
|
223 | ('global', | |
|
224 | ADMIN_PREFIX + '/integrations'), | |
|
225 | ('root_repos', | |
|
226 | ADMIN_PREFIX + '/integrations'), | |
|
227 | ('repo:%s' % repo.repo_name, | |
|
228 | '/%s/settings/integrations' % repo.repo_name), | |
|
229 | ('repogroup:%s' % repo_group.group_name, | |
|
230 | '/%s/settings/integrations' % repo_group.group_name), | |
|
231 | ] | |
|
232 | ||
|
233 | for scope, destination in scopes_destinations: | |
|
234 | if admin_view: | |
|
235 | destination = ADMIN_PREFIX + '/integrations' | |
|
236 | ||
|
237 | form_data = [ | |
|
238 | ('csrf_token', csrf_token), | |
|
239 | ('__start__', 'options:mapping'), | |
|
240 | ('name', 'test integration'), | |
|
241 | ('scope', scope), | |
|
242 | ('enabled', 'true'), | |
|
243 | ('__end__', 'options:mapping'), | |
|
244 | ('__start__', 'settings:mapping'), | |
|
245 | ('test_int_field', '34'), | |
|
246 | ('test_string_field', ''), # empty value on purpose as it's required | |
|
247 | ('__end__', 'settings:mapping'), | |
|
248 | ] | |
|
249 | errors_response = app.post(url, form_data) | |
|
250 | assert 'Errors exist' in errors_response.body | |
|
251 | ||
|
252 | form_data[-2] = ('test_string_field', 'data!') | |
|
253 | assert Session().query(Integration).count() == 0 | |
|
254 | created_response = app.post(url, form_data) | |
|
255 | assert Session().query(Integration).count() == 1 | |
|
256 | ||
|
257 | delete_response = app.post( | |
|
258 | created_response.location, | |
|
259 | params={'csrf_token': csrf_token, 'delete': 'delete'}) | |
|
260 | ||
|
261 | assert Session().query(Integration).count() == 0 | |
|
262 | assert delete_response.location.endswith(destination) |
@@ -0,0 +1,120 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2016 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | import colander | |
|
22 | import pytest | |
|
23 | ||
|
24 | from rhodecode.model import validation_schema | |
|
25 | ||
|
26 | from rhodecode.integrations import integration_type_registry | |
|
27 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
|
28 | from rhodecode.model.validation_schema.schemas.integration_schema import ( | |
|
29 | make_integration_schema | |
|
30 | ) | |
|
31 | ||
|
32 | ||
|
33 | @pytest.mark.usefixtures('app', 'autologin_user') | |
|
34 | class TestIntegrationSchema(object): | |
|
35 | ||
|
36 | def test_deserialize_integration_schema_perms(self, backend_random, | |
|
37 | test_repo_group, | |
|
38 | StubIntegrationType): | |
|
39 | ||
|
40 | repo = backend_random.repo | |
|
41 | repo_group = test_repo_group | |
|
42 | ||
|
43 | ||
|
44 | empty_perms_dict = { | |
|
45 | 'global': [], | |
|
46 | 'repositories': {}, | |
|
47 | 'repositories_groups': {}, | |
|
48 | } | |
|
49 | ||
|
50 | perms_tests = { | |
|
51 | ('repo:%s' % repo.repo_name, repo): [ | |
|
52 | ({}, False), | |
|
53 | ({'global': ['hg.admin']}, True), | |
|
54 | ({'global': []}, False), | |
|
55 | ({'repositories': {repo.repo_name: 'repository.admin'}}, True), | |
|
56 | ({'repositories': {repo.repo_name: 'repository.read'}}, False), | |
|
57 | ({'repositories': {repo.repo_name: 'repository.write'}}, False), | |
|
58 | ({'repositories': {repo.repo_name: 'repository.none'}}, False), | |
|
59 | ], | |
|
60 | ('repogroup:%s' % repo_group.group_name, repo_group): [ | |
|
61 | ({}, False), | |
|
62 | ({'global': ['hg.admin']}, True), | |
|
63 | ({'global': []}, False), | |
|
64 | ({'repositories_groups': | |
|
65 | {repo_group.group_name: 'group.admin'}}, True), | |
|
66 | ({'repositories_groups': | |
|
67 | {repo_group.group_name: 'group.read'}}, False), | |
|
68 | ({'repositories_groups': | |
|
69 | {repo_group.group_name: 'group.write'}}, False), | |
|
70 | ({'repositories_groups': | |
|
71 | {repo_group.group_name: 'group.none'}}, False), | |
|
72 | ], | |
|
73 | ('global', 'global'): [ | |
|
74 | ({}, False), | |
|
75 | ({'global': ['hg.admin']}, True), | |
|
76 | ({'global': []}, False), | |
|
77 | ], | |
|
78 | ('root_repos', 'root_repos'): [ | |
|
79 | ({}, False), | |
|
80 | ({'global': ['hg.admin']}, True), | |
|
81 | ({'global': []}, False), | |
|
82 | ], | |
|
83 | } | |
|
84 | ||
|
85 | for (scope_input, scope_output), perms_allowed in perms_tests.items(): | |
|
86 | for perms_update, allowed in perms_allowed: | |
|
87 | perms = dict(empty_perms_dict, **perms_update) | |
|
88 | ||
|
89 | schema = make_integration_schema( | |
|
90 | IntegrationType=StubIntegrationType | |
|
91 | ).bind(permissions=perms) | |
|
92 | ||
|
93 | input_data = { | |
|
94 | 'options': { | |
|
95 | 'enabled': 'true', | |
|
96 | 'scope': scope_input, | |
|
97 | 'name': 'test integration', | |
|
98 | }, | |
|
99 | 'settings': { | |
|
100 | 'test_string_field': 'stringy', | |
|
101 | 'test_int_field': '100', | |
|
102 | } | |
|
103 | } | |
|
104 | ||
|
105 | if not allowed: | |
|
106 | with pytest.raises(colander.Invalid): | |
|
107 | schema.deserialize(input_data) | |
|
108 | else: | |
|
109 | assert schema.deserialize(input_data) == { | |
|
110 | 'options': { | |
|
111 | 'enabled': True, | |
|
112 | 'scope': scope_output, | |
|
113 | 'name': 'test integration', | |
|
114 | }, | |
|
115 | 'settings': { | |
|
116 | 'test_string_field': 'stringy', | |
|
117 | 'test_int_field': 100, | |
|
118 | } | |
|
119 | } | |
|
120 |
@@ -51,7 +51,7 b' PYRAMID_SETTINGS = {}' | |||
|
51 | 51 | EXTENSIONS = {} |
|
52 | 52 | |
|
53 | 53 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) |
|
54 |
__dbversion__ = 5 |
|
|
54 | __dbversion__ = 57 # defines current db version for migrations | |
|
55 | 55 | __platform__ = platform.system() |
|
56 | 56 | __license__ = 'AGPLv3, and Commercial License' |
|
57 | 57 | __author__ = 'RhodeCode GmbH' |
@@ -42,6 +42,7 b" STATIC_FILE_PREFIX = '/_static'" | |||
|
42 | 42 | URL_NAME_REQUIREMENTS = { |
|
43 | 43 | # group name can have a slash in them, but they must not end with a slash |
|
44 | 44 | 'group_name': r'.*?[^/]', |
|
45 | 'repo_group_name': r'.*?[^/]', | |
|
45 | 46 | # repo names can have a slash in them, but they must not end with a slash |
|
46 | 47 | 'repo_name': r'.*?[^/]', |
|
47 | 48 | # file path eats up everything at the end |
@@ -31,6 +31,15 b' log = logging.getLogger(__name__)' | |||
|
31 | 31 | def includeme(config): |
|
32 | 32 | |
|
33 | 33 | # global integrations |
|
34 | ||
|
35 | config.add_route('global_integrations_new', | |
|
36 | ADMIN_PREFIX + '/integrations/new') | |
|
37 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', | |
|
38 | attr='new_integration', | |
|
39 | renderer='rhodecode:templates/admin/integrations/new.html', | |
|
40 | request_method='GET', | |
|
41 | route_name='global_integrations_new') | |
|
42 | ||
|
34 | 43 | config.add_route('global_integrations_home', |
|
35 | 44 | ADMIN_PREFIX + '/integrations') |
|
36 | 45 | config.add_route('global_integrations_list', |
@@ -48,15 +57,75 b' def includeme(config):' | |||
|
48 | 57 | config.add_route('global_integrations_edit', |
|
49 | 58 | ADMIN_PREFIX + '/integrations/{integration}/{integration_id}', |
|
50 | 59 | custom_predicates=(valid_integration,)) |
|
60 | ||
|
61 | ||
|
51 | 62 | for route_name in ['global_integrations_create', 'global_integrations_edit']: |
|
52 | 63 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', |
|
53 | 64 | attr='settings_get', |
|
54 |
renderer='rhodecode:templates/admin/integrations/ |
|
|
65 | renderer='rhodecode:templates/admin/integrations/form.html', | |
|
55 | 66 | request_method='GET', |
|
56 | 67 | route_name=route_name) |
|
57 | 68 | config.add_view('rhodecode.integrations.views.GlobalIntegrationsView', |
|
58 | 69 | attr='settings_post', |
|
59 |
renderer='rhodecode:templates/admin/integrations/ |
|
|
70 | renderer='rhodecode:templates/admin/integrations/form.html', | |
|
71 | request_method='POST', | |
|
72 | route_name=route_name) | |
|
73 | ||
|
74 | ||
|
75 | # repo group integrations | |
|
76 | config.add_route('repo_group_integrations_home', | |
|
77 | add_route_requirements( | |
|
78 | '{repo_group_name}/settings/integrations', | |
|
79 | URL_NAME_REQUIREMENTS | |
|
80 | ), | |
|
81 | custom_predicates=(valid_repo_group,) | |
|
82 | ) | |
|
83 | config.add_route('repo_group_integrations_list', | |
|
84 | add_route_requirements( | |
|
85 | '{repo_group_name}/settings/integrations/{integration}', | |
|
86 | URL_NAME_REQUIREMENTS | |
|
87 | ), | |
|
88 | custom_predicates=(valid_repo_group, valid_integration)) | |
|
89 | for route_name in ['repo_group_integrations_home', 'repo_group_integrations_list']: | |
|
90 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |
|
91 | attr='index', | |
|
92 | renderer='rhodecode:templates/admin/integrations/list.html', | |
|
93 | request_method='GET', | |
|
94 | route_name=route_name) | |
|
95 | ||
|
96 | config.add_route('repo_group_integrations_new', | |
|
97 | add_route_requirements( | |
|
98 | '{repo_group_name}/settings/integrations/new', | |
|
99 | URL_NAME_REQUIREMENTS | |
|
100 | ), | |
|
101 | custom_predicates=(valid_repo_group,)) | |
|
102 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |
|
103 | attr='new_integration', | |
|
104 | renderer='rhodecode:templates/admin/integrations/new.html', | |
|
105 | request_method='GET', | |
|
106 | route_name='repo_group_integrations_new') | |
|
107 | ||
|
108 | config.add_route('repo_group_integrations_create', | |
|
109 | add_route_requirements( | |
|
110 | '{repo_group_name}/settings/integrations/{integration}/new', | |
|
111 | URL_NAME_REQUIREMENTS | |
|
112 | ), | |
|
113 | custom_predicates=(valid_repo_group, valid_integration)) | |
|
114 | config.add_route('repo_group_integrations_edit', | |
|
115 | add_route_requirements( | |
|
116 | '{repo_group_name}/settings/integrations/{integration}/{integration_id}', | |
|
117 | URL_NAME_REQUIREMENTS | |
|
118 | ), | |
|
119 | custom_predicates=(valid_repo_group, valid_integration)) | |
|
120 | for route_name in ['repo_group_integrations_edit', 'repo_group_integrations_create']: | |
|
121 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |
|
122 | attr='settings_get', | |
|
123 | renderer='rhodecode:templates/admin/integrations/form.html', | |
|
124 | request_method='GET', | |
|
125 | route_name=route_name) | |
|
126 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |
|
127 | attr='settings_post', | |
|
128 | renderer='rhodecode:templates/admin/integrations/form.html', | |
|
60 | 129 | request_method='POST', |
|
61 | 130 | route_name=route_name) |
|
62 | 131 | |
@@ -78,8 +147,21 b' def includeme(config):' | |||
|
78 | 147 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', |
|
79 | 148 | attr='index', |
|
80 | 149 | request_method='GET', |
|
150 | renderer='rhodecode:templates/admin/integrations/list.html', | |
|
81 | 151 | route_name=route_name) |
|
82 | 152 | |
|
153 | config.add_route('repo_integrations_new', | |
|
154 | add_route_requirements( | |
|
155 | '{repo_name}/settings/integrations/new', | |
|
156 | URL_NAME_REQUIREMENTS | |
|
157 | ), | |
|
158 | custom_predicates=(valid_repo,)) | |
|
159 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', | |
|
160 | attr='new_integration', | |
|
161 | renderer='rhodecode:templates/admin/integrations/new.html', | |
|
162 | request_method='GET', | |
|
163 | route_name='repo_integrations_new') | |
|
164 | ||
|
83 | 165 | config.add_route('repo_integrations_create', |
|
84 | 166 | add_route_requirements( |
|
85 | 167 | '{repo_name}/settings/integrations/{integration}/new', |
@@ -95,56 +177,12 b' def includeme(config):' | |||
|
95 | 177 | for route_name in ['repo_integrations_edit', 'repo_integrations_create']: |
|
96 | 178 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', |
|
97 | 179 | attr='settings_get', |
|
98 |
renderer='rhodecode:templates/admin/integrations/ |
|
|
180 | renderer='rhodecode:templates/admin/integrations/form.html', | |
|
99 | 181 | request_method='GET', |
|
100 | 182 | route_name=route_name) |
|
101 | 183 | config.add_view('rhodecode.integrations.views.RepoIntegrationsView', |
|
102 | 184 | attr='settings_post', |
|
103 |
renderer='rhodecode:templates/admin/integrations/ |
|
|
104 | request_method='POST', | |
|
105 | route_name=route_name) | |
|
106 | ||
|
107 | ||
|
108 | # repo group integrations | |
|
109 | config.add_route('repo_group_integrations_home', | |
|
110 | add_route_requirements( | |
|
111 | '{repo_group_name}/settings/integrations', | |
|
112 | URL_NAME_REQUIREMENTS | |
|
113 | ), | |
|
114 | custom_predicates=(valid_repo_group,)) | |
|
115 | config.add_route('repo_group_integrations_list', | |
|
116 | add_route_requirements( | |
|
117 | '{repo_group_name}/settings/integrations/{integration}', | |
|
118 | URL_NAME_REQUIREMENTS | |
|
119 | ), | |
|
120 | custom_predicates=(valid_repo_group, valid_integration)) | |
|
121 | for route_name in ['repo_group_integrations_home', 'repo_group_integrations_list']: | |
|
122 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |
|
123 | attr='index', | |
|
124 | request_method='GET', | |
|
125 | route_name=route_name) | |
|
126 | ||
|
127 | config.add_route('repo_group_integrations_create', | |
|
128 | add_route_requirements( | |
|
129 | '{repo_group_name}/settings/integrations/{integration}/new', | |
|
130 | URL_NAME_REQUIREMENTS | |
|
131 | ), | |
|
132 | custom_predicates=(valid_repo_group, valid_integration)) | |
|
133 | config.add_route('repo_group_integrations_edit', | |
|
134 | add_route_requirements( | |
|
135 | '{repo_group_name}/settings/integrations/{integration}/{integration_id}', | |
|
136 | URL_NAME_REQUIREMENTS | |
|
137 | ), | |
|
138 | custom_predicates=(valid_repo_group, valid_integration)) | |
|
139 | for route_name in ['repo_group_integrations_edit', 'repo_group_integrations_create']: | |
|
140 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |
|
141 | attr='settings_get', | |
|
142 | renderer='rhodecode:templates/admin/integrations/edit.html', | |
|
143 | request_method='GET', | |
|
144 | route_name=route_name) | |
|
145 | config.add_view('rhodecode.integrations.views.RepoGroupIntegrationsView', | |
|
146 | attr='settings_post', | |
|
147 | renderer='rhodecode:templates/admin/integrations/edit.html', | |
|
185 | renderer='rhodecode:templates/admin/integrations/form.html', | |
|
148 | 186 | request_method='POST', |
|
149 | 187 | route_name=route_name) |
|
150 | 188 | |
@@ -194,7 +232,7 b' def valid_integration(info, request):' | |||
|
194 | 232 | return False |
|
195 | 233 | if repo and repo.repo_id != integration.repo_id: |
|
196 | 234 | return False |
|
197 |
if repo_group and repo_group. |
|
|
235 | if repo_group and repo_group.group_id != integration.repo_group_id: | |
|
198 | 236 | return False |
|
199 | 237 | |
|
200 | 238 | return True |
@@ -20,26 +20,52 b'' | |||
|
20 | 20 | |
|
21 | 21 | import colander |
|
22 | 22 | |
|
23 |
from rhodecode.translation import |
|
|
23 | from rhodecode.translation import _ | |
|
24 | 24 | |
|
25 | 25 | |
|
26 |
class Integration |
|
|
27 | """ | |
|
28 | This base schema is intended for use in integrations. | |
|
29 | It adds a few default settings (e.g., "enabled"), so that integration | |
|
30 | authors don't have to maintain a bunch of boilerplate. | |
|
31 | """ | |
|
26 | class IntegrationOptionsSchemaBase(colander.MappingSchema): | |
|
32 | 27 | enabled = colander.SchemaNode( |
|
33 | 28 | colander.Bool(), |
|
34 | 29 | default=True, |
|
35 |
description= |
|
|
30 | description=_('Enable or disable this integration.'), | |
|
36 | 31 | missing=False, |
|
37 |
title= |
|
|
32 | title=_('Enabled'), | |
|
38 | 33 | ) |
|
39 | 34 | |
|
40 | 35 | name = colander.SchemaNode( |
|
41 | 36 | colander.String(), |
|
42 |
description= |
|
|
37 | description=_('Short name for this integration.'), | |
|
43 | 38 | missing=colander.required, |
|
44 |
title= |
|
|
39 | title=_('Integration name'), | |
|
45 | 40 | ) |
|
41 | ||
|
42 | ||
|
43 | class RepoIntegrationOptionsSchema(IntegrationOptionsSchemaBase): | |
|
44 | pass | |
|
45 | ||
|
46 | ||
|
47 | class RepoGroupIntegrationOptionsSchema(IntegrationOptionsSchemaBase): | |
|
48 | child_repos_only = colander.SchemaNode( | |
|
49 | colander.Bool(), | |
|
50 | default=True, | |
|
51 | description=_( | |
|
52 | 'Limit integrations to to work only on the direct children ' | |
|
53 | 'repositories of this repository group (no subgroups)'), | |
|
54 | missing=False, | |
|
55 | title=_('Limit to childen repos only'), | |
|
56 | ) | |
|
57 | ||
|
58 | ||
|
59 | class GlobalIntegrationOptionsSchema(IntegrationOptionsSchemaBase): | |
|
60 | child_repos_only = colander.SchemaNode( | |
|
61 | colander.Bool(), | |
|
62 | default=False, | |
|
63 | description=_( | |
|
64 | 'Limit integrations to to work only on root level repositories'), | |
|
65 | missing=False, | |
|
66 | title=_('Root repositories only'), | |
|
67 | ) | |
|
68 | ||
|
69 | ||
|
70 | class IntegrationSettingsSchemaBase(colander.MappingSchema): | |
|
71 | pass |
@@ -18,25 +18,84 b'' | |||
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase | |
|
21 | import colander | |
|
22 | from rhodecode.translation import _ | |
|
22 | 23 | |
|
23 | 24 | |
|
24 | 25 | class IntegrationTypeBase(object): |
|
25 | 26 | """ Base class for IntegrationType plugins """ |
|
26 | 27 | |
|
28 | description = '' | |
|
29 | icon = ''' | |
|
30 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | |
|
31 | <svg | |
|
32 | xmlns:dc="http://purl.org/dc/elements/1.1/" | |
|
33 | xmlns:cc="http://creativecommons.org/ns#" | |
|
34 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | |
|
35 | xmlns:svg="http://www.w3.org/2000/svg" | |
|
36 | xmlns="http://www.w3.org/2000/svg" | |
|
37 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | |
|
38 | xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | |
|
39 | viewBox="0 -256 1792 1792" | |
|
40 | id="svg3025" | |
|
41 | version="1.1" | |
|
42 | inkscape:version="0.48.3.1 r9886" | |
|
43 | width="100%" | |
|
44 | height="100%" | |
|
45 | sodipodi:docname="cog_font_awesome.svg"> | |
|
46 | <metadata | |
|
47 | id="metadata3035"> | |
|
48 | <rdf:RDF> | |
|
49 | <cc:Work | |
|
50 | rdf:about=""> | |
|
51 | <dc:format>image/svg+xml</dc:format> | |
|
52 | <dc:type | |
|
53 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> | |
|
54 | </cc:Work> | |
|
55 | </rdf:RDF> | |
|
56 | </metadata> | |
|
57 | <defs | |
|
58 | id="defs3033" /> | |
|
59 | <sodipodi:namedview | |
|
60 | pagecolor="#ffffff" | |
|
61 | bordercolor="#666666" | |
|
62 | borderopacity="1" | |
|
63 | objecttolerance="10" | |
|
64 | gridtolerance="10" | |
|
65 | guidetolerance="10" | |
|
66 | inkscape:pageopacity="0" | |
|
67 | inkscape:pageshadow="2" | |
|
68 | inkscape:window-width="640" | |
|
69 | inkscape:window-height="480" | |
|
70 | id="namedview3031" | |
|
71 | showgrid="false" | |
|
72 | inkscape:zoom="0.13169643" | |
|
73 | inkscape:cx="896" | |
|
74 | inkscape:cy="896" | |
|
75 | inkscape:window-x="0" | |
|
76 | inkscape:window-y="25" | |
|
77 | inkscape:window-maximized="0" | |
|
78 | inkscape:current-layer="svg3025" /> | |
|
79 | <g | |
|
80 | transform="matrix(1,0,0,-1,121.49153,1285.4237)" | |
|
81 | id="g3027"> | |
|
82 | <path | |
|
83 | d="m 1024,640 q 0,106 -75,181 -75,75 -181,75 -106,0 -181,-75 -75,-75 -75,-181 0,-106 75,-181 75,-75 181,-75 106,0 181,75 75,75 75,181 z m 512,109 V 527 q 0,-12 -8,-23 -8,-11 -20,-13 l -185,-28 q -19,-54 -39,-91 35,-50 107,-138 10,-12 10,-25 0,-13 -9,-23 -27,-37 -99,-108 -72,-71 -94,-71 -12,0 -26,9 l -138,108 q -44,-23 -91,-38 -16,-136 -29,-186 -7,-28 -36,-28 H 657 q -14,0 -24.5,8.5 Q 622,-111 621,-98 L 593,86 q -49,16 -90,37 L 362,16 Q 352,7 337,7 323,7 312,18 186,132 147,186 q -7,10 -7,23 0,12 8,23 15,21 51,66.5 36,45.5 54,70.5 -27,50 -41,99 L 29,495 Q 16,497 8,507.5 0,518 0,531 v 222 q 0,12 8,23 8,11 19,13 l 186,28 q 14,46 39,92 -40,57 -107,138 -10,12 -10,24 0,10 9,23 26,36 98.5,107.5 72.5,71.5 94.5,71.5 13,0 26,-10 l 138,-107 q 44,23 91,38 16,136 29,186 7,28 36,28 h 222 q 14,0 24.5,-8.5 Q 914,1391 915,1378 l 28,-184 q 49,-16 90,-37 l 142,107 q 9,9 24,9 13,0 25,-10 129,-119 165,-170 7,-8 7,-22 0,-12 -8,-23 -15,-21 -51,-66.5 -36,-45.5 -54,-70.5 26,-50 41,-98 l 183,-28 q 13,-2 21,-12.5 8,-10.5 8,-23.5 z" | |
|
84 | id="path3029" | |
|
85 | inkscape:connector-curvature="0" | |
|
86 | style="fill:currentColor" /> | |
|
87 | </g> | |
|
88 | </svg> | |
|
89 | ''' | |
|
90 | ||
|
27 | 91 | def __init__(self, settings): |
|
28 | 92 | """ |
|
29 | 93 | :param settings: dict of settings to be used for the integration |
|
30 | 94 | """ |
|
31 | 95 | self.settings = settings |
|
32 | 96 | |
|
33 | ||
|
34 | 97 | def settings_schema(self): |
|
35 | 98 | """ |
|
36 | 99 | A colander schema of settings for the integration type |
|
37 | ||
|
38 | Subclasses can return their own schema but should always | |
|
39 | inherit from IntegrationSettingsSchemaBase | |
|
40 | 100 | """ |
|
41 |
return |
|
|
42 | ||
|
101 | return colander.Schema() |
@@ -26,11 +26,10 b' import colander' | |||
|
26 | 26 | from mako.template import Template |
|
27 | 27 | |
|
28 | 28 | from rhodecode import events |
|
29 |
from rhodecode.translation import _ |
|
|
29 | from rhodecode.translation import _ | |
|
30 | 30 | from rhodecode.lib.celerylib import run_task |
|
31 | 31 | from rhodecode.lib.celerylib import tasks |
|
32 | 32 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
33 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase | |
|
34 | 33 | |
|
35 | 34 | |
|
36 | 35 | log = logging.getLogger(__name__) |
@@ -147,18 +146,79 b" repo_push_template_html = Template('''" | |||
|
147 | 146 | </html> |
|
148 | 147 | ''') |
|
149 | 148 | |
|
149 | email_icon = ''' | |
|
150 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | |
|
151 | <svg | |
|
152 | xmlns:dc="http://purl.org/dc/elements/1.1/" | |
|
153 | xmlns:cc="http://creativecommons.org/ns#" | |
|
154 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | |
|
155 | xmlns:svg="http://www.w3.org/2000/svg" | |
|
156 | xmlns="http://www.w3.org/2000/svg" | |
|
157 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | |
|
158 | xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | |
|
159 | viewBox="0 -256 1850 1850" | |
|
160 | id="svg2989" | |
|
161 | version="1.1" | |
|
162 | inkscape:version="0.48.3.1 r9886" | |
|
163 | width="100%" | |
|
164 | height="100%" | |
|
165 | sodipodi:docname="envelope_font_awesome.svg"> | |
|
166 | <metadata | |
|
167 | id="metadata2999"> | |
|
168 | <rdf:RDF> | |
|
169 | <cc:Work | |
|
170 | rdf:about=""> | |
|
171 | <dc:format>image/svg+xml</dc:format> | |
|
172 | <dc:type | |
|
173 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> | |
|
174 | </cc:Work> | |
|
175 | </rdf:RDF> | |
|
176 | </metadata> | |
|
177 | <defs | |
|
178 | id="defs2997" /> | |
|
179 | <sodipodi:namedview | |
|
180 | pagecolor="#ffffff" | |
|
181 | bordercolor="#666666" | |
|
182 | borderopacity="1" | |
|
183 | objecttolerance="10" | |
|
184 | gridtolerance="10" | |
|
185 | guidetolerance="10" | |
|
186 | inkscape:pageopacity="0" | |
|
187 | inkscape:pageshadow="2" | |
|
188 | inkscape:window-width="640" | |
|
189 | inkscape:window-height="480" | |
|
190 | id="namedview2995" | |
|
191 | showgrid="false" | |
|
192 | inkscape:zoom="0.13169643" | |
|
193 | inkscape:cx="896" | |
|
194 | inkscape:cy="896" | |
|
195 | inkscape:window-x="0" | |
|
196 | inkscape:window-y="25" | |
|
197 | inkscape:window-maximized="0" | |
|
198 | inkscape:current-layer="svg2989" /> | |
|
199 | <g | |
|
200 | transform="matrix(1,0,0,-1,37.966102,1282.678)" | |
|
201 | id="g2991"> | |
|
202 | <path | |
|
203 | d="m 1664,32 v 768 q -32,-36 -69,-66 -268,-206 -426,-338 -51,-43 -83,-67 -32,-24 -86.5,-48.5 Q 945,256 897,256 h -1 -1 Q 847,256 792.5,280.5 738,305 706,329 674,353 623,396 465,528 197,734 160,764 128,800 V 32 Q 128,19 137.5,9.5 147,0 160,0 h 1472 q 13,0 22.5,9.5 9.5,9.5 9.5,22.5 z m 0,1051 v 11 13.5 q 0,0 -0.5,13 -0.5,13 -3,12.5 -2.5,-0.5 -5.5,9 -3,9.5 -9,7.5 -6,-2 -14,2.5 H 160 q -13,0 -22.5,-9.5 Q 128,1133 128,1120 128,952 275,836 468,684 676,519 682,514 711,489.5 740,465 757,452 774,439 801.5,420.5 829,402 852,393 q 23,-9 43,-9 h 1 1 q 20,0 43,9 23,9 50.5,27.5 27.5,18.5 44.5,31.5 17,13 46,37.5 29,24.5 35,29.5 208,165 401,317 54,43 100.5,115.5 46.5,72.5 46.5,131.5 z m 128,37 V 32 q 0,-66 -47,-113 -47,-47 -113,-47 H 160 Q 94,-128 47,-81 0,-34 0,32 v 1088 q 0,66 47,113 47,47 113,47 h 1472 q 66,0 113,-47 47,-47 47,-113 z" | |
|
204 | id="path2993" | |
|
205 | inkscape:connector-curvature="0" | |
|
206 | style="fill:currentColor" /> | |
|
207 | </g> | |
|
208 | </svg> | |
|
209 | ''' | |
|
150 | 210 | |
|
151 |
class EmailSettingsSchema( |
|
|
211 | class EmailSettingsSchema(colander.Schema): | |
|
152 | 212 | @colander.instantiate(validator=colander.Length(min=1)) |
|
153 | 213 | class recipients(colander.SequenceSchema): |
|
154 |
title = |
|
|
155 |
description = |
|
|
214 | title = _('Recipients') | |
|
215 | description = _('Email addresses to send push events to') | |
|
156 | 216 | widget = deform.widget.SequenceWidget(min_len=1) |
|
157 | 217 | |
|
158 | 218 | recipient = colander.SchemaNode( |
|
159 | 219 | colander.String(), |
|
160 |
title= |
|
|
161 |
description= |
|
|
220 | title=_('Email address'), | |
|
221 | description=_('Email address'), | |
|
162 | 222 | default='', |
|
163 | 223 | validator=colander.Email(), |
|
164 | 224 | widget=deform.widget.TextInputWidget( |
@@ -169,8 +229,9 b' class EmailSettingsSchema(IntegrationSet' | |||
|
169 | 229 | |
|
170 | 230 | class EmailIntegrationType(IntegrationTypeBase): |
|
171 | 231 | key = 'email' |
|
172 |
display_name = |
|
|
173 | SettingsSchema = EmailSettingsSchema | |
|
232 | display_name = _('Email') | |
|
233 | description = _('Send repo push summaries to a list of recipients via email') | |
|
234 | icon = email_icon | |
|
174 | 235 | |
|
175 | 236 | def settings_schema(self): |
|
176 | 237 | schema = EmailSettingsSchema() |
@@ -29,29 +29,28 b' from celery.task import task' | |||
|
29 | 29 | from mako.template import Template |
|
30 | 30 | |
|
31 | 31 | from rhodecode import events |
|
32 |
from rhodecode.translation import |
|
|
32 | from rhodecode.translation import _ | |
|
33 | 33 | from rhodecode.lib import helpers as h |
|
34 | 34 | from rhodecode.lib.celerylib import run_task |
|
35 | 35 | from rhodecode.lib.colander_utils import strip_whitespace |
|
36 | 36 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
37 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase | |
|
38 | 37 | |
|
39 | 38 | log = logging.getLogger(__name__) |
|
40 | 39 | |
|
41 | 40 | |
|
42 |
class HipchatSettingsSchema( |
|
|
41 | class HipchatSettingsSchema(colander.Schema): | |
|
43 | 42 | color_choices = [ |
|
44 |
('yellow', |
|
|
45 |
('red', |
|
|
46 |
('green', |
|
|
47 |
('purple', |
|
|
48 |
('gray', |
|
|
43 | ('yellow', _('Yellow')), | |
|
44 | ('red', _('Red')), | |
|
45 | ('green', _('Green')), | |
|
46 | ('purple', _('Purple')), | |
|
47 | ('gray', _('Gray')), | |
|
49 | 48 | ] |
|
50 | 49 | |
|
51 | 50 | server_url = colander.SchemaNode( |
|
52 | 51 | colander.String(), |
|
53 |
title= |
|
|
54 |
description= |
|
|
52 | title=_('Hipchat server URL'), | |
|
53 | description=_('Hipchat integration url.'), | |
|
55 | 54 | default='', |
|
56 | 55 | preparer=strip_whitespace, |
|
57 | 56 | validator=colander.url, |
@@ -61,15 +60,15 b' class HipchatSettingsSchema(IntegrationS' | |||
|
61 | 60 | ) |
|
62 | 61 | notify = colander.SchemaNode( |
|
63 | 62 | colander.Bool(), |
|
64 |
title= |
|
|
65 |
description= |
|
|
63 | title=_('Notify'), | |
|
64 | description=_('Make a notification to the users in room.'), | |
|
66 | 65 | missing=False, |
|
67 | 66 | default=False, |
|
68 | 67 | ) |
|
69 | 68 | color = colander.SchemaNode( |
|
70 | 69 | colander.String(), |
|
71 |
title= |
|
|
72 |
description= |
|
|
70 | title=_('Color'), | |
|
71 | description=_('Background color of message.'), | |
|
73 | 72 | missing='', |
|
74 | 73 | validator=colander.OneOf([x[0] for x in color_choices]), |
|
75 | 74 | widget=deform.widget.Select2Widget( |
@@ -98,10 +97,12 b' in <a href="${data[\'repo\'][\'url\']}">${da' | |||
|
98 | 97 | ''') |
|
99 | 98 | |
|
100 | 99 | |
|
101 | ||
|
102 | 100 | class HipchatIntegrationType(IntegrationTypeBase): |
|
103 | 101 | key = 'hipchat' |
|
104 |
display_name = |
|
|
102 | display_name = _('Hipchat') | |
|
103 | description = _('Send events such as repo pushes and pull requests to ' | |
|
104 | 'your hipchat channel.') | |
|
105 | icon = '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>''' | |
|
105 | 106 | valid_events = [ |
|
106 | 107 | events.PullRequestCloseEvent, |
|
107 | 108 | events.PullRequestMergeEvent, |
@@ -29,21 +29,20 b' from celery.task import task' | |||
|
29 | 29 | from mako.template import Template |
|
30 | 30 | |
|
31 | 31 | from rhodecode import events |
|
32 |
from rhodecode.translation import |
|
|
32 | from rhodecode.translation import _ | |
|
33 | 33 | from rhodecode.lib import helpers as h |
|
34 | 34 | from rhodecode.lib.celerylib import run_task |
|
35 | 35 | from rhodecode.lib.colander_utils import strip_whitespace |
|
36 | 36 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
37 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase | |
|
38 | 37 | |
|
39 | 38 | log = logging.getLogger(__name__) |
|
40 | 39 | |
|
41 | 40 | |
|
42 |
class SlackSettingsSchema( |
|
|
41 | class SlackSettingsSchema(colander.Schema): | |
|
43 | 42 | service = colander.SchemaNode( |
|
44 | 43 | colander.String(), |
|
45 |
title= |
|
|
46 |
description=h.literal( |
|
|
44 | title=_('Slack service URL'), | |
|
45 | description=h.literal(_( | |
|
47 | 46 | 'This can be setup at the ' |
|
48 | 47 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' |
|
49 | 48 | 'slack app manager</a>')), |
@@ -56,8 +55,8 b' class SlackSettingsSchema(IntegrationSet' | |||
|
56 | 55 | ) |
|
57 | 56 | username = colander.SchemaNode( |
|
58 | 57 | colander.String(), |
|
59 |
title= |
|
|
60 |
description= |
|
|
58 | title=_('Username'), | |
|
59 | description=_('Username to show notifications coming from.'), | |
|
61 | 60 | missing='Rhodecode', |
|
62 | 61 | preparer=strip_whitespace, |
|
63 | 62 | widget=deform.widget.TextInputWidget( |
@@ -66,8 +65,8 b' class SlackSettingsSchema(IntegrationSet' | |||
|
66 | 65 | ) |
|
67 | 66 | channel = colander.SchemaNode( |
|
68 | 67 | colander.String(), |
|
69 |
title= |
|
|
70 |
description= |
|
|
68 | title=_('Channel'), | |
|
69 | description=_('Channel to send notifications to.'), | |
|
71 | 70 | missing='', |
|
72 | 71 | preparer=strip_whitespace, |
|
73 | 72 | widget=deform.widget.TextInputWidget( |
@@ -76,8 +75,8 b' class SlackSettingsSchema(IntegrationSet' | |||
|
76 | 75 | ) |
|
77 | 76 | icon_emoji = colander.SchemaNode( |
|
78 | 77 | colander.String(), |
|
79 |
title= |
|
|
80 |
description= |
|
|
78 | title=_('Emoji'), | |
|
79 | description=_('Emoji to use eg. :studio_microphone:'), | |
|
81 | 80 | missing='', |
|
82 | 81 | preparer=strip_whitespace, |
|
83 | 82 | widget=deform.widget.TextInputWidget( |
@@ -102,10 +101,14 b" in <${data['repo']['url']}|${data['repo'" | |||
|
102 | 101 | ''') |
|
103 | 102 | |
|
104 | 103 | |
|
104 | ||
|
105 | ||
|
105 | 106 | class SlackIntegrationType(IntegrationTypeBase): |
|
106 | 107 | key = 'slack' |
|
107 |
display_name = |
|
|
108 | SettingsSchema = SlackSettingsSchema | |
|
108 | display_name = _('Slack') | |
|
109 | description = _('Send events such as repo pushes and pull requests to ' | |
|
110 | 'your slack channel.') | |
|
111 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>''' | |
|
109 | 112 | valid_events = [ |
|
110 | 113 | events.PullRequestCloseEvent, |
|
111 | 114 | events.PullRequestMergeEvent, |
@@ -28,19 +28,19 b' from celery.task import task' | |||
|
28 | 28 | from mako.template import Template |
|
29 | 29 | |
|
30 | 30 | from rhodecode import events |
|
31 |
from rhodecode.translation import |
|
|
31 | from rhodecode.translation import _ | |
|
32 | 32 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
33 | from rhodecode.integrations.schema import IntegrationSettingsSchemaBase | |
|
34 | 33 | |
|
35 | 34 | log = logging.getLogger(__name__) |
|
36 | 35 | |
|
37 | 36 | |
|
38 |
class WebhookSettingsSchema( |
|
|
37 | class WebhookSettingsSchema(colander.Schema): | |
|
39 | 38 | url = colander.SchemaNode( |
|
40 | 39 | colander.String(), |
|
41 |
title= |
|
|
42 |
description= |
|
|
43 | default='', | |
|
40 | title=_('Webhook URL'), | |
|
41 | description=_('URL of the webhook to receive POST event.'), | |
|
42 | missing=colander.required, | |
|
43 | required=True, | |
|
44 | 44 | validator=colander.url, |
|
45 | 45 | widget=deform.widget.TextInputWidget( |
|
46 | 46 | placeholder='https://www.example.com/webhook' |
@@ -48,18 +48,24 b' class WebhookSettingsSchema(IntegrationS' | |||
|
48 | 48 | ) |
|
49 | 49 | secret_token = colander.SchemaNode( |
|
50 | 50 | colander.String(), |
|
51 |
title= |
|
|
52 |
description= |
|
|
51 | title=_('Secret Token'), | |
|
52 | description=_('String used to validate received payloads.'), | |
|
53 | 53 | default='', |
|
54 | missing='', | |
|
54 | 55 | widget=deform.widget.TextInputWidget( |
|
55 | 56 | placeholder='secret_token' |
|
56 | 57 | ), |
|
57 | 58 | ) |
|
58 | 59 | |
|
59 | 60 | |
|
61 | ||
|
62 | ||
|
60 | 63 | class WebhookIntegrationType(IntegrationTypeBase): |
|
61 | 64 | key = 'webhook' |
|
62 |
display_name = |
|
|
65 | display_name = _('Webhook') | |
|
66 | description = _('Post json events to a webhook endpoint') | |
|
67 | icon = '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' | |
|
68 | ||
|
63 | 69 | valid_events = [ |
|
64 | 70 | events.PullRequestCloseEvent, |
|
65 | 71 | events.PullRequestMergeEvent, |
@@ -18,23 +18,29 b'' | |||
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | import colander | |
|
22 | import logging | |
|
23 | 21 | import pylons |
|
24 | 22 | import deform |
|
23 | import logging | |
|
24 | import colander | |
|
25 | import peppercorn | |
|
26 | import webhelpers.paginate | |
|
25 | 27 | |
|
26 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden | |
|
28 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest | |
|
27 | 29 | from pyramid.renderers import render |
|
28 | 30 | from pyramid.response import Response |
|
29 | 31 | |
|
30 | 32 | from rhodecode.lib import auth |
|
31 | 33 | from rhodecode.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
34 | from rhodecode.lib.utils2 import safe_int | |
|
35 | from rhodecode.lib.helpers import Page | |
|
32 | 36 | from rhodecode.model.db import Repository, RepoGroup, Session, Integration |
|
33 | 37 | from rhodecode.model.scm import ScmModel |
|
34 | 38 | from rhodecode.model.integration import IntegrationModel |
|
35 | 39 | from rhodecode.admin.navigation import navigation_list |
|
36 | 40 | from rhodecode.translation import _ |
|
37 | 41 | from rhodecode.integrations import integration_type_registry |
|
42 | from rhodecode.model.validation_schema.schemas.integration_schema import ( | |
|
43 | make_integration_schema) | |
|
38 | 44 | |
|
39 | 45 | log = logging.getLogger(__name__) |
|
40 | 46 | |
@@ -65,30 +71,45 b' class IntegrationSettingsViewBase(object' | |||
|
65 | 71 | |
|
66 | 72 | request = self.request |
|
67 | 73 | |
|
68 |
if 'repo_name' in request.matchdict: # |
|
|
74 | if 'repo_name' in request.matchdict: # in repo settings context | |
|
69 | 75 | repo_name = request.matchdict['repo_name'] |
|
70 | 76 | self.repo = Repository.get_by_repo_name(repo_name) |
|
71 | 77 | |
|
72 |
if 'repo_group_name' in request.matchdict: # |
|
|
78 | if 'repo_group_name' in request.matchdict: # in group settings context | |
|
73 | 79 | repo_group_name = request.matchdict['repo_group_name'] |
|
74 | 80 | self.repo_group = RepoGroup.get_by_group_name(repo_group_name) |
|
75 | 81 | |
|
76 | if 'integration' in request.matchdict: # we're in integration context | |
|
82 | ||
|
83 | if 'integration' in request.matchdict: # integration type context | |
|
77 | 84 | integration_type = request.matchdict['integration'] |
|
78 | 85 | self.IntegrationType = integration_type_registry[integration_type] |
|
79 | 86 | |
|
80 | 87 | if 'integration_id' in request.matchdict: # single integration context |
|
81 | 88 | integration_id = request.matchdict['integration_id'] |
|
82 | 89 | self.integration = Integration.get(integration_id) |
|
83 | else: # list integrations context | |
|
84 | integrations = IntegrationModel().get_integrations( | |
|
85 | repo=self.repo, repo_group=self.repo_group) | |
|
86 | 90 | |
|
87 | for integration in integrations: | |
|
88 | self.integrations.setdefault(integration.integration_type, [] | |
|
89 | ).append(integration) | |
|
91 | # extra perms check just in case | |
|
92 | if not self._has_perms_for_integration(self.integration): | |
|
93 | raise HTTPForbidden() | |
|
90 | 94 | |
|
91 | 95 | self.settings = self.integration and self.integration.settings or {} |
|
96 | self.admin_view = not (self.repo or self.repo_group) | |
|
97 | ||
|
98 | def _has_perms_for_integration(self, integration): | |
|
99 | perms = self.request.user.permissions | |
|
100 | ||
|
101 | if 'hg.admin' in perms['global']: | |
|
102 | return True | |
|
103 | ||
|
104 | if integration.repo: | |
|
105 | return perms['repositories'].get( | |
|
106 | integration.repo.repo_name) == 'repository.admin' | |
|
107 | ||
|
108 | if integration.repo_group: | |
|
109 | return perms['repositories_groups'].get( | |
|
110 | integration.repo_group.group_name) == 'group.admin' | |
|
111 | ||
|
112 | return False | |
|
92 | 113 | |
|
93 | 114 | def _template_c_context(self): |
|
94 | 115 | # TODO: dan: this is a stopgap in order to inherit from current pylons |
@@ -102,6 +123,7 b' class IntegrationSettingsViewBase(object' | |||
|
102 | 123 | c.repo_group = self.repo_group |
|
103 | 124 | c.repo_name = self.repo and self.repo.repo_name or None |
|
104 | 125 | c.repo_group_name = self.repo_group and self.repo_group.group_name or None |
|
126 | ||
|
105 | 127 | if self.repo: |
|
106 | 128 | c.repo_info = self.repo |
|
107 | 129 | c.rhodecode_db_repo = self.repo |
@@ -112,23 +134,25 b' class IntegrationSettingsViewBase(object' | |||
|
112 | 134 | return c |
|
113 | 135 | |
|
114 | 136 | def _form_schema(self): |
|
115 | if self.integration: | |
|
116 |
settings |
|
|
117 | else: | |
|
118 | settings = {} | |
|
119 | return self.IntegrationType(settings=settings).settings_schema() | |
|
137 | schema = make_integration_schema(IntegrationType=self.IntegrationType, | |
|
138 | settings=self.settings) | |
|
120 | 139 | |
|
121 | def settings_get(self, defaults=None, errors=None, form=None): | |
|
122 | """ | |
|
123 | View that displays the plugin settings as a form. | |
|
124 | """ | |
|
125 | defaults = defaults or {} | |
|
126 | errors = errors or {} | |
|
140 | # returns a clone, important if mutating the schema later | |
|
141 | return schema.bind( | |
|
142 | permissions=self.request.user.permissions, | |
|
143 | no_scope=not self.admin_view) | |
|
144 | ||
|
145 | ||
|
146 | def _form_defaults(self): | |
|
147 | defaults = {} | |
|
127 | 148 | |
|
128 | 149 | if self.integration: |
|
129 | defaults = self.integration.settings or {} | |
|
130 |
defaults[' |
|
|
131 |
|
|
|
150 | defaults['settings'] = self.integration.settings or {} | |
|
151 | defaults['options'] = { | |
|
152 | 'name': self.integration.name, | |
|
153 | 'enabled': self.integration.enabled, | |
|
154 | 'scope': self.integration.scope, | |
|
155 | } | |
|
132 | 156 | else: |
|
133 | 157 | if self.repo: |
|
134 | 158 | scope = _('{repo_name} repository').format( |
@@ -139,11 +163,44 b' class IntegrationSettingsViewBase(object' | |||
|
139 | 163 | else: |
|
140 | 164 | scope = _('Global') |
|
141 | 165 | |
|
142 | defaults['name'] = '{} {} integration'.format(scope, | |
|
143 | self.IntegrationType.display_name) | |
|
144 | defaults['enabled'] = True | |
|
166 | defaults['options'] = { | |
|
167 | 'enabled': True, | |
|
168 | 'name': _('{name} integration').format( | |
|
169 | name=self.IntegrationType.display_name), | |
|
170 | } | |
|
171 | if self.repo: | |
|
172 | defaults['options']['scope'] = self.repo | |
|
173 | elif self.repo_group: | |
|
174 | defaults['options']['scope'] = self.repo_group | |
|
175 | ||
|
176 | return defaults | |
|
145 | 177 | |
|
146 | schema = self._form_schema().bind(request=self.request) | |
|
178 | def _delete_integration(self, integration): | |
|
179 | Session().delete(self.integration) | |
|
180 | Session().commit() | |
|
181 | self.request.session.flash( | |
|
182 | _('Integration {integration_name} deleted successfully.').format( | |
|
183 | integration_name=self.integration.name), | |
|
184 | queue='success') | |
|
185 | ||
|
186 | if self.repo: | |
|
187 | redirect_to = self.request.route_url( | |
|
188 | 'repo_integrations_home', repo_name=self.repo.repo_name) | |
|
189 | elif self.repo_group: | |
|
190 | redirect_to = self.request.route_url( | |
|
191 | 'repo_group_integrations_home', | |
|
192 | repo_group_name=self.repo_group.group_name) | |
|
193 | else: | |
|
194 | redirect_to = self.request.route_url('global_integrations_home') | |
|
195 | raise HTTPFound(redirect_to) | |
|
196 | ||
|
197 | def settings_get(self, defaults=None, form=None): | |
|
198 | """ | |
|
199 | View that displays the integration settings as a form. | |
|
200 | """ | |
|
201 | ||
|
202 | defaults = defaults or self._form_defaults() | |
|
203 | schema = self._form_schema() | |
|
147 | 204 | |
|
148 | 205 | if self.integration: |
|
149 | 206 | buttons = ('submit', 'delete') |
@@ -152,23 +209,10 b' class IntegrationSettingsViewBase(object' | |||
|
152 | 209 | |
|
153 | 210 | form = form or deform.Form(schema, appstruct=defaults, buttons=buttons) |
|
154 | 211 | |
|
155 | for node in schema: | |
|
156 | setting = self.settings.get(node.name) | |
|
157 | if setting is not None: | |
|
158 | defaults.setdefault(node.name, setting) | |
|
159 | else: | |
|
160 | if node.default: | |
|
161 | defaults.setdefault(node.name, node.default) | |
|
162 | ||
|
163 | 212 | template_context = { |
|
164 | 213 | 'form': form, |
|
165 | 'defaults': defaults, | |
|
166 | 'errors': errors, | |
|
167 | 'schema': schema, | |
|
168 | 214 | 'current_IntegrationType': self.IntegrationType, |
|
169 | 215 | 'integration': self.integration, |
|
170 | 'settings': self.settings, | |
|
171 | 'resource': self.context, | |
|
172 | 216 | 'c': self._template_c_context(), |
|
173 | 217 | } |
|
174 | 218 | |
@@ -177,79 +221,90 b' class IntegrationSettingsViewBase(object' | |||
|
177 | 221 | @auth.CSRFRequired() |
|
178 | 222 | def settings_post(self): |
|
179 | 223 | """ |
|
180 |
View that validates and stores the |
|
|
224 | View that validates and stores the integration settings. | |
|
181 | 225 | """ |
|
182 | if self.request.params.get('delete'): | |
|
183 | Session().delete(self.integration) | |
|
184 | Session().commit() | |
|
185 | self.request.session.flash( | |
|
186 | _('Integration {integration_name} deleted successfully.').format( | |
|
187 | integration_name=self.integration.name), | |
|
188 | queue='success') | |
|
189 | if self.repo: | |
|
190 | redirect_to = self.request.route_url( | |
|
191 | 'repo_integrations_home', repo_name=self.repo.repo_name) | |
|
192 | else: | |
|
193 | redirect_to = self.request.route_url('global_integrations_home') | |
|
194 | raise HTTPFound(redirect_to) | |
|
226 | controls = self.request.POST.items() | |
|
227 | pstruct = peppercorn.parse(controls) | |
|
228 | ||
|
229 | if self.integration and pstruct.get('delete'): | |
|
230 | return self._delete_integration(self.integration) | |
|
231 | ||
|
232 | schema = self._form_schema() | |
|
233 | ||
|
234 | skip_settings_validation = False | |
|
235 | if self.integration and 'enabled' not in pstruct.get('options', {}): | |
|
236 | skip_settings_validation = True | |
|
237 | schema['settings'].validator = None | |
|
238 | for field in schema['settings'].children: | |
|
239 | field.validator = None | |
|
240 | field.missing = '' | |
|
195 | 241 | |
|
196 | schema = self._form_schema().bind(request=self.request) | |
|
242 | if self.integration: | |
|
243 | buttons = ('submit', 'delete') | |
|
244 | else: | |
|
245 | buttons = ('submit',) | |
|
197 | 246 | |
|
198 |
form = deform.Form(schema, buttons= |
|
|
247 | form = deform.Form(schema, buttons=buttons) | |
|
199 | 248 | |
|
200 | params = {} | |
|
201 | for node in schema.children: | |
|
202 | if type(node.typ) in (colander.Set, colander.List): | |
|
203 | val = self.request.params.getall(node.name) | |
|
204 |
|
|
|
205 | val = self.request.params.get(node.name) | |
|
206 |
if |
|
|
207 | params[node.name] = val | |
|
249 | if not self.admin_view: | |
|
250 | # scope is read only field in these cases, and has to be added | |
|
251 | options = pstruct.setdefault('options', {}) | |
|
252 | if 'scope' not in options: | |
|
253 | if self.repo: | |
|
254 | options['scope'] = 'repo:{}'.format(self.repo.repo_name) | |
|
255 | elif self.repo_group: | |
|
256 | options['scope'] = 'repogroup:{}'.format( | |
|
257 | self.repo_group.group_name) | |
|
208 | 258 | |
|
209 | controls = self.request.POST.items() | |
|
210 | 259 | try: |
|
211 |
valid_data = form.validate(c |
|
|
260 | valid_data = form.validate_pstruct(pstruct) | |
|
212 | 261 | except deform.ValidationFailure as e: |
|
213 | 262 | self.request.session.flash( |
|
214 | 263 | _('Errors exist when saving integration settings. ' |
|
215 | 264 | 'Please check the form inputs.'), |
|
216 | 265 | queue='error') |
|
217 |
return self.settings_get( |
|
|
266 | return self.settings_get(form=e) | |
|
218 | 267 | |
|
219 | 268 | if not self.integration: |
|
220 | 269 | self.integration = Integration() |
|
221 | 270 | self.integration.integration_type = self.IntegrationType.key |
|
222 | if self.repo: | |
|
223 | self.integration.repo = self.repo | |
|
224 | elif self.repo_group: | |
|
225 | self.integration.repo_group = self.repo_group | |
|
226 | 271 | Session().add(self.integration) |
|
227 | 272 | |
|
228 | self.integration.enabled = valid_data.pop('enabled', False) | |
|
229 | self.integration.name = valid_data.pop('name') | |
|
230 | self.integration.settings = valid_data | |
|
273 | scope = valid_data['options']['scope'] | |
|
231 | 274 | |
|
275 | IntegrationModel().update_integration(self.integration, | |
|
276 | name=valid_data['options']['name'], | |
|
277 | enabled=valid_data['options']['enabled'], | |
|
278 | settings=valid_data['settings'], | |
|
279 | scope=scope) | |
|
280 | ||
|
281 | self.integration.settings = valid_data['settings'] | |
|
232 | 282 | Session().commit() |
|
233 | ||
|
234 | 283 | # Display success message and redirect. |
|
235 | 284 | self.request.session.flash( |
|
236 | 285 | _('Integration {integration_name} updated successfully.').format( |
|
237 | 286 | integration_name=self.IntegrationType.display_name), |
|
238 | 287 | queue='success') |
|
239 | 288 | |
|
240 | if self.repo: | |
|
241 | redirect_to = self.request.route_url( | |
|
242 | 'repo_integrations_edit', repo_name=self.repo.repo_name, | |
|
289 | ||
|
290 | # if integration scope changes, we must redirect to the right place | |
|
291 | # keeping in mind if the original view was for /repo/ or /_admin/ | |
|
292 | admin_view = not (self.repo or self.repo_group) | |
|
293 | ||
|
294 | if isinstance(self.integration.scope, Repository) and not admin_view: | |
|
295 | redirect_to = self.request.route_path( | |
|
296 | 'repo_integrations_edit', | |
|
297 | repo_name=self.integration.scope.repo_name, | |
|
243 | 298 | integration=self.integration.integration_type, |
|
244 | 299 | integration_id=self.integration.integration_id) |
|
245 | elif self.repo: | |
|
246 |
redirect_to = self.request.route_ |
|
|
300 | elif isinstance(self.integration.scope, RepoGroup) and not admin_view: | |
|
301 | redirect_to = self.request.route_path( | |
|
247 | 302 | 'repo_group_integrations_edit', |
|
248 |
repo_group_name=self. |
|
|
303 | repo_group_name=self.integration.scope.group_name, | |
|
249 | 304 | integration=self.integration.integration_type, |
|
250 | 305 | integration_id=self.integration.integration_id) |
|
251 | 306 | else: |
|
252 |
redirect_to = self.request.route_ |
|
|
307 | redirect_to = self.request.route_path( | |
|
253 | 308 | 'global_integrations_edit', |
|
254 | 309 | integration=self.integration.integration_type, |
|
255 | 310 | integration_id=self.integration.integration_id) |
@@ -257,31 +312,60 b' class IntegrationSettingsViewBase(object' | |||
|
257 | 312 | return HTTPFound(redirect_to) |
|
258 | 313 | |
|
259 | 314 | def index(self): |
|
260 | current_integrations = self.integrations | |
|
261 |
if self. |
|
|
262 | current_integrations = { | |
|
263 | self.IntegrationType.key: self.integrations.get( | |
|
264 | self.IntegrationType.key, []) | |
|
265 |
|
|
|
315 | """ List integrations """ | |
|
316 | if self.repo: | |
|
317 | scope = self.repo | |
|
318 | elif self.repo_group: | |
|
319 | scope = self.repo_group | |
|
320 | else: | |
|
321 | scope = 'all' | |
|
322 | ||
|
323 | integrations = [] | |
|
324 | ||
|
325 | for integration in IntegrationModel().get_integrations( | |
|
326 | scope=scope, IntegrationType=self.IntegrationType): | |
|
327 | ||
|
328 | # extra permissions check *just in case* | |
|
329 | if not self._has_perms_for_integration(integration): | |
|
330 | continue | |
|
331 | integrations.append(integration) | |
|
332 | ||
|
333 | sort_arg = self.request.GET.get('sort', 'name:asc') | |
|
334 | if ':' in sort_arg: | |
|
335 | sort_field, sort_dir = sort_arg.split(':') | |
|
336 | else: | |
|
337 | sort_field = sort_arg, 'asc' | |
|
338 | ||
|
339 | assert sort_field in ('name', 'integration_type', 'enabled', 'scope') | |
|
340 | ||
|
341 | integrations.sort( | |
|
342 | key=lambda x: getattr(x[1], sort_field), reverse=(sort_dir=='desc')) | |
|
343 | ||
|
344 | ||
|
345 | page_url = webhelpers.paginate.PageURL( | |
|
346 | self.request.path, self.request.GET) | |
|
347 | page = safe_int(self.request.GET.get('page', 1), 1) | |
|
348 | ||
|
349 | integrations = Page(integrations, page=page, items_per_page=10, | |
|
350 | url=page_url) | |
|
266 | 351 | |
|
267 | 352 | template_context = { |
|
353 | 'sort_field': sort_field, | |
|
354 | 'rev_sort_dir': sort_dir != 'desc' and 'desc' or 'asc', | |
|
268 | 355 | 'current_IntegrationType': self.IntegrationType, |
|
269 |
' |
|
|
356 | 'integrations_list': integrations, | |
|
270 | 357 | 'available_integrations': integration_type_registry, |
|
271 | 'c': self._template_c_context() | |
|
358 | 'c': self._template_c_context(), | |
|
359 | 'request': self.request, | |
|
272 | 360 | } |
|
361 | return template_context | |
|
273 | 362 | |
|
274 | if self.repo: | |
|
275 | html = render('rhodecode:templates/admin/integrations/list.html', | |
|
276 | template_context, | |
|
277 | request=self.request) | |
|
278 |
|
|
|
279 | html = render('rhodecode:templates/admin/integrations/list.html', | |
|
280 | template_context, | |
|
281 | request=self.request) | |
|
282 | ||
|
283 | return Response(html) | |
|
284 | ||
|
363 | def new_integration(self): | |
|
364 | template_context = { | |
|
365 | 'available_integrations': integration_type_registry, | |
|
366 | 'c': self._template_c_context(), | |
|
367 | } | |
|
368 | return template_context | |
|
285 | 369 | |
|
286 | 370 | class GlobalIntegrationsView(IntegrationSettingsViewBase): |
|
287 | 371 | def perm_check(self, user): |
@@ -293,7 +377,9 b' class RepoIntegrationsView(IntegrationSe' | |||
|
293 | 377 | return auth.HasRepoPermissionAll('repository.admin' |
|
294 | 378 | )(repo_name=self.repo.repo_name, user=user) |
|
295 | 379 | |
|
380 | ||
|
296 | 381 | class RepoGroupIntegrationsView(IntegrationSettingsViewBase): |
|
297 | 382 | def perm_check(self, user): |
|
298 | 383 | return auth.HasRepoGroupPermissionAll('group.admin' |
|
299 | 384 | )(group_name=self.repo_group.group_name, user=user) |
|
385 |
@@ -3481,7 +3481,6 b' class Integration(Base, BaseModel):' | |||
|
3481 | 3481 | integration_type = Column('integration_type', String(255)) |
|
3482 | 3482 | enabled = Column('enabled', Boolean(), nullable=False) |
|
3483 | 3483 | name = Column('name', String(255), nullable=False) |
|
3484 | ||
|
3485 | 3484 | settings = Column( |
|
3486 | 3485 | 'settings_json', MutationObj.as_mutable( |
|
3487 | 3486 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) |
@@ -2036,6 +2036,8 b' class RepoGroup(Base, BaseModel):' | |||
|
2036 | 2036 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
2037 | 2037 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
2038 | 2038 | user = relationship('User') |
|
2039 | integrations = relationship('Integration', | |
|
2040 | cascade="all, delete, delete-orphan") | |
|
2039 | 2041 | |
|
2040 | 2042 | def __init__(self, group_name='', parent_group=None): |
|
2041 | 2043 | self.group_name = group_name |
@@ -3481,6 +3483,8 b' class Integration(Base, BaseModel):' | |||
|
3481 | 3483 | integration_type = Column('integration_type', String(255)) |
|
3482 | 3484 | enabled = Column('enabled', Boolean(), nullable=False) |
|
3483 | 3485 | name = Column('name', String(255), nullable=False) |
|
3486 | child_repos_only = Column('child_repos_only', Boolean(), nullable=False, | |
|
3487 | default=False) | |
|
3484 | 3488 | |
|
3485 | 3489 | settings = Column( |
|
3486 | 3490 | 'settings_json', MutationObj.as_mutable( |
@@ -3495,12 +3499,36 b' class Integration(Base, BaseModel):' | |||
|
3495 | 3499 | nullable=True, unique=None, default=None) |
|
3496 | 3500 | repo_group = relationship('RepoGroup', lazy='joined') |
|
3497 | 3501 | |
|
3498 | def __repr__(self): | |
|
3502 | @hybrid_property | |
|
3503 | def scope(self): | |
|
3499 | 3504 | if self.repo: |
|
3500 |
|
|
|
3501 |
|
|
|
3502 |
|
|
|
3505 | return self.repo | |
|
3506 | if self.repo_group: | |
|
3507 | return self.repo_group | |
|
3508 | if self.child_repos_only: | |
|
3509 | return 'root_repos' | |
|
3510 | return 'global' | |
|
3511 | ||
|
3512 | @scope.setter | |
|
3513 | def scope(self, value): | |
|
3514 | self.repo = None | |
|
3515 | self.repo_id = None | |
|
3516 | self.repo_group_id = None | |
|
3517 | self.repo_group = None | |
|
3518 | self.child_repos_only = False | |
|
3519 | if isinstance(value, Repository): | |
|
3520 | self.repo_id = value.repo_id | |
|
3521 | self.repo = value | |
|
3522 | elif isinstance(value, RepoGroup): | |
|
3523 | self.repo_group_id = value.group_id | |
|
3524 | self.repo_group = value | |
|
3525 | elif value == 'root_repos': | |
|
3526 | self.child_repos_only = True | |
|
3527 | elif value == 'global': | |
|
3528 | pass | |
|
3503 | 3529 | else: |
|
3504 | scope = 'global' | |
|
3505 | ||
|
3506 | return '<Integration(%r, %r)>' % (self.integration_type, scope) | |
|
3530 | raise Exception("invalid scope: %s, must be one of " | |
|
3531 | "['global', 'root_repos', <RepoGroup>. <Repository>]" % value) | |
|
3532 | ||
|
3533 | def __repr__(self): | |
|
3534 | return '<Integration(%r, %r)>' % (self.integration_type, self.scope) |
@@ -29,7 +29,7 b' import traceback' | |||
|
29 | 29 | |
|
30 | 30 | from pylons import tmpl_context as c |
|
31 | 31 | from pylons.i18n.translation import _, ungettext |
|
32 | from sqlalchemy import or_ | |
|
32 | from sqlalchemy import or_, and_ | |
|
33 | 33 | from sqlalchemy.sql.expression import false, true |
|
34 | 34 | from mako import exceptions |
|
35 | 35 | |
@@ -39,7 +39,7 b' from rhodecode.lib import helpers as h' | |||
|
39 | 39 | from rhodecode.lib.caching_query import FromCache |
|
40 | 40 | from rhodecode.lib.utils import PartialRenderer |
|
41 | 41 | from rhodecode.model import BaseModel |
|
42 | from rhodecode.model.db import Integration, User | |
|
42 | from rhodecode.model.db import Integration, User, Repository, RepoGroup | |
|
43 | 43 | from rhodecode.model.meta import Session |
|
44 | 44 | from rhodecode.integrations import integration_type_registry |
|
45 | 45 | from rhodecode.integrations.types.base import IntegrationTypeBase |
@@ -61,28 +61,34 b' class IntegrationModel(BaseModel):' | |||
|
61 | 61 | raise Exception('integration must be int, long or Instance' |
|
62 | 62 | ' of Integration got %s' % type(integration)) |
|
63 | 63 | |
|
64 |
def create(self, IntegrationType, enabled, |
|
|
64 | def create(self, IntegrationType, name, enabled, scope, settings): | |
|
65 | 65 | """ Create an IntegrationType integration """ |
|
66 | 66 | integration = Integration() |
|
67 | 67 | integration.integration_type = IntegrationType.key |
|
68 | integration.settings = {} | |
|
69 | integration.repo = repo | |
|
70 | integration.enabled = enabled | |
|
71 | integration.name = name | |
|
72 | ||
|
73 | 68 | self.sa.add(integration) |
|
69 | self.update_integration(integration, name, enabled, scope, settings) | |
|
74 | 70 | self.sa.commit() |
|
75 | 71 | return integration |
|
76 | 72 | |
|
73 | def update_integration(self, integration, name, enabled, scope, settings): | |
|
74 | """ | |
|
75 | :param scope: one of ['global', 'root_repos', <RepoGroup>. <Repository>] | |
|
76 | """ | |
|
77 | ||
|
78 | integration = self.__get_integration(integration) | |
|
79 | ||
|
80 | integration.scope = scope | |
|
81 | integration.name = name | |
|
82 | integration.enabled = enabled | |
|
83 | integration.settings = settings | |
|
84 | ||
|
85 | return integration | |
|
86 | ||
|
77 | 87 | def delete(self, integration): |
|
78 | try: | |
|
79 | integration = self.__get_integration(integration) | |
|
80 |
|
|
|
81 | self.sa.delete(integration) | |
|
82 | return True | |
|
83 | except Exception: | |
|
84 | log.error(traceback.format_exc()) | |
|
85 | raise | |
|
88 | integration = self.__get_integration(integration) | |
|
89 | if integration: | |
|
90 | self.sa.delete(integration) | |
|
91 | return True | |
|
86 | 92 | return False |
|
87 | 93 | |
|
88 | 94 | def get_integration_handler(self, integration): |
@@ -100,41 +106,108 b' class IntegrationModel(BaseModel):' | |||
|
100 | 106 | if handler: |
|
101 | 107 | handler.send_event(event) |
|
102 | 108 | |
|
103 |
def get_integrations(self, |
|
|
104 | if repo: | |
|
105 | return self.sa.query(Integration).filter( | |
|
106 | Integration.repo_id==repo.repo_id).all() | |
|
107 | elif repo_group: | |
|
108 | return self.sa.query(Integration).filter( | |
|
109 | Integration.repo_group_id==repo_group.group_id).all() | |
|
109 | def get_integrations(self, scope, IntegrationType=None): | |
|
110 | """ | |
|
111 | Return integrations for a scope, which must be one of: | |
|
112 | ||
|
113 | 'all' - every integration, global/repogroup/repo | |
|
114 | 'global' - global integrations only | |
|
115 | <Repository> instance - integrations for this repo only | |
|
116 | <RepoGroup> instance - integrations for this repogroup only | |
|
117 | """ | |
|
110 | 118 | |
|
111 | # global integrations | |
|
112 |
|
|
|
113 |
Integration.repo |
|
|
119 | if isinstance(scope, Repository): | |
|
120 | query = self.sa.query(Integration).filter( | |
|
121 | Integration.repo==scope) | |
|
122 | elif isinstance(scope, RepoGroup): | |
|
123 | query = self.sa.query(Integration).filter( | |
|
124 | Integration.repo_group==scope) | |
|
125 | elif scope == 'global': | |
|
126 | # global integrations | |
|
127 | query = self.sa.query(Integration).filter( | |
|
128 | and_(Integration.repo_id==None, Integration.repo_group_id==None) | |
|
129 | ) | |
|
130 | elif scope == 'root_repos': | |
|
131 | query = self.sa.query(Integration).filter( | |
|
132 | and_(Integration.repo_id==None, | |
|
133 | Integration.repo_group_id==None, | |
|
134 | Integration.child_repos_only==True) | |
|
135 | ) | |
|
136 | elif scope == 'all': | |
|
137 | query = self.sa.query(Integration) | |
|
138 | else: | |
|
139 | raise Exception( | |
|
140 | "invalid `scope`, must be one of: " | |
|
141 | "['global', 'all', <Repository>, <RepoGroup>]") | |
|
142 | ||
|
143 | if IntegrationType is not None: | |
|
144 | query = query.filter( | |
|
145 | Integration.integration_type==IntegrationType.key) | |
|
146 | ||
|
147 | result = [] | |
|
148 | for integration in query.all(): | |
|
149 | IntType = integration_type_registry.get(integration.integration_type) | |
|
150 | result.append((IntType, integration)) | |
|
151 | return result | |
|
114 | 152 | |
|
115 | 153 | def get_for_event(self, event, cache=False): |
|
116 | 154 | """ |
|
117 | 155 | Get integrations that match an event |
|
118 | 156 | """ |
|
119 | query = self.sa.query(Integration).filter(Integration.enabled==True) | |
|
157 | query = self.sa.query( | |
|
158 | Integration | |
|
159 | ).filter( | |
|
160 | Integration.enabled==True | |
|
161 | ) | |
|
162 | ||
|
163 | global_integrations_filter = and_( | |
|
164 | Integration.repo_id==None, | |
|
165 | Integration.repo_group_id==None, | |
|
166 | Integration.child_repos_only==False, | |
|
167 | ) | |
|
168 | ||
|
169 | if isinstance(event, events.RepoEvent): | |
|
170 | root_repos_integrations_filter = and_( | |
|
171 | Integration.repo_id==None, | |
|
172 | Integration.repo_group_id==None, | |
|
173 | Integration.child_repos_only==True, | |
|
174 | ) | |
|
175 | ||
|
176 | clauses = [ | |
|
177 | global_integrations_filter, | |
|
178 | ] | |
|
120 | 179 | |
|
121 | if isinstance(event, events.RepoEvent): # global + repo integrations | |
|
122 | # + repo_group integrations | |
|
123 | parent_groups = event.repo.groups_with_parents | |
|
124 | query = query.filter( | |
|
125 | or_(Integration.repo_id==None, | |
|
126 | Integration.repo_id==event.repo.repo_id, | |
|
127 | Integration.repo_group_id.in_( | |
|
128 | [group.group_id for group in parent_groups] | |
|
129 | ))) | |
|
180 | # repo integrations | |
|
181 | if event.repo.repo_id: # pre create events dont have a repo_id yet | |
|
182 | clauses.append( | |
|
183 | Integration.repo_id==event.repo.repo_id | |
|
184 | ) | |
|
185 | ||
|
186 | if event.repo.group: | |
|
187 | clauses.append( | |
|
188 | Integration.repo_group_id == event.repo.group.group_id | |
|
189 | ) | |
|
190 | # repo group cascade to kids (maybe implement this sometime?) | |
|
191 | # clauses.append(Integration.repo_group_id.in_( | |
|
192 | # [group.group_id for group in | |
|
193 | # event.repo.groups_with_parents] | |
|
194 | # )) | |
|
195 | ||
|
196 | ||
|
197 | if not event.repo.group: # root repo | |
|
198 | clauses.append(root_repos_integrations_filter) | |
|
199 | ||
|
200 | query = query.filter(or_(*clauses)) | |
|
201 | ||
|
130 | 202 | if cache: |
|
131 | 203 | query = query.options(FromCache( |
|
132 | 204 | "sql_cache_short", |
|
133 | 205 | "get_enabled_repo_integrations_%i" % event.repo.repo_id)) |
|
134 | 206 | else: # only global integrations |
|
135 |
query = query.filter( |
|
|
207 | query = query.filter(global_integrations_filter) | |
|
136 | 208 | if cache: |
|
137 | 209 | query = query.options(FromCache( |
|
138 | 210 | "sql_cache_short", "get_enabled_global_integrations")) |
|
139 | 211 | |
|
140 |
re |
|
|
212 | result = query.all() | |
|
213 | return result No newline at end of file |
@@ -469,6 +469,8 b' class RepoGroupModel(BaseModel):' | |||
|
469 | 469 | |
|
470 | 470 | def delete(self, repo_group, force_delete=False, fs_remove=True): |
|
471 | 471 | repo_group = self._get_repo_group(repo_group) |
|
472 | if not repo_group: | |
|
473 | return False | |
|
472 | 474 | try: |
|
473 | 475 | self.sa.delete(repo_group) |
|
474 | 476 | if fs_remove: |
@@ -478,6 +480,7 b' class RepoGroupModel(BaseModel):' | |||
|
478 | 480 | |
|
479 | 481 | # Trigger delete event. |
|
480 | 482 | events.trigger(events.RepoGroupDeleteEvent(repo_group)) |
|
483 | return True | |
|
481 | 484 | |
|
482 | 485 | except Exception: |
|
483 | 486 | log.error('Error removing repo_group %s', repo_group) |
@@ -38,6 +38,17 b'' | |||
|
38 | 38 | |
|
39 | 39 | .form-control { |
|
40 | 40 | width: 100%; |
|
41 | padding: 0.9em; | |
|
42 | border: 1px solid #979797; | |
|
43 | border-radius: 2px; | |
|
44 | } | |
|
45 | .form-control.select2-container { | |
|
46 | padding: 0; /* padding already applied in .drop-menu a */ | |
|
47 | } | |
|
48 | ||
|
49 | .form-control.readonly { | |
|
50 | background: #eeeeee; | |
|
51 | cursor: not-allowed; | |
|
41 | 52 | } |
|
42 | 53 | |
|
43 | 54 | .error-block { |
@@ -1100,6 +1100,44 b' table.issuetracker {' | |||
|
1100 | 1100 | } |
|
1101 | 1101 | } |
|
1102 | 1102 | |
|
1103 | table.integrations { | |
|
1104 | .td-icon { | |
|
1105 | width: 20px; | |
|
1106 | .integration-icon { | |
|
1107 | height: 20px; | |
|
1108 | width: 20px; | |
|
1109 | } | |
|
1110 | } | |
|
1111 | } | |
|
1112 | ||
|
1113 | .integrations { | |
|
1114 | a.integration-box { | |
|
1115 | color: @text-color; | |
|
1116 | &:hover { | |
|
1117 | .panel { | |
|
1118 | background: #fbfbfb; | |
|
1119 | } | |
|
1120 | } | |
|
1121 | .integration-icon { | |
|
1122 | width: 30px; | |
|
1123 | height: 30px; | |
|
1124 | margin-right: 20px; | |
|
1125 | float: left; | |
|
1126 | } | |
|
1127 | ||
|
1128 | .panel-body { | |
|
1129 | padding: 10px; | |
|
1130 | } | |
|
1131 | .panel { | |
|
1132 | margin-bottom: 10px; | |
|
1133 | } | |
|
1134 | h2 { | |
|
1135 | display: inline-block; | |
|
1136 | margin: 0; | |
|
1137 | min-width: 140px; | |
|
1138 | } | |
|
1139 | } | |
|
1140 | } | |
|
1103 | 1141 | |
|
1104 | 1142 | //Permissions Settings |
|
1105 | 1143 | #add_perm { |
@@ -261,7 +261,7 b' mark,' | |||
|
261 | 261 | margin-bottom: 0; |
|
262 | 262 | } |
|
263 | 263 | |
|
264 | .links{ | |
|
264 | .links { | |
|
265 | 265 | float: right; |
|
266 | 266 | display: inline; |
|
267 | 267 | margin: 0; |
@@ -270,7 +270,7 b' mark,' | |||
|
270 | 270 | text-align: right; |
|
271 | 271 | |
|
272 | 272 | li:before { content: none; } |
|
273 | ||
|
273 | li { float: right; } | |
|
274 | 274 | a { |
|
275 | 275 | display: inline-block; |
|
276 | 276 | margin-left: @textmargin/2; |
@@ -11,6 +11,19 b'' | |||
|
11 | 11 | request.route_url(route_name='repo_integrations_list', |
|
12 | 12 | repo_name=c.repo.repo_name, |
|
13 | 13 | integration=current_IntegrationType.key))} |
|
14 | %elif c.repo_group: | |
|
15 | ${h.link_to(_('Admin'),h.url('admin_home'))} | |
|
16 | » | |
|
17 | ${h.link_to(_('Repository Groups'),h.url('repo_groups'))} | |
|
18 | » | |
|
19 | ${h.link_to(c.repo_group.group_name,h.url('edit_repo_group', group_name=c.repo_group.group_name))} | |
|
20 | » | |
|
21 | ${h.link_to(_('Integrations'),request.route_url(route_name='repo_group_integrations_home', repo_group_name=c.repo_group.group_name))} | |
|
22 | » | |
|
23 | ${h.link_to(current_IntegrationType.display_name, | |
|
24 | request.route_url(route_name='repo_group_integrations_list', | |
|
25 | repo_group_name=c.repo_group.group_name, | |
|
26 | integration=current_IntegrationType.key))} | |
|
14 | 27 | %else: |
|
15 | 28 | ${h.link_to(_('Admin'),h.url('admin_home'))} |
|
16 | 29 | » |
@@ -22,18 +35,31 b'' | |||
|
22 | 35 | request.route_url(route_name='global_integrations_list', |
|
23 | 36 | integration=current_IntegrationType.key))} |
|
24 | 37 | %endif |
|
38 | ||
|
25 | 39 | %if integration: |
|
26 | 40 | » |
|
27 | 41 | ${integration.name} |
|
42 | %elif current_IntegrationType: | |
|
43 | » | |
|
44 | ${current_IntegrationType.display_name} | |
|
28 | 45 | %endif |
|
29 | 46 | </%def> |
|
47 | ||
|
48 | <style> | |
|
49 | .control-inputs.item-options, .control-inputs.item-settings { | |
|
50 | float: left; | |
|
51 | width: 100%; | |
|
52 | } | |
|
53 | </style> | |
|
30 | 54 | <div class="panel panel-default"> |
|
31 | 55 | <div class="panel-heading"> |
|
32 | 56 | <h2 class="panel-title"> |
|
33 | 57 | %if integration: |
|
34 | 58 | ${current_IntegrationType.display_name} - ${integration.name} |
|
35 | 59 | %else: |
|
36 |
${_('Create New %(integration_type)s Integration') % { |
|
|
60 | ${_('Create New %(integration_type)s Integration') % { | |
|
61 | 'integration_type': current_IntegrationType.display_name | |
|
62 | }} | |
|
37 | 63 | %endif |
|
38 | 64 | </h2> |
|
39 | 65 | </div> |
@@ -4,6 +4,12 b'' | |||
|
4 | 4 | <%def name="breadcrumbs_links()"> |
|
5 | 5 | %if c.repo: |
|
6 | 6 | ${h.link_to('Settings',h.url('edit_repo', repo_name=c.repo.repo_name))} |
|
7 | %elif c.repo_group: | |
|
8 | ${h.link_to(_('Admin'),h.url('admin_home'))} | |
|
9 | » | |
|
10 | ${h.link_to(_('Repository Groups'),h.url('repo_groups'))} | |
|
11 | » | |
|
12 | ${h.link_to(c.repo_group.group_name,h.url('edit_repo_group', group_name=c.repo_group.group_name))} | |
|
7 | 13 | %else: |
|
8 | 14 | ${h.link_to(_('Admin'),h.url('admin_home'))} |
|
9 | 15 | » |
@@ -15,6 +21,10 b'' | |||
|
15 | 21 | ${h.link_to(_('Integrations'), |
|
16 | 22 | request.route_url(route_name='repo_integrations_home', |
|
17 | 23 | repo_name=c.repo.repo_name))} |
|
24 | %elif c.repo_group: | |
|
25 | ${h.link_to(_('Integrations'), | |
|
26 | request.route_url(route_name='repo_group_integrations_home', | |
|
27 | repo_group_name=c.repo_group.group_name))} | |
|
18 | 28 | %else: |
|
19 | 29 | ${h.link_to(_('Integrations'), |
|
20 | 30 | request.route_url(route_name='global_integrations_home'))} |
@@ -26,54 +36,105 b'' | |||
|
26 | 36 | ${_('Integrations')} |
|
27 | 37 | %endif |
|
28 | 38 | </%def> |
|
39 | ||
|
29 | 40 | <div class="panel panel-default"> |
|
30 | 41 | <div class="panel-heading"> |
|
31 |
<h3 class="panel-title"> |
|
|
42 | <h3 class="panel-title"> | |
|
43 | %if c.repo: | |
|
44 | ${_('Current Integrations for Repository: {repo_name}').format(repo_name=c.repo.repo_name)} | |
|
45 | %elif c.repo_group: | |
|
46 | ${_('Current Integrations for repository group: {repo_group_name}').format(repo_group_name=c.repo_group.group_name)} | |
|
47 | %else: | |
|
48 | ${_('Current Integrations')} | |
|
49 | %endif | |
|
50 | </h3> | |
|
32 | 51 | </div> |
|
33 | 52 | <div class="panel-body"> |
|
34 | %if not available_integrations: | |
|
35 | ${_('No integrations available.')} | |
|
36 | %else: | |
|
37 | %for integration in available_integrations: | |
|
38 | <% | |
|
39 | if c.repo: | |
|
40 | create_url = request.route_path('repo_integrations_create', | |
|
53 | <% | |
|
54 | if c.repo: | |
|
55 | home_url = request.route_path('repo_integrations_home', | |
|
56 | repo_name=c.repo.repo_name) | |
|
57 | elif c.repo_group: | |
|
58 | home_url = request.route_path('repo_group_integrations_home', | |
|
59 | repo_group_name=c.repo_group.group_name) | |
|
60 | else: | |
|
61 | home_url = request.route_path('global_integrations_home') | |
|
62 | %> | |
|
63 | ||
|
64 | <a href="${home_url}" class="btn ${not current_IntegrationType and 'btn-primary' or ''}">${_('All')}</a> | |
|
65 | ||
|
66 | %for integration_key, IntegrationType in available_integrations.items(): | |
|
67 | <% | |
|
68 | if c.repo: | |
|
69 | list_url = request.route_path('repo_integrations_list', | |
|
41 | 70 | repo_name=c.repo.repo_name, |
|
42 | integration=integration) | |
|
43 | elif c.repo_group: | |
|
44 |
|
|
|
71 | integration=integration_key) | |
|
72 | elif c.repo_group: | |
|
73 | list_url = request.route_path('repo_group_integrations_list', | |
|
45 | 74 | repo_group_name=c.repo_group.group_name, |
|
46 | integration=integration) | |
|
47 | else: | |
|
48 |
|
|
|
49 |
|
|
|
50 | %> | |
|
51 |
<a href="${ |
|
|
52 | ${integration} | |
|
75 | integration=integration_key) | |
|
76 | else: | |
|
77 | list_url = request.route_path('global_integrations_list', | |
|
78 | integration=integration_key) | |
|
79 | %> | |
|
80 | <a href="${list_url}" | |
|
81 | class="btn ${current_IntegrationType and integration_key == current_IntegrationType.key and 'btn-primary' or ''}"> | |
|
82 | ${IntegrationType.display_name} | |
|
53 | 83 | </a> |
|
54 | 84 | %endfor |
|
55 | %endif | |
|
56 | </div> | |
|
57 | </div> | |
|
58 | <div class="panel panel-default"> | |
|
59 | <div class="panel-heading"> | |
|
60 | <h3 class="panel-title">${_('Current Integrations')}</h3> | |
|
61 | </div> | |
|
62 | <div class="panel-body"> | |
|
63 | <table class="rctable issuetracker"> | |
|
85 | ||
|
86 | <% | |
|
87 | if c.repo: | |
|
88 | create_url = h.route_path('repo_integrations_new', repo_name=c.repo.repo_name) | |
|
89 | elif c.repo_group: | |
|
90 | create_url = h.route_path('repo_group_integrations_new', repo_group_name=c.repo_group.group_name) | |
|
91 | else: | |
|
92 | create_url = h.route_path('global_integrations_new') | |
|
93 | %> | |
|
94 | <p class="pull-right"> | |
|
95 | <a href="${create_url}" class="btn btn-small btn-success">${_(u'Create new integration')}</a> | |
|
96 | </p> | |
|
97 | ||
|
98 | <table class="rctable integrations"> | |
|
64 | 99 | <thead> |
|
65 | 100 | <tr> |
|
66 | <th>${_('Enabled')}</th> | |
|
67 | <th>${_('Description')}</th> | |
|
68 | <th>${_('Type')}</th> | |
|
101 | <th><a href="?sort=enabled:${rev_sort_dir}">${_('Enabled')}</a></th> | |
|
102 | <th><a href="?sort=name:${rev_sort_dir}">${_('Name')}</a></th> | |
|
103 | <th colspan="2"><a href="?sort=integration_type:${rev_sort_dir}">${_('Type')}</a></th> | |
|
104 | <th><a href="?sort=scope:${rev_sort_dir}">${_('Scope')}</a></th> | |
|
69 | 105 | <th>${_('Actions')}</th> |
|
70 | 106 | <th></th> |
|
71 | 107 | </tr> |
|
72 | 108 | </thead> |
|
73 | 109 | <tbody> |
|
110 | %if not integrations_list: | |
|
111 | <tr> | |
|
112 | <td colspan="7"> | |
|
113 | <% integration_type = current_IntegrationType and current_IntegrationType.display_name or '' %> | |
|
114 | %if c.repo: | |
|
115 | ${_('No {type} integrations for repo {repo} exist yet.').format(type=integration_type, repo=c.repo.repo_name)} | |
|
116 | %elif c.repo_group: | |
|
117 | ${_('No {type} integrations for repogroup {repogroup} exist yet.').format(type=integration_type, repogroup=c.repo_group.group_name)} | |
|
118 | %else: | |
|
119 | ${_('No {type} integrations exist yet.').format(type=integration_type)} | |
|
120 | %endif | |
|
74 | 121 | |
|
75 | %for integration_type, integrations in sorted(current_integrations.items()): | |
|
76 | %for integration in sorted(integrations, key=lambda x: x.name): | |
|
122 | %if current_IntegrationType: | |
|
123 | <% | |
|
124 | if c.repo: | |
|
125 | create_url = h.route_path('repo_integrations_create', repo_name=c.repo.repo_name, integration=current_IntegrationType.key) | |
|
126 | elif c.repo_group: | |
|
127 | create_url = h.route_path('repo_group_integrations_create', repo_group_name=c.repo_group.group_name, integration=current_IntegrationType.key) | |
|
128 | else: | |
|
129 | create_url = h.route_path('global_integrations_create', integration=current_IntegrationType.key) | |
|
130 | %> | |
|
131 | %endif | |
|
132 | ||
|
133 | <a href="${create_url}">${_(u'Create one')}</a> | |
|
134 | </td> | |
|
135 | </tr> | |
|
136 | %endif | |
|
137 | %for IntegrationType, integration in integrations_list: | |
|
77 | 138 | <tr id="integration_${integration.integration_id}"> |
|
78 | 139 | <td class="td-enabled"> |
|
79 | 140 | %if integration.enabled: |
@@ -85,11 +146,39 b'' | |||
|
85 | 146 | <td class="td-description"> |
|
86 | 147 | ${integration.name} |
|
87 | 148 | </td> |
|
88 |
<td class="td- |
|
|
149 | <td class="td-icon"> | |
|
150 | %if integration.integration_type in available_integrations: | |
|
151 | <div class="integration-icon"> | |
|
152 | ${available_integrations[integration.integration_type].icon|n} | |
|
153 | </div> | |
|
154 | %else: | |
|
155 | ? | |
|
156 | %endif | |
|
157 | </td> | |
|
158 | <td class="td-type"> | |
|
89 | 159 | ${integration.integration_type} |
|
90 | 160 | </td> |
|
161 | <td class="td-scope"> | |
|
162 | %if integration.repo: | |
|
163 | <a href="${h.url('summary_home', repo_name=integration.repo.repo_name)}"> | |
|
164 | ${_('repo')}:${integration.repo.repo_name} | |
|
165 | </a> | |
|
166 | %elif integration.repo_group: | |
|
167 | <a href="${h.url('repo_group_home', group_name=integration.repo_group.group_name)}"> | |
|
168 | ${_('repogroup')}:${integration.repo_group.group_name} | |
|
169 | </a> | |
|
170 | %else: | |
|
171 | %if integration.scope == 'root_repos': | |
|
172 | ${_('top level repos only')} | |
|
173 | %elif integration.scope == 'global': | |
|
174 | ${_('global')} | |
|
175 | %else: | |
|
176 | ${_('unknown scope')}: ${integration.scope} | |
|
177 | %endif | |
|
178 | </td> | |
|
179 | %endif | |
|
91 | 180 | <td class="td-action"> |
|
92 | %if integration_type not in available_integrations: | |
|
181 | %if not IntegrationType: | |
|
93 | 182 | ${_('unknown integration')} |
|
94 | 183 | %else: |
|
95 | 184 | <% |
@@ -122,11 +211,15 b'' | |||
|
122 | 211 | %endif |
|
123 | 212 | </td> |
|
124 | 213 | </tr> |
|
125 | %endfor | |
|
126 | 214 | %endfor |
|
127 | 215 | <tr id="last-row"></tr> |
|
128 | 216 | </tbody> |
|
129 | 217 | </table> |
|
218 | <div class="integrations-paginator"> | |
|
219 | <div class="pagination-wh pagination-left"> | |
|
220 | ${integrations_list.pager('$link_previous ~2~ $link_next')} | |
|
221 | </div> | |
|
222 | </div> | |
|
130 | 223 | </div> |
|
131 | 224 | </div> |
|
132 | 225 | <script type="text/javascript"> |
@@ -10,7 +10,6 b'' | |||
|
10 | 10 | id="item-${oid}" |
|
11 | 11 | tal:omit-tag="structural" |
|
12 | 12 | i18n:domain="deform"> |
|
13 | ||
|
14 | 13 | <label for="${oid}" |
|
15 | 14 | class="control-label ${required and 'required' or ''}" |
|
16 | 15 | tal:condition="not structural" |
@@ -18,7 +17,7 b'' | |||
|
18 | 17 | > |
|
19 | 18 | ${title} |
|
20 | 19 | </label> |
|
21 | <div class="control-inputs"> | |
|
20 | <div class="control-inputs ${field.widget.item_css_class or ''}"> | |
|
22 | 21 | <div tal:define="input_prepend field.widget.input_prepend | None; |
|
23 | 22 | input_append field.widget.input_append | None" |
|
24 | 23 | tal:omit-tag="not (input_prepend or input_append)" |
@@ -1,8 +1,16 b'' | |||
|
1 |
<%def name="panel(title |
|
|
2 | <div class="panel panel-${class_}"> | |
|
1 | <%def name="panel(title='', category='default', class_='')"> | |
|
2 | <div class="panel panel-${category} ${class_}"> | |
|
3 | %if title or hasattr(caller, 'title'): | |
|
3 | 4 | <div class="panel-heading"> |
|
4 |
<h3 class="panel-title"> |
|
|
5 | <h3 class="panel-title"> | |
|
6 | %if title: | |
|
7 | ${title} | |
|
8 | %else: | |
|
9 | ${caller.title()} | |
|
10 | %endif | |
|
11 | </h3> | |
|
5 | 12 | </div> |
|
13 | %endif | |
|
6 | 14 | <div class="panel-body"> |
|
7 | 15 | ${caller.body()} |
|
8 | 16 | </div> |
@@ -18,61 +18,175 b'' | |||
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | import time | |
|
21 | 22 | import pytest |
|
22 | import requests | |
|
23 | from mock import Mock, patch | |
|
24 | 23 | |
|
25 | 24 | from rhodecode import events |
|
25 | from rhodecode.tests.fixture import Fixture | |
|
26 | 26 | from rhodecode.model.db import Session, Integration |
|
27 | 27 | from rhodecode.model.integration import IntegrationModel |
|
28 | 28 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
29 | 29 | |
|
30 | 30 | |
|
31 | class TestIntegrationType(IntegrationTypeBase): | |
|
32 | """ Test integration type class """ | |
|
33 | ||
|
34 | key = 'test-integration' | |
|
35 | display_name = 'Test integration type' | |
|
31 | class TestDeleteScopesDeletesIntegrations(object): | |
|
32 | def test_delete_repo_with_integration_deletes_integration(self, | |
|
33 | repo_integration_stub): | |
|
34 | Session().delete(repo_integration_stub.repo) | |
|
35 | Session().commit() | |
|
36 | Session().expire_all() | |
|
37 | integration = Integration.get(repo_integration_stub.integration_id) | |
|
38 | assert integration is None | |
|
36 | 39 | |
|
37 | def __init__(self, settings): | |
|
38 | super(IntegrationTypeBase, self).__init__(settings) | |
|
39 | self.sent_events = [] # for testing | |
|
40 | 40 | |
|
41 | def send_event(self, event): | |
|
42 | self.sent_events.append(event) | |
|
41 | def test_delete_repo_group_with_integration_deletes_integration(self, | |
|
42 | repogroup_integration_stub): | |
|
43 | Session().delete(repogroup_integration_stub.repo_group) | |
|
44 | Session().commit() | |
|
45 | Session().expire_all() | |
|
46 | integration = Integration.get(repogroup_integration_stub.integration_id) | |
|
47 | assert integration is None | |
|
43 | 48 | |
|
44 | 49 | |
|
45 | 50 | @pytest.fixture |
|
46 | def repo_integration_stub(request, repo_stub): | |
|
47 | settings = {'test_key': 'test_value'} | |
|
48 | integration = IntegrationModel().create( | |
|
49 | TestIntegrationType, settings=settings, repo=repo_stub, enabled=True, | |
|
50 | name='test repo integration') | |
|
51 | def integration_repos(request, StubIntegrationType, stub_integration_settings): | |
|
52 | """ | |
|
53 | Create repositories and integrations for testing, and destroy them after | |
|
54 | """ | |
|
55 | fixture = Fixture() | |
|
56 | ||
|
57 | repo_group_1_id = 'int_test_repo_group_1_%s' % time.time() | |
|
58 | repo_group_1 = fixture.create_repo_group(repo_group_1_id) | |
|
59 | repo_group_2_id = 'int_test_repo_group_2_%s' % time.time() | |
|
60 | repo_group_2 = fixture.create_repo_group(repo_group_2_id) | |
|
61 | ||
|
62 | repo_1_id = 'int_test_repo_1_%s' % time.time() | |
|
63 | repo_1 = fixture.create_repo(repo_1_id, repo_group=repo_group_1) | |
|
64 | repo_2_id = 'int_test_repo_2_%s' % time.time() | |
|
65 | repo_2 = fixture.create_repo(repo_2_id, repo_group=repo_group_2) | |
|
66 | ||
|
67 | root_repo_id = 'int_test_repo_root_%s' % time.time() | |
|
68 | root_repo = fixture.create_repo(root_repo_id) | |
|
51 | 69 | |
|
52 | @request.addfinalizer | |
|
53 | def cleanup(): | |
|
54 | IntegrationModel().delete(integration) | |
|
70 | integration_global = IntegrationModel().create( | |
|
71 | StubIntegrationType, settings=stub_integration_settings, | |
|
72 | enabled=True, name='test global integration', scope='global') | |
|
73 | integration_root_repos = IntegrationModel().create( | |
|
74 | StubIntegrationType, settings=stub_integration_settings, | |
|
75 | enabled=True, name='test root repos integration', scope='root_repos') | |
|
76 | integration_repo_1 = IntegrationModel().create( | |
|
77 | StubIntegrationType, settings=stub_integration_settings, | |
|
78 | enabled=True, name='test repo 1 integration', scope=repo_1) | |
|
79 | integration_repo_group_1 = IntegrationModel().create( | |
|
80 | StubIntegrationType, settings=stub_integration_settings, | |
|
81 | enabled=True, name='test repo group 1 integration', scope=repo_group_1) | |
|
82 | integration_repo_2 = IntegrationModel().create( | |
|
83 | StubIntegrationType, settings=stub_integration_settings, | |
|
84 | enabled=True, name='test repo 2 integration', scope=repo_2) | |
|
85 | integration_repo_group_2 = IntegrationModel().create( | |
|
86 | StubIntegrationType, settings=stub_integration_settings, | |
|
87 | enabled=True, name='test repo group 2 integration', scope=repo_group_2) | |
|
88 | ||
|
89 | Session().commit() | |
|
55 | 90 | |
|
56 | return integration | |
|
91 | def _cleanup(): | |
|
92 | Session().delete(integration_global) | |
|
93 | Session().delete(integration_root_repos) | |
|
94 | Session().delete(integration_repo_1) | |
|
95 | Session().delete(integration_repo_group_1) | |
|
96 | Session().delete(integration_repo_2) | |
|
97 | Session().delete(integration_repo_group_2) | |
|
98 | fixture.destroy_repo(root_repo) | |
|
99 | fixture.destroy_repo(repo_1) | |
|
100 | fixture.destroy_repo(repo_2) | |
|
101 | fixture.destroy_repo_group(repo_group_1) | |
|
102 | fixture.destroy_repo_group(repo_group_2) | |
|
103 | ||
|
104 | request.addfinalizer(_cleanup) | |
|
105 | ||
|
106 | return { | |
|
107 | 'repos': { | |
|
108 | 'repo_1': repo_1, | |
|
109 | 'repo_2': repo_2, | |
|
110 | 'root_repo': root_repo, | |
|
111 | }, | |
|
112 | 'repo_groups': { | |
|
113 | 'repo_group_1': repo_group_1, | |
|
114 | 'repo_group_2': repo_group_2, | |
|
115 | }, | |
|
116 | 'integrations': { | |
|
117 | 'global': integration_global, | |
|
118 | 'root_repos': integration_root_repos, | |
|
119 | 'repo_1': integration_repo_1, | |
|
120 | 'repo_2': integration_repo_2, | |
|
121 | 'repo_group_1': integration_repo_group_1, | |
|
122 | 'repo_group_2': integration_repo_group_2, | |
|
123 | } | |
|
124 | } | |
|
57 | 125 | |
|
58 | 126 | |
|
59 | @pytest.fixture | |
|
60 | def global_integration_stub(request): | |
|
61 | settings = {'test_key': 'test_value'} | |
|
62 | integration = IntegrationModel().create( | |
|
63 | TestIntegrationType, settings=settings, enabled=True, | |
|
64 | name='test global integration') | |
|
127 | def test_enabled_integration_repo_scopes(integration_repos): | |
|
128 | integrations = integration_repos['integrations'] | |
|
129 | repos = integration_repos['repos'] | |
|
130 | ||
|
131 | triggered_integrations = IntegrationModel().get_for_event( | |
|
132 | events.RepoEvent(repos['root_repo'])) | |
|
133 | ||
|
134 | assert triggered_integrations == [ | |
|
135 | integrations['global'], | |
|
136 | integrations['root_repos'] | |
|
137 | ] | |
|
138 | ||
|
139 | ||
|
140 | triggered_integrations = IntegrationModel().get_for_event( | |
|
141 | events.RepoEvent(repos['repo_1'])) | |
|
65 | 142 | |
|
66 | @request.addfinalizer | |
|
67 | def cleanup(): | |
|
68 | IntegrationModel().delete(integration) | |
|
143 | assert triggered_integrations == [ | |
|
144 | integrations['global'], | |
|
145 | integrations['repo_1'], | |
|
146 | integrations['repo_group_1'] | |
|
147 | ] | |
|
148 | ||
|
69 | 149 | |
|
70 | return integration | |
|
150 | triggered_integrations = IntegrationModel().get_for_event( | |
|
151 | events.RepoEvent(repos['repo_2'])) | |
|
152 | ||
|
153 | assert triggered_integrations == [ | |
|
154 | integrations['global'], | |
|
155 | integrations['repo_2'], | |
|
156 | integrations['repo_group_2'], | |
|
157 | ] | |
|
71 | 158 | |
|
72 | 159 | |
|
73 |
def test_d |
|
|
74 | Session().delete(repo_integration_stub.repo) | |
|
160 | def test_disabled_integration_repo_scopes(integration_repos): | |
|
161 | integrations = integration_repos['integrations'] | |
|
162 | repos = integration_repos['repos'] | |
|
163 | ||
|
164 | for integration in integrations.values(): | |
|
165 | integration.enabled = False | |
|
75 | 166 | Session().commit() |
|
76 | Session().expire_all() | |
|
77 | assert Integration.get(repo_integration_stub.integration_id) is None | |
|
167 | ||
|
168 | triggered_integrations = IntegrationModel().get_for_event( | |
|
169 | events.RepoEvent(repos['root_repo'])) | |
|
170 | ||
|
171 | assert triggered_integrations == [] | |
|
172 | ||
|
173 | ||
|
174 | triggered_integrations = IntegrationModel().get_for_event( | |
|
175 | events.RepoEvent(repos['repo_1'])) | |
|
176 | ||
|
177 | assert triggered_integrations == [] | |
|
178 | ||
|
78 | 179 | |
|
180 | triggered_integrations = IntegrationModel().get_for_event( | |
|
181 | events.RepoEvent(repos['repo_2'])) | |
|
182 | ||
|
183 | assert triggered_integrations == [] | |
|
184 | ||
|
185 | ||
|
186 | def test_enabled_non_repo_integrations(integration_repos): | |
|
187 | integrations = integration_repos['integrations'] | |
|
188 | ||
|
189 | triggered_integrations = IntegrationModel().get_for_event( | |
|
190 | events.UserPreCreate({})) | |
|
191 | ||
|
192 | assert triggered_integrations == [integrations['global']] |
@@ -33,6 +33,7 b' import uuid' | |||
|
33 | 33 | import mock |
|
34 | 34 | import pyramid.testing |
|
35 | 35 | import pytest |
|
36 | import colander | |
|
36 | 37 | import requests |
|
37 | 38 | from webtest.app import TestApp |
|
38 | 39 | |
@@ -41,7 +42,7 b' from rhodecode.model.changeset_status im' | |||
|
41 | 42 | from rhodecode.model.comment import ChangesetCommentsModel |
|
42 | 43 | from rhodecode.model.db import ( |
|
43 | 44 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, |
|
44 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) | |
|
45 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, Integration) | |
|
45 | 46 | from rhodecode.model.meta import Session |
|
46 | 47 | from rhodecode.model.pull_request import PullRequestModel |
|
47 | 48 | from rhodecode.model.repo import RepoModel |
@@ -49,6 +50,9 b' from rhodecode.model.repo_group import R' | |||
|
49 | 50 | from rhodecode.model.user import UserModel |
|
50 | 51 | from rhodecode.model.settings import VcsSettingsModel |
|
51 | 52 | from rhodecode.model.user_group import UserGroupModel |
|
53 | from rhodecode.model.integration import IntegrationModel | |
|
54 | from rhodecode.integrations import integration_type_registry | |
|
55 | from rhodecode.integrations.types.base import IntegrationTypeBase | |
|
52 | 56 | from rhodecode.lib.utils import repo2db_mapper |
|
53 | 57 | from rhodecode.lib.vcs import create_vcsserver_proxy |
|
54 | 58 | from rhodecode.lib.vcs.backends import get_backend |
@@ -1636,3 +1640,101 b' def config_stub(request, request_stub):' | |||
|
1636 | 1640 | pyramid.testing.tearDown() |
|
1637 | 1641 | |
|
1638 | 1642 | return config |
|
1643 | ||
|
1644 | ||
|
1645 | @pytest.fixture | |
|
1646 | def StubIntegrationType(): | |
|
1647 | class _StubIntegrationType(IntegrationTypeBase): | |
|
1648 | """ Test integration type class """ | |
|
1649 | ||
|
1650 | key = 'test' | |
|
1651 | display_name = 'Test integration type' | |
|
1652 | description = 'A test integration type for testing' | |
|
1653 | icon = 'test_icon_html_image' | |
|
1654 | ||
|
1655 | def __init__(self, settings): | |
|
1656 | super(_StubIntegrationType, self).__init__(settings) | |
|
1657 | self.sent_events = [] # for testing | |
|
1658 | ||
|
1659 | def send_event(self, event): | |
|
1660 | self.sent_events.append(event) | |
|
1661 | ||
|
1662 | def settings_schema(self): | |
|
1663 | class SettingsSchema(colander.Schema): | |
|
1664 | test_string_field = colander.SchemaNode( | |
|
1665 | colander.String(), | |
|
1666 | missing=colander.required, | |
|
1667 | title='test string field', | |
|
1668 | ) | |
|
1669 | test_int_field = colander.SchemaNode( | |
|
1670 | colander.Int(), | |
|
1671 | title='some integer setting', | |
|
1672 | ) | |
|
1673 | return SettingsSchema() | |
|
1674 | ||
|
1675 | ||
|
1676 | integration_type_registry.register_integration_type(_StubIntegrationType) | |
|
1677 | return _StubIntegrationType | |
|
1678 | ||
|
1679 | @pytest.fixture | |
|
1680 | def stub_integration_settings(): | |
|
1681 | return { | |
|
1682 | 'test_string_field': 'some data', | |
|
1683 | 'test_int_field': 100, | |
|
1684 | } | |
|
1685 | ||
|
1686 | ||
|
1687 | @pytest.fixture | |
|
1688 | def repo_integration_stub(request, repo_stub, StubIntegrationType, | |
|
1689 | stub_integration_settings): | |
|
1690 | integration = IntegrationModel().create( | |
|
1691 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
|
1692 | name='test repo integration', scope=repo_stub) | |
|
1693 | ||
|
1694 | @request.addfinalizer | |
|
1695 | def cleanup(): | |
|
1696 | IntegrationModel().delete(integration) | |
|
1697 | ||
|
1698 | return integration | |
|
1699 | ||
|
1700 | ||
|
1701 | @pytest.fixture | |
|
1702 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, | |
|
1703 | stub_integration_settings): | |
|
1704 | integration = IntegrationModel().create( | |
|
1705 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
|
1706 | name='test repogroup integration', scope=test_repo_group) | |
|
1707 | ||
|
1708 | @request.addfinalizer | |
|
1709 | def cleanup(): | |
|
1710 | IntegrationModel().delete(integration) | |
|
1711 | ||
|
1712 | return integration | |
|
1713 | ||
|
1714 | ||
|
1715 | @pytest.fixture | |
|
1716 | def global_integration_stub(request, StubIntegrationType, | |
|
1717 | stub_integration_settings): | |
|
1718 | integration = IntegrationModel().create( | |
|
1719 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
|
1720 | name='test global integration', scope='global') | |
|
1721 | ||
|
1722 | @request.addfinalizer | |
|
1723 | def cleanup(): | |
|
1724 | IntegrationModel().delete(integration) | |
|
1725 | ||
|
1726 | return integration | |
|
1727 | ||
|
1728 | ||
|
1729 | @pytest.fixture | |
|
1730 | def root_repos_integration_stub(request, StubIntegrationType, | |
|
1731 | stub_integration_settings): | |
|
1732 | integration = IntegrationModel().create( | |
|
1733 | StubIntegrationType, settings=stub_integration_settings, enabled=True, | |
|
1734 | name='test global integration', scope='root_repos') | |
|
1735 | ||
|
1736 | @request.addfinalizer | |
|
1737 | def cleanup(): | |
|
1738 | IntegrationModel().delete(integration) | |
|
1739 | ||
|
1740 | return integration |
General Comments 0
You need to be logged in to leave comments.
Login now