Show More
The requested changes are too big and content was truncated. Show full diff
This diff has been collapsed as it changes many lines, (5625 lines changed) Show them Hide them | |||||
@@ -0,0 +1,5625 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | """ | |||
|
22 | Database Models for RhodeCode Enterprise | |||
|
23 | """ | |||
|
24 | ||||
|
25 | import re | |||
|
26 | import os | |||
|
27 | import time | |||
|
28 | import string | |||
|
29 | import hashlib | |||
|
30 | import logging | |||
|
31 | import datetime | |||
|
32 | import uuid | |||
|
33 | import warnings | |||
|
34 | import ipaddress | |||
|
35 | import functools | |||
|
36 | import traceback | |||
|
37 | import collections | |||
|
38 | ||||
|
39 | from sqlalchemy import ( | |||
|
40 | or_, and_, not_, func, cast, TypeDecorator, event, | |||
|
41 | Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column, | |||
|
42 | Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary, | |||
|
43 | Text, Float, PickleType, BigInteger) | |||
|
44 | from sqlalchemy.sql.expression import true, false, case | |||
|
45 | from sqlalchemy.sql.functions import coalesce, count # pragma: no cover | |||
|
46 | from sqlalchemy.orm import ( | |||
|
47 | relationship, joinedload, class_mapper, validates, aliased) | |||
|
48 | from sqlalchemy.ext.declarative import declared_attr | |||
|
49 | from sqlalchemy.ext.hybrid import hybrid_property | |||
|
50 | from sqlalchemy.exc import IntegrityError # pragma: no cover | |||
|
51 | from sqlalchemy.dialects.mysql import LONGTEXT | |||
|
52 | from zope.cachedescriptors.property import Lazy as LazyProperty | |||
|
53 | from pyramid import compat | |||
|
54 | from pyramid.threadlocal import get_current_request | |||
|
55 | from webhelpers2.text import remove_formatting | |||
|
56 | ||||
|
57 | from rhodecode.translation import _ | |||
|
58 | from rhodecode.lib.vcs import get_vcs_instance, VCSError | |||
|
59 | from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference | |||
|
60 | from rhodecode.lib.utils2 import ( | |||
|
61 | str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe, | |||
|
62 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict, | |||
|
63 | glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict) | |||
|
64 | from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \ | |||
|
65 | JsonRaw | |||
|
66 | from rhodecode.lib.ext_json import json | |||
|
67 | from rhodecode.lib.caching_query import FromCache | |||
|
68 | from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data | |||
|
69 | from rhodecode.lib.encrypt2 import Encryptor | |||
|
70 | from rhodecode.lib.exceptions import ( | |||
|
71 | ArtifactMetadataDuplicate, ArtifactMetadataBadValueType) | |||
|
72 | from rhodecode.model.meta import Base, Session | |||
|
73 | ||||
|
74 | URL_SEP = '/' | |||
|
75 | log = logging.getLogger(__name__) | |||
|
76 | ||||
|
77 | # ============================================================================= | |||
|
78 | # BASE CLASSES | |||
|
79 | # ============================================================================= | |||
|
80 | ||||
|
81 | # this is propagated from .ini file rhodecode.encrypted_values.secret or | |||
|
82 | # beaker.session.secret if first is not set. | |||
|
83 | # and initialized at environment.py | |||
|
84 | ENCRYPTION_KEY = None | |||
|
85 | ||||
|
86 | # used to sort permissions by types, '#' used here is not allowed to be in | |||
|
87 | # usernames, and it's very early in sorted string.printable table. | |||
|
88 | PERMISSION_TYPE_SORT = { | |||
|
89 | 'admin': '####', | |||
|
90 | 'write': '###', | |||
|
91 | 'read': '##', | |||
|
92 | 'none': '#', | |||
|
93 | } | |||
|
94 | ||||
|
95 | ||||
|
96 | def display_user_sort(obj): | |||
|
97 | """ | |||
|
98 | Sort function used to sort permissions in .permissions() function of | |||
|
99 | Repository, RepoGroup, UserGroup. Also it put the default user in front | |||
|
100 | of all other resources | |||
|
101 | """ | |||
|
102 | ||||
|
103 | if obj.username == User.DEFAULT_USER: | |||
|
104 | return '#####' | |||
|
105 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') | |||
|
106 | return prefix + obj.username | |||
|
107 | ||||
|
108 | ||||
|
109 | def display_user_group_sort(obj): | |||
|
110 | """ | |||
|
111 | Sort function used to sort permissions in .permissions() function of | |||
|
112 | Repository, RepoGroup, UserGroup. Also it put the default user in front | |||
|
113 | of all other resources | |||
|
114 | """ | |||
|
115 | ||||
|
116 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') | |||
|
117 | return prefix + obj.users_group_name | |||
|
118 | ||||
|
119 | ||||
|
120 | def _hash_key(k): | |||
|
121 | return sha1_safe(k) | |||
|
122 | ||||
|
123 | ||||
|
124 | def in_filter_generator(qry, items, limit=500): | |||
|
125 | """ | |||
|
126 | Splits IN() into multiple with OR | |||
|
127 | e.g.:: | |||
|
128 | cnt = Repository.query().filter( | |||
|
129 | or_( | |||
|
130 | *in_filter_generator(Repository.repo_id, range(100000)) | |||
|
131 | )).count() | |||
|
132 | """ | |||
|
133 | if not items: | |||
|
134 | # empty list will cause empty query which might cause security issues | |||
|
135 | # this can lead to hidden unpleasant results | |||
|
136 | items = [-1] | |||
|
137 | ||||
|
138 | parts = [] | |||
|
139 | for chunk in xrange(0, len(items), limit): | |||
|
140 | parts.append( | |||
|
141 | qry.in_(items[chunk: chunk + limit]) | |||
|
142 | ) | |||
|
143 | ||||
|
144 | return parts | |||
|
145 | ||||
|
146 | ||||
|
147 | base_table_args = { | |||
|
148 | 'extend_existing': True, | |||
|
149 | 'mysql_engine': 'InnoDB', | |||
|
150 | 'mysql_charset': 'utf8', | |||
|
151 | 'sqlite_autoincrement': True | |||
|
152 | } | |||
|
153 | ||||
|
154 | ||||
|
155 | class EncryptedTextValue(TypeDecorator): | |||
|
156 | """ | |||
|
157 | Special column for encrypted long text data, use like:: | |||
|
158 | ||||
|
159 | value = Column("encrypted_value", EncryptedValue(), nullable=False) | |||
|
160 | ||||
|
161 | This column is intelligent so if value is in unencrypted form it return | |||
|
162 | unencrypted form, but on save it always encrypts | |||
|
163 | """ | |||
|
164 | impl = Text | |||
|
165 | ||||
|
166 | def process_bind_param(self, value, dialect): | |||
|
167 | """ | |||
|
168 | Setter for storing value | |||
|
169 | """ | |||
|
170 | import rhodecode | |||
|
171 | if not value: | |||
|
172 | return value | |||
|
173 | ||||
|
174 | # protect against double encrypting if values is already encrypted | |||
|
175 | if value.startswith('enc$aes$') \ | |||
|
176 | or value.startswith('enc$aes_hmac$') \ | |||
|
177 | or value.startswith('enc2$'): | |||
|
178 | raise ValueError('value needs to be in unencrypted format, ' | |||
|
179 | 'ie. not starting with enc$ or enc2$') | |||
|
180 | ||||
|
181 | algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' | |||
|
182 | if algo == 'aes': | |||
|
183 | return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value) | |||
|
184 | elif algo == 'fernet': | |||
|
185 | return Encryptor(ENCRYPTION_KEY).encrypt(value) | |||
|
186 | else: | |||
|
187 | ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo)) | |||
|
188 | ||||
|
189 | def process_result_value(self, value, dialect): | |||
|
190 | """ | |||
|
191 | Getter for retrieving value | |||
|
192 | """ | |||
|
193 | ||||
|
194 | import rhodecode | |||
|
195 | if not value: | |||
|
196 | return value | |||
|
197 | ||||
|
198 | algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' | |||
|
199 | enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True) | |||
|
200 | if algo == 'aes': | |||
|
201 | decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode) | |||
|
202 | elif algo == 'fernet': | |||
|
203 | return Encryptor(ENCRYPTION_KEY).decrypt(value) | |||
|
204 | else: | |||
|
205 | ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo)) | |||
|
206 | return decrypted_data | |||
|
207 | ||||
|
208 | ||||
|
209 | class BaseModel(object): | |||
|
210 | """ | |||
|
211 | Base Model for all classes | |||
|
212 | """ | |||
|
213 | ||||
|
214 | @classmethod | |||
|
215 | def _get_keys(cls): | |||
|
216 | """return column names for this model """ | |||
|
217 | return class_mapper(cls).c.keys() | |||
|
218 | ||||
|
219 | def get_dict(self): | |||
|
220 | """ | |||
|
221 | return dict with keys and values corresponding | |||
|
222 | to this model data """ | |||
|
223 | ||||
|
224 | d = {} | |||
|
225 | for k in self._get_keys(): | |||
|
226 | d[k] = getattr(self, k) | |||
|
227 | ||||
|
228 | # also use __json__() if present to get additional fields | |||
|
229 | _json_attr = getattr(self, '__json__', None) | |||
|
230 | if _json_attr: | |||
|
231 | # update with attributes from __json__ | |||
|
232 | if callable(_json_attr): | |||
|
233 | _json_attr = _json_attr() | |||
|
234 | for k, val in _json_attr.iteritems(): | |||
|
235 | d[k] = val | |||
|
236 | return d | |||
|
237 | ||||
|
238 | def get_appstruct(self): | |||
|
239 | """return list with keys and values tuples corresponding | |||
|
240 | to this model data """ | |||
|
241 | ||||
|
242 | lst = [] | |||
|
243 | for k in self._get_keys(): | |||
|
244 | lst.append((k, getattr(self, k),)) | |||
|
245 | return lst | |||
|
246 | ||||
|
247 | def populate_obj(self, populate_dict): | |||
|
248 | """populate model with data from given populate_dict""" | |||
|
249 | ||||
|
250 | for k in self._get_keys(): | |||
|
251 | if k in populate_dict: | |||
|
252 | setattr(self, k, populate_dict[k]) | |||
|
253 | ||||
|
254 | @classmethod | |||
|
255 | def query(cls): | |||
|
256 | return Session().query(cls) | |||
|
257 | ||||
|
258 | @classmethod | |||
|
259 | def get(cls, id_): | |||
|
260 | if id_: | |||
|
261 | return cls.query().get(id_) | |||
|
262 | ||||
|
263 | @classmethod | |||
|
264 | def get_or_404(cls, id_): | |||
|
265 | from pyramid.httpexceptions import HTTPNotFound | |||
|
266 | ||||
|
267 | try: | |||
|
268 | id_ = int(id_) | |||
|
269 | except (TypeError, ValueError): | |||
|
270 | raise HTTPNotFound() | |||
|
271 | ||||
|
272 | res = cls.query().get(id_) | |||
|
273 | if not res: | |||
|
274 | raise HTTPNotFound() | |||
|
275 | return res | |||
|
276 | ||||
|
277 | @classmethod | |||
|
278 | def getAll(cls): | |||
|
279 | # deprecated and left for backward compatibility | |||
|
280 | return cls.get_all() | |||
|
281 | ||||
|
282 | @classmethod | |||
|
283 | def get_all(cls): | |||
|
284 | return cls.query().all() | |||
|
285 | ||||
|
286 | @classmethod | |||
|
287 | def delete(cls, id_): | |||
|
288 | obj = cls.query().get(id_) | |||
|
289 | Session().delete(obj) | |||
|
290 | ||||
|
291 | @classmethod | |||
|
292 | def identity_cache(cls, session, attr_name, value): | |||
|
293 | exist_in_session = [] | |||
|
294 | for (item_cls, pkey), instance in session.identity_map.items(): | |||
|
295 | if cls == item_cls and getattr(instance, attr_name) == value: | |||
|
296 | exist_in_session.append(instance) | |||
|
297 | if exist_in_session: | |||
|
298 | if len(exist_in_session) == 1: | |||
|
299 | return exist_in_session[0] | |||
|
300 | log.exception( | |||
|
301 | 'multiple objects with attr %s and ' | |||
|
302 | 'value %s found with same name: %r', | |||
|
303 | attr_name, value, exist_in_session) | |||
|
304 | ||||
|
305 | def __repr__(self): | |||
|
306 | if hasattr(self, '__unicode__'): | |||
|
307 | # python repr needs to return str | |||
|
308 | try: | |||
|
309 | return safe_str(self.__unicode__()) | |||
|
310 | except UnicodeDecodeError: | |||
|
311 | pass | |||
|
312 | return '<DB:%s>' % (self.__class__.__name__) | |||
|
313 | ||||
|
314 | ||||
|
315 | class RhodeCodeSetting(Base, BaseModel): | |||
|
316 | __tablename__ = 'rhodecode_settings' | |||
|
317 | __table_args__ = ( | |||
|
318 | UniqueConstraint('app_settings_name'), | |||
|
319 | base_table_args | |||
|
320 | ) | |||
|
321 | ||||
|
322 | SETTINGS_TYPES = { | |||
|
323 | 'str': safe_str, | |||
|
324 | 'int': safe_int, | |||
|
325 | 'unicode': safe_unicode, | |||
|
326 | 'bool': str2bool, | |||
|
327 | 'list': functools.partial(aslist, sep=',') | |||
|
328 | } | |||
|
329 | DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' | |||
|
330 | GLOBAL_CONF_KEY = 'app_settings' | |||
|
331 | ||||
|
332 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
333 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) | |||
|
334 | _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) | |||
|
335 | _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) | |||
|
336 | ||||
|
337 | def __init__(self, key='', val='', type='unicode'): | |||
|
338 | self.app_settings_name = key | |||
|
339 | self.app_settings_type = type | |||
|
340 | self.app_settings_value = val | |||
|
341 | ||||
|
342 | @validates('_app_settings_value') | |||
|
343 | def validate_settings_value(self, key, val): | |||
|
344 | assert type(val) == unicode | |||
|
345 | return val | |||
|
346 | ||||
|
347 | @hybrid_property | |||
|
348 | def app_settings_value(self): | |||
|
349 | v = self._app_settings_value | |||
|
350 | _type = self.app_settings_type | |||
|
351 | if _type: | |||
|
352 | _type = self.app_settings_type.split('.')[0] | |||
|
353 | # decode the encrypted value | |||
|
354 | if 'encrypted' in self.app_settings_type: | |||
|
355 | cipher = EncryptedTextValue() | |||
|
356 | v = safe_unicode(cipher.process_result_value(v, None)) | |||
|
357 | ||||
|
358 | converter = self.SETTINGS_TYPES.get(_type) or \ | |||
|
359 | self.SETTINGS_TYPES['unicode'] | |||
|
360 | return converter(v) | |||
|
361 | ||||
|
362 | @app_settings_value.setter | |||
|
363 | def app_settings_value(self, val): | |||
|
364 | """ | |||
|
365 | Setter that will always make sure we use unicode in app_settings_value | |||
|
366 | ||||
|
367 | :param val: | |||
|
368 | """ | |||
|
369 | val = safe_unicode(val) | |||
|
370 | # encode the encrypted value | |||
|
371 | if 'encrypted' in self.app_settings_type: | |||
|
372 | cipher = EncryptedTextValue() | |||
|
373 | val = safe_unicode(cipher.process_bind_param(val, None)) | |||
|
374 | self._app_settings_value = val | |||
|
375 | ||||
|
376 | @hybrid_property | |||
|
377 | def app_settings_type(self): | |||
|
378 | return self._app_settings_type | |||
|
379 | ||||
|
380 | @app_settings_type.setter | |||
|
381 | def app_settings_type(self, val): | |||
|
382 | if val.split('.')[0] not in self.SETTINGS_TYPES: | |||
|
383 | raise Exception('type must be one of %s got %s' | |||
|
384 | % (self.SETTINGS_TYPES.keys(), val)) | |||
|
385 | self._app_settings_type = val | |||
|
386 | ||||
|
387 | @classmethod | |||
|
388 | def get_by_prefix(cls, prefix): | |||
|
389 | return RhodeCodeSetting.query()\ | |||
|
390 | .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\ | |||
|
391 | .all() | |||
|
392 | ||||
|
393 | def __unicode__(self): | |||
|
394 | return u"<%s('%s:%s[%s]')>" % ( | |||
|
395 | self.__class__.__name__, | |||
|
396 | self.app_settings_name, self.app_settings_value, | |||
|
397 | self.app_settings_type | |||
|
398 | ) | |||
|
399 | ||||
|
400 | ||||
|
401 | class RhodeCodeUi(Base, BaseModel): | |||
|
402 | __tablename__ = 'rhodecode_ui' | |||
|
403 | __table_args__ = ( | |||
|
404 | UniqueConstraint('ui_key'), | |||
|
405 | base_table_args | |||
|
406 | ) | |||
|
407 | ||||
|
408 | HOOK_REPO_SIZE = 'changegroup.repo_size' | |||
|
409 | # HG | |||
|
410 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' | |||
|
411 | HOOK_PULL = 'outgoing.pull_logger' | |||
|
412 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' | |||
|
413 | HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push' | |||
|
414 | HOOK_PUSH = 'changegroup.push_logger' | |||
|
415 | HOOK_PUSH_KEY = 'pushkey.key_push' | |||
|
416 | ||||
|
417 | HOOKS_BUILTIN = [ | |||
|
418 | HOOK_PRE_PULL, | |||
|
419 | HOOK_PULL, | |||
|
420 | HOOK_PRE_PUSH, | |||
|
421 | HOOK_PRETX_PUSH, | |||
|
422 | HOOK_PUSH, | |||
|
423 | HOOK_PUSH_KEY, | |||
|
424 | ] | |||
|
425 | ||||
|
426 | # TODO: johbo: Unify way how hooks are configured for git and hg, | |||
|
427 | # git part is currently hardcoded. | |||
|
428 | ||||
|
429 | # SVN PATTERNS | |||
|
430 | SVN_BRANCH_ID = 'vcs_svn_branch' | |||
|
431 | SVN_TAG_ID = 'vcs_svn_tag' | |||
|
432 | ||||
|
433 | ui_id = Column( | |||
|
434 | "ui_id", Integer(), nullable=False, unique=True, default=None, | |||
|
435 | primary_key=True) | |||
|
436 | ui_section = Column( | |||
|
437 | "ui_section", String(255), nullable=True, unique=None, default=None) | |||
|
438 | ui_key = Column( | |||
|
439 | "ui_key", String(255), nullable=True, unique=None, default=None) | |||
|
440 | ui_value = Column( | |||
|
441 | "ui_value", String(255), nullable=True, unique=None, default=None) | |||
|
442 | ui_active = Column( | |||
|
443 | "ui_active", Boolean(), nullable=True, unique=None, default=True) | |||
|
444 | ||||
|
445 | def __repr__(self): | |||
|
446 | return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section, | |||
|
447 | self.ui_key, self.ui_value) | |||
|
448 | ||||
|
449 | ||||
|
450 | class RepoRhodeCodeSetting(Base, BaseModel): | |||
|
451 | __tablename__ = 'repo_rhodecode_settings' | |||
|
452 | __table_args__ = ( | |||
|
453 | UniqueConstraint( | |||
|
454 | 'app_settings_name', 'repository_id', | |||
|
455 | name='uq_repo_rhodecode_setting_name_repo_id'), | |||
|
456 | base_table_args | |||
|
457 | ) | |||
|
458 | ||||
|
459 | repository_id = Column( | |||
|
460 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), | |||
|
461 | nullable=False) | |||
|
462 | app_settings_id = Column( | |||
|
463 | "app_settings_id", Integer(), nullable=False, unique=True, | |||
|
464 | default=None, primary_key=True) | |||
|
465 | app_settings_name = Column( | |||
|
466 | "app_settings_name", String(255), nullable=True, unique=None, | |||
|
467 | default=None) | |||
|
468 | _app_settings_value = Column( | |||
|
469 | "app_settings_value", String(4096), nullable=True, unique=None, | |||
|
470 | default=None) | |||
|
471 | _app_settings_type = Column( | |||
|
472 | "app_settings_type", String(255), nullable=True, unique=None, | |||
|
473 | default=None) | |||
|
474 | ||||
|
475 | repository = relationship('Repository') | |||
|
476 | ||||
|
477 | def __init__(self, repository_id, key='', val='', type='unicode'): | |||
|
478 | self.repository_id = repository_id | |||
|
479 | self.app_settings_name = key | |||
|
480 | self.app_settings_type = type | |||
|
481 | self.app_settings_value = val | |||
|
482 | ||||
|
483 | @validates('_app_settings_value') | |||
|
484 | def validate_settings_value(self, key, val): | |||
|
485 | assert type(val) == unicode | |||
|
486 | return val | |||
|
487 | ||||
|
488 | @hybrid_property | |||
|
489 | def app_settings_value(self): | |||
|
490 | v = self._app_settings_value | |||
|
491 | type_ = self.app_settings_type | |||
|
492 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | |||
|
493 | converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] | |||
|
494 | return converter(v) | |||
|
495 | ||||
|
496 | @app_settings_value.setter | |||
|
497 | def app_settings_value(self, val): | |||
|
498 | """ | |||
|
499 | Setter that will always make sure we use unicode in app_settings_value | |||
|
500 | ||||
|
501 | :param val: | |||
|
502 | """ | |||
|
503 | self._app_settings_value = safe_unicode(val) | |||
|
504 | ||||
|
505 | @hybrid_property | |||
|
506 | def app_settings_type(self): | |||
|
507 | return self._app_settings_type | |||
|
508 | ||||
|
509 | @app_settings_type.setter | |||
|
510 | def app_settings_type(self, val): | |||
|
511 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES | |||
|
512 | if val not in SETTINGS_TYPES: | |||
|
513 | raise Exception('type must be one of %s got %s' | |||
|
514 | % (SETTINGS_TYPES.keys(), val)) | |||
|
515 | self._app_settings_type = val | |||
|
516 | ||||
|
517 | def __unicode__(self): | |||
|
518 | return u"<%s('%s:%s:%s[%s]')>" % ( | |||
|
519 | self.__class__.__name__, self.repository.repo_name, | |||
|
520 | self.app_settings_name, self.app_settings_value, | |||
|
521 | self.app_settings_type | |||
|
522 | ) | |||
|
523 | ||||
|
524 | ||||
|
525 | class RepoRhodeCodeUi(Base, BaseModel): | |||
|
526 | __tablename__ = 'repo_rhodecode_ui' | |||
|
527 | __table_args__ = ( | |||
|
528 | UniqueConstraint( | |||
|
529 | 'repository_id', 'ui_section', 'ui_key', | |||
|
530 | name='uq_repo_rhodecode_ui_repository_id_section_key'), | |||
|
531 | base_table_args | |||
|
532 | ) | |||
|
533 | ||||
|
534 | repository_id = Column( | |||
|
535 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), | |||
|
536 | nullable=False) | |||
|
537 | ui_id = Column( | |||
|
538 | "ui_id", Integer(), nullable=False, unique=True, default=None, | |||
|
539 | primary_key=True) | |||
|
540 | ui_section = Column( | |||
|
541 | "ui_section", String(255), nullable=True, unique=None, default=None) | |||
|
542 | ui_key = Column( | |||
|
543 | "ui_key", String(255), nullable=True, unique=None, default=None) | |||
|
544 | ui_value = Column( | |||
|
545 | "ui_value", String(255), nullable=True, unique=None, default=None) | |||
|
546 | ui_active = Column( | |||
|
547 | "ui_active", Boolean(), nullable=True, unique=None, default=True) | |||
|
548 | ||||
|
549 | repository = relationship('Repository') | |||
|
550 | ||||
|
551 | def __repr__(self): | |||
|
552 | return '<%s[%s:%s]%s=>%s]>' % ( | |||
|
553 | self.__class__.__name__, self.repository.repo_name, | |||
|
554 | self.ui_section, self.ui_key, self.ui_value) | |||
|
555 | ||||
|
556 | ||||
|
557 | class User(Base, BaseModel): | |||
|
558 | __tablename__ = 'users' | |||
|
559 | __table_args__ = ( | |||
|
560 | UniqueConstraint('username'), UniqueConstraint('email'), | |||
|
561 | Index('u_username_idx', 'username'), | |||
|
562 | Index('u_email_idx', 'email'), | |||
|
563 | base_table_args | |||
|
564 | ) | |||
|
565 | ||||
|
566 | DEFAULT_USER = 'default' | |||
|
567 | DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' | |||
|
568 | DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' | |||
|
569 | ||||
|
570 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
571 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |||
|
572 | password = Column("password", String(255), nullable=True, unique=None, default=None) | |||
|
573 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |||
|
574 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) | |||
|
575 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) | |||
|
576 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) | |||
|
577 | _email = Column("email", String(255), nullable=True, unique=None, default=None) | |||
|
578 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) | |||
|
579 | last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None) | |||
|
580 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |||
|
581 | ||||
|
582 | extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) | |||
|
583 | extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) | |||
|
584 | _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) | |||
|
585 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |||
|
586 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
587 | _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data | |||
|
588 | ||||
|
589 | user_log = relationship('UserLog') | |||
|
590 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan') | |||
|
591 | ||||
|
592 | repositories = relationship('Repository') | |||
|
593 | repository_groups = relationship('RepoGroup') | |||
|
594 | user_groups = relationship('UserGroup') | |||
|
595 | ||||
|
596 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') | |||
|
597 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all') | |||
|
598 | ||||
|
599 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan') | |||
|
600 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') | |||
|
601 | user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan') | |||
|
602 | ||||
|
603 | group_member = relationship('UserGroupMember', cascade='all') | |||
|
604 | ||||
|
605 | notifications = relationship('UserNotification', cascade='all') | |||
|
606 | # notifications assigned to this user | |||
|
607 | user_created_notifications = relationship('Notification', cascade='all') | |||
|
608 | # comments created by this user | |||
|
609 | user_comments = relationship('ChangesetComment', cascade='all') | |||
|
610 | # user profile extra info | |||
|
611 | user_emails = relationship('UserEmailMap', cascade='all') | |||
|
612 | user_ip_map = relationship('UserIpMap', cascade='all') | |||
|
613 | user_auth_tokens = relationship('UserApiKeys', cascade='all') | |||
|
614 | user_ssh_keys = relationship('UserSshKeys', cascade='all') | |||
|
615 | ||||
|
616 | # gists | |||
|
617 | user_gists = relationship('Gist', cascade='all') | |||
|
618 | # user pull requests | |||
|
619 | user_pull_requests = relationship('PullRequest', cascade='all') | |||
|
620 | ||||
|
621 | # external identities | |||
|
622 | external_identities = relationship( | |||
|
623 | 'ExternalIdentity', | |||
|
624 | primaryjoin="User.user_id==ExternalIdentity.local_user_id", | |||
|
625 | cascade='all') | |||
|
626 | # review rules | |||
|
627 | user_review_rules = relationship('RepoReviewRuleUser', cascade='all') | |||
|
628 | ||||
|
629 | # artifacts owned | |||
|
630 | artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id') | |||
|
631 | ||||
|
632 | # no cascade, set NULL | |||
|
633 | scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id') | |||
|
634 | ||||
|
635 | def __unicode__(self): | |||
|
636 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |||
|
637 | self.user_id, self.username) | |||
|
638 | ||||
|
639 | @hybrid_property | |||
|
640 | def email(self): | |||
|
641 | return self._email | |||
|
642 | ||||
|
643 | @email.setter | |||
|
644 | def email(self, val): | |||
|
645 | self._email = val.lower() if val else None | |||
|
646 | ||||
|
647 | @hybrid_property | |||
|
648 | def first_name(self): | |||
|
649 | from rhodecode.lib import helpers as h | |||
|
650 | if self.name: | |||
|
651 | return h.escape(self.name) | |||
|
652 | return self.name | |||
|
653 | ||||
|
654 | @hybrid_property | |||
|
655 | def last_name(self): | |||
|
656 | from rhodecode.lib import helpers as h | |||
|
657 | if self.lastname: | |||
|
658 | return h.escape(self.lastname) | |||
|
659 | return self.lastname | |||
|
660 | ||||
|
661 | @hybrid_property | |||
|
662 | def api_key(self): | |||
|
663 | """ | |||
|
664 | Fetch if exist an auth-token with role ALL connected to this user | |||
|
665 | """ | |||
|
666 | user_auth_token = UserApiKeys.query()\ | |||
|
667 | .filter(UserApiKeys.user_id == self.user_id)\ | |||
|
668 | .filter(or_(UserApiKeys.expires == -1, | |||
|
669 | UserApiKeys.expires >= time.time()))\ | |||
|
670 | .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first() | |||
|
671 | if user_auth_token: | |||
|
672 | user_auth_token = user_auth_token.api_key | |||
|
673 | ||||
|
674 | return user_auth_token | |||
|
675 | ||||
|
676 | @api_key.setter | |||
|
677 | def api_key(self, val): | |||
|
678 | # don't allow to set API key this is deprecated for now | |||
|
679 | self._api_key = None | |||
|
680 | ||||
|
681 | @property | |||
|
682 | def reviewer_pull_requests(self): | |||
|
683 | return PullRequestReviewers.query() \ | |||
|
684 | .options(joinedload(PullRequestReviewers.pull_request)) \ | |||
|
685 | .filter(PullRequestReviewers.user_id == self.user_id) \ | |||
|
686 | .all() | |||
|
687 | ||||
|
688 | @property | |||
|
689 | def firstname(self): | |||
|
690 | # alias for future | |||
|
691 | return self.name | |||
|
692 | ||||
|
693 | @property | |||
|
694 | def emails(self): | |||
|
695 | other = UserEmailMap.query()\ | |||
|
696 | .filter(UserEmailMap.user == self) \ | |||
|
697 | .order_by(UserEmailMap.email_id.asc()) \ | |||
|
698 | .all() | |||
|
699 | return [self.email] + [x.email for x in other] | |||
|
700 | ||||
|
701 | def emails_cached(self): | |||
|
702 | emails = UserEmailMap.query()\ | |||
|
703 | .filter(UserEmailMap.user == self) \ | |||
|
704 | .order_by(UserEmailMap.email_id.asc()) | |||
|
705 | ||||
|
706 | emails = emails.options( | |||
|
707 | FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id)) | |||
|
708 | ) | |||
|
709 | ||||
|
710 | return [self.email] + [x.email for x in emails] | |||
|
711 | ||||
|
712 | @property | |||
|
713 | def auth_tokens(self): | |||
|
714 | auth_tokens = self.get_auth_tokens() | |||
|
715 | return [x.api_key for x in auth_tokens] | |||
|
716 | ||||
|
717 | def get_auth_tokens(self): | |||
|
718 | return UserApiKeys.query()\ | |||
|
719 | .filter(UserApiKeys.user == self)\ | |||
|
720 | .order_by(UserApiKeys.user_api_key_id.asc())\ | |||
|
721 | .all() | |||
|
722 | ||||
|
723 | @LazyProperty | |||
|
724 | def feed_token(self): | |||
|
725 | return self.get_feed_token() | |||
|
726 | ||||
|
727 | def get_feed_token(self, cache=True): | |||
|
728 | feed_tokens = UserApiKeys.query()\ | |||
|
729 | .filter(UserApiKeys.user == self)\ | |||
|
730 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) | |||
|
731 | if cache: | |||
|
732 | feed_tokens = feed_tokens.options( | |||
|
733 | FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id)) | |||
|
734 | ||||
|
735 | feed_tokens = feed_tokens.all() | |||
|
736 | if feed_tokens: | |||
|
737 | return feed_tokens[0].api_key | |||
|
738 | return 'NO_FEED_TOKEN_AVAILABLE' | |||
|
739 | ||||
|
740 | @LazyProperty | |||
|
741 | def artifact_token(self): | |||
|
742 | return self.get_artifact_token() | |||
|
743 | ||||
|
744 | def get_artifact_token(self, cache=True): | |||
|
745 | artifacts_tokens = UserApiKeys.query()\ | |||
|
746 | .filter(UserApiKeys.user == self)\ | |||
|
747 | .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) | |||
|
748 | if cache: | |||
|
749 | artifacts_tokens = artifacts_tokens.options( | |||
|
750 | FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id)) | |||
|
751 | ||||
|
752 | artifacts_tokens = artifacts_tokens.all() | |||
|
753 | if artifacts_tokens: | |||
|
754 | return artifacts_tokens[0].api_key | |||
|
755 | return 'NO_ARTIFACT_TOKEN_AVAILABLE' | |||
|
756 | ||||
|
757 | @classmethod | |||
|
758 | def get(cls, user_id, cache=False): | |||
|
759 | if not user_id: | |||
|
760 | return | |||
|
761 | ||||
|
762 | user = cls.query() | |||
|
763 | if cache: | |||
|
764 | user = user.options( | |||
|
765 | FromCache("sql_cache_short", "get_users_%s" % user_id)) | |||
|
766 | return user.get(user_id) | |||
|
767 | ||||
|
768 | @classmethod | |||
|
769 | def extra_valid_auth_tokens(cls, user, role=None): | |||
|
770 | tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ | |||
|
771 | .filter(or_(UserApiKeys.expires == -1, | |||
|
772 | UserApiKeys.expires >= time.time())) | |||
|
773 | if role: | |||
|
774 | tokens = tokens.filter(or_(UserApiKeys.role == role, | |||
|
775 | UserApiKeys.role == UserApiKeys.ROLE_ALL)) | |||
|
776 | return tokens.all() | |||
|
777 | ||||
|
778 | def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None): | |||
|
779 | from rhodecode.lib import auth | |||
|
780 | ||||
|
781 | log.debug('Trying to authenticate user: %s via auth-token, ' | |||
|
782 | 'and roles: %s', self, roles) | |||
|
783 | ||||
|
784 | if not auth_token: | |||
|
785 | return False | |||
|
786 | ||||
|
787 | roles = (roles or []) + [UserApiKeys.ROLE_ALL] | |||
|
788 | tokens_q = UserApiKeys.query()\ | |||
|
789 | .filter(UserApiKeys.user_id == self.user_id)\ | |||
|
790 | .filter(or_(UserApiKeys.expires == -1, | |||
|
791 | UserApiKeys.expires >= time.time())) | |||
|
792 | ||||
|
793 | tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles)) | |||
|
794 | ||||
|
795 | crypto_backend = auth.crypto_backend() | |||
|
796 | enc_token_map = {} | |||
|
797 | plain_token_map = {} | |||
|
798 | for token in tokens_q: | |||
|
799 | if token.api_key.startswith(crypto_backend.ENC_PREF): | |||
|
800 | enc_token_map[token.api_key] = token | |||
|
801 | else: | |||
|
802 | plain_token_map[token.api_key] = token | |||
|
803 | log.debug( | |||
|
804 | 'Found %s plain and %s encrypted tokens to check for authentication for this user', | |||
|
805 | len(plain_token_map), len(enc_token_map)) | |||
|
806 | ||||
|
807 | # plain token match comes first | |||
|
808 | match = plain_token_map.get(auth_token) | |||
|
809 | ||||
|
810 | # check encrypted tokens now | |||
|
811 | if not match: | |||
|
812 | for token_hash, token in enc_token_map.items(): | |||
|
813 | # NOTE(marcink): this is expensive to calculate, but most secure | |||
|
814 | if crypto_backend.hash_check(auth_token, token_hash): | |||
|
815 | match = token | |||
|
816 | break | |||
|
817 | ||||
|
818 | if match: | |||
|
819 | log.debug('Found matching token %s', match) | |||
|
820 | if match.repo_id: | |||
|
821 | log.debug('Found scope, checking for scope match of token %s', match) | |||
|
822 | if match.repo_id == scope_repo_id: | |||
|
823 | return True | |||
|
824 | else: | |||
|
825 | log.debug( | |||
|
826 | 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, ' | |||
|
827 | 'and calling scope is:%s, skipping further checks', | |||
|
828 | match.repo, scope_repo_id) | |||
|
829 | return False | |||
|
830 | else: | |||
|
831 | return True | |||
|
832 | ||||
|
833 | return False | |||
|
834 | ||||
|
835 | @property | |||
|
836 | def ip_addresses(self): | |||
|
837 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() | |||
|
838 | return [x.ip_addr for x in ret] | |||
|
839 | ||||
|
840 | @property | |||
|
841 | def username_and_name(self): | |||
|
842 | return '%s (%s %s)' % (self.username, self.first_name, self.last_name) | |||
|
843 | ||||
|
844 | @property | |||
|
845 | def username_or_name_or_email(self): | |||
|
846 | full_name = self.full_name if self.full_name is not ' ' else None | |||
|
847 | return self.username or full_name or self.email | |||
|
848 | ||||
|
849 | @property | |||
|
850 | def full_name(self): | |||
|
851 | return '%s %s' % (self.first_name, self.last_name) | |||
|
852 | ||||
|
853 | @property | |||
|
854 | def full_name_or_username(self): | |||
|
855 | return ('%s %s' % (self.first_name, self.last_name) | |||
|
856 | if (self.first_name and self.last_name) else self.username) | |||
|
857 | ||||
|
858 | @property | |||
|
859 | def full_contact(self): | |||
|
860 | return '%s %s <%s>' % (self.first_name, self.last_name, self.email) | |||
|
861 | ||||
|
862 | @property | |||
|
863 | def short_contact(self): | |||
|
864 | return '%s %s' % (self.first_name, self.last_name) | |||
|
865 | ||||
|
866 | @property | |||
|
867 | def is_admin(self): | |||
|
868 | return self.admin | |||
|
869 | ||||
|
870 | @property | |||
|
871 | def language(self): | |||
|
872 | return self.user_data.get('language') | |||
|
873 | ||||
|
874 | def AuthUser(self, **kwargs): | |||
|
875 | """ | |||
|
876 | Returns instance of AuthUser for this user | |||
|
877 | """ | |||
|
878 | from rhodecode.lib.auth import AuthUser | |||
|
879 | return AuthUser(user_id=self.user_id, username=self.username, **kwargs) | |||
|
880 | ||||
|
881 | @hybrid_property | |||
|
882 | def user_data(self): | |||
|
883 | if not self._user_data: | |||
|
884 | return {} | |||
|
885 | ||||
|
886 | try: | |||
|
887 | return json.loads(self._user_data) | |||
|
888 | except TypeError: | |||
|
889 | return {} | |||
|
890 | ||||
|
891 | @user_data.setter | |||
|
892 | def user_data(self, val): | |||
|
893 | if not isinstance(val, dict): | |||
|
894 | raise Exception('user_data must be dict, got %s' % type(val)) | |||
|
895 | try: | |||
|
896 | self._user_data = json.dumps(val) | |||
|
897 | except Exception: | |||
|
898 | log.error(traceback.format_exc()) | |||
|
899 | ||||
|
900 | @classmethod | |||
|
901 | def get_by_username(cls, username, case_insensitive=False, | |||
|
902 | cache=False, identity_cache=False): | |||
|
903 | session = Session() | |||
|
904 | ||||
|
905 | if case_insensitive: | |||
|
906 | q = cls.query().filter( | |||
|
907 | func.lower(cls.username) == func.lower(username)) | |||
|
908 | else: | |||
|
909 | q = cls.query().filter(cls.username == username) | |||
|
910 | ||||
|
911 | if cache: | |||
|
912 | if identity_cache: | |||
|
913 | val = cls.identity_cache(session, 'username', username) | |||
|
914 | if val: | |||
|
915 | return val | |||
|
916 | else: | |||
|
917 | cache_key = "get_user_by_name_%s" % _hash_key(username) | |||
|
918 | q = q.options( | |||
|
919 | FromCache("sql_cache_short", cache_key)) | |||
|
920 | ||||
|
921 | return q.scalar() | |||
|
922 | ||||
|
923 | @classmethod | |||
|
924 | def get_by_auth_token(cls, auth_token, cache=False): | |||
|
925 | q = UserApiKeys.query()\ | |||
|
926 | .filter(UserApiKeys.api_key == auth_token)\ | |||
|
927 | .filter(or_(UserApiKeys.expires == -1, | |||
|
928 | UserApiKeys.expires >= time.time())) | |||
|
929 | if cache: | |||
|
930 | q = q.options( | |||
|
931 | FromCache("sql_cache_short", "get_auth_token_%s" % auth_token)) | |||
|
932 | ||||
|
933 | match = q.first() | |||
|
934 | if match: | |||
|
935 | return match.user | |||
|
936 | ||||
|
937 | @classmethod | |||
|
938 | def get_by_email(cls, email, case_insensitive=False, cache=False): | |||
|
939 | ||||
|
940 | if case_insensitive: | |||
|
941 | q = cls.query().filter(func.lower(cls.email) == func.lower(email)) | |||
|
942 | ||||
|
943 | else: | |||
|
944 | q = cls.query().filter(cls.email == email) | |||
|
945 | ||||
|
946 | email_key = _hash_key(email) | |||
|
947 | if cache: | |||
|
948 | q = q.options( | |||
|
949 | FromCache("sql_cache_short", "get_email_key_%s" % email_key)) | |||
|
950 | ||||
|
951 | ret = q.scalar() | |||
|
952 | if ret is None: | |||
|
953 | q = UserEmailMap.query() | |||
|
954 | # try fetching in alternate email map | |||
|
955 | if case_insensitive: | |||
|
956 | q = q.filter(func.lower(UserEmailMap.email) == func.lower(email)) | |||
|
957 | else: | |||
|
958 | q = q.filter(UserEmailMap.email == email) | |||
|
959 | q = q.options(joinedload(UserEmailMap.user)) | |||
|
960 | if cache: | |||
|
961 | q = q.options( | |||
|
962 | FromCache("sql_cache_short", "get_email_map_key_%s" % email_key)) | |||
|
963 | ret = getattr(q.scalar(), 'user', None) | |||
|
964 | ||||
|
965 | return ret | |||
|
966 | ||||
|
967 | @classmethod | |||
|
968 | def get_from_cs_author(cls, author): | |||
|
969 | """ | |||
|
970 | Tries to get User objects out of commit author string | |||
|
971 | ||||
|
972 | :param author: | |||
|
973 | """ | |||
|
974 | from rhodecode.lib.helpers import email, author_name | |||
|
975 | # Valid email in the attribute passed, see if they're in the system | |||
|
976 | _email = email(author) | |||
|
977 | if _email: | |||
|
978 | user = cls.get_by_email(_email, case_insensitive=True) | |||
|
979 | if user: | |||
|
980 | return user | |||
|
981 | # Maybe we can match by username? | |||
|
982 | _author = author_name(author) | |||
|
983 | user = cls.get_by_username(_author, case_insensitive=True) | |||
|
984 | if user: | |||
|
985 | return user | |||
|
986 | ||||
|
987 | def update_userdata(self, **kwargs): | |||
|
988 | usr = self | |||
|
989 | old = usr.user_data | |||
|
990 | old.update(**kwargs) | |||
|
991 | usr.user_data = old | |||
|
992 | Session().add(usr) | |||
|
993 | log.debug('updated userdata with %s', kwargs) | |||
|
994 | ||||
|
995 | def update_lastlogin(self): | |||
|
996 | """Update user lastlogin""" | |||
|
997 | self.last_login = datetime.datetime.now() | |||
|
998 | Session().add(self) | |||
|
999 | log.debug('updated user %s lastlogin', self.username) | |||
|
1000 | ||||
|
1001 | def update_password(self, new_password): | |||
|
1002 | from rhodecode.lib.auth import get_crypt_password | |||
|
1003 | ||||
|
1004 | self.password = get_crypt_password(new_password) | |||
|
1005 | Session().add(self) | |||
|
1006 | ||||
|
1007 | @classmethod | |||
|
1008 | def get_first_super_admin(cls): | |||
|
1009 | user = User.query()\ | |||
|
1010 | .filter(User.admin == true()) \ | |||
|
1011 | .order_by(User.user_id.asc()) \ | |||
|
1012 | .first() | |||
|
1013 | ||||
|
1014 | if user is None: | |||
|
1015 | raise Exception('FATAL: Missing administrative account!') | |||
|
1016 | return user | |||
|
1017 | ||||
|
1018 | @classmethod | |||
|
1019 | def get_all_super_admins(cls, only_active=False): | |||
|
1020 | """ | |||
|
1021 | Returns all admin accounts sorted by username | |||
|
1022 | """ | |||
|
1023 | qry = User.query().filter(User.admin == true()).order_by(User.username.asc()) | |||
|
1024 | if only_active: | |||
|
1025 | qry = qry.filter(User.active == true()) | |||
|
1026 | return qry.all() | |||
|
1027 | ||||
|
1028 | @classmethod | |||
|
1029 | def get_all_user_ids(cls, only_active=True): | |||
|
1030 | """ | |||
|
1031 | Returns all users IDs | |||
|
1032 | """ | |||
|
1033 | qry = Session().query(User.user_id) | |||
|
1034 | ||||
|
1035 | if only_active: | |||
|
1036 | qry = qry.filter(User.active == true()) | |||
|
1037 | return [x.user_id for x in qry] | |||
|
1038 | ||||
|
1039 | @classmethod | |||
|
1040 | def get_default_user(cls, cache=False, refresh=False): | |||
|
1041 | user = User.get_by_username(User.DEFAULT_USER, cache=cache) | |||
|
1042 | if user is None: | |||
|
1043 | raise Exception('FATAL: Missing default account!') | |||
|
1044 | if refresh: | |||
|
1045 | # The default user might be based on outdated state which | |||
|
1046 | # has been loaded from the cache. | |||
|
1047 | # A call to refresh() ensures that the | |||
|
1048 | # latest state from the database is used. | |||
|
1049 | Session().refresh(user) | |||
|
1050 | return user | |||
|
1051 | ||||
|
1052 | @classmethod | |||
|
1053 | def get_default_user_id(cls): | |||
|
1054 | import rhodecode | |||
|
1055 | return rhodecode.CONFIG['default_user_id'] | |||
|
1056 | ||||
|
1057 | def _get_default_perms(self, user, suffix=''): | |||
|
1058 | from rhodecode.model.permission import PermissionModel | |||
|
1059 | return PermissionModel().get_default_perms(user.user_perms, suffix) | |||
|
1060 | ||||
|
1061 | def get_default_perms(self, suffix=''): | |||
|
1062 | return self._get_default_perms(self, suffix) | |||
|
1063 | ||||
|
1064 | def get_api_data(self, include_secrets=False, details='full'): | |||
|
1065 | """ | |||
|
1066 | Common function for generating user related data for API | |||
|
1067 | ||||
|
1068 | :param include_secrets: By default secrets in the API data will be replaced | |||
|
1069 | by a placeholder value to prevent exposing this data by accident. In case | |||
|
1070 | this data shall be exposed, set this flag to ``True``. | |||
|
1071 | ||||
|
1072 | :param details: details can be 'basic|full' basic gives only a subset of | |||
|
1073 | the available user information that includes user_id, name and emails. | |||
|
1074 | """ | |||
|
1075 | user = self | |||
|
1076 | user_data = self.user_data | |||
|
1077 | data = { | |||
|
1078 | 'user_id': user.user_id, | |||
|
1079 | 'username': user.username, | |||
|
1080 | 'firstname': user.name, | |||
|
1081 | 'lastname': user.lastname, | |||
|
1082 | 'description': user.description, | |||
|
1083 | 'email': user.email, | |||
|
1084 | 'emails': user.emails, | |||
|
1085 | } | |||
|
1086 | if details == 'basic': | |||
|
1087 | return data | |||
|
1088 | ||||
|
1089 | auth_token_length = 40 | |||
|
1090 | auth_token_replacement = '*' * auth_token_length | |||
|
1091 | ||||
|
1092 | extras = { | |||
|
1093 | 'auth_tokens': [auth_token_replacement], | |||
|
1094 | 'active': user.active, | |||
|
1095 | 'admin': user.admin, | |||
|
1096 | 'extern_type': user.extern_type, | |||
|
1097 | 'extern_name': user.extern_name, | |||
|
1098 | 'last_login': user.last_login, | |||
|
1099 | 'last_activity': user.last_activity, | |||
|
1100 | 'ip_addresses': user.ip_addresses, | |||
|
1101 | 'language': user_data.get('language') | |||
|
1102 | } | |||
|
1103 | data.update(extras) | |||
|
1104 | ||||
|
1105 | if include_secrets: | |||
|
1106 | data['auth_tokens'] = user.auth_tokens | |||
|
1107 | return data | |||
|
1108 | ||||
|
1109 | def __json__(self): | |||
|
1110 | data = { | |||
|
1111 | 'full_name': self.full_name, | |||
|
1112 | 'full_name_or_username': self.full_name_or_username, | |||
|
1113 | 'short_contact': self.short_contact, | |||
|
1114 | 'full_contact': self.full_contact, | |||
|
1115 | } | |||
|
1116 | data.update(self.get_api_data()) | |||
|
1117 | return data | |||
|
1118 | ||||
|
1119 | ||||
|
1120 | class UserApiKeys(Base, BaseModel): | |||
|
1121 | __tablename__ = 'user_api_keys' | |||
|
1122 | __table_args__ = ( | |||
|
1123 | Index('uak_api_key_idx', 'api_key'), | |||
|
1124 | Index('uak_api_key_expires_idx', 'api_key', 'expires'), | |||
|
1125 | base_table_args | |||
|
1126 | ) | |||
|
1127 | __mapper_args__ = {} | |||
|
1128 | ||||
|
1129 | # ApiKey role | |||
|
1130 | ROLE_ALL = 'token_role_all' | |||
|
1131 | ROLE_HTTP = 'token_role_http' | |||
|
1132 | ROLE_VCS = 'token_role_vcs' | |||
|
1133 | ROLE_API = 'token_role_api' | |||
|
1134 | ROLE_FEED = 'token_role_feed' | |||
|
1135 | ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download' | |||
|
1136 | ROLE_PASSWORD_RESET = 'token_password_reset' | |||
|
1137 | ||||
|
1138 | ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD] | |||
|
1139 | ||||
|
1140 | user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1141 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
1142 | api_key = Column("api_key", String(255), nullable=False, unique=True) | |||
|
1143 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |||
|
1144 | expires = Column('expires', Float(53), nullable=False) | |||
|
1145 | role = Column('role', String(255), nullable=True) | |||
|
1146 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
1147 | ||||
|
1148 | # scope columns | |||
|
1149 | repo_id = Column( | |||
|
1150 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), | |||
|
1151 | nullable=True, unique=None, default=None) | |||
|
1152 | repo = relationship('Repository', lazy='joined') | |||
|
1153 | ||||
|
1154 | repo_group_id = Column( | |||
|
1155 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), | |||
|
1156 | nullable=True, unique=None, default=None) | |||
|
1157 | repo_group = relationship('RepoGroup', lazy='joined') | |||
|
1158 | ||||
|
1159 | user = relationship('User', lazy='joined') | |||
|
1160 | ||||
|
1161 | def __unicode__(self): | |||
|
1162 | return u"<%s('%s')>" % (self.__class__.__name__, self.role) | |||
|
1163 | ||||
|
1164 | def __json__(self): | |||
|
1165 | data = { | |||
|
1166 | 'auth_token': self.api_key, | |||
|
1167 | 'role': self.role, | |||
|
1168 | 'scope': self.scope_humanized, | |||
|
1169 | 'expired': self.expired | |||
|
1170 | } | |||
|
1171 | return data | |||
|
1172 | ||||
|
1173 | def get_api_data(self, include_secrets=False): | |||
|
1174 | data = self.__json__() | |||
|
1175 | if include_secrets: | |||
|
1176 | return data | |||
|
1177 | else: | |||
|
1178 | data['auth_token'] = self.token_obfuscated | |||
|
1179 | return data | |||
|
1180 | ||||
|
1181 | @hybrid_property | |||
|
1182 | def description_safe(self): | |||
|
1183 | from rhodecode.lib import helpers as h | |||
|
1184 | return h.escape(self.description) | |||
|
1185 | ||||
|
1186 | @property | |||
|
1187 | def expired(self): | |||
|
1188 | if self.expires == -1: | |||
|
1189 | return False | |||
|
1190 | return time.time() > self.expires | |||
|
1191 | ||||
|
1192 | @classmethod | |||
|
1193 | def _get_role_name(cls, role): | |||
|
1194 | return { | |||
|
1195 | cls.ROLE_ALL: _('all'), | |||
|
1196 | cls.ROLE_HTTP: _('http/web interface'), | |||
|
1197 | cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), | |||
|
1198 | cls.ROLE_API: _('api calls'), | |||
|
1199 | cls.ROLE_FEED: _('feed access'), | |||
|
1200 | cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'), | |||
|
1201 | }.get(role, role) | |||
|
1202 | ||||
|
1203 | @property | |||
|
1204 | def role_humanized(self): | |||
|
1205 | return self._get_role_name(self.role) | |||
|
1206 | ||||
|
1207 | def _get_scope(self): | |||
|
1208 | if self.repo: | |||
|
1209 | return 'Repository: {}'.format(self.repo.repo_name) | |||
|
1210 | if self.repo_group: | |||
|
1211 | return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name) | |||
|
1212 | return 'Global' | |||
|
1213 | ||||
|
1214 | @property | |||
|
1215 | def scope_humanized(self): | |||
|
1216 | return self._get_scope() | |||
|
1217 | ||||
|
1218 | @property | |||
|
1219 | def token_obfuscated(self): | |||
|
1220 | if self.api_key: | |||
|
1221 | return self.api_key[:4] + "****" | |||
|
1222 | ||||
|
1223 | ||||
|
1224 | class UserEmailMap(Base, BaseModel): | |||
|
1225 | __tablename__ = 'user_email_map' | |||
|
1226 | __table_args__ = ( | |||
|
1227 | Index('uem_email_idx', 'email'), | |||
|
1228 | UniqueConstraint('email'), | |||
|
1229 | base_table_args | |||
|
1230 | ) | |||
|
1231 | __mapper_args__ = {} | |||
|
1232 | ||||
|
1233 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1234 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
1235 | _email = Column("email", String(255), nullable=True, unique=False, default=None) | |||
|
1236 | user = relationship('User', lazy='joined') | |||
|
1237 | ||||
|
1238 | @validates('_email') | |||
|
1239 | def validate_email(self, key, email): | |||
|
1240 | # check if this email is not main one | |||
|
1241 | main_email = Session().query(User).filter(User.email == email).scalar() | |||
|
1242 | if main_email is not None: | |||
|
1243 | raise AttributeError('email %s is present is user table' % email) | |||
|
1244 | return email | |||
|
1245 | ||||
|
1246 | @hybrid_property | |||
|
1247 | def email(self): | |||
|
1248 | return self._email | |||
|
1249 | ||||
|
1250 | @email.setter | |||
|
1251 | def email(self, val): | |||
|
1252 | self._email = val.lower() if val else None | |||
|
1253 | ||||
|
1254 | ||||
|
1255 | class UserIpMap(Base, BaseModel): | |||
|
1256 | __tablename__ = 'user_ip_map' | |||
|
1257 | __table_args__ = ( | |||
|
1258 | UniqueConstraint('user_id', 'ip_addr'), | |||
|
1259 | base_table_args | |||
|
1260 | ) | |||
|
1261 | __mapper_args__ = {} | |||
|
1262 | ||||
|
1263 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1264 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
1265 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) | |||
|
1266 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) | |||
|
1267 | description = Column("description", String(10000), nullable=True, unique=None, default=None) | |||
|
1268 | user = relationship('User', lazy='joined') | |||
|
1269 | ||||
|
1270 | @hybrid_property | |||
|
1271 | def description_safe(self): | |||
|
1272 | from rhodecode.lib import helpers as h | |||
|
1273 | return h.escape(self.description) | |||
|
1274 | ||||
|
1275 | @classmethod | |||
|
1276 | def _get_ip_range(cls, ip_addr): | |||
|
1277 | net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False) | |||
|
1278 | return [str(net.network_address), str(net.broadcast_address)] | |||
|
1279 | ||||
|
1280 | def __json__(self): | |||
|
1281 | return { | |||
|
1282 | 'ip_addr': self.ip_addr, | |||
|
1283 | 'ip_range': self._get_ip_range(self.ip_addr), | |||
|
1284 | } | |||
|
1285 | ||||
|
1286 | def __unicode__(self): | |||
|
1287 | return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__, | |||
|
1288 | self.user_id, self.ip_addr) | |||
|
1289 | ||||
|
1290 | ||||
|
1291 | class UserSshKeys(Base, BaseModel): | |||
|
1292 | __tablename__ = 'user_ssh_keys' | |||
|
1293 | __table_args__ = ( | |||
|
1294 | Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'), | |||
|
1295 | ||||
|
1296 | UniqueConstraint('ssh_key_fingerprint'), | |||
|
1297 | ||||
|
1298 | base_table_args | |||
|
1299 | ) | |||
|
1300 | __mapper_args__ = {} | |||
|
1301 | ||||
|
1302 | ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1303 | ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None) | |||
|
1304 | ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None) | |||
|
1305 | ||||
|
1306 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |||
|
1307 | ||||
|
1308 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
1309 | accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None) | |||
|
1310 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
1311 | ||||
|
1312 | user = relationship('User', lazy='joined') | |||
|
1313 | ||||
|
1314 | def __json__(self): | |||
|
1315 | data = { | |||
|
1316 | 'ssh_fingerprint': self.ssh_key_fingerprint, | |||
|
1317 | 'description': self.description, | |||
|
1318 | 'created_on': self.created_on | |||
|
1319 | } | |||
|
1320 | return data | |||
|
1321 | ||||
|
1322 | def get_api_data(self): | |||
|
1323 | data = self.__json__() | |||
|
1324 | return data | |||
|
1325 | ||||
|
1326 | ||||
|
1327 | class UserLog(Base, BaseModel): | |||
|
1328 | __tablename__ = 'user_logs' | |||
|
1329 | __table_args__ = ( | |||
|
1330 | base_table_args, | |||
|
1331 | ) | |||
|
1332 | ||||
|
1333 | VERSION_1 = 'v1' | |||
|
1334 | VERSION_2 = 'v2' | |||
|
1335 | VERSIONS = [VERSION_1, VERSION_2] | |||
|
1336 | ||||
|
1337 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1338 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None) | |||
|
1339 | username = Column("username", String(255), nullable=True, unique=None, default=None) | |||
|
1340 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None) | |||
|
1341 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) | |||
|
1342 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) | |||
|
1343 | action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) | |||
|
1344 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) | |||
|
1345 | ||||
|
1346 | version = Column("version", String(255), nullable=True, default=VERSION_1) | |||
|
1347 | user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) | |||
|
1348 | action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) | |||
|
1349 | ||||
|
1350 | def __unicode__(self): | |||
|
1351 | return u"<%s('id:%s:%s')>" % ( | |||
|
1352 | self.__class__.__name__, self.repository_name, self.action) | |||
|
1353 | ||||
|
1354 | def __json__(self): | |||
|
1355 | return { | |||
|
1356 | 'user_id': self.user_id, | |||
|
1357 | 'username': self.username, | |||
|
1358 | 'repository_id': self.repository_id, | |||
|
1359 | 'repository_name': self.repository_name, | |||
|
1360 | 'user_ip': self.user_ip, | |||
|
1361 | 'action_date': self.action_date, | |||
|
1362 | 'action': self.action, | |||
|
1363 | } | |||
|
1364 | ||||
|
1365 | @hybrid_property | |||
|
1366 | def entry_id(self): | |||
|
1367 | return self.user_log_id | |||
|
1368 | ||||
|
1369 | @property | |||
|
1370 | def action_as_day(self): | |||
|
1371 | return datetime.date(*self.action_date.timetuple()[:3]) | |||
|
1372 | ||||
|
1373 | user = relationship('User') | |||
|
1374 | repository = relationship('Repository', cascade='') | |||
|
1375 | ||||
|
1376 | ||||
|
1377 | class UserGroup(Base, BaseModel): | |||
|
1378 | __tablename__ = 'users_groups' | |||
|
1379 | __table_args__ = ( | |||
|
1380 | base_table_args, | |||
|
1381 | ) | |||
|
1382 | ||||
|
1383 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1384 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) | |||
|
1385 | user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) | |||
|
1386 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) | |||
|
1387 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) | |||
|
1388 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |||
|
1389 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
1390 | _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data | |||
|
1391 | ||||
|
1392 | members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined") | |||
|
1393 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') | |||
|
1394 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |||
|
1395 | users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') | |||
|
1396 | user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all') | |||
|
1397 | user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all') | |||
|
1398 | ||||
|
1399 | user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all') | |||
|
1400 | user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id") | |||
|
1401 | ||||
|
1402 | @classmethod | |||
|
1403 | def _load_group_data(cls, column): | |||
|
1404 | if not column: | |||
|
1405 | return {} | |||
|
1406 | ||||
|
1407 | try: | |||
|
1408 | return json.loads(column) or {} | |||
|
1409 | except TypeError: | |||
|
1410 | return {} | |||
|
1411 | ||||
|
1412 | @hybrid_property | |||
|
1413 | def description_safe(self): | |||
|
1414 | from rhodecode.lib import helpers as h | |||
|
1415 | return h.escape(self.user_group_description) | |||
|
1416 | ||||
|
1417 | @hybrid_property | |||
|
1418 | def group_data(self): | |||
|
1419 | return self._load_group_data(self._group_data) | |||
|
1420 | ||||
|
1421 | @group_data.expression | |||
|
1422 | def group_data(self, **kwargs): | |||
|
1423 | return self._group_data | |||
|
1424 | ||||
|
1425 | @group_data.setter | |||
|
1426 | def group_data(self, val): | |||
|
1427 | try: | |||
|
1428 | self._group_data = json.dumps(val) | |||
|
1429 | except Exception: | |||
|
1430 | log.error(traceback.format_exc()) | |||
|
1431 | ||||
|
1432 | @classmethod | |||
|
1433 | def _load_sync(cls, group_data): | |||
|
1434 | if group_data: | |||
|
1435 | return group_data.get('extern_type') | |||
|
1436 | ||||
|
1437 | @property | |||
|
1438 | def sync(self): | |||
|
1439 | return self._load_sync(self.group_data) | |||
|
1440 | ||||
|
1441 | def __unicode__(self): | |||
|
1442 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, | |||
|
1443 | self.users_group_id, | |||
|
1444 | self.users_group_name) | |||
|
1445 | ||||
|
1446 | @classmethod | |||
|
1447 | def get_by_group_name(cls, group_name, cache=False, | |||
|
1448 | case_insensitive=False): | |||
|
1449 | if case_insensitive: | |||
|
1450 | q = cls.query().filter(func.lower(cls.users_group_name) == | |||
|
1451 | func.lower(group_name)) | |||
|
1452 | ||||
|
1453 | else: | |||
|
1454 | q = cls.query().filter(cls.users_group_name == group_name) | |||
|
1455 | if cache: | |||
|
1456 | q = q.options( | |||
|
1457 | FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name))) | |||
|
1458 | return q.scalar() | |||
|
1459 | ||||
|
1460 | @classmethod | |||
|
1461 | def get(cls, user_group_id, cache=False): | |||
|
1462 | if not user_group_id: | |||
|
1463 | return | |||
|
1464 | ||||
|
1465 | user_group = cls.query() | |||
|
1466 | if cache: | |||
|
1467 | user_group = user_group.options( | |||
|
1468 | FromCache("sql_cache_short", "get_users_group_%s" % user_group_id)) | |||
|
1469 | return user_group.get(user_group_id) | |||
|
1470 | ||||
|
1471 | def permissions(self, with_admins=True, with_owner=True, | |||
|
1472 | expand_from_user_groups=False): | |||
|
1473 | """ | |||
|
1474 | Permissions for user groups | |||
|
1475 | """ | |||
|
1476 | _admin_perm = 'usergroup.admin' | |||
|
1477 | ||||
|
1478 | owner_row = [] | |||
|
1479 | if with_owner: | |||
|
1480 | usr = AttributeDict(self.user.get_dict()) | |||
|
1481 | usr.owner_row = True | |||
|
1482 | usr.permission = _admin_perm | |||
|
1483 | owner_row.append(usr) | |||
|
1484 | ||||
|
1485 | super_admin_ids = [] | |||
|
1486 | super_admin_rows = [] | |||
|
1487 | if with_admins: | |||
|
1488 | for usr in User.get_all_super_admins(): | |||
|
1489 | super_admin_ids.append(usr.user_id) | |||
|
1490 | # if this admin is also owner, don't double the record | |||
|
1491 | if usr.user_id == owner_row[0].user_id: | |||
|
1492 | owner_row[0].admin_row = True | |||
|
1493 | else: | |||
|
1494 | usr = AttributeDict(usr.get_dict()) | |||
|
1495 | usr.admin_row = True | |||
|
1496 | usr.permission = _admin_perm | |||
|
1497 | super_admin_rows.append(usr) | |||
|
1498 | ||||
|
1499 | q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) | |||
|
1500 | q = q.options(joinedload(UserUserGroupToPerm.user_group), | |||
|
1501 | joinedload(UserUserGroupToPerm.user), | |||
|
1502 | joinedload(UserUserGroupToPerm.permission),) | |||
|
1503 | ||||
|
1504 | # get owners and admins and permissions. We do a trick of re-writing | |||
|
1505 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |||
|
1506 | # has a global reference and changing one object propagates to all | |||
|
1507 | # others. This means if admin is also an owner admin_row that change | |||
|
1508 | # would propagate to both objects | |||
|
1509 | perm_rows = [] | |||
|
1510 | for _usr in q.all(): | |||
|
1511 | usr = AttributeDict(_usr.user.get_dict()) | |||
|
1512 | # if this user is also owner/admin, mark as duplicate record | |||
|
1513 | if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: | |||
|
1514 | usr.duplicate_perm = True | |||
|
1515 | usr.permission = _usr.permission.permission_name | |||
|
1516 | perm_rows.append(usr) | |||
|
1517 | ||||
|
1518 | # filter the perm rows by 'default' first and then sort them by | |||
|
1519 | # admin,write,read,none permissions sorted again alphabetically in | |||
|
1520 | # each group | |||
|
1521 | perm_rows = sorted(perm_rows, key=display_user_sort) | |||
|
1522 | ||||
|
1523 | user_groups_rows = [] | |||
|
1524 | if expand_from_user_groups: | |||
|
1525 | for ug in self.permission_user_groups(with_members=True): | |||
|
1526 | for user_data in ug.members: | |||
|
1527 | user_groups_rows.append(user_data) | |||
|
1528 | ||||
|
1529 | return super_admin_rows + owner_row + perm_rows + user_groups_rows | |||
|
1530 | ||||
|
1531 | def permission_user_groups(self, with_members=False): | |||
|
1532 | q = UserGroupUserGroupToPerm.query()\ | |||
|
1533 | .filter(UserGroupUserGroupToPerm.target_user_group == self) | |||
|
1534 | q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), | |||
|
1535 | joinedload(UserGroupUserGroupToPerm.target_user_group), | |||
|
1536 | joinedload(UserGroupUserGroupToPerm.permission),) | |||
|
1537 | ||||
|
1538 | perm_rows = [] | |||
|
1539 | for _user_group in q.all(): | |||
|
1540 | entry = AttributeDict(_user_group.user_group.get_dict()) | |||
|
1541 | entry.permission = _user_group.permission.permission_name | |||
|
1542 | if with_members: | |||
|
1543 | entry.members = [x.user.get_dict() | |||
|
1544 | for x in _user_group.user_group.members] | |||
|
1545 | perm_rows.append(entry) | |||
|
1546 | ||||
|
1547 | perm_rows = sorted(perm_rows, key=display_user_group_sort) | |||
|
1548 | return perm_rows | |||
|
1549 | ||||
|
1550 | def _get_default_perms(self, user_group, suffix=''): | |||
|
1551 | from rhodecode.model.permission import PermissionModel | |||
|
1552 | return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) | |||
|
1553 | ||||
|
1554 | def get_default_perms(self, suffix=''): | |||
|
1555 | return self._get_default_perms(self, suffix) | |||
|
1556 | ||||
|
1557 | def get_api_data(self, with_group_members=True, include_secrets=False): | |||
|
1558 | """ | |||
|
1559 | :param include_secrets: See :meth:`User.get_api_data`, this parameter is | |||
|
1560 | basically forwarded. | |||
|
1561 | ||||
|
1562 | """ | |||
|
1563 | user_group = self | |||
|
1564 | data = { | |||
|
1565 | 'users_group_id': user_group.users_group_id, | |||
|
1566 | 'group_name': user_group.users_group_name, | |||
|
1567 | 'group_description': user_group.user_group_description, | |||
|
1568 | 'active': user_group.users_group_active, | |||
|
1569 | 'owner': user_group.user.username, | |||
|
1570 | 'sync': user_group.sync, | |||
|
1571 | 'owner_email': user_group.user.email, | |||
|
1572 | } | |||
|
1573 | ||||
|
1574 | if with_group_members: | |||
|
1575 | users = [] | |||
|
1576 | for user in user_group.members: | |||
|
1577 | user = user.user | |||
|
1578 | users.append(user.get_api_data(include_secrets=include_secrets)) | |||
|
1579 | data['users'] = users | |||
|
1580 | ||||
|
1581 | return data | |||
|
1582 | ||||
|
1583 | ||||
|
1584 | class UserGroupMember(Base, BaseModel): | |||
|
1585 | __tablename__ = 'users_groups_members' | |||
|
1586 | __table_args__ = ( | |||
|
1587 | base_table_args, | |||
|
1588 | ) | |||
|
1589 | ||||
|
1590 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1591 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
1592 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
1593 | ||||
|
1594 | user = relationship('User', lazy='joined') | |||
|
1595 | users_group = relationship('UserGroup') | |||
|
1596 | ||||
|
1597 | def __init__(self, gr_id='', u_id=''): | |||
|
1598 | self.users_group_id = gr_id | |||
|
1599 | self.user_id = u_id | |||
|
1600 | ||||
|
1601 | ||||
|
1602 | class RepositoryField(Base, BaseModel): | |||
|
1603 | __tablename__ = 'repositories_fields' | |||
|
1604 | __table_args__ = ( | |||
|
1605 | UniqueConstraint('repository_id', 'field_key'), # no-multi field | |||
|
1606 | base_table_args, | |||
|
1607 | ) | |||
|
1608 | ||||
|
1609 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields | |||
|
1610 | ||||
|
1611 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
1612 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |||
|
1613 | field_key = Column("field_key", String(250)) | |||
|
1614 | field_label = Column("field_label", String(1024), nullable=False) | |||
|
1615 | field_value = Column("field_value", String(10000), nullable=False) | |||
|
1616 | field_desc = Column("field_desc", String(1024), nullable=False) | |||
|
1617 | field_type = Column("field_type", String(255), nullable=False, unique=None) | |||
|
1618 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
1619 | ||||
|
1620 | repository = relationship('Repository') | |||
|
1621 | ||||
|
1622 | @property | |||
|
1623 | def field_key_prefixed(self): | |||
|
1624 | return 'ex_%s' % self.field_key | |||
|
1625 | ||||
|
1626 | @classmethod | |||
|
1627 | def un_prefix_key(cls, key): | |||
|
1628 | if key.startswith(cls.PREFIX): | |||
|
1629 | return key[len(cls.PREFIX):] | |||
|
1630 | return key | |||
|
1631 | ||||
|
1632 | @classmethod | |||
|
1633 | def get_by_key_name(cls, key, repo): | |||
|
1634 | row = cls.query()\ | |||
|
1635 | .filter(cls.repository == repo)\ | |||
|
1636 | .filter(cls.field_key == key).scalar() | |||
|
1637 | return row | |||
|
1638 | ||||
|
1639 | ||||
|
1640 | class Repository(Base, BaseModel): | |||
|
1641 | __tablename__ = 'repositories' | |||
|
1642 | __table_args__ = ( | |||
|
1643 | Index('r_repo_name_idx', 'repo_name', mysql_length=255), | |||
|
1644 | base_table_args, | |||
|
1645 | ) | |||
|
1646 | DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' | |||
|
1647 | DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' | |||
|
1648 | DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}' | |||
|
1649 | ||||
|
1650 | STATE_CREATED = 'repo_state_created' | |||
|
1651 | STATE_PENDING = 'repo_state_pending' | |||
|
1652 | STATE_ERROR = 'repo_state_error' | |||
|
1653 | ||||
|
1654 | LOCK_AUTOMATIC = 'lock_auto' | |||
|
1655 | LOCK_API = 'lock_api' | |||
|
1656 | LOCK_WEB = 'lock_web' | |||
|
1657 | LOCK_PULL = 'lock_pull' | |||
|
1658 | ||||
|
1659 | NAME_SEP = URL_SEP | |||
|
1660 | ||||
|
1661 | repo_id = Column( | |||
|
1662 | "repo_id", Integer(), nullable=False, unique=True, default=None, | |||
|
1663 | primary_key=True) | |||
|
1664 | _repo_name = Column( | |||
|
1665 | "repo_name", Text(), nullable=False, default=None) | |||
|
1666 | repo_name_hash = Column( | |||
|
1667 | "repo_name_hash", String(255), nullable=False, unique=True) | |||
|
1668 | repo_state = Column("repo_state", String(255), nullable=True) | |||
|
1669 | ||||
|
1670 | clone_uri = Column( | |||
|
1671 | "clone_uri", EncryptedTextValue(), nullable=True, unique=False, | |||
|
1672 | default=None) | |||
|
1673 | push_uri = Column( | |||
|
1674 | "push_uri", EncryptedTextValue(), nullable=True, unique=False, | |||
|
1675 | default=None) | |||
|
1676 | repo_type = Column( | |||
|
1677 | "repo_type", String(255), nullable=False, unique=False, default=None) | |||
|
1678 | user_id = Column( | |||
|
1679 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | |||
|
1680 | unique=False, default=None) | |||
|
1681 | private = Column( | |||
|
1682 | "private", Boolean(), nullable=True, unique=None, default=None) | |||
|
1683 | archived = Column( | |||
|
1684 | "archived", Boolean(), nullable=True, unique=None, default=None) | |||
|
1685 | enable_statistics = Column( | |||
|
1686 | "statistics", Boolean(), nullable=True, unique=None, default=True) | |||
|
1687 | enable_downloads = Column( | |||
|
1688 | "downloads", Boolean(), nullable=True, unique=None, default=True) | |||
|
1689 | description = Column( | |||
|
1690 | "description", String(10000), nullable=True, unique=None, default=None) | |||
|
1691 | created_on = Column( | |||
|
1692 | 'created_on', DateTime(timezone=False), nullable=True, unique=None, | |||
|
1693 | default=datetime.datetime.now) | |||
|
1694 | updated_on = Column( | |||
|
1695 | 'updated_on', DateTime(timezone=False), nullable=True, unique=None, | |||
|
1696 | default=datetime.datetime.now) | |||
|
1697 | _landing_revision = Column( | |||
|
1698 | "landing_revision", String(255), nullable=False, unique=False, | |||
|
1699 | default=None) | |||
|
1700 | enable_locking = Column( | |||
|
1701 | "enable_locking", Boolean(), nullable=False, unique=None, | |||
|
1702 | default=False) | |||
|
1703 | _locked = Column( | |||
|
1704 | "locked", String(255), nullable=True, unique=False, default=None) | |||
|
1705 | _changeset_cache = Column( | |||
|
1706 | "changeset_cache", LargeBinary(), nullable=True) # JSON data | |||
|
1707 | ||||
|
1708 | fork_id = Column( | |||
|
1709 | "fork_id", Integer(), ForeignKey('repositories.repo_id'), | |||
|
1710 | nullable=True, unique=False, default=None) | |||
|
1711 | group_id = Column( | |||
|
1712 | "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, | |||
|
1713 | unique=False, default=None) | |||
|
1714 | ||||
|
1715 | user = relationship('User', lazy='joined') | |||
|
1716 | fork = relationship('Repository', remote_side=repo_id, lazy='joined') | |||
|
1717 | group = relationship('RepoGroup', lazy='joined') | |||
|
1718 | repo_to_perm = relationship( | |||
|
1719 | 'UserRepoToPerm', cascade='all', | |||
|
1720 | order_by='UserRepoToPerm.repo_to_perm_id') | |||
|
1721 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') | |||
|
1722 | stats = relationship('Statistics', cascade='all', uselist=False) | |||
|
1723 | ||||
|
1724 | followers = relationship( | |||
|
1725 | 'UserFollowing', | |||
|
1726 | primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', | |||
|
1727 | cascade='all') | |||
|
1728 | extra_fields = relationship( | |||
|
1729 | 'RepositoryField', cascade="all, delete-orphan") | |||
|
1730 | logs = relationship('UserLog') | |||
|
1731 | comments = relationship( | |||
|
1732 | 'ChangesetComment', cascade="all, delete-orphan") | |||
|
1733 | pull_requests_source = relationship( | |||
|
1734 | 'PullRequest', | |||
|
1735 | primaryjoin='PullRequest.source_repo_id==Repository.repo_id', | |||
|
1736 | cascade="all, delete-orphan") | |||
|
1737 | pull_requests_target = relationship( | |||
|
1738 | 'PullRequest', | |||
|
1739 | primaryjoin='PullRequest.target_repo_id==Repository.repo_id', | |||
|
1740 | cascade="all, delete-orphan") | |||
|
1741 | ui = relationship('RepoRhodeCodeUi', cascade="all") | |||
|
1742 | settings = relationship('RepoRhodeCodeSetting', cascade="all") | |||
|
1743 | integrations = relationship('Integration', cascade="all, delete-orphan") | |||
|
1744 | ||||
|
1745 | scoped_tokens = relationship('UserApiKeys', cascade="all") | |||
|
1746 | ||||
|
1747 | # no cascade, set NULL | |||
|
1748 | artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id') | |||
|
1749 | ||||
|
1750 | def __unicode__(self): | |||
|
1751 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, | |||
|
1752 | safe_unicode(self.repo_name)) | |||
|
1753 | ||||
|
1754 | @hybrid_property | |||
|
1755 | def description_safe(self): | |||
|
1756 | from rhodecode.lib import helpers as h | |||
|
1757 | return h.escape(self.description) | |||
|
1758 | ||||
|
1759 | @hybrid_property | |||
|
1760 | def landing_rev(self): | |||
|
1761 | # always should return [rev_type, rev] | |||
|
1762 | if self._landing_revision: | |||
|
1763 | _rev_info = self._landing_revision.split(':') | |||
|
1764 | if len(_rev_info) < 2: | |||
|
1765 | _rev_info.insert(0, 'rev') | |||
|
1766 | return [_rev_info[0], _rev_info[1]] | |||
|
1767 | return [None, None] | |||
|
1768 | ||||
|
1769 | @landing_rev.setter | |||
|
1770 | def landing_rev(self, val): | |||
|
1771 | if ':' not in val: | |||
|
1772 | raise ValueError('value must be delimited with `:` and consist ' | |||
|
1773 | 'of <rev_type>:<rev>, got %s instead' % val) | |||
|
1774 | self._landing_revision = val | |||
|
1775 | ||||
|
1776 | @hybrid_property | |||
|
1777 | def locked(self): | |||
|
1778 | if self._locked: | |||
|
1779 | user_id, timelocked, reason = self._locked.split(':') | |||
|
1780 | lock_values = int(user_id), timelocked, reason | |||
|
1781 | else: | |||
|
1782 | lock_values = [None, None, None] | |||
|
1783 | return lock_values | |||
|
1784 | ||||
|
1785 | @locked.setter | |||
|
1786 | def locked(self, val): | |||
|
1787 | if val and isinstance(val, (list, tuple)): | |||
|
1788 | self._locked = ':'.join(map(str, val)) | |||
|
1789 | else: | |||
|
1790 | self._locked = None | |||
|
1791 | ||||
|
1792 | @classmethod | |||
|
1793 | def _load_changeset_cache(cls, repo_id, changeset_cache_raw): | |||
|
1794 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |||
|
1795 | dummy = EmptyCommit().__json__() | |||
|
1796 | if not changeset_cache_raw: | |||
|
1797 | dummy['source_repo_id'] = repo_id | |||
|
1798 | return json.loads(json.dumps(dummy)) | |||
|
1799 | ||||
|
1800 | try: | |||
|
1801 | return json.loads(changeset_cache_raw) | |||
|
1802 | except TypeError: | |||
|
1803 | return dummy | |||
|
1804 | except Exception: | |||
|
1805 | log.error(traceback.format_exc()) | |||
|
1806 | return dummy | |||
|
1807 | ||||
|
1808 | @hybrid_property | |||
|
1809 | def changeset_cache(self): | |||
|
1810 | return self._load_changeset_cache(self.repo_id, self._changeset_cache) | |||
|
1811 | ||||
|
1812 | @changeset_cache.setter | |||
|
1813 | def changeset_cache(self, val): | |||
|
1814 | try: | |||
|
1815 | self._changeset_cache = json.dumps(val) | |||
|
1816 | except Exception: | |||
|
1817 | log.error(traceback.format_exc()) | |||
|
1818 | ||||
|
1819 | @hybrid_property | |||
|
1820 | def repo_name(self): | |||
|
1821 | return self._repo_name | |||
|
1822 | ||||
|
1823 | @repo_name.setter | |||
|
1824 | def repo_name(self, value): | |||
|
1825 | self._repo_name = value | |||
|
1826 | self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest() | |||
|
1827 | ||||
|
1828 | @classmethod | |||
|
1829 | def normalize_repo_name(cls, repo_name): | |||
|
1830 | """ | |||
|
1831 | Normalizes os specific repo_name to the format internally stored inside | |||
|
1832 | database using URL_SEP | |||
|
1833 | ||||
|
1834 | :param cls: | |||
|
1835 | :param repo_name: | |||
|
1836 | """ | |||
|
1837 | return cls.NAME_SEP.join(repo_name.split(os.sep)) | |||
|
1838 | ||||
|
1839 | @classmethod | |||
|
1840 | def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): | |||
|
1841 | session = Session() | |||
|
1842 | q = session.query(cls).filter(cls.repo_name == repo_name) | |||
|
1843 | ||||
|
1844 | if cache: | |||
|
1845 | if identity_cache: | |||
|
1846 | val = cls.identity_cache(session, 'repo_name', repo_name) | |||
|
1847 | if val: | |||
|
1848 | return val | |||
|
1849 | else: | |||
|
1850 | cache_key = "get_repo_by_name_%s" % _hash_key(repo_name) | |||
|
1851 | q = q.options( | |||
|
1852 | FromCache("sql_cache_short", cache_key)) | |||
|
1853 | ||||
|
1854 | return q.scalar() | |||
|
1855 | ||||
|
1856 | @classmethod | |||
|
1857 | def get_by_id_or_repo_name(cls, repoid): | |||
|
1858 | if isinstance(repoid, (int, long)): | |||
|
1859 | try: | |||
|
1860 | repo = cls.get(repoid) | |||
|
1861 | except ValueError: | |||
|
1862 | repo = None | |||
|
1863 | else: | |||
|
1864 | repo = cls.get_by_repo_name(repoid) | |||
|
1865 | return repo | |||
|
1866 | ||||
|
1867 | @classmethod | |||
|
1868 | def get_by_full_path(cls, repo_full_path): | |||
|
1869 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] | |||
|
1870 | repo_name = cls.normalize_repo_name(repo_name) | |||
|
1871 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) | |||
|
1872 | ||||
|
1873 | @classmethod | |||
|
1874 | def get_repo_forks(cls, repo_id): | |||
|
1875 | return cls.query().filter(Repository.fork_id == repo_id) | |||
|
1876 | ||||
|
1877 | @classmethod | |||
|
1878 | def base_path(cls): | |||
|
1879 | """ | |||
|
1880 | Returns base path when all repos are stored | |||
|
1881 | ||||
|
1882 | :param cls: | |||
|
1883 | """ | |||
|
1884 | q = Session().query(RhodeCodeUi)\ | |||
|
1885 | .filter(RhodeCodeUi.ui_key == cls.NAME_SEP) | |||
|
1886 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |||
|
1887 | return q.one().ui_value | |||
|
1888 | ||||
|
1889 | @classmethod | |||
|
1890 | def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), | |||
|
1891 | case_insensitive=True, archived=False): | |||
|
1892 | q = Repository.query() | |||
|
1893 | ||||
|
1894 | if not archived: | |||
|
1895 | q = q.filter(Repository.archived.isnot(true())) | |||
|
1896 | ||||
|
1897 | if not isinstance(user_id, Optional): | |||
|
1898 | q = q.filter(Repository.user_id == user_id) | |||
|
1899 | ||||
|
1900 | if not isinstance(group_id, Optional): | |||
|
1901 | q = q.filter(Repository.group_id == group_id) | |||
|
1902 | ||||
|
1903 | if case_insensitive: | |||
|
1904 | q = q.order_by(func.lower(Repository.repo_name)) | |||
|
1905 | else: | |||
|
1906 | q = q.order_by(Repository.repo_name) | |||
|
1907 | ||||
|
1908 | return q.all() | |||
|
1909 | ||||
|
1910 | @property | |||
|
1911 | def repo_uid(self): | |||
|
1912 | return '_{}'.format(self.repo_id) | |||
|
1913 | ||||
|
1914 | @property | |||
|
1915 | def forks(self): | |||
|
1916 | """ | |||
|
1917 | Return forks of this repo | |||
|
1918 | """ | |||
|
1919 | return Repository.get_repo_forks(self.repo_id) | |||
|
1920 | ||||
|
1921 | @property | |||
|
1922 | def parent(self): | |||
|
1923 | """ | |||
|
1924 | Returns fork parent | |||
|
1925 | """ | |||
|
1926 | return self.fork | |||
|
1927 | ||||
|
1928 | @property | |||
|
1929 | def just_name(self): | |||
|
1930 | return self.repo_name.split(self.NAME_SEP)[-1] | |||
|
1931 | ||||
|
1932 | @property | |||
|
1933 | def groups_with_parents(self): | |||
|
1934 | groups = [] | |||
|
1935 | if self.group is None: | |||
|
1936 | return groups | |||
|
1937 | ||||
|
1938 | cur_gr = self.group | |||
|
1939 | groups.insert(0, cur_gr) | |||
|
1940 | while 1: | |||
|
1941 | gr = getattr(cur_gr, 'parent_group', None) | |||
|
1942 | cur_gr = cur_gr.parent_group | |||
|
1943 | if gr is None: | |||
|
1944 | break | |||
|
1945 | groups.insert(0, gr) | |||
|
1946 | ||||
|
1947 | return groups | |||
|
1948 | ||||
|
1949 | @property | |||
|
1950 | def groups_and_repo(self): | |||
|
1951 | return self.groups_with_parents, self | |||
|
1952 | ||||
|
1953 | @LazyProperty | |||
|
1954 | def repo_path(self): | |||
|
1955 | """ | |||
|
1956 | Returns base full path for that repository means where it actually | |||
|
1957 | exists on a filesystem | |||
|
1958 | """ | |||
|
1959 | q = Session().query(RhodeCodeUi).filter( | |||
|
1960 | RhodeCodeUi.ui_key == self.NAME_SEP) | |||
|
1961 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |||
|
1962 | return q.one().ui_value | |||
|
1963 | ||||
|
1964 | @property | |||
|
1965 | def repo_full_path(self): | |||
|
1966 | p = [self.repo_path] | |||
|
1967 | # we need to split the name by / since this is how we store the | |||
|
1968 | # names in the database, but that eventually needs to be converted | |||
|
1969 | # into a valid system path | |||
|
1970 | p += self.repo_name.split(self.NAME_SEP) | |||
|
1971 | return os.path.join(*map(safe_unicode, p)) | |||
|
1972 | ||||
|
1973 | @property | |||
|
1974 | def cache_keys(self): | |||
|
1975 | """ | |||
|
1976 | Returns associated cache keys for that repo | |||
|
1977 | """ | |||
|
1978 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
1979 | repo_id=self.repo_id) | |||
|
1980 | return CacheKey.query()\ | |||
|
1981 | .filter(CacheKey.cache_args == invalidation_namespace)\ | |||
|
1982 | .order_by(CacheKey.cache_key)\ | |||
|
1983 | .all() | |||
|
1984 | ||||
|
1985 | @property | |||
|
1986 | def cached_diffs_relative_dir(self): | |||
|
1987 | """ | |||
|
1988 | Return a relative to the repository store path of cached diffs | |||
|
1989 | used for safe display for users, who shouldn't know the absolute store | |||
|
1990 | path | |||
|
1991 | """ | |||
|
1992 | return os.path.join( | |||
|
1993 | os.path.dirname(self.repo_name), | |||
|
1994 | self.cached_diffs_dir.split(os.path.sep)[-1]) | |||
|
1995 | ||||
|
1996 | @property | |||
|
1997 | def cached_diffs_dir(self): | |||
|
1998 | path = self.repo_full_path | |||
|
1999 | return os.path.join( | |||
|
2000 | os.path.dirname(path), | |||
|
2001 | '.__shadow_diff_cache_repo_{}'.format(self.repo_id)) | |||
|
2002 | ||||
|
2003 | def cached_diffs(self): | |||
|
2004 | diff_cache_dir = self.cached_diffs_dir | |||
|
2005 | if os.path.isdir(diff_cache_dir): | |||
|
2006 | return os.listdir(diff_cache_dir) | |||
|
2007 | return [] | |||
|
2008 | ||||
|
2009 | def shadow_repos(self): | |||
|
2010 | shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id) | |||
|
2011 | return [ | |||
|
2012 | x for x in os.listdir(os.path.dirname(self.repo_full_path)) | |||
|
2013 | if x.startswith(shadow_repos_pattern)] | |||
|
2014 | ||||
|
2015 | def get_new_name(self, repo_name): | |||
|
2016 | """ | |||
|
2017 | returns new full repository name based on assigned group and new new | |||
|
2018 | ||||
|
2019 | :param group_name: | |||
|
2020 | """ | |||
|
2021 | path_prefix = self.group.full_path_splitted if self.group else [] | |||
|
2022 | return self.NAME_SEP.join(path_prefix + [repo_name]) | |||
|
2023 | ||||
|
2024 | @property | |||
|
2025 | def _config(self): | |||
|
2026 | """ | |||
|
2027 | Returns db based config object. | |||
|
2028 | """ | |||
|
2029 | from rhodecode.lib.utils import make_db_config | |||
|
2030 | return make_db_config(clear_session=False, repo=self) | |||
|
2031 | ||||
|
2032 | def permissions(self, with_admins=True, with_owner=True, | |||
|
2033 | expand_from_user_groups=False): | |||
|
2034 | """ | |||
|
2035 | Permissions for repositories | |||
|
2036 | """ | |||
|
2037 | _admin_perm = 'repository.admin' | |||
|
2038 | ||||
|
2039 | owner_row = [] | |||
|
2040 | if with_owner: | |||
|
2041 | usr = AttributeDict(self.user.get_dict()) | |||
|
2042 | usr.owner_row = True | |||
|
2043 | usr.permission = _admin_perm | |||
|
2044 | usr.permission_id = None | |||
|
2045 | owner_row.append(usr) | |||
|
2046 | ||||
|
2047 | super_admin_ids = [] | |||
|
2048 | super_admin_rows = [] | |||
|
2049 | if with_admins: | |||
|
2050 | for usr in User.get_all_super_admins(): | |||
|
2051 | super_admin_ids.append(usr.user_id) | |||
|
2052 | # if this admin is also owner, don't double the record | |||
|
2053 | if usr.user_id == owner_row[0].user_id: | |||
|
2054 | owner_row[0].admin_row = True | |||
|
2055 | else: | |||
|
2056 | usr = AttributeDict(usr.get_dict()) | |||
|
2057 | usr.admin_row = True | |||
|
2058 | usr.permission = _admin_perm | |||
|
2059 | usr.permission_id = None | |||
|
2060 | super_admin_rows.append(usr) | |||
|
2061 | ||||
|
2062 | q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) | |||
|
2063 | q = q.options(joinedload(UserRepoToPerm.repository), | |||
|
2064 | joinedload(UserRepoToPerm.user), | |||
|
2065 | joinedload(UserRepoToPerm.permission),) | |||
|
2066 | ||||
|
2067 | # get owners and admins and permissions. We do a trick of re-writing | |||
|
2068 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |||
|
2069 | # has a global reference and changing one object propagates to all | |||
|
2070 | # others. This means if admin is also an owner admin_row that change | |||
|
2071 | # would propagate to both objects | |||
|
2072 | perm_rows = [] | |||
|
2073 | for _usr in q.all(): | |||
|
2074 | usr = AttributeDict(_usr.user.get_dict()) | |||
|
2075 | # if this user is also owner/admin, mark as duplicate record | |||
|
2076 | if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: | |||
|
2077 | usr.duplicate_perm = True | |||
|
2078 | # also check if this permission is maybe used by branch_permissions | |||
|
2079 | if _usr.branch_perm_entry: | |||
|
2080 | usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry] | |||
|
2081 | ||||
|
2082 | usr.permission = _usr.permission.permission_name | |||
|
2083 | usr.permission_id = _usr.repo_to_perm_id | |||
|
2084 | perm_rows.append(usr) | |||
|
2085 | ||||
|
2086 | # filter the perm rows by 'default' first and then sort them by | |||
|
2087 | # admin,write,read,none permissions sorted again alphabetically in | |||
|
2088 | # each group | |||
|
2089 | perm_rows = sorted(perm_rows, key=display_user_sort) | |||
|
2090 | ||||
|
2091 | user_groups_rows = [] | |||
|
2092 | if expand_from_user_groups: | |||
|
2093 | for ug in self.permission_user_groups(with_members=True): | |||
|
2094 | for user_data in ug.members: | |||
|
2095 | user_groups_rows.append(user_data) | |||
|
2096 | ||||
|
2097 | return super_admin_rows + owner_row + perm_rows + user_groups_rows | |||
|
2098 | ||||
|
2099 | def permission_user_groups(self, with_members=True): | |||
|
2100 | q = UserGroupRepoToPerm.query()\ | |||
|
2101 | .filter(UserGroupRepoToPerm.repository == self) | |||
|
2102 | q = q.options(joinedload(UserGroupRepoToPerm.repository), | |||
|
2103 | joinedload(UserGroupRepoToPerm.users_group), | |||
|
2104 | joinedload(UserGroupRepoToPerm.permission),) | |||
|
2105 | ||||
|
2106 | perm_rows = [] | |||
|
2107 | for _user_group in q.all(): | |||
|
2108 | entry = AttributeDict(_user_group.users_group.get_dict()) | |||
|
2109 | entry.permission = _user_group.permission.permission_name | |||
|
2110 | if with_members: | |||
|
2111 | entry.members = [x.user.get_dict() | |||
|
2112 | for x in _user_group.users_group.members] | |||
|
2113 | perm_rows.append(entry) | |||
|
2114 | ||||
|
2115 | perm_rows = sorted(perm_rows, key=display_user_group_sort) | |||
|
2116 | return perm_rows | |||
|
2117 | ||||
|
2118 | def get_api_data(self, include_secrets=False): | |||
|
2119 | """ | |||
|
2120 | Common function for generating repo api data | |||
|
2121 | ||||
|
2122 | :param include_secrets: See :meth:`User.get_api_data`. | |||
|
2123 | ||||
|
2124 | """ | |||
|
2125 | # TODO: mikhail: Here there is an anti-pattern, we probably need to | |||
|
2126 | # move this methods on models level. | |||
|
2127 | from rhodecode.model.settings import SettingsModel | |||
|
2128 | from rhodecode.model.repo import RepoModel | |||
|
2129 | ||||
|
2130 | repo = self | |||
|
2131 | _user_id, _time, _reason = self.locked | |||
|
2132 | ||||
|
2133 | data = { | |||
|
2134 | 'repo_id': repo.repo_id, | |||
|
2135 | 'repo_name': repo.repo_name, | |||
|
2136 | 'repo_type': repo.repo_type, | |||
|
2137 | 'clone_uri': repo.clone_uri or '', | |||
|
2138 | 'push_uri': repo.push_uri or '', | |||
|
2139 | 'url': RepoModel().get_url(self), | |||
|
2140 | 'private': repo.private, | |||
|
2141 | 'created_on': repo.created_on, | |||
|
2142 | 'description': repo.description_safe, | |||
|
2143 | 'landing_rev': repo.landing_rev, | |||
|
2144 | 'owner': repo.user.username, | |||
|
2145 | 'fork_of': repo.fork.repo_name if repo.fork else None, | |||
|
2146 | 'fork_of_id': repo.fork.repo_id if repo.fork else None, | |||
|
2147 | 'enable_statistics': repo.enable_statistics, | |||
|
2148 | 'enable_locking': repo.enable_locking, | |||
|
2149 | 'enable_downloads': repo.enable_downloads, | |||
|
2150 | 'last_changeset': repo.changeset_cache, | |||
|
2151 | 'locked_by': User.get(_user_id).get_api_data( | |||
|
2152 | include_secrets=include_secrets) if _user_id else None, | |||
|
2153 | 'locked_date': time_to_datetime(_time) if _time else None, | |||
|
2154 | 'lock_reason': _reason if _reason else None, | |||
|
2155 | } | |||
|
2156 | ||||
|
2157 | # TODO: mikhail: should be per-repo settings here | |||
|
2158 | rc_config = SettingsModel().get_all_settings() | |||
|
2159 | repository_fields = str2bool( | |||
|
2160 | rc_config.get('rhodecode_repository_fields')) | |||
|
2161 | if repository_fields: | |||
|
2162 | for f in self.extra_fields: | |||
|
2163 | data[f.field_key_prefixed] = f.field_value | |||
|
2164 | ||||
|
2165 | return data | |||
|
2166 | ||||
|
2167 | @classmethod | |||
|
2168 | def lock(cls, repo, user_id, lock_time=None, lock_reason=None): | |||
|
2169 | if not lock_time: | |||
|
2170 | lock_time = time.time() | |||
|
2171 | if not lock_reason: | |||
|
2172 | lock_reason = cls.LOCK_AUTOMATIC | |||
|
2173 | repo.locked = [user_id, lock_time, lock_reason] | |||
|
2174 | Session().add(repo) | |||
|
2175 | Session().commit() | |||
|
2176 | ||||
|
2177 | @classmethod | |||
|
2178 | def unlock(cls, repo): | |||
|
2179 | repo.locked = None | |||
|
2180 | Session().add(repo) | |||
|
2181 | Session().commit() | |||
|
2182 | ||||
|
2183 | @classmethod | |||
|
2184 | def getlock(cls, repo): | |||
|
2185 | return repo.locked | |||
|
2186 | ||||
|
2187 | def is_user_lock(self, user_id): | |||
|
2188 | if self.lock[0]: | |||
|
2189 | lock_user_id = safe_int(self.lock[0]) | |||
|
2190 | user_id = safe_int(user_id) | |||
|
2191 | # both are ints, and they are equal | |||
|
2192 | return all([lock_user_id, user_id]) and lock_user_id == user_id | |||
|
2193 | ||||
|
2194 | return False | |||
|
2195 | ||||
|
2196 | def get_locking_state(self, action, user_id, only_when_enabled=True): | |||
|
2197 | """ | |||
|
2198 | Checks locking on this repository, if locking is enabled and lock is | |||
|
2199 | present returns a tuple of make_lock, locked, locked_by. | |||
|
2200 | make_lock can have 3 states None (do nothing) True, make lock | |||
|
2201 | False release lock, This value is later propagated to hooks, which | |||
|
2202 | do the locking. Think about this as signals passed to hooks what to do. | |||
|
2203 | ||||
|
2204 | """ | |||
|
2205 | # TODO: johbo: This is part of the business logic and should be moved | |||
|
2206 | # into the RepositoryModel. | |||
|
2207 | ||||
|
2208 | if action not in ('push', 'pull'): | |||
|
2209 | raise ValueError("Invalid action value: %s" % repr(action)) | |||
|
2210 | ||||
|
2211 | # defines if locked error should be thrown to user | |||
|
2212 | currently_locked = False | |||
|
2213 | # defines if new lock should be made, tri-state | |||
|
2214 | make_lock = None | |||
|
2215 | repo = self | |||
|
2216 | user = User.get(user_id) | |||
|
2217 | ||||
|
2218 | lock_info = repo.locked | |||
|
2219 | ||||
|
2220 | if repo and (repo.enable_locking or not only_when_enabled): | |||
|
2221 | if action == 'push': | |||
|
2222 | # check if it's already locked !, if it is compare users | |||
|
2223 | locked_by_user_id = lock_info[0] | |||
|
2224 | if user.user_id == locked_by_user_id: | |||
|
2225 | log.debug( | |||
|
2226 | 'Got `push` action from user %s, now unlocking', user) | |||
|
2227 | # unlock if we have push from user who locked | |||
|
2228 | make_lock = False | |||
|
2229 | else: | |||
|
2230 | # we're not the same user who locked, ban with | |||
|
2231 | # code defined in settings (default is 423 HTTP Locked) ! | |||
|
2232 | log.debug('Repo %s is currently locked by %s', repo, user) | |||
|
2233 | currently_locked = True | |||
|
2234 | elif action == 'pull': | |||
|
2235 | # [0] user [1] date | |||
|
2236 | if lock_info[0] and lock_info[1]: | |||
|
2237 | log.debug('Repo %s is currently locked by %s', repo, user) | |||
|
2238 | currently_locked = True | |||
|
2239 | else: | |||
|
2240 | log.debug('Setting lock on repo %s by %s', repo, user) | |||
|
2241 | make_lock = True | |||
|
2242 | ||||
|
2243 | else: | |||
|
2244 | log.debug('Repository %s do not have locking enabled', repo) | |||
|
2245 | ||||
|
2246 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', | |||
|
2247 | make_lock, currently_locked, lock_info) | |||
|
2248 | ||||
|
2249 | from rhodecode.lib.auth import HasRepoPermissionAny | |||
|
2250 | perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') | |||
|
2251 | if make_lock and not perm_check(repo_name=repo.repo_name, user=user): | |||
|
2252 | # if we don't have at least write permission we cannot make a lock | |||
|
2253 | log.debug('lock state reset back to FALSE due to lack ' | |||
|
2254 | 'of at least read permission') | |||
|
2255 | make_lock = False | |||
|
2256 | ||||
|
2257 | return make_lock, currently_locked, lock_info | |||
|
2258 | ||||
|
2259 | @property | |||
|
2260 | def last_commit_cache_update_diff(self): | |||
|
2261 | return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) | |||
|
2262 | ||||
|
2263 | @classmethod | |||
|
2264 | def _load_commit_change(cls, last_commit_cache): | |||
|
2265 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |||
|
2266 | empty_date = datetime.datetime.fromtimestamp(0) | |||
|
2267 | date_latest = last_commit_cache.get('date', empty_date) | |||
|
2268 | try: | |||
|
2269 | return parse_datetime(date_latest) | |||
|
2270 | except Exception: | |||
|
2271 | return empty_date | |||
|
2272 | ||||
|
2273 | @property | |||
|
2274 | def last_commit_change(self): | |||
|
2275 | return self._load_commit_change(self.changeset_cache) | |||
|
2276 | ||||
|
2277 | @property | |||
|
2278 | def last_db_change(self): | |||
|
2279 | return self.updated_on | |||
|
2280 | ||||
|
2281 | @property | |||
|
2282 | def clone_uri_hidden(self): | |||
|
2283 | clone_uri = self.clone_uri | |||
|
2284 | if clone_uri: | |||
|
2285 | import urlobject | |||
|
2286 | url_obj = urlobject.URLObject(cleaned_uri(clone_uri)) | |||
|
2287 | if url_obj.password: | |||
|
2288 | clone_uri = url_obj.with_password('*****') | |||
|
2289 | return clone_uri | |||
|
2290 | ||||
|
2291 | @property | |||
|
2292 | def push_uri_hidden(self): | |||
|
2293 | push_uri = self.push_uri | |||
|
2294 | if push_uri: | |||
|
2295 | import urlobject | |||
|
2296 | url_obj = urlobject.URLObject(cleaned_uri(push_uri)) | |||
|
2297 | if url_obj.password: | |||
|
2298 | push_uri = url_obj.with_password('*****') | |||
|
2299 | return push_uri | |||
|
2300 | ||||
|
2301 | def clone_url(self, **override): | |||
|
2302 | from rhodecode.model.settings import SettingsModel | |||
|
2303 | ||||
|
2304 | uri_tmpl = None | |||
|
2305 | if 'with_id' in override: | |||
|
2306 | uri_tmpl = self.DEFAULT_CLONE_URI_ID | |||
|
2307 | del override['with_id'] | |||
|
2308 | ||||
|
2309 | if 'uri_tmpl' in override: | |||
|
2310 | uri_tmpl = override['uri_tmpl'] | |||
|
2311 | del override['uri_tmpl'] | |||
|
2312 | ||||
|
2313 | ssh = False | |||
|
2314 | if 'ssh' in override: | |||
|
2315 | ssh = True | |||
|
2316 | del override['ssh'] | |||
|
2317 | ||||
|
2318 | # we didn't override our tmpl from **overrides | |||
|
2319 | request = get_current_request() | |||
|
2320 | if not uri_tmpl: | |||
|
2321 | if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): | |||
|
2322 | rc_config = request.call_context.rc_config | |||
|
2323 | else: | |||
|
2324 | rc_config = SettingsModel().get_all_settings(cache=True) | |||
|
2325 | ||||
|
2326 | if ssh: | |||
|
2327 | uri_tmpl = rc_config.get( | |||
|
2328 | 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH | |||
|
2329 | ||||
|
2330 | else: | |||
|
2331 | uri_tmpl = rc_config.get( | |||
|
2332 | 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI | |||
|
2333 | ||||
|
2334 | return get_clone_url(request=request, | |||
|
2335 | uri_tmpl=uri_tmpl, | |||
|
2336 | repo_name=self.repo_name, | |||
|
2337 | repo_id=self.repo_id, | |||
|
2338 | repo_type=self.repo_type, | |||
|
2339 | **override) | |||
|
2340 | ||||
|
2341 | def set_state(self, state): | |||
|
2342 | self.repo_state = state | |||
|
2343 | Session().add(self) | |||
|
2344 | #========================================================================== | |||
|
2345 | # SCM PROPERTIES | |||
|
2346 | #========================================================================== | |||
|
2347 | ||||
|
2348 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False): | |||
|
2349 | return get_commit_safe( | |||
|
2350 | self.scm_instance(), commit_id, commit_idx, pre_load=pre_load, | |||
|
2351 | maybe_unreachable=maybe_unreachable) | |||
|
2352 | ||||
|
2353 | def get_changeset(self, rev=None, pre_load=None): | |||
|
2354 | warnings.warn("Use get_commit", DeprecationWarning) | |||
|
2355 | commit_id = None | |||
|
2356 | commit_idx = None | |||
|
2357 | if isinstance(rev, compat.string_types): | |||
|
2358 | commit_id = rev | |||
|
2359 | else: | |||
|
2360 | commit_idx = rev | |||
|
2361 | return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, | |||
|
2362 | pre_load=pre_load) | |||
|
2363 | ||||
|
2364 | def get_landing_commit(self): | |||
|
2365 | """ | |||
|
2366 | Returns landing commit, or if that doesn't exist returns the tip | |||
|
2367 | """ | |||
|
2368 | _rev_type, _rev = self.landing_rev | |||
|
2369 | commit = self.get_commit(_rev) | |||
|
2370 | if isinstance(commit, EmptyCommit): | |||
|
2371 | return self.get_commit() | |||
|
2372 | return commit | |||
|
2373 | ||||
|
2374 | def flush_commit_cache(self): | |||
|
2375 | self.update_commit_cache(cs_cache={'raw_id':'0'}) | |||
|
2376 | self.update_commit_cache() | |||
|
2377 | ||||
|
2378 | def update_commit_cache(self, cs_cache=None, config=None): | |||
|
2379 | """ | |||
|
2380 | Update cache of last commit for repository | |||
|
2381 | cache_keys should be:: | |||
|
2382 | ||||
|
2383 | source_repo_id | |||
|
2384 | short_id | |||
|
2385 | raw_id | |||
|
2386 | revision | |||
|
2387 | parents | |||
|
2388 | message | |||
|
2389 | date | |||
|
2390 | author | |||
|
2391 | updated_on | |||
|
2392 | ||||
|
2393 | """ | |||
|
2394 | from rhodecode.lib.vcs.backends.base import BaseChangeset | |||
|
2395 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |||
|
2396 | empty_date = datetime.datetime.fromtimestamp(0) | |||
|
2397 | ||||
|
2398 | if cs_cache is None: | |||
|
2399 | # use no-cache version here | |||
|
2400 | try: | |||
|
2401 | scm_repo = self.scm_instance(cache=False, config=config) | |||
|
2402 | except VCSError: | |||
|
2403 | scm_repo = None | |||
|
2404 | empty = scm_repo is None or scm_repo.is_empty() | |||
|
2405 | ||||
|
2406 | if not empty: | |||
|
2407 | cs_cache = scm_repo.get_commit( | |||
|
2408 | pre_load=["author", "date", "message", "parents", "branch"]) | |||
|
2409 | else: | |||
|
2410 | cs_cache = EmptyCommit() | |||
|
2411 | ||||
|
2412 | if isinstance(cs_cache, BaseChangeset): | |||
|
2413 | cs_cache = cs_cache.__json__() | |||
|
2414 | ||||
|
2415 | def is_outdated(new_cs_cache): | |||
|
2416 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or | |||
|
2417 | new_cs_cache['revision'] != self.changeset_cache['revision']): | |||
|
2418 | return True | |||
|
2419 | return False | |||
|
2420 | ||||
|
2421 | # check if we have maybe already latest cached revision | |||
|
2422 | if is_outdated(cs_cache) or not self.changeset_cache: | |||
|
2423 | _current_datetime = datetime.datetime.utcnow() | |||
|
2424 | last_change = cs_cache.get('date') or _current_datetime | |||
|
2425 | # we check if last update is newer than the new value | |||
|
2426 | # if yes, we use the current timestamp instead. Imagine you get | |||
|
2427 | # old commit pushed 1y ago, we'd set last update 1y to ago. | |||
|
2428 | last_change_timestamp = datetime_to_time(last_change) | |||
|
2429 | current_timestamp = datetime_to_time(last_change) | |||
|
2430 | if last_change_timestamp > current_timestamp and not empty: | |||
|
2431 | cs_cache['date'] = _current_datetime | |||
|
2432 | ||||
|
2433 | _date_latest = parse_datetime(cs_cache.get('date') or empty_date) | |||
|
2434 | cs_cache['updated_on'] = time.time() | |||
|
2435 | self.changeset_cache = cs_cache | |||
|
2436 | self.updated_on = last_change | |||
|
2437 | Session().add(self) | |||
|
2438 | Session().commit() | |||
|
2439 | ||||
|
2440 | else: | |||
|
2441 | if empty: | |||
|
2442 | cs_cache = EmptyCommit().__json__() | |||
|
2443 | else: | |||
|
2444 | cs_cache = self.changeset_cache | |||
|
2445 | ||||
|
2446 | _date_latest = parse_datetime(cs_cache.get('date') or empty_date) | |||
|
2447 | ||||
|
2448 | cs_cache['updated_on'] = time.time() | |||
|
2449 | self.changeset_cache = cs_cache | |||
|
2450 | self.updated_on = _date_latest | |||
|
2451 | Session().add(self) | |||
|
2452 | Session().commit() | |||
|
2453 | ||||
|
2454 | log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s', | |||
|
2455 | self.repo_name, cs_cache, _date_latest) | |||
|
2456 | ||||
|
2457 | @property | |||
|
2458 | def tip(self): | |||
|
2459 | return self.get_commit('tip') | |||
|
2460 | ||||
|
2461 | @property | |||
|
2462 | def author(self): | |||
|
2463 | return self.tip.author | |||
|
2464 | ||||
|
2465 | @property | |||
|
2466 | def last_change(self): | |||
|
2467 | return self.scm_instance().last_change | |||
|
2468 | ||||
|
2469 | def get_comments(self, revisions=None): | |||
|
2470 | """ | |||
|
2471 | Returns comments for this repository grouped by revisions | |||
|
2472 | ||||
|
2473 | :param revisions: filter query by revisions only | |||
|
2474 | """ | |||
|
2475 | cmts = ChangesetComment.query()\ | |||
|
2476 | .filter(ChangesetComment.repo == self) | |||
|
2477 | if revisions: | |||
|
2478 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) | |||
|
2479 | grouped = collections.defaultdict(list) | |||
|
2480 | for cmt in cmts.all(): | |||
|
2481 | grouped[cmt.revision].append(cmt) | |||
|
2482 | return grouped | |||
|
2483 | ||||
|
2484 | def statuses(self, revisions=None): | |||
|
2485 | """ | |||
|
2486 | Returns statuses for this repository | |||
|
2487 | ||||
|
2488 | :param revisions: list of revisions to get statuses for | |||
|
2489 | """ | |||
|
2490 | statuses = ChangesetStatus.query()\ | |||
|
2491 | .filter(ChangesetStatus.repo == self)\ | |||
|
2492 | .filter(ChangesetStatus.version == 0) | |||
|
2493 | ||||
|
2494 | if revisions: | |||
|
2495 | # Try doing the filtering in chunks to avoid hitting limits | |||
|
2496 | size = 500 | |||
|
2497 | status_results = [] | |||
|
2498 | for chunk in xrange(0, len(revisions), size): | |||
|
2499 | status_results += statuses.filter( | |||
|
2500 | ChangesetStatus.revision.in_( | |||
|
2501 | revisions[chunk: chunk+size]) | |||
|
2502 | ).all() | |||
|
2503 | else: | |||
|
2504 | status_results = statuses.all() | |||
|
2505 | ||||
|
2506 | grouped = {} | |||
|
2507 | ||||
|
2508 | # maybe we have open new pullrequest without a status? | |||
|
2509 | stat = ChangesetStatus.STATUS_UNDER_REVIEW | |||
|
2510 | status_lbl = ChangesetStatus.get_status_lbl(stat) | |||
|
2511 | for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): | |||
|
2512 | for rev in pr.revisions: | |||
|
2513 | pr_id = pr.pull_request_id | |||
|
2514 | pr_repo = pr.target_repo.repo_name | |||
|
2515 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] | |||
|
2516 | ||||
|
2517 | for stat in status_results: | |||
|
2518 | pr_id = pr_repo = None | |||
|
2519 | if stat.pull_request: | |||
|
2520 | pr_id = stat.pull_request.pull_request_id | |||
|
2521 | pr_repo = stat.pull_request.target_repo.repo_name | |||
|
2522 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, | |||
|
2523 | pr_id, pr_repo] | |||
|
2524 | return grouped | |||
|
2525 | ||||
|
2526 | # ========================================================================== | |||
|
2527 | # SCM CACHE INSTANCE | |||
|
2528 | # ========================================================================== | |||
|
2529 | ||||
|
2530 | def scm_instance(self, **kwargs): | |||
|
2531 | import rhodecode | |||
|
2532 | ||||
|
2533 | # Passing a config will not hit the cache currently only used | |||
|
2534 | # for repo2dbmapper | |||
|
2535 | config = kwargs.pop('config', None) | |||
|
2536 | cache = kwargs.pop('cache', None) | |||
|
2537 | vcs_full_cache = kwargs.pop('vcs_full_cache', None) | |||
|
2538 | if vcs_full_cache is not None: | |||
|
2539 | # allows override global config | |||
|
2540 | full_cache = vcs_full_cache | |||
|
2541 | else: | |||
|
2542 | full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache')) | |||
|
2543 | # if cache is NOT defined use default global, else we have a full | |||
|
2544 | # control over cache behaviour | |||
|
2545 | if cache is None and full_cache and not config: | |||
|
2546 | log.debug('Initializing pure cached instance for %s', self.repo_path) | |||
|
2547 | return self._get_instance_cached() | |||
|
2548 | ||||
|
2549 | # cache here is sent to the "vcs server" | |||
|
2550 | return self._get_instance(cache=bool(cache), config=config) | |||
|
2551 | ||||
|
2552 | def _get_instance_cached(self): | |||
|
2553 | from rhodecode.lib import rc_cache | |||
|
2554 | ||||
|
2555 | cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id) | |||
|
2556 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
2557 | repo_id=self.repo_id) | |||
|
2558 | region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) | |||
|
2559 | ||||
|
2560 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) | |||
|
2561 | def get_instance_cached(repo_id, context_id, _cache_state_uid): | |||
|
2562 | return self._get_instance(repo_state_uid=_cache_state_uid) | |||
|
2563 | ||||
|
2564 | # we must use thread scoped cache here, | |||
|
2565 | # because each thread of gevent needs it's own not shared connection and cache | |||
|
2566 | # we also alter `args` so the cache key is individual for every green thread. | |||
|
2567 | inv_context_manager = rc_cache.InvalidationContext( | |||
|
2568 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace, | |||
|
2569 | thread_scoped=True) | |||
|
2570 | with inv_context_manager as invalidation_context: | |||
|
2571 | cache_state_uid = invalidation_context.cache_data['cache_state_uid'] | |||
|
2572 | args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid) | |||
|
2573 | ||||
|
2574 | # re-compute and store cache if we get invalidate signal | |||
|
2575 | if invalidation_context.should_invalidate(): | |||
|
2576 | instance = get_instance_cached.refresh(*args) | |||
|
2577 | else: | |||
|
2578 | instance = get_instance_cached(*args) | |||
|
2579 | ||||
|
2580 | log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time) | |||
|
2581 | return instance | |||
|
2582 | ||||
|
2583 | def _get_instance(self, cache=True, config=None, repo_state_uid=None): | |||
|
2584 | log.debug('Initializing %s instance `%s` with cache flag set to: %s', | |||
|
2585 | self.repo_type, self.repo_path, cache) | |||
|
2586 | config = config or self._config | |||
|
2587 | custom_wire = { | |||
|
2588 | 'cache': cache, # controls the vcs.remote cache | |||
|
2589 | 'repo_state_uid': repo_state_uid | |||
|
2590 | } | |||
|
2591 | repo = get_vcs_instance( | |||
|
2592 | repo_path=safe_str(self.repo_full_path), | |||
|
2593 | config=config, | |||
|
2594 | with_wire=custom_wire, | |||
|
2595 | create=False, | |||
|
2596 | _vcs_alias=self.repo_type) | |||
|
2597 | if repo is not None: | |||
|
2598 | repo.count() # cache rebuild | |||
|
2599 | return repo | |||
|
2600 | ||||
|
2601 | def get_shadow_repository_path(self, workspace_id): | |||
|
2602 | from rhodecode.lib.vcs.backends.base import BaseRepository | |||
|
2603 | shadow_repo_path = BaseRepository._get_shadow_repository_path( | |||
|
2604 | self.repo_full_path, self.repo_id, workspace_id) | |||
|
2605 | return shadow_repo_path | |||
|
2606 | ||||
|
2607 | def __json__(self): | |||
|
2608 | return {'landing_rev': self.landing_rev} | |||
|
2609 | ||||
|
2610 | def get_dict(self): | |||
|
2611 | ||||
|
2612 | # Since we transformed `repo_name` to a hybrid property, we need to | |||
|
2613 | # keep compatibility with the code which uses `repo_name` field. | |||
|
2614 | ||||
|
2615 | result = super(Repository, self).get_dict() | |||
|
2616 | result['repo_name'] = result.pop('_repo_name', None) | |||
|
2617 | return result | |||
|
2618 | ||||
|
2619 | ||||
|
2620 | class RepoGroup(Base, BaseModel): | |||
|
2621 | __tablename__ = 'groups' | |||
|
2622 | __table_args__ = ( | |||
|
2623 | UniqueConstraint('group_name', 'group_parent_id'), | |||
|
2624 | base_table_args, | |||
|
2625 | ) | |||
|
2626 | __mapper_args__ = {'order_by': 'group_name'} | |||
|
2627 | ||||
|
2628 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups | |||
|
2629 | ||||
|
2630 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
2631 | _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) | |||
|
2632 | group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False) | |||
|
2633 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | |||
|
2634 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) | |||
|
2635 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | |||
|
2636 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) | |||
|
2637 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
2638 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |||
|
2639 | personal = Column('personal', Boolean(), nullable=True, unique=None, default=None) | |||
|
2640 | _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data | |||
|
2641 | ||||
|
2642 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') | |||
|
2643 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') | |||
|
2644 | parent_group = relationship('RepoGroup', remote_side=group_id) | |||
|
2645 | user = relationship('User') | |||
|
2646 | integrations = relationship('Integration', cascade="all, delete-orphan") | |||
|
2647 | ||||
|
2648 | # no cascade, set NULL | |||
|
2649 | scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id') | |||
|
2650 | ||||
|
2651 | def __init__(self, group_name='', parent_group=None): | |||
|
2652 | self.group_name = group_name | |||
|
2653 | self.parent_group = parent_group | |||
|
2654 | ||||
|
2655 | def __unicode__(self): | |||
|
2656 | return u"<%s('id:%s:%s')>" % ( | |||
|
2657 | self.__class__.__name__, self.group_id, self.group_name) | |||
|
2658 | ||||
|
2659 | @hybrid_property | |||
|
2660 | def group_name(self): | |||
|
2661 | return self._group_name | |||
|
2662 | ||||
|
2663 | @group_name.setter | |||
|
2664 | def group_name(self, value): | |||
|
2665 | self._group_name = value | |||
|
2666 | self.group_name_hash = self.hash_repo_group_name(value) | |||
|
2667 | ||||
|
2668 | @classmethod | |||
|
2669 | def _load_changeset_cache(cls, repo_id, changeset_cache_raw): | |||
|
2670 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |||
|
2671 | dummy = EmptyCommit().__json__() | |||
|
2672 | if not changeset_cache_raw: | |||
|
2673 | dummy['source_repo_id'] = repo_id | |||
|
2674 | return json.loads(json.dumps(dummy)) | |||
|
2675 | ||||
|
2676 | try: | |||
|
2677 | return json.loads(changeset_cache_raw) | |||
|
2678 | except TypeError: | |||
|
2679 | return dummy | |||
|
2680 | except Exception: | |||
|
2681 | log.error(traceback.format_exc()) | |||
|
2682 | return dummy | |||
|
2683 | ||||
|
2684 | @hybrid_property | |||
|
2685 | def changeset_cache(self): | |||
|
2686 | return self._load_changeset_cache('', self._changeset_cache) | |||
|
2687 | ||||
|
2688 | @changeset_cache.setter | |||
|
2689 | def changeset_cache(self, val): | |||
|
2690 | try: | |||
|
2691 | self._changeset_cache = json.dumps(val) | |||
|
2692 | except Exception: | |||
|
2693 | log.error(traceback.format_exc()) | |||
|
2694 | ||||
|
2695 | @validates('group_parent_id') | |||
|
2696 | def validate_group_parent_id(self, key, val): | |||
|
2697 | """ | |||
|
2698 | Check cycle references for a parent group to self | |||
|
2699 | """ | |||
|
2700 | if self.group_id and val: | |||
|
2701 | assert val != self.group_id | |||
|
2702 | ||||
|
2703 | return val | |||
|
2704 | ||||
|
2705 | @hybrid_property | |||
|
2706 | def description_safe(self): | |||
|
2707 | from rhodecode.lib import helpers as h | |||
|
2708 | return h.escape(self.group_description) | |||
|
2709 | ||||
|
2710 | @classmethod | |||
|
2711 | def hash_repo_group_name(cls, repo_group_name): | |||
|
2712 | val = remove_formatting(repo_group_name) | |||
|
2713 | val = safe_str(val).lower() | |||
|
2714 | chars = [] | |||
|
2715 | for c in val: | |||
|
2716 | if c not in string.ascii_letters: | |||
|
2717 | c = str(ord(c)) | |||
|
2718 | chars.append(c) | |||
|
2719 | ||||
|
2720 | return ''.join(chars) | |||
|
2721 | ||||
|
2722 | @classmethod | |||
|
2723 | def _generate_choice(cls, repo_group): | |||
|
2724 | from webhelpers2.html import literal as _literal | |||
|
2725 | _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) | |||
|
2726 | return repo_group.group_id, _name(repo_group.full_path_splitted) | |||
|
2727 | ||||
|
2728 | @classmethod | |||
|
2729 | def groups_choices(cls, groups=None, show_empty_group=True): | |||
|
2730 | if not groups: | |||
|
2731 | groups = cls.query().all() | |||
|
2732 | ||||
|
2733 | repo_groups = [] | |||
|
2734 | if show_empty_group: | |||
|
2735 | repo_groups = [(-1, u'-- %s --' % _('No parent'))] | |||
|
2736 | ||||
|
2737 | repo_groups.extend([cls._generate_choice(x) for x in groups]) | |||
|
2738 | ||||
|
2739 | repo_groups = sorted( | |||
|
2740 | repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) | |||
|
2741 | return repo_groups | |||
|
2742 | ||||
|
2743 | @classmethod | |||
|
2744 | def url_sep(cls): | |||
|
2745 | return URL_SEP | |||
|
2746 | ||||
|
2747 | @classmethod | |||
|
2748 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): | |||
|
2749 | if case_insensitive: | |||
|
2750 | gr = cls.query().filter(func.lower(cls.group_name) | |||
|
2751 | == func.lower(group_name)) | |||
|
2752 | else: | |||
|
2753 | gr = cls.query().filter(cls.group_name == group_name) | |||
|
2754 | if cache: | |||
|
2755 | name_key = _hash_key(group_name) | |||
|
2756 | gr = gr.options( | |||
|
2757 | FromCache("sql_cache_short", "get_group_%s" % name_key)) | |||
|
2758 | return gr.scalar() | |||
|
2759 | ||||
|
2760 | @classmethod | |||
|
2761 | def get_user_personal_repo_group(cls, user_id): | |||
|
2762 | user = User.get(user_id) | |||
|
2763 | if user.username == User.DEFAULT_USER: | |||
|
2764 | return None | |||
|
2765 | ||||
|
2766 | return cls.query()\ | |||
|
2767 | .filter(cls.personal == true()) \ | |||
|
2768 | .filter(cls.user == user) \ | |||
|
2769 | .order_by(cls.group_id.asc()) \ | |||
|
2770 | .first() | |||
|
2771 | ||||
|
2772 | @classmethod | |||
|
2773 | def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), | |||
|
2774 | case_insensitive=True): | |||
|
2775 | q = RepoGroup.query() | |||
|
2776 | ||||
|
2777 | if not isinstance(user_id, Optional): | |||
|
2778 | q = q.filter(RepoGroup.user_id == user_id) | |||
|
2779 | ||||
|
2780 | if not isinstance(group_id, Optional): | |||
|
2781 | q = q.filter(RepoGroup.group_parent_id == group_id) | |||
|
2782 | ||||
|
2783 | if case_insensitive: | |||
|
2784 | q = q.order_by(func.lower(RepoGroup.group_name)) | |||
|
2785 | else: | |||
|
2786 | q = q.order_by(RepoGroup.group_name) | |||
|
2787 | return q.all() | |||
|
2788 | ||||
|
2789 | @property | |||
|
2790 | def parents(self, parents_recursion_limit=10): | |||
|
2791 | groups = [] | |||
|
2792 | if self.parent_group is None: | |||
|
2793 | return groups | |||
|
2794 | cur_gr = self.parent_group | |||
|
2795 | groups.insert(0, cur_gr) | |||
|
2796 | cnt = 0 | |||
|
2797 | while 1: | |||
|
2798 | cnt += 1 | |||
|
2799 | gr = getattr(cur_gr, 'parent_group', None) | |||
|
2800 | cur_gr = cur_gr.parent_group | |||
|
2801 | if gr is None: | |||
|
2802 | break | |||
|
2803 | if cnt == parents_recursion_limit: | |||
|
2804 | # this will prevent accidental infinit loops | |||
|
2805 | log.error('more than %s parents found for group %s, stopping ' | |||
|
2806 | 'recursive parent fetching', parents_recursion_limit, self) | |||
|
2807 | break | |||
|
2808 | ||||
|
2809 | groups.insert(0, gr) | |||
|
2810 | return groups | |||
|
2811 | ||||
|
2812 | @property | |||
|
2813 | def last_commit_cache_update_diff(self): | |||
|
2814 | return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) | |||
|
2815 | ||||
|
2816 | @classmethod | |||
|
2817 | def _load_commit_change(cls, last_commit_cache): | |||
|
2818 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |||
|
2819 | empty_date = datetime.datetime.fromtimestamp(0) | |||
|
2820 | date_latest = last_commit_cache.get('date', empty_date) | |||
|
2821 | try: | |||
|
2822 | return parse_datetime(date_latest) | |||
|
2823 | except Exception: | |||
|
2824 | return empty_date | |||
|
2825 | ||||
|
2826 | @property | |||
|
2827 | def last_commit_change(self): | |||
|
2828 | return self._load_commit_change(self.changeset_cache) | |||
|
2829 | ||||
|
2830 | @property | |||
|
2831 | def last_db_change(self): | |||
|
2832 | return self.updated_on | |||
|
2833 | ||||
|
2834 | @property | |||
|
2835 | def children(self): | |||
|
2836 | return RepoGroup.query().filter(RepoGroup.parent_group == self) | |||
|
2837 | ||||
|
2838 | @property | |||
|
2839 | def name(self): | |||
|
2840 | return self.group_name.split(RepoGroup.url_sep())[-1] | |||
|
2841 | ||||
|
2842 | @property | |||
|
2843 | def full_path(self): | |||
|
2844 | return self.group_name | |||
|
2845 | ||||
|
2846 | @property | |||
|
2847 | def full_path_splitted(self): | |||
|
2848 | return self.group_name.split(RepoGroup.url_sep()) | |||
|
2849 | ||||
|
2850 | @property | |||
|
2851 | def repositories(self): | |||
|
2852 | return Repository.query()\ | |||
|
2853 | .filter(Repository.group == self)\ | |||
|
2854 | .order_by(Repository.repo_name) | |||
|
2855 | ||||
|
2856 | @property | |||
|
2857 | def repositories_recursive_count(self): | |||
|
2858 | cnt = self.repositories.count() | |||
|
2859 | ||||
|
2860 | def children_count(group): | |||
|
2861 | cnt = 0 | |||
|
2862 | for child in group.children: | |||
|
2863 | cnt += child.repositories.count() | |||
|
2864 | cnt += children_count(child) | |||
|
2865 | return cnt | |||
|
2866 | ||||
|
2867 | return cnt + children_count(self) | |||
|
2868 | ||||
|
2869 | def _recursive_objects(self, include_repos=True, include_groups=True): | |||
|
2870 | all_ = [] | |||
|
2871 | ||||
|
2872 | def _get_members(root_gr): | |||
|
2873 | if include_repos: | |||
|
2874 | for r in root_gr.repositories: | |||
|
2875 | all_.append(r) | |||
|
2876 | childs = root_gr.children.all() | |||
|
2877 | if childs: | |||
|
2878 | for gr in childs: | |||
|
2879 | if include_groups: | |||
|
2880 | all_.append(gr) | |||
|
2881 | _get_members(gr) | |||
|
2882 | ||||
|
2883 | root_group = [] | |||
|
2884 | if include_groups: | |||
|
2885 | root_group = [self] | |||
|
2886 | ||||
|
2887 | _get_members(self) | |||
|
2888 | return root_group + all_ | |||
|
2889 | ||||
|
2890 | def recursive_groups_and_repos(self): | |||
|
2891 | """ | |||
|
2892 | Recursive return all groups, with repositories in those groups | |||
|
2893 | """ | |||
|
2894 | return self._recursive_objects() | |||
|
2895 | ||||
|
2896 | def recursive_groups(self): | |||
|
2897 | """ | |||
|
2898 | Returns all children groups for this group including children of children | |||
|
2899 | """ | |||
|
2900 | return self._recursive_objects(include_repos=False) | |||
|
2901 | ||||
|
2902 | def recursive_repos(self): | |||
|
2903 | """ | |||
|
2904 | Returns all children repositories for this group | |||
|
2905 | """ | |||
|
2906 | return self._recursive_objects(include_groups=False) | |||
|
2907 | ||||
|
2908 | def get_new_name(self, group_name): | |||
|
2909 | """ | |||
|
2910 | returns new full group name based on parent and new name | |||
|
2911 | ||||
|
2912 | :param group_name: | |||
|
2913 | """ | |||
|
2914 | path_prefix = (self.parent_group.full_path_splitted if | |||
|
2915 | self.parent_group else []) | |||
|
2916 | return RepoGroup.url_sep().join(path_prefix + [group_name]) | |||
|
2917 | ||||
|
2918 | def update_commit_cache(self, config=None): | |||
|
2919 | """ | |||
|
2920 | Update cache of last commit for newest repository inside this repository group. | |||
|
2921 | cache_keys should be:: | |||
|
2922 | ||||
|
2923 | source_repo_id | |||
|
2924 | short_id | |||
|
2925 | raw_id | |||
|
2926 | revision | |||
|
2927 | parents | |||
|
2928 | message | |||
|
2929 | date | |||
|
2930 | author | |||
|
2931 | ||||
|
2932 | """ | |||
|
2933 | from rhodecode.lib.vcs.utils.helpers import parse_datetime | |||
|
2934 | empty_date = datetime.datetime.fromtimestamp(0) | |||
|
2935 | ||||
|
2936 | def repo_groups_and_repos(root_gr): | |||
|
2937 | for _repo in root_gr.repositories: | |||
|
2938 | yield _repo | |||
|
2939 | for child_group in root_gr.children.all(): | |||
|
2940 | yield child_group | |||
|
2941 | ||||
|
2942 | latest_repo_cs_cache = {} | |||
|
2943 | for obj in repo_groups_and_repos(self): | |||
|
2944 | repo_cs_cache = obj.changeset_cache | |||
|
2945 | date_latest = latest_repo_cs_cache.get('date', empty_date) | |||
|
2946 | date_current = repo_cs_cache.get('date', empty_date) | |||
|
2947 | current_timestamp = datetime_to_time(parse_datetime(date_latest)) | |||
|
2948 | if current_timestamp < datetime_to_time(parse_datetime(date_current)): | |||
|
2949 | latest_repo_cs_cache = repo_cs_cache | |||
|
2950 | if hasattr(obj, 'repo_id'): | |||
|
2951 | latest_repo_cs_cache['source_repo_id'] = obj.repo_id | |||
|
2952 | else: | |||
|
2953 | latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id') | |||
|
2954 | ||||
|
2955 | _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date) | |||
|
2956 | ||||
|
2957 | latest_repo_cs_cache['updated_on'] = time.time() | |||
|
2958 | self.changeset_cache = latest_repo_cs_cache | |||
|
2959 | self.updated_on = _date_latest | |||
|
2960 | Session().add(self) | |||
|
2961 | Session().commit() | |||
|
2962 | ||||
|
2963 | log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s', | |||
|
2964 | self.group_name, latest_repo_cs_cache, _date_latest) | |||
|
2965 | ||||
|
2966 | def permissions(self, with_admins=True, with_owner=True, | |||
|
2967 | expand_from_user_groups=False): | |||
|
2968 | """ | |||
|
2969 | Permissions for repository groups | |||
|
2970 | """ | |||
|
2971 | _admin_perm = 'group.admin' | |||
|
2972 | ||||
|
2973 | owner_row = [] | |||
|
2974 | if with_owner: | |||
|
2975 | usr = AttributeDict(self.user.get_dict()) | |||
|
2976 | usr.owner_row = True | |||
|
2977 | usr.permission = _admin_perm | |||
|
2978 | owner_row.append(usr) | |||
|
2979 | ||||
|
2980 | super_admin_ids = [] | |||
|
2981 | super_admin_rows = [] | |||
|
2982 | if with_admins: | |||
|
2983 | for usr in User.get_all_super_admins(): | |||
|
2984 | super_admin_ids.append(usr.user_id) | |||
|
2985 | # if this admin is also owner, don't double the record | |||
|
2986 | if usr.user_id == owner_row[0].user_id: | |||
|
2987 | owner_row[0].admin_row = True | |||
|
2988 | else: | |||
|
2989 | usr = AttributeDict(usr.get_dict()) | |||
|
2990 | usr.admin_row = True | |||
|
2991 | usr.permission = _admin_perm | |||
|
2992 | super_admin_rows.append(usr) | |||
|
2993 | ||||
|
2994 | q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) | |||
|
2995 | q = q.options(joinedload(UserRepoGroupToPerm.group), | |||
|
2996 | joinedload(UserRepoGroupToPerm.user), | |||
|
2997 | joinedload(UserRepoGroupToPerm.permission),) | |||
|
2998 | ||||
|
2999 | # get owners and admins and permissions. We do a trick of re-writing | |||
|
3000 | # objects from sqlalchemy to named-tuples due to sqlalchemy session | |||
|
3001 | # has a global reference and changing one object propagates to all | |||
|
3002 | # others. This means if admin is also an owner admin_row that change | |||
|
3003 | # would propagate to both objects | |||
|
3004 | perm_rows = [] | |||
|
3005 | for _usr in q.all(): | |||
|
3006 | usr = AttributeDict(_usr.user.get_dict()) | |||
|
3007 | # if this user is also owner/admin, mark as duplicate record | |||
|
3008 | if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: | |||
|
3009 | usr.duplicate_perm = True | |||
|
3010 | usr.permission = _usr.permission.permission_name | |||
|
3011 | perm_rows.append(usr) | |||
|
3012 | ||||
|
3013 | # filter the perm rows by 'default' first and then sort them by | |||
|
3014 | # admin,write,read,none permissions sorted again alphabetically in | |||
|
3015 | # each group | |||
|
3016 | perm_rows = sorted(perm_rows, key=display_user_sort) | |||
|
3017 | ||||
|
3018 | user_groups_rows = [] | |||
|
3019 | if expand_from_user_groups: | |||
|
3020 | for ug in self.permission_user_groups(with_members=True): | |||
|
3021 | for user_data in ug.members: | |||
|
3022 | user_groups_rows.append(user_data) | |||
|
3023 | ||||
|
3024 | return super_admin_rows + owner_row + perm_rows + user_groups_rows | |||
|
3025 | ||||
|
3026 | def permission_user_groups(self, with_members=False): | |||
|
3027 | q = UserGroupRepoGroupToPerm.query()\ | |||
|
3028 | .filter(UserGroupRepoGroupToPerm.group == self) | |||
|
3029 | q = q.options(joinedload(UserGroupRepoGroupToPerm.group), | |||
|
3030 | joinedload(UserGroupRepoGroupToPerm.users_group), | |||
|
3031 | joinedload(UserGroupRepoGroupToPerm.permission),) | |||
|
3032 | ||||
|
3033 | perm_rows = [] | |||
|
3034 | for _user_group in q.all(): | |||
|
3035 | entry = AttributeDict(_user_group.users_group.get_dict()) | |||
|
3036 | entry.permission = _user_group.permission.permission_name | |||
|
3037 | if with_members: | |||
|
3038 | entry.members = [x.user.get_dict() | |||
|
3039 | for x in _user_group.users_group.members] | |||
|
3040 | perm_rows.append(entry) | |||
|
3041 | ||||
|
3042 | perm_rows = sorted(perm_rows, key=display_user_group_sort) | |||
|
3043 | return perm_rows | |||
|
3044 | ||||
|
3045 | def get_api_data(self): | |||
|
3046 | """ | |||
|
3047 | Common function for generating api data | |||
|
3048 | ||||
|
3049 | """ | |||
|
3050 | group = self | |||
|
3051 | data = { | |||
|
3052 | 'group_id': group.group_id, | |||
|
3053 | 'group_name': group.group_name, | |||
|
3054 | 'group_description': group.description_safe, | |||
|
3055 | 'parent_group': group.parent_group.group_name if group.parent_group else None, | |||
|
3056 | 'repositories': [x.repo_name for x in group.repositories], | |||
|
3057 | 'owner': group.user.username, | |||
|
3058 | } | |||
|
3059 | return data | |||
|
3060 | ||||
|
3061 | def get_dict(self): | |||
|
3062 | # Since we transformed `group_name` to a hybrid property, we need to | |||
|
3063 | # keep compatibility with the code which uses `group_name` field. | |||
|
3064 | result = super(RepoGroup, self).get_dict() | |||
|
3065 | result['group_name'] = result.pop('_group_name', None) | |||
|
3066 | return result | |||
|
3067 | ||||
|
3068 | ||||
|
3069 | class Permission(Base, BaseModel): | |||
|
3070 | __tablename__ = 'permissions' | |||
|
3071 | __table_args__ = ( | |||
|
3072 | Index('p_perm_name_idx', 'permission_name'), | |||
|
3073 | base_table_args, | |||
|
3074 | ) | |||
|
3075 | ||||
|
3076 | PERMS = [ | |||
|
3077 | ('hg.admin', _('RhodeCode Super Administrator')), | |||
|
3078 | ||||
|
3079 | ('repository.none', _('Repository no access')), | |||
|
3080 | ('repository.read', _('Repository read access')), | |||
|
3081 | ('repository.write', _('Repository write access')), | |||
|
3082 | ('repository.admin', _('Repository admin access')), | |||
|
3083 | ||||
|
3084 | ('group.none', _('Repository group no access')), | |||
|
3085 | ('group.read', _('Repository group read access')), | |||
|
3086 | ('group.write', _('Repository group write access')), | |||
|
3087 | ('group.admin', _('Repository group admin access')), | |||
|
3088 | ||||
|
3089 | ('usergroup.none', _('User group no access')), | |||
|
3090 | ('usergroup.read', _('User group read access')), | |||
|
3091 | ('usergroup.write', _('User group write access')), | |||
|
3092 | ('usergroup.admin', _('User group admin access')), | |||
|
3093 | ||||
|
3094 | ('branch.none', _('Branch no permissions')), | |||
|
3095 | ('branch.merge', _('Branch access by web merge')), | |||
|
3096 | ('branch.push', _('Branch access by push')), | |||
|
3097 | ('branch.push_force', _('Branch access by push with force')), | |||
|
3098 | ||||
|
3099 | ('hg.repogroup.create.false', _('Repository Group creation disabled')), | |||
|
3100 | ('hg.repogroup.create.true', _('Repository Group creation enabled')), | |||
|
3101 | ||||
|
3102 | ('hg.usergroup.create.false', _('User Group creation disabled')), | |||
|
3103 | ('hg.usergroup.create.true', _('User Group creation enabled')), | |||
|
3104 | ||||
|
3105 | ('hg.create.none', _('Repository creation disabled')), | |||
|
3106 | ('hg.create.repository', _('Repository creation enabled')), | |||
|
3107 | ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), | |||
|
3108 | ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), | |||
|
3109 | ||||
|
3110 | ('hg.fork.none', _('Repository forking disabled')), | |||
|
3111 | ('hg.fork.repository', _('Repository forking enabled')), | |||
|
3112 | ||||
|
3113 | ('hg.register.none', _('Registration disabled')), | |||
|
3114 | ('hg.register.manual_activate', _('User Registration with manual account activation')), | |||
|
3115 | ('hg.register.auto_activate', _('User Registration with automatic account activation')), | |||
|
3116 | ||||
|
3117 | ('hg.password_reset.enabled', _('Password reset enabled')), | |||
|
3118 | ('hg.password_reset.hidden', _('Password reset hidden')), | |||
|
3119 | ('hg.password_reset.disabled', _('Password reset disabled')), | |||
|
3120 | ||||
|
3121 | ('hg.extern_activate.manual', _('Manual activation of external account')), | |||
|
3122 | ('hg.extern_activate.auto', _('Automatic activation of external account')), | |||
|
3123 | ||||
|
3124 | ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), | |||
|
3125 | ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), | |||
|
3126 | ] | |||
|
3127 | ||||
|
3128 | # definition of system default permissions for DEFAULT user, created on | |||
|
3129 | # system setup | |||
|
3130 | DEFAULT_USER_PERMISSIONS = [ | |||
|
3131 | # object perms | |||
|
3132 | 'repository.read', | |||
|
3133 | 'group.read', | |||
|
3134 | 'usergroup.read', | |||
|
3135 | # branch, for backward compat we need same value as before so forced pushed | |||
|
3136 | 'branch.push_force', | |||
|
3137 | # global | |||
|
3138 | 'hg.create.repository', | |||
|
3139 | 'hg.repogroup.create.false', | |||
|
3140 | 'hg.usergroup.create.false', | |||
|
3141 | 'hg.create.write_on_repogroup.true', | |||
|
3142 | 'hg.fork.repository', | |||
|
3143 | 'hg.register.manual_activate', | |||
|
3144 | 'hg.password_reset.enabled', | |||
|
3145 | 'hg.extern_activate.auto', | |||
|
3146 | 'hg.inherit_default_perms.true', | |||
|
3147 | ] | |||
|
3148 | ||||
|
3149 | # defines which permissions are more important higher the more important | |||
|
3150 | # Weight defines which permissions are more important. | |||
|
3151 | # The higher number the more important. | |||
|
3152 | PERM_WEIGHTS = { | |||
|
3153 | 'repository.none': 0, | |||
|
3154 | 'repository.read': 1, | |||
|
3155 | 'repository.write': 3, | |||
|
3156 | 'repository.admin': 4, | |||
|
3157 | ||||
|
3158 | 'group.none': 0, | |||
|
3159 | 'group.read': 1, | |||
|
3160 | 'group.write': 3, | |||
|
3161 | 'group.admin': 4, | |||
|
3162 | ||||
|
3163 | 'usergroup.none': 0, | |||
|
3164 | 'usergroup.read': 1, | |||
|
3165 | 'usergroup.write': 3, | |||
|
3166 | 'usergroup.admin': 4, | |||
|
3167 | ||||
|
3168 | 'branch.none': 0, | |||
|
3169 | 'branch.merge': 1, | |||
|
3170 | 'branch.push': 3, | |||
|
3171 | 'branch.push_force': 4, | |||
|
3172 | ||||
|
3173 | 'hg.repogroup.create.false': 0, | |||
|
3174 | 'hg.repogroup.create.true': 1, | |||
|
3175 | ||||
|
3176 | 'hg.usergroup.create.false': 0, | |||
|
3177 | 'hg.usergroup.create.true': 1, | |||
|
3178 | ||||
|
3179 | 'hg.fork.none': 0, | |||
|
3180 | 'hg.fork.repository': 1, | |||
|
3181 | 'hg.create.none': 0, | |||
|
3182 | 'hg.create.repository': 1 | |||
|
3183 | } | |||
|
3184 | ||||
|
3185 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3186 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) | |||
|
3187 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) | |||
|
3188 | ||||
|
3189 | def __unicode__(self): | |||
|
3190 | return u"<%s('%s:%s')>" % ( | |||
|
3191 | self.__class__.__name__, self.permission_id, self.permission_name | |||
|
3192 | ) | |||
|
3193 | ||||
|
3194 | @classmethod | |||
|
3195 | def get_by_key(cls, key): | |||
|
3196 | return cls.query().filter(cls.permission_name == key).scalar() | |||
|
3197 | ||||
|
3198 | @classmethod | |||
|
3199 | def get_default_repo_perms(cls, user_id, repo_id=None): | |||
|
3200 | q = Session().query(UserRepoToPerm, Repository, Permission)\ | |||
|
3201 | .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ | |||
|
3202 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ | |||
|
3203 | .filter(UserRepoToPerm.user_id == user_id) | |||
|
3204 | if repo_id: | |||
|
3205 | q = q.filter(UserRepoToPerm.repository_id == repo_id) | |||
|
3206 | return q.all() | |||
|
3207 | ||||
|
3208 | @classmethod | |||
|
3209 | def get_default_repo_branch_perms(cls, user_id, repo_id=None): | |||
|
3210 | q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \ | |||
|
3211 | .join( | |||
|
3212 | Permission, | |||
|
3213 | UserToRepoBranchPermission.permission_id == Permission.permission_id) \ | |||
|
3214 | .join( | |||
|
3215 | UserRepoToPerm, | |||
|
3216 | UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \ | |||
|
3217 | .filter(UserRepoToPerm.user_id == user_id) | |||
|
3218 | ||||
|
3219 | if repo_id: | |||
|
3220 | q = q.filter(UserToRepoBranchPermission.repository_id == repo_id) | |||
|
3221 | return q.order_by(UserToRepoBranchPermission.rule_order).all() | |||
|
3222 | ||||
|
3223 | @classmethod | |||
|
3224 | def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): | |||
|
3225 | q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ | |||
|
3226 | .join( | |||
|
3227 | Permission, | |||
|
3228 | UserGroupRepoToPerm.permission_id == Permission.permission_id)\ | |||
|
3229 | .join( | |||
|
3230 | Repository, | |||
|
3231 | UserGroupRepoToPerm.repository_id == Repository.repo_id)\ | |||
|
3232 | .join( | |||
|
3233 | UserGroup, | |||
|
3234 | UserGroupRepoToPerm.users_group_id == | |||
|
3235 | UserGroup.users_group_id)\ | |||
|
3236 | .join( | |||
|
3237 | UserGroupMember, | |||
|
3238 | UserGroupRepoToPerm.users_group_id == | |||
|
3239 | UserGroupMember.users_group_id)\ | |||
|
3240 | .filter( | |||
|
3241 | UserGroupMember.user_id == user_id, | |||
|
3242 | UserGroup.users_group_active == true()) | |||
|
3243 | if repo_id: | |||
|
3244 | q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) | |||
|
3245 | return q.all() | |||
|
3246 | ||||
|
3247 | @classmethod | |||
|
3248 | def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None): | |||
|
3249 | q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \ | |||
|
3250 | .join( | |||
|
3251 | Permission, | |||
|
3252 | UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \ | |||
|
3253 | .join( | |||
|
3254 | UserGroupRepoToPerm, | |||
|
3255 | UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \ | |||
|
3256 | .join( | |||
|
3257 | UserGroup, | |||
|
3258 | UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \ | |||
|
3259 | .join( | |||
|
3260 | UserGroupMember, | |||
|
3261 | UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \ | |||
|
3262 | .filter( | |||
|
3263 | UserGroupMember.user_id == user_id, | |||
|
3264 | UserGroup.users_group_active == true()) | |||
|
3265 | ||||
|
3266 | if repo_id: | |||
|
3267 | q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id) | |||
|
3268 | return q.order_by(UserGroupToRepoBranchPermission.rule_order).all() | |||
|
3269 | ||||
|
3270 | @classmethod | |||
|
3271 | def get_default_group_perms(cls, user_id, repo_group_id=None): | |||
|
3272 | q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ | |||
|
3273 | .join( | |||
|
3274 | Permission, | |||
|
3275 | UserRepoGroupToPerm.permission_id == Permission.permission_id)\ | |||
|
3276 | .join( | |||
|
3277 | RepoGroup, | |||
|
3278 | UserRepoGroupToPerm.group_id == RepoGroup.group_id)\ | |||
|
3279 | .filter(UserRepoGroupToPerm.user_id == user_id) | |||
|
3280 | if repo_group_id: | |||
|
3281 | q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) | |||
|
3282 | return q.all() | |||
|
3283 | ||||
|
3284 | @classmethod | |||
|
3285 | def get_default_group_perms_from_user_group( | |||
|
3286 | cls, user_id, repo_group_id=None): | |||
|
3287 | q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ | |||
|
3288 | .join( | |||
|
3289 | Permission, | |||
|
3290 | UserGroupRepoGroupToPerm.permission_id == | |||
|
3291 | Permission.permission_id)\ | |||
|
3292 | .join( | |||
|
3293 | RepoGroup, | |||
|
3294 | UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ | |||
|
3295 | .join( | |||
|
3296 | UserGroup, | |||
|
3297 | UserGroupRepoGroupToPerm.users_group_id == | |||
|
3298 | UserGroup.users_group_id)\ | |||
|
3299 | .join( | |||
|
3300 | UserGroupMember, | |||
|
3301 | UserGroupRepoGroupToPerm.users_group_id == | |||
|
3302 | UserGroupMember.users_group_id)\ | |||
|
3303 | .filter( | |||
|
3304 | UserGroupMember.user_id == user_id, | |||
|
3305 | UserGroup.users_group_active == true()) | |||
|
3306 | if repo_group_id: | |||
|
3307 | q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) | |||
|
3308 | return q.all() | |||
|
3309 | ||||
|
3310 | @classmethod | |||
|
3311 | def get_default_user_group_perms(cls, user_id, user_group_id=None): | |||
|
3312 | q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ | |||
|
3313 | .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ | |||
|
3314 | .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ | |||
|
3315 | .filter(UserUserGroupToPerm.user_id == user_id) | |||
|
3316 | if user_group_id: | |||
|
3317 | q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) | |||
|
3318 | return q.all() | |||
|
3319 | ||||
|
3320 | @classmethod | |||
|
3321 | def get_default_user_group_perms_from_user_group( | |||
|
3322 | cls, user_id, user_group_id=None): | |||
|
3323 | TargetUserGroup = aliased(UserGroup, name='target_user_group') | |||
|
3324 | q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ | |||
|
3325 | .join( | |||
|
3326 | Permission, | |||
|
3327 | UserGroupUserGroupToPerm.permission_id == | |||
|
3328 | Permission.permission_id)\ | |||
|
3329 | .join( | |||
|
3330 | TargetUserGroup, | |||
|
3331 | UserGroupUserGroupToPerm.target_user_group_id == | |||
|
3332 | TargetUserGroup.users_group_id)\ | |||
|
3333 | .join( | |||
|
3334 | UserGroup, | |||
|
3335 | UserGroupUserGroupToPerm.user_group_id == | |||
|
3336 | UserGroup.users_group_id)\ | |||
|
3337 | .join( | |||
|
3338 | UserGroupMember, | |||
|
3339 | UserGroupUserGroupToPerm.user_group_id == | |||
|
3340 | UserGroupMember.users_group_id)\ | |||
|
3341 | .filter( | |||
|
3342 | UserGroupMember.user_id == user_id, | |||
|
3343 | UserGroup.users_group_active == true()) | |||
|
3344 | if user_group_id: | |||
|
3345 | q = q.filter( | |||
|
3346 | UserGroupUserGroupToPerm.user_group_id == user_group_id) | |||
|
3347 | ||||
|
3348 | return q.all() | |||
|
3349 | ||||
|
3350 | ||||
|
3351 | class UserRepoToPerm(Base, BaseModel): | |||
|
3352 | __tablename__ = 'repo_to_perm' | |||
|
3353 | __table_args__ = ( | |||
|
3354 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), | |||
|
3355 | base_table_args | |||
|
3356 | ) | |||
|
3357 | ||||
|
3358 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3359 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
3360 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
3361 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |||
|
3362 | ||||
|
3363 | user = relationship('User') | |||
|
3364 | repository = relationship('Repository') | |||
|
3365 | permission = relationship('Permission') | |||
|
3366 | ||||
|
3367 | branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined') | |||
|
3368 | ||||
|
3369 | @classmethod | |||
|
3370 | def create(cls, user, repository, permission): | |||
|
3371 | n = cls() | |||
|
3372 | n.user = user | |||
|
3373 | n.repository = repository | |||
|
3374 | n.permission = permission | |||
|
3375 | Session().add(n) | |||
|
3376 | return n | |||
|
3377 | ||||
|
3378 | def __unicode__(self): | |||
|
3379 | return u'<%s => %s >' % (self.user, self.repository) | |||
|
3380 | ||||
|
3381 | ||||
|
3382 | class UserUserGroupToPerm(Base, BaseModel): | |||
|
3383 | __tablename__ = 'user_user_group_to_perm' | |||
|
3384 | __table_args__ = ( | |||
|
3385 | UniqueConstraint('user_id', 'user_group_id', 'permission_id'), | |||
|
3386 | base_table_args | |||
|
3387 | ) | |||
|
3388 | ||||
|
3389 | user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3390 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
3391 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
3392 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
3393 | ||||
|
3394 | user = relationship('User') | |||
|
3395 | user_group = relationship('UserGroup') | |||
|
3396 | permission = relationship('Permission') | |||
|
3397 | ||||
|
3398 | @classmethod | |||
|
3399 | def create(cls, user, user_group, permission): | |||
|
3400 | n = cls() | |||
|
3401 | n.user = user | |||
|
3402 | n.user_group = user_group | |||
|
3403 | n.permission = permission | |||
|
3404 | Session().add(n) | |||
|
3405 | return n | |||
|
3406 | ||||
|
3407 | def __unicode__(self): | |||
|
3408 | return u'<%s => %s >' % (self.user, self.user_group) | |||
|
3409 | ||||
|
3410 | ||||
|
3411 | class UserToPerm(Base, BaseModel): | |||
|
3412 | __tablename__ = 'user_to_perm' | |||
|
3413 | __table_args__ = ( | |||
|
3414 | UniqueConstraint('user_id', 'permission_id'), | |||
|
3415 | base_table_args | |||
|
3416 | ) | |||
|
3417 | ||||
|
3418 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3419 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
3420 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
3421 | ||||
|
3422 | user = relationship('User') | |||
|
3423 | permission = relationship('Permission', lazy='joined') | |||
|
3424 | ||||
|
3425 | def __unicode__(self): | |||
|
3426 | return u'<%s => %s >' % (self.user, self.permission) | |||
|
3427 | ||||
|
3428 | ||||
|
3429 | class UserGroupRepoToPerm(Base, BaseModel): | |||
|
3430 | __tablename__ = 'users_group_repo_to_perm' | |||
|
3431 | __table_args__ = ( | |||
|
3432 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), | |||
|
3433 | base_table_args | |||
|
3434 | ) | |||
|
3435 | ||||
|
3436 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3437 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
3438 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
3439 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |||
|
3440 | ||||
|
3441 | users_group = relationship('UserGroup') | |||
|
3442 | permission = relationship('Permission') | |||
|
3443 | repository = relationship('Repository') | |||
|
3444 | user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all') | |||
|
3445 | ||||
|
3446 | @classmethod | |||
|
3447 | def create(cls, users_group, repository, permission): | |||
|
3448 | n = cls() | |||
|
3449 | n.users_group = users_group | |||
|
3450 | n.repository = repository | |||
|
3451 | n.permission = permission | |||
|
3452 | Session().add(n) | |||
|
3453 | return n | |||
|
3454 | ||||
|
3455 | def __unicode__(self): | |||
|
3456 | return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository) | |||
|
3457 | ||||
|
3458 | ||||
|
3459 | class UserGroupUserGroupToPerm(Base, BaseModel): | |||
|
3460 | __tablename__ = 'user_group_user_group_to_perm' | |||
|
3461 | __table_args__ = ( | |||
|
3462 | UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), | |||
|
3463 | CheckConstraint('target_user_group_id != user_group_id'), | |||
|
3464 | base_table_args | |||
|
3465 | ) | |||
|
3466 | ||||
|
3467 | user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3468 | target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
3469 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
3470 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
3471 | ||||
|
3472 | target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id') | |||
|
3473 | user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') | |||
|
3474 | permission = relationship('Permission') | |||
|
3475 | ||||
|
3476 | @classmethod | |||
|
3477 | def create(cls, target_user_group, user_group, permission): | |||
|
3478 | n = cls() | |||
|
3479 | n.target_user_group = target_user_group | |||
|
3480 | n.user_group = user_group | |||
|
3481 | n.permission = permission | |||
|
3482 | Session().add(n) | |||
|
3483 | return n | |||
|
3484 | ||||
|
3485 | def __unicode__(self): | |||
|
3486 | return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group) | |||
|
3487 | ||||
|
3488 | ||||
|
3489 | class UserGroupToPerm(Base, BaseModel): | |||
|
3490 | __tablename__ = 'users_group_to_perm' | |||
|
3491 | __table_args__ = ( | |||
|
3492 | UniqueConstraint('users_group_id', 'permission_id',), | |||
|
3493 | base_table_args | |||
|
3494 | ) | |||
|
3495 | ||||
|
3496 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3497 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
3498 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
3499 | ||||
|
3500 | users_group = relationship('UserGroup') | |||
|
3501 | permission = relationship('Permission') | |||
|
3502 | ||||
|
3503 | ||||
|
3504 | class UserRepoGroupToPerm(Base, BaseModel): | |||
|
3505 | __tablename__ = 'user_repo_group_to_perm' | |||
|
3506 | __table_args__ = ( | |||
|
3507 | UniqueConstraint('user_id', 'group_id', 'permission_id'), | |||
|
3508 | base_table_args | |||
|
3509 | ) | |||
|
3510 | ||||
|
3511 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3512 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
3513 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |||
|
3514 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
3515 | ||||
|
3516 | user = relationship('User') | |||
|
3517 | group = relationship('RepoGroup') | |||
|
3518 | permission = relationship('Permission') | |||
|
3519 | ||||
|
3520 | @classmethod | |||
|
3521 | def create(cls, user, repository_group, permission): | |||
|
3522 | n = cls() | |||
|
3523 | n.user = user | |||
|
3524 | n.group = repository_group | |||
|
3525 | n.permission = permission | |||
|
3526 | Session().add(n) | |||
|
3527 | return n | |||
|
3528 | ||||
|
3529 | ||||
|
3530 | class UserGroupRepoGroupToPerm(Base, BaseModel): | |||
|
3531 | __tablename__ = 'users_group_repo_group_to_perm' | |||
|
3532 | __table_args__ = ( | |||
|
3533 | UniqueConstraint('users_group_id', 'group_id'), | |||
|
3534 | base_table_args | |||
|
3535 | ) | |||
|
3536 | ||||
|
3537 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3538 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) | |||
|
3539 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) | |||
|
3540 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
3541 | ||||
|
3542 | users_group = relationship('UserGroup') | |||
|
3543 | permission = relationship('Permission') | |||
|
3544 | group = relationship('RepoGroup') | |||
|
3545 | ||||
|
3546 | @classmethod | |||
|
3547 | def create(cls, user_group, repository_group, permission): | |||
|
3548 | n = cls() | |||
|
3549 | n.users_group = user_group | |||
|
3550 | n.group = repository_group | |||
|
3551 | n.permission = permission | |||
|
3552 | Session().add(n) | |||
|
3553 | return n | |||
|
3554 | ||||
|
3555 | def __unicode__(self): | |||
|
3556 | return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group) | |||
|
3557 | ||||
|
3558 | ||||
|
3559 | class Statistics(Base, BaseModel): | |||
|
3560 | __tablename__ = 'statistics' | |||
|
3561 | __table_args__ = ( | |||
|
3562 | base_table_args | |||
|
3563 | ) | |||
|
3564 | ||||
|
3565 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3566 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) | |||
|
3567 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) | |||
|
3568 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data | |||
|
3569 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data | |||
|
3570 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data | |||
|
3571 | ||||
|
3572 | repository = relationship('Repository', single_parent=True) | |||
|
3573 | ||||
|
3574 | ||||
|
3575 | class UserFollowing(Base, BaseModel): | |||
|
3576 | __tablename__ = 'user_followings' | |||
|
3577 | __table_args__ = ( | |||
|
3578 | UniqueConstraint('user_id', 'follows_repository_id'), | |||
|
3579 | UniqueConstraint('user_id', 'follows_user_id'), | |||
|
3580 | base_table_args | |||
|
3581 | ) | |||
|
3582 | ||||
|
3583 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3584 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
3585 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) | |||
|
3586 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) | |||
|
3587 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) | |||
|
3588 | ||||
|
3589 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') | |||
|
3590 | ||||
|
3591 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') | |||
|
3592 | follows_repository = relationship('Repository', order_by='Repository.repo_name') | |||
|
3593 | ||||
|
3594 | @classmethod | |||
|
3595 | def get_repo_followers(cls, repo_id): | |||
|
3596 | return cls.query().filter(cls.follows_repo_id == repo_id) | |||
|
3597 | ||||
|
3598 | ||||
|
3599 | class CacheKey(Base, BaseModel): | |||
|
3600 | __tablename__ = 'cache_invalidation' | |||
|
3601 | __table_args__ = ( | |||
|
3602 | UniqueConstraint('cache_key'), | |||
|
3603 | Index('key_idx', 'cache_key'), | |||
|
3604 | base_table_args, | |||
|
3605 | ) | |||
|
3606 | ||||
|
3607 | CACHE_TYPE_FEED = 'FEED' | |||
|
3608 | ||||
|
3609 | # namespaces used to register process/thread aware caches | |||
|
3610 | REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}' | |||
|
3611 | SETTINGS_INVALIDATION_NAMESPACE = 'system_settings' | |||
|
3612 | ||||
|
3613 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
3614 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) | |||
|
3615 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) | |||
|
3616 | cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None) | |||
|
3617 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) | |||
|
3618 | ||||
|
3619 | def __init__(self, cache_key, cache_args='', cache_state_uid=None): | |||
|
3620 | self.cache_key = cache_key | |||
|
3621 | self.cache_args = cache_args | |||
|
3622 | self.cache_active = False | |||
|
3623 | # first key should be same for all entries, since all workers should share it | |||
|
3624 | self.cache_state_uid = cache_state_uid or self.generate_new_state_uid() | |||
|
3625 | ||||
|
3626 | def __unicode__(self): | |||
|
3627 | return u"<%s('%s:%s[%s]')>" % ( | |||
|
3628 | self.__class__.__name__, | |||
|
3629 | self.cache_id, self.cache_key, self.cache_active) | |||
|
3630 | ||||
|
3631 | def _cache_key_partition(self): | |||
|
3632 | prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) | |||
|
3633 | return prefix, repo_name, suffix | |||
|
3634 | ||||
|
3635 | def get_prefix(self): | |||
|
3636 | """ | |||
|
3637 | Try to extract prefix from existing cache key. The key could consist | |||
|
3638 | of prefix, repo_name, suffix | |||
|
3639 | """ | |||
|
3640 | # this returns prefix, repo_name, suffix | |||
|
3641 | return self._cache_key_partition()[0] | |||
|
3642 | ||||
|
3643 | def get_suffix(self): | |||
|
3644 | """ | |||
|
3645 | get suffix that might have been used in _get_cache_key to | |||
|
3646 | generate self.cache_key. Only used for informational purposes | |||
|
3647 | in repo_edit.mako. | |||
|
3648 | """ | |||
|
3649 | # prefix, repo_name, suffix | |||
|
3650 | return self._cache_key_partition()[2] | |||
|
3651 | ||||
|
3652 | @classmethod | |||
|
3653 | def generate_new_state_uid(cls, based_on=None): | |||
|
3654 | if based_on: | |||
|
3655 | return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on))) | |||
|
3656 | else: | |||
|
3657 | return str(uuid.uuid4()) | |||
|
3658 | ||||
|
3659 | @classmethod | |||
|
3660 | def delete_all_cache(cls): | |||
|
3661 | """ | |||
|
3662 | Delete all cache keys from database. | |||
|
3663 | Should only be run when all instances are down and all entries | |||
|
3664 | thus stale. | |||
|
3665 | """ | |||
|
3666 | cls.query().delete() | |||
|
3667 | Session().commit() | |||
|
3668 | ||||
|
3669 | @classmethod | |||
|
3670 | def set_invalidate(cls, cache_uid, delete=False): | |||
|
3671 | """ | |||
|
3672 | Mark all caches of a repo as invalid in the database. | |||
|
3673 | """ | |||
|
3674 | ||||
|
3675 | try: | |||
|
3676 | qry = Session().query(cls).filter(cls.cache_args == cache_uid) | |||
|
3677 | if delete: | |||
|
3678 | qry.delete() | |||
|
3679 | log.debug('cache objects deleted for cache args %s', | |||
|
3680 | safe_str(cache_uid)) | |||
|
3681 | else: | |||
|
3682 | qry.update({"cache_active": False, | |||
|
3683 | "cache_state_uid": cls.generate_new_state_uid()}) | |||
|
3684 | log.debug('cache objects marked as invalid for cache args %s', | |||
|
3685 | safe_str(cache_uid)) | |||
|
3686 | ||||
|
3687 | Session().commit() | |||
|
3688 | except Exception: | |||
|
3689 | log.exception( | |||
|
3690 | 'Cache key invalidation failed for cache args %s', | |||
|
3691 | safe_str(cache_uid)) | |||
|
3692 | Session().rollback() | |||
|
3693 | ||||
|
3694 | @classmethod | |||
|
3695 | def get_active_cache(cls, cache_key): | |||
|
3696 | inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() | |||
|
3697 | if inv_obj: | |||
|
3698 | return inv_obj | |||
|
3699 | return None | |||
|
3700 | ||||
|
3701 | @classmethod | |||
|
3702 | def get_namespace_map(cls, namespace): | |||
|
3703 | return { | |||
|
3704 | x.cache_key: x | |||
|
3705 | for x in cls.query().filter(cls.cache_args == namespace)} | |||
|
3706 | ||||
|
3707 | ||||
|
3708 | class ChangesetComment(Base, BaseModel): | |||
|
3709 | __tablename__ = 'changeset_comments' | |||
|
3710 | __table_args__ = ( | |||
|
3711 | Index('cc_revision_idx', 'revision'), | |||
|
3712 | base_table_args, | |||
|
3713 | ) | |||
|
3714 | ||||
|
3715 | COMMENT_OUTDATED = u'comment_outdated' | |||
|
3716 | COMMENT_TYPE_NOTE = u'note' | |||
|
3717 | COMMENT_TYPE_TODO = u'todo' | |||
|
3718 | COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO] | |||
|
3719 | ||||
|
3720 | OP_IMMUTABLE = u'immutable' | |||
|
3721 | OP_CHANGEABLE = u'changeable' | |||
|
3722 | ||||
|
3723 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) | |||
|
3724 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |||
|
3725 | revision = Column('revision', String(40), nullable=True) | |||
|
3726 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |||
|
3727 | pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) | |||
|
3728 | line_no = Column('line_no', Unicode(10), nullable=True) | |||
|
3729 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) | |||
|
3730 | f_path = Column('f_path', Unicode(1000), nullable=True) | |||
|
3731 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |||
|
3732 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) | |||
|
3733 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
3734 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
3735 | renderer = Column('renderer', Unicode(64), nullable=True) | |||
|
3736 | display_state = Column('display_state', Unicode(128), nullable=True) | |||
|
3737 | immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE) | |||
|
3738 | ||||
|
3739 | comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE) | |||
|
3740 | resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True) | |||
|
3741 | ||||
|
3742 | resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by') | |||
|
3743 | resolved_by = relationship('ChangesetComment', back_populates='resolved_comment') | |||
|
3744 | ||||
|
3745 | author = relationship('User', lazy='joined') | |||
|
3746 | repo = relationship('Repository') | |||
|
3747 | status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined') | |||
|
3748 | pull_request = relationship('PullRequest', lazy='joined') | |||
|
3749 | pull_request_version = relationship('PullRequestVersion') | |||
|
3750 | history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='joined', order_by='ChangesetCommentHistory.version') | |||
|
3751 | ||||
|
3752 | @classmethod | |||
|
3753 | def get_users(cls, revision=None, pull_request_id=None): | |||
|
3754 | """ | |||
|
3755 | Returns user associated with this ChangesetComment. ie those | |||
|
3756 | who actually commented | |||
|
3757 | ||||
|
3758 | :param cls: | |||
|
3759 | :param revision: | |||
|
3760 | """ | |||
|
3761 | q = Session().query(User)\ | |||
|
3762 | .join(ChangesetComment.author) | |||
|
3763 | if revision: | |||
|
3764 | q = q.filter(cls.revision == revision) | |||
|
3765 | elif pull_request_id: | |||
|
3766 | q = q.filter(cls.pull_request_id == pull_request_id) | |||
|
3767 | return q.all() | |||
|
3768 | ||||
|
3769 | @classmethod | |||
|
3770 | def get_index_from_version(cls, pr_version, versions): | |||
|
3771 | num_versions = [x.pull_request_version_id for x in versions] | |||
|
3772 | try: | |||
|
3773 | return num_versions.index(pr_version) +1 | |||
|
3774 | except (IndexError, ValueError): | |||
|
3775 | return | |||
|
3776 | ||||
|
3777 | @property | |||
|
3778 | def outdated(self): | |||
|
3779 | return self.display_state == self.COMMENT_OUTDATED | |||
|
3780 | ||||
|
3781 | @property | |||
|
3782 | def immutable(self): | |||
|
3783 | return self.immutable_state == self.OP_IMMUTABLE | |||
|
3784 | ||||
|
3785 | def outdated_at_version(self, version): | |||
|
3786 | """ | |||
|
3787 | Checks if comment is outdated for given pull request version | |||
|
3788 | """ | |||
|
3789 | return self.outdated and self.pull_request_version_id != version | |||
|
3790 | ||||
|
3791 | def older_than_version(self, version): | |||
|
3792 | """ | |||
|
3793 | Checks if comment is made from previous version than given | |||
|
3794 | """ | |||
|
3795 | if version is None: | |||
|
3796 | return self.pull_request_version_id is not None | |||
|
3797 | ||||
|
3798 | return self.pull_request_version_id < version | |||
|
3799 | ||||
|
3800 | @property | |||
|
3801 | def resolved(self): | |||
|
3802 | return self.resolved_by[0] if self.resolved_by else None | |||
|
3803 | ||||
|
3804 | @property | |||
|
3805 | def is_todo(self): | |||
|
3806 | return self.comment_type == self.COMMENT_TYPE_TODO | |||
|
3807 | ||||
|
3808 | @property | |||
|
3809 | def is_inline(self): | |||
|
3810 | return self.line_no and self.f_path | |||
|
3811 | ||||
|
3812 | def get_index_version(self, versions): | |||
|
3813 | return self.get_index_from_version( | |||
|
3814 | self.pull_request_version_id, versions) | |||
|
3815 | ||||
|
3816 | def __repr__(self): | |||
|
3817 | if self.comment_id: | |||
|
3818 | return '<DB:Comment #%s>' % self.comment_id | |||
|
3819 | else: | |||
|
3820 | return '<DB:Comment at %#x>' % id(self) | |||
|
3821 | ||||
|
3822 | def get_api_data(self): | |||
|
3823 | comment = self | |||
|
3824 | data = { | |||
|
3825 | 'comment_id': comment.comment_id, | |||
|
3826 | 'comment_type': comment.comment_type, | |||
|
3827 | 'comment_text': comment.text, | |||
|
3828 | 'comment_status': comment.status_change, | |||
|
3829 | 'comment_f_path': comment.f_path, | |||
|
3830 | 'comment_lineno': comment.line_no, | |||
|
3831 | 'comment_author': comment.author, | |||
|
3832 | 'comment_created_on': comment.created_on, | |||
|
3833 | 'comment_resolved_by': self.resolved, | |||
|
3834 | 'comment_commit_id': comment.revision, | |||
|
3835 | 'comment_pull_request_id': comment.pull_request_id, | |||
|
3836 | } | |||
|
3837 | return data | |||
|
3838 | ||||
|
3839 | def __json__(self): | |||
|
3840 | data = dict() | |||
|
3841 | data.update(self.get_api_data()) | |||
|
3842 | return data | |||
|
3843 | ||||
|
3844 | ||||
|
3845 | class ChangesetCommentHistory(Base, BaseModel): | |||
|
3846 | __tablename__ = 'changeset_comments_history' | |||
|
3847 | __table_args__ = ( | |||
|
3848 | Index('cch_comment_id_idx', 'comment_id'), | |||
|
3849 | base_table_args, | |||
|
3850 | ) | |||
|
3851 | ||||
|
3852 | comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True) | |||
|
3853 | comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False) | |||
|
3854 | version = Column("version", Integer(), nullable=False, default=0) | |||
|
3855 | created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |||
|
3856 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) | |||
|
3857 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
3858 | deleted = Column('deleted', Boolean(), default=False) | |||
|
3859 | ||||
|
3860 | author = relationship('User', lazy='joined') | |||
|
3861 | comment = relationship('ChangesetComment', cascade="all, delete") | |||
|
3862 | ||||
|
3863 | @classmethod | |||
|
3864 | def get_version(cls, comment_id): | |||
|
3865 | q = Session().query(ChangesetCommentHistory).filter( | |||
|
3866 | ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc()) | |||
|
3867 | if q.count() == 0: | |||
|
3868 | return 1 | |||
|
3869 | elif q.count() >= q[0].version: | |||
|
3870 | return q.count() + 1 | |||
|
3871 | else: | |||
|
3872 | return q[0].version + 1 | |||
|
3873 | ||||
|
3874 | ||||
|
3875 | class ChangesetStatus(Base, BaseModel): | |||
|
3876 | __tablename__ = 'changeset_statuses' | |||
|
3877 | __table_args__ = ( | |||
|
3878 | Index('cs_revision_idx', 'revision'), | |||
|
3879 | Index('cs_version_idx', 'version'), | |||
|
3880 | UniqueConstraint('repo_id', 'revision', 'version'), | |||
|
3881 | base_table_args | |||
|
3882 | ) | |||
|
3883 | ||||
|
3884 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' | |||
|
3885 | STATUS_APPROVED = 'approved' | |||
|
3886 | STATUS_REJECTED = 'rejected' | |||
|
3887 | STATUS_UNDER_REVIEW = 'under_review' | |||
|
3888 | ||||
|
3889 | STATUSES = [ | |||
|
3890 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default | |||
|
3891 | (STATUS_APPROVED, _("Approved")), | |||
|
3892 | (STATUS_REJECTED, _("Rejected")), | |||
|
3893 | (STATUS_UNDER_REVIEW, _("Under Review")), | |||
|
3894 | ] | |||
|
3895 | ||||
|
3896 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) | |||
|
3897 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) | |||
|
3898 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) | |||
|
3899 | revision = Column('revision', String(40), nullable=False) | |||
|
3900 | status = Column('status', String(128), nullable=False, default=DEFAULT) | |||
|
3901 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) | |||
|
3902 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) | |||
|
3903 | version = Column('version', Integer(), nullable=False, default=0) | |||
|
3904 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) | |||
|
3905 | ||||
|
3906 | author = relationship('User', lazy='joined') | |||
|
3907 | repo = relationship('Repository') | |||
|
3908 | comment = relationship('ChangesetComment', lazy='joined') | |||
|
3909 | pull_request = relationship('PullRequest', lazy='joined') | |||
|
3910 | ||||
|
3911 | def __unicode__(self): | |||
|
3912 | return u"<%s('%s[v%s]:%s')>" % ( | |||
|
3913 | self.__class__.__name__, | |||
|
3914 | self.status, self.version, self.author | |||
|
3915 | ) | |||
|
3916 | ||||
|
3917 | @classmethod | |||
|
3918 | def get_status_lbl(cls, value): | |||
|
3919 | return dict(cls.STATUSES).get(value) | |||
|
3920 | ||||
|
3921 | @property | |||
|
3922 | def status_lbl(self): | |||
|
3923 | return ChangesetStatus.get_status_lbl(self.status) | |||
|
3924 | ||||
|
3925 | def get_api_data(self): | |||
|
3926 | status = self | |||
|
3927 | data = { | |||
|
3928 | 'status_id': status.changeset_status_id, | |||
|
3929 | 'status': status.status, | |||
|
3930 | } | |||
|
3931 | return data | |||
|
3932 | ||||
|
3933 | def __json__(self): | |||
|
3934 | data = dict() | |||
|
3935 | data.update(self.get_api_data()) | |||
|
3936 | return data | |||
|
3937 | ||||
|
3938 | ||||
|
3939 | class _SetState(object): | |||
|
3940 | """ | |||
|
3941 | Context processor allowing changing state for sensitive operation such as | |||
|
3942 | pull request update or merge | |||
|
3943 | """ | |||
|
3944 | ||||
|
3945 | def __init__(self, pull_request, pr_state, back_state=None): | |||
|
3946 | self._pr = pull_request | |||
|
3947 | self._org_state = back_state or pull_request.pull_request_state | |||
|
3948 | self._pr_state = pr_state | |||
|
3949 | self._current_state = None | |||
|
3950 | ||||
|
3951 | def __enter__(self): | |||
|
3952 | log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`', | |||
|
3953 | self._pr, self._pr_state) | |||
|
3954 | self.set_pr_state(self._pr_state) | |||
|
3955 | return self | |||
|
3956 | ||||
|
3957 | def __exit__(self, exc_type, exc_val, exc_tb): | |||
|
3958 | if exc_val is not None: | |||
|
3959 | log.error(traceback.format_exc(exc_tb)) | |||
|
3960 | return None | |||
|
3961 | ||||
|
3962 | self.set_pr_state(self._org_state) | |||
|
3963 | log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`', | |||
|
3964 | self._pr, self._org_state) | |||
|
3965 | ||||
|
3966 | @property | |||
|
3967 | def state(self): | |||
|
3968 | return self._current_state | |||
|
3969 | ||||
|
3970 | def set_pr_state(self, pr_state): | |||
|
3971 | try: | |||
|
3972 | self._pr.pull_request_state = pr_state | |||
|
3973 | Session().add(self._pr) | |||
|
3974 | Session().commit() | |||
|
3975 | self._current_state = pr_state | |||
|
3976 | except Exception: | |||
|
3977 | log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state) | |||
|
3978 | raise | |||
|
3979 | ||||
|
3980 | ||||
|
3981 | class _PullRequestBase(BaseModel): | |||
|
3982 | """ | |||
|
3983 | Common attributes of pull request and version entries. | |||
|
3984 | """ | |||
|
3985 | ||||
|
3986 | # .status values | |||
|
3987 | STATUS_NEW = u'new' | |||
|
3988 | STATUS_OPEN = u'open' | |||
|
3989 | STATUS_CLOSED = u'closed' | |||
|
3990 | ||||
|
3991 | # available states | |||
|
3992 | STATE_CREATING = u'creating' | |||
|
3993 | STATE_UPDATING = u'updating' | |||
|
3994 | STATE_MERGING = u'merging' | |||
|
3995 | STATE_CREATED = u'created' | |||
|
3996 | ||||
|
3997 | title = Column('title', Unicode(255), nullable=True) | |||
|
3998 | description = Column( | |||
|
3999 | 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), | |||
|
4000 | nullable=True) | |||
|
4001 | description_renderer = Column('description_renderer', Unicode(64), nullable=True) | |||
|
4002 | ||||
|
4003 | # new/open/closed status of pull request (not approve/reject/etc) | |||
|
4004 | status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) | |||
|
4005 | created_on = Column( | |||
|
4006 | 'created_on', DateTime(timezone=False), nullable=False, | |||
|
4007 | default=datetime.datetime.now) | |||
|
4008 | updated_on = Column( | |||
|
4009 | 'updated_on', DateTime(timezone=False), nullable=False, | |||
|
4010 | default=datetime.datetime.now) | |||
|
4011 | ||||
|
4012 | pull_request_state = Column("pull_request_state", String(255), nullable=True) | |||
|
4013 | ||||
|
4014 | @declared_attr | |||
|
4015 | def user_id(cls): | |||
|
4016 | return Column( | |||
|
4017 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, | |||
|
4018 | unique=None) | |||
|
4019 | ||||
|
4020 | # 500 revisions max | |||
|
4021 | _revisions = Column( | |||
|
4022 | 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) | |||
|
4023 | ||||
|
4024 | common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True) | |||
|
4025 | ||||
|
4026 | @declared_attr | |||
|
4027 | def source_repo_id(cls): | |||
|
4028 | # TODO: dan: rename column to source_repo_id | |||
|
4029 | return Column( | |||
|
4030 | 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), | |||
|
4031 | nullable=False) | |||
|
4032 | ||||
|
4033 | _source_ref = Column('org_ref', Unicode(255), nullable=False) | |||
|
4034 | ||||
|
4035 | @hybrid_property | |||
|
4036 | def source_ref(self): | |||
|
4037 | return self._source_ref | |||
|
4038 | ||||
|
4039 | @source_ref.setter | |||
|
4040 | def source_ref(self, val): | |||
|
4041 | parts = (val or '').split(':') | |||
|
4042 | if len(parts) != 3: | |||
|
4043 | raise ValueError( | |||
|
4044 | 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) | |||
|
4045 | self._source_ref = safe_unicode(val) | |||
|
4046 | ||||
|
4047 | _target_ref = Column('other_ref', Unicode(255), nullable=False) | |||
|
4048 | ||||
|
4049 | @hybrid_property | |||
|
4050 | def target_ref(self): | |||
|
4051 | return self._target_ref | |||
|
4052 | ||||
|
4053 | @target_ref.setter | |||
|
4054 | def target_ref(self, val): | |||
|
4055 | parts = (val or '').split(':') | |||
|
4056 | if len(parts) != 3: | |||
|
4057 | raise ValueError( | |||
|
4058 | 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) | |||
|
4059 | self._target_ref = safe_unicode(val) | |||
|
4060 | ||||
|
4061 | @declared_attr | |||
|
4062 | def target_repo_id(cls): | |||
|
4063 | # TODO: dan: rename column to target_repo_id | |||
|
4064 | return Column( | |||
|
4065 | 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), | |||
|
4066 | nullable=False) | |||
|
4067 | ||||
|
4068 | _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True) | |||
|
4069 | ||||
|
4070 | # TODO: dan: rename column to last_merge_source_rev | |||
|
4071 | _last_merge_source_rev = Column( | |||
|
4072 | 'last_merge_org_rev', String(40), nullable=True) | |||
|
4073 | # TODO: dan: rename column to last_merge_target_rev | |||
|
4074 | _last_merge_target_rev = Column( | |||
|
4075 | 'last_merge_other_rev', String(40), nullable=True) | |||
|
4076 | _last_merge_status = Column('merge_status', Integer(), nullable=True) | |||
|
4077 | last_merge_metadata = Column( | |||
|
4078 | 'last_merge_metadata', MutationObj.as_mutable( | |||
|
4079 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) | |||
|
4080 | ||||
|
4081 | merge_rev = Column('merge_rev', String(40), nullable=True) | |||
|
4082 | ||||
|
4083 | reviewer_data = Column( | |||
|
4084 | 'reviewer_data_json', MutationObj.as_mutable( | |||
|
4085 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) | |||
|
4086 | ||||
|
4087 | @property | |||
|
4088 | def reviewer_data_json(self): | |||
|
4089 | return json.dumps(self.reviewer_data) | |||
|
4090 | ||||
|
4091 | @property | |||
|
4092 | def work_in_progress(self): | |||
|
4093 | """checks if pull request is work in progress by checking the title""" | |||
|
4094 | title = self.title.upper() | |||
|
4095 | if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title): | |||
|
4096 | return True | |||
|
4097 | return False | |||
|
4098 | ||||
|
4099 | @hybrid_property | |||
|
4100 | def description_safe(self): | |||
|
4101 | from rhodecode.lib import helpers as h | |||
|
4102 | return h.escape(self.description) | |||
|
4103 | ||||
|
4104 | @hybrid_property | |||
|
4105 | def revisions(self): | |||
|
4106 | return self._revisions.split(':') if self._revisions else [] | |||
|
4107 | ||||
|
4108 | @revisions.setter | |||
|
4109 | def revisions(self, val): | |||
|
4110 | self._revisions = u':'.join(val) | |||
|
4111 | ||||
|
4112 | @hybrid_property | |||
|
4113 | def last_merge_status(self): | |||
|
4114 | return safe_int(self._last_merge_status) | |||
|
4115 | ||||
|
4116 | @last_merge_status.setter | |||
|
4117 | def last_merge_status(self, val): | |||
|
4118 | self._last_merge_status = val | |||
|
4119 | ||||
|
4120 | @declared_attr | |||
|
4121 | def author(cls): | |||
|
4122 | return relationship('User', lazy='joined') | |||
|
4123 | ||||
|
4124 | @declared_attr | |||
|
4125 | def source_repo(cls): | |||
|
4126 | return relationship( | |||
|
4127 | 'Repository', | |||
|
4128 | primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__) | |||
|
4129 | ||||
|
4130 | @property | |||
|
4131 | def source_ref_parts(self): | |||
|
4132 | return self.unicode_to_reference(self.source_ref) | |||
|
4133 | ||||
|
4134 | @declared_attr | |||
|
4135 | def target_repo(cls): | |||
|
4136 | return relationship( | |||
|
4137 | 'Repository', | |||
|
4138 | primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__) | |||
|
4139 | ||||
|
4140 | @property | |||
|
4141 | def target_ref_parts(self): | |||
|
4142 | return self.unicode_to_reference(self.target_ref) | |||
|
4143 | ||||
|
4144 | @property | |||
|
4145 | def shadow_merge_ref(self): | |||
|
4146 | return self.unicode_to_reference(self._shadow_merge_ref) | |||
|
4147 | ||||
|
4148 | @shadow_merge_ref.setter | |||
|
4149 | def shadow_merge_ref(self, ref): | |||
|
4150 | self._shadow_merge_ref = self.reference_to_unicode(ref) | |||
|
4151 | ||||
|
4152 | @staticmethod | |||
|
4153 | def unicode_to_reference(raw): | |||
|
4154 | """ | |||
|
4155 | Convert a unicode (or string) to a reference object. | |||
|
4156 | If unicode evaluates to False it returns None. | |||
|
4157 | """ | |||
|
4158 | if raw: | |||
|
4159 | refs = raw.split(':') | |||
|
4160 | return Reference(*refs) | |||
|
4161 | else: | |||
|
4162 | return None | |||
|
4163 | ||||
|
4164 | @staticmethod | |||
|
4165 | def reference_to_unicode(ref): | |||
|
4166 | """ | |||
|
4167 | Convert a reference object to unicode. | |||
|
4168 | If reference is None it returns None. | |||
|
4169 | """ | |||
|
4170 | if ref: | |||
|
4171 | return u':'.join(ref) | |||
|
4172 | else: | |||
|
4173 | return None | |||
|
4174 | ||||
|
4175 | def get_api_data(self, with_merge_state=True): | |||
|
4176 | from rhodecode.model.pull_request import PullRequestModel | |||
|
4177 | ||||
|
4178 | pull_request = self | |||
|
4179 | if with_merge_state: | |||
|
4180 | merge_response, merge_status, msg = \ | |||
|
4181 | PullRequestModel().merge_status(pull_request) | |||
|
4182 | merge_state = { | |||
|
4183 | 'status': merge_status, | |||
|
4184 | 'message': safe_unicode(msg), | |||
|
4185 | } | |||
|
4186 | else: | |||
|
4187 | merge_state = {'status': 'not_available', | |||
|
4188 | 'message': 'not_available'} | |||
|
4189 | ||||
|
4190 | merge_data = { | |||
|
4191 | 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request), | |||
|
4192 | 'reference': ( | |||
|
4193 | pull_request.shadow_merge_ref._asdict() | |||
|
4194 | if pull_request.shadow_merge_ref else None), | |||
|
4195 | } | |||
|
4196 | ||||
|
4197 | data = { | |||
|
4198 | 'pull_request_id': pull_request.pull_request_id, | |||
|
4199 | 'url': PullRequestModel().get_url(pull_request), | |||
|
4200 | 'title': pull_request.title, | |||
|
4201 | 'description': pull_request.description, | |||
|
4202 | 'status': pull_request.status, | |||
|
4203 | 'state': pull_request.pull_request_state, | |||
|
4204 | 'created_on': pull_request.created_on, | |||
|
4205 | 'updated_on': pull_request.updated_on, | |||
|
4206 | 'commit_ids': pull_request.revisions, | |||
|
4207 | 'review_status': pull_request.calculated_review_status(), | |||
|
4208 | 'mergeable': merge_state, | |||
|
4209 | 'source': { | |||
|
4210 | 'clone_url': pull_request.source_repo.clone_url(), | |||
|
4211 | 'repository': pull_request.source_repo.repo_name, | |||
|
4212 | 'reference': { | |||
|
4213 | 'name': pull_request.source_ref_parts.name, | |||
|
4214 | 'type': pull_request.source_ref_parts.type, | |||
|
4215 | 'commit_id': pull_request.source_ref_parts.commit_id, | |||
|
4216 | }, | |||
|
4217 | }, | |||
|
4218 | 'target': { | |||
|
4219 | 'clone_url': pull_request.target_repo.clone_url(), | |||
|
4220 | 'repository': pull_request.target_repo.repo_name, | |||
|
4221 | 'reference': { | |||
|
4222 | 'name': pull_request.target_ref_parts.name, | |||
|
4223 | 'type': pull_request.target_ref_parts.type, | |||
|
4224 | 'commit_id': pull_request.target_ref_parts.commit_id, | |||
|
4225 | }, | |||
|
4226 | }, | |||
|
4227 | 'merge': merge_data, | |||
|
4228 | 'author': pull_request.author.get_api_data(include_secrets=False, | |||
|
4229 | details='basic'), | |||
|
4230 | 'reviewers': [ | |||
|
4231 | { | |||
|
4232 | 'user': reviewer.get_api_data(include_secrets=False, | |||
|
4233 | details='basic'), | |||
|
4234 | 'reasons': reasons, | |||
|
4235 | 'review_status': st[0][1].status if st else 'not_reviewed', | |||
|
4236 | } | |||
|
4237 | for obj, reviewer, reasons, mandatory, st in | |||
|
4238 | pull_request.reviewers_statuses() | |||
|
4239 | ] | |||
|
4240 | } | |||
|
4241 | ||||
|
4242 | return data | |||
|
4243 | ||||
|
4244 | def set_state(self, pull_request_state, final_state=None): | |||
|
4245 | """ | |||
|
4246 | # goes from initial state to updating to initial state. | |||
|
4247 | # initial state can be changed by specifying back_state= | |||
|
4248 | with pull_request_obj.set_state(PullRequest.STATE_UPDATING): | |||
|
4249 | pull_request.merge() | |||
|
4250 | ||||
|
4251 | :param pull_request_state: | |||
|
4252 | :param final_state: | |||
|
4253 | ||||
|
4254 | """ | |||
|
4255 | ||||
|
4256 | return _SetState(self, pull_request_state, back_state=final_state) | |||
|
4257 | ||||
|
4258 | ||||
|
4259 | class PullRequest(Base, _PullRequestBase): | |||
|
4260 | __tablename__ = 'pull_requests' | |||
|
4261 | __table_args__ = ( | |||
|
4262 | base_table_args, | |||
|
4263 | ) | |||
|
4264 | ||||
|
4265 | pull_request_id = Column( | |||
|
4266 | 'pull_request_id', Integer(), nullable=False, primary_key=True) | |||
|
4267 | ||||
|
4268 | def __repr__(self): | |||
|
4269 | if self.pull_request_id: | |||
|
4270 | return '<DB:PullRequest #%s>' % self.pull_request_id | |||
|
4271 | else: | |||
|
4272 | return '<DB:PullRequest at %#x>' % id(self) | |||
|
4273 | ||||
|
4274 | reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan") | |||
|
4275 | statuses = relationship('ChangesetStatus', cascade="all, delete-orphan") | |||
|
4276 | comments = relationship('ChangesetComment', cascade="all, delete-orphan") | |||
|
4277 | versions = relationship('PullRequestVersion', cascade="all, delete-orphan", | |||
|
4278 | lazy='dynamic') | |||
|
4279 | ||||
|
4280 | @classmethod | |||
|
4281 | def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj, | |||
|
4282 | internal_methods=None): | |||
|
4283 | ||||
|
4284 | class PullRequestDisplay(object): | |||
|
4285 | """ | |||
|
4286 | Special object wrapper for showing PullRequest data via Versions | |||
|
4287 | It mimics PR object as close as possible. This is read only object | |||
|
4288 | just for display | |||
|
4289 | """ | |||
|
4290 | ||||
|
4291 | def __init__(self, attrs, internal=None): | |||
|
4292 | self.attrs = attrs | |||
|
4293 | # internal have priority over the given ones via attrs | |||
|
4294 | self.internal = internal or ['versions'] | |||
|
4295 | ||||
|
4296 | def __getattr__(self, item): | |||
|
4297 | if item in self.internal: | |||
|
4298 | return getattr(self, item) | |||
|
4299 | try: | |||
|
4300 | return self.attrs[item] | |||
|
4301 | except KeyError: | |||
|
4302 | raise AttributeError( | |||
|
4303 | '%s object has no attribute %s' % (self, item)) | |||
|
4304 | ||||
|
4305 | def __repr__(self): | |||
|
4306 | return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id') | |||
|
4307 | ||||
|
4308 | def versions(self): | |||
|
4309 | return pull_request_obj.versions.order_by( | |||
|
4310 | PullRequestVersion.pull_request_version_id).all() | |||
|
4311 | ||||
|
4312 | def is_closed(self): | |||
|
4313 | return pull_request_obj.is_closed() | |||
|
4314 | ||||
|
4315 | def is_state_changing(self): | |||
|
4316 | return pull_request_obj.is_state_changing() | |||
|
4317 | ||||
|
4318 | @property | |||
|
4319 | def pull_request_version_id(self): | |||
|
4320 | return getattr(pull_request_obj, 'pull_request_version_id', None) | |||
|
4321 | ||||
|
4322 | attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False)) | |||
|
4323 | ||||
|
4324 | attrs.author = StrictAttributeDict( | |||
|
4325 | pull_request_obj.author.get_api_data()) | |||
|
4326 | if pull_request_obj.target_repo: | |||
|
4327 | attrs.target_repo = StrictAttributeDict( | |||
|
4328 | pull_request_obj.target_repo.get_api_data()) | |||
|
4329 | attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url | |||
|
4330 | ||||
|
4331 | if pull_request_obj.source_repo: | |||
|
4332 | attrs.source_repo = StrictAttributeDict( | |||
|
4333 | pull_request_obj.source_repo.get_api_data()) | |||
|
4334 | attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url | |||
|
4335 | ||||
|
4336 | attrs.source_ref_parts = pull_request_obj.source_ref_parts | |||
|
4337 | attrs.target_ref_parts = pull_request_obj.target_ref_parts | |||
|
4338 | attrs.revisions = pull_request_obj.revisions | |||
|
4339 | attrs.common_ancestor_id = pull_request_obj.common_ancestor_id | |||
|
4340 | attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref | |||
|
4341 | attrs.reviewer_data = org_pull_request_obj.reviewer_data | |||
|
4342 | attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json | |||
|
4343 | ||||
|
4344 | return PullRequestDisplay(attrs, internal=internal_methods) | |||
|
4345 | ||||
|
4346 | def is_closed(self): | |||
|
4347 | return self.status == self.STATUS_CLOSED | |||
|
4348 | ||||
|
4349 | def is_state_changing(self): | |||
|
4350 | return self.pull_request_state != PullRequest.STATE_CREATED | |||
|
4351 | ||||
|
4352 | def __json__(self): | |||
|
4353 | return { | |||
|
4354 | 'revisions': self.revisions, | |||
|
4355 | 'versions': self.versions_count | |||
|
4356 | } | |||
|
4357 | ||||
|
4358 | def calculated_review_status(self): | |||
|
4359 | from rhodecode.model.changeset_status import ChangesetStatusModel | |||
|
4360 | return ChangesetStatusModel().calculated_review_status(self) | |||
|
4361 | ||||
|
4362 | def reviewers_statuses(self): | |||
|
4363 | from rhodecode.model.changeset_status import ChangesetStatusModel | |||
|
4364 | return ChangesetStatusModel().reviewers_statuses(self) | |||
|
4365 | ||||
|
4366 | @property | |||
|
4367 | def workspace_id(self): | |||
|
4368 | from rhodecode.model.pull_request import PullRequestModel | |||
|
4369 | return PullRequestModel()._workspace_id(self) | |||
|
4370 | ||||
|
4371 | def get_shadow_repo(self): | |||
|
4372 | workspace_id = self.workspace_id | |||
|
4373 | shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id) | |||
|
4374 | if os.path.isdir(shadow_repository_path): | |||
|
4375 | vcs_obj = self.target_repo.scm_instance() | |||
|
4376 | return vcs_obj.get_shadow_instance(shadow_repository_path) | |||
|
4377 | ||||
|
4378 | @property | |||
|
4379 | def versions_count(self): | |||
|
4380 | """ | |||
|
4381 | return number of versions this PR have, e.g a PR that once been | |||
|
4382 | updated will have 2 versions | |||
|
4383 | """ | |||
|
4384 | return self.versions.count() + 1 | |||
|
4385 | ||||
|
4386 | ||||
|
4387 | class PullRequestVersion(Base, _PullRequestBase): | |||
|
4388 | __tablename__ = 'pull_request_versions' | |||
|
4389 | __table_args__ = ( | |||
|
4390 | base_table_args, | |||
|
4391 | ) | |||
|
4392 | ||||
|
4393 | pull_request_version_id = Column( | |||
|
4394 | 'pull_request_version_id', Integer(), nullable=False, primary_key=True) | |||
|
4395 | pull_request_id = Column( | |||
|
4396 | 'pull_request_id', Integer(), | |||
|
4397 | ForeignKey('pull_requests.pull_request_id'), nullable=False) | |||
|
4398 | pull_request = relationship('PullRequest') | |||
|
4399 | ||||
|
4400 | def __repr__(self): | |||
|
4401 | if self.pull_request_version_id: | |||
|
4402 | return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id | |||
|
4403 | else: | |||
|
4404 | return '<DB:PullRequestVersion at %#x>' % id(self) | |||
|
4405 | ||||
|
4406 | @property | |||
|
4407 | def reviewers(self): | |||
|
4408 | return self.pull_request.reviewers | |||
|
4409 | ||||
|
4410 | @property | |||
|
4411 | def versions(self): | |||
|
4412 | return self.pull_request.versions | |||
|
4413 | ||||
|
4414 | def is_closed(self): | |||
|
4415 | # calculate from original | |||
|
4416 | return self.pull_request.status == self.STATUS_CLOSED | |||
|
4417 | ||||
|
4418 | def is_state_changing(self): | |||
|
4419 | return self.pull_request.pull_request_state != PullRequest.STATE_CREATED | |||
|
4420 | ||||
|
4421 | def calculated_review_status(self): | |||
|
4422 | return self.pull_request.calculated_review_status() | |||
|
4423 | ||||
|
4424 | def reviewers_statuses(self): | |||
|
4425 | return self.pull_request.reviewers_statuses() | |||
|
4426 | ||||
|
4427 | ||||
|
4428 | class PullRequestReviewers(Base, BaseModel): | |||
|
4429 | __tablename__ = 'pull_request_reviewers' | |||
|
4430 | __table_args__ = ( | |||
|
4431 | base_table_args, | |||
|
4432 | ) | |||
|
4433 | ||||
|
4434 | @hybrid_property | |||
|
4435 | def reasons(self): | |||
|
4436 | if not self._reasons: | |||
|
4437 | return [] | |||
|
4438 | return self._reasons | |||
|
4439 | ||||
|
4440 | @reasons.setter | |||
|
4441 | def reasons(self, val): | |||
|
4442 | val = val or [] | |||
|
4443 | if any(not isinstance(x, compat.string_types) for x in val): | |||
|
4444 | raise Exception('invalid reasons type, must be list of strings') | |||
|
4445 | self._reasons = val | |||
|
4446 | ||||
|
4447 | pull_requests_reviewers_id = Column( | |||
|
4448 | 'pull_requests_reviewers_id', Integer(), nullable=False, | |||
|
4449 | primary_key=True) | |||
|
4450 | pull_request_id = Column( | |||
|
4451 | "pull_request_id", Integer(), | |||
|
4452 | ForeignKey('pull_requests.pull_request_id'), nullable=False) | |||
|
4453 | user_id = Column( | |||
|
4454 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) | |||
|
4455 | _reasons = Column( | |||
|
4456 | 'reason', MutationList.as_mutable( | |||
|
4457 | JsonType('list', dialect_map=dict(mysql=UnicodeText(16384))))) | |||
|
4458 | ||||
|
4459 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) | |||
|
4460 | user = relationship('User') | |||
|
4461 | pull_request = relationship('PullRequest') | |||
|
4462 | ||||
|
4463 | rule_data = Column( | |||
|
4464 | 'rule_data_json', | |||
|
4465 | JsonType(dialect_map=dict(mysql=UnicodeText(16384)))) | |||
|
4466 | ||||
|
4467 | def rule_user_group_data(self): | |||
|
4468 | """ | |||
|
4469 | Returns the voting user group rule data for this reviewer | |||
|
4470 | """ | |||
|
4471 | ||||
|
4472 | if self.rule_data and 'vote_rule' in self.rule_data: | |||
|
4473 | user_group_data = {} | |||
|
4474 | if 'rule_user_group_entry_id' in self.rule_data: | |||
|
4475 | # means a group with voting rules ! | |||
|
4476 | user_group_data['id'] = self.rule_data['rule_user_group_entry_id'] | |||
|
4477 | user_group_data['name'] = self.rule_data['rule_name'] | |||
|
4478 | user_group_data['vote_rule'] = self.rule_data['vote_rule'] | |||
|
4479 | ||||
|
4480 | return user_group_data | |||
|
4481 | ||||
|
4482 | def __unicode__(self): | |||
|
4483 | return u"<%s('id:%s')>" % (self.__class__.__name__, | |||
|
4484 | self.pull_requests_reviewers_id) | |||
|
4485 | ||||
|
4486 | ||||
|
4487 | class Notification(Base, BaseModel): | |||
|
4488 | __tablename__ = 'notifications' | |||
|
4489 | __table_args__ = ( | |||
|
4490 | Index('notification_type_idx', 'type'), | |||
|
4491 | base_table_args, | |||
|
4492 | ) | |||
|
4493 | ||||
|
4494 | TYPE_CHANGESET_COMMENT = u'cs_comment' | |||
|
4495 | TYPE_MESSAGE = u'message' | |||
|
4496 | TYPE_MENTION = u'mention' | |||
|
4497 | TYPE_REGISTRATION = u'registration' | |||
|
4498 | TYPE_PULL_REQUEST = u'pull_request' | |||
|
4499 | TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment' | |||
|
4500 | TYPE_PULL_REQUEST_UPDATE = u'pull_request_update' | |||
|
4501 | ||||
|
4502 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) | |||
|
4503 | subject = Column('subject', Unicode(512), nullable=True) | |||
|
4504 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) | |||
|
4505 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) | |||
|
4506 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
4507 | type_ = Column('type', Unicode(255)) | |||
|
4508 | ||||
|
4509 | created_by_user = relationship('User') | |||
|
4510 | notifications_to_users = relationship('UserNotification', lazy='joined', | |||
|
4511 | cascade="all, delete-orphan") | |||
|
4512 | ||||
|
4513 | @property | |||
|
4514 | def recipients(self): | |||
|
4515 | return [x.user for x in UserNotification.query()\ | |||
|
4516 | .filter(UserNotification.notification == self)\ | |||
|
4517 | .order_by(UserNotification.user_id.asc()).all()] | |||
|
4518 | ||||
|
4519 | @classmethod | |||
|
4520 | def create(cls, created_by, subject, body, recipients, type_=None): | |||
|
4521 | if type_ is None: | |||
|
4522 | type_ = Notification.TYPE_MESSAGE | |||
|
4523 | ||||
|
4524 | notification = cls() | |||
|
4525 | notification.created_by_user = created_by | |||
|
4526 | notification.subject = subject | |||
|
4527 | notification.body = body | |||
|
4528 | notification.type_ = type_ | |||
|
4529 | notification.created_on = datetime.datetime.now() | |||
|
4530 | ||||
|
4531 | # For each recipient link the created notification to his account | |||
|
4532 | for u in recipients: | |||
|
4533 | assoc = UserNotification() | |||
|
4534 | assoc.user_id = u.user_id | |||
|
4535 | assoc.notification = notification | |||
|
4536 | ||||
|
4537 | # if created_by is inside recipients mark his notification | |||
|
4538 | # as read | |||
|
4539 | if u.user_id == created_by.user_id: | |||
|
4540 | assoc.read = True | |||
|
4541 | Session().add(assoc) | |||
|
4542 | ||||
|
4543 | Session().add(notification) | |||
|
4544 | ||||
|
4545 | return notification | |||
|
4546 | ||||
|
4547 | ||||
|
4548 | class UserNotification(Base, BaseModel): | |||
|
4549 | __tablename__ = 'user_to_notification' | |||
|
4550 | __table_args__ = ( | |||
|
4551 | UniqueConstraint('user_id', 'notification_id'), | |||
|
4552 | base_table_args | |||
|
4553 | ) | |||
|
4554 | ||||
|
4555 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |||
|
4556 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) | |||
|
4557 | read = Column('read', Boolean, default=False) | |||
|
4558 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) | |||
|
4559 | ||||
|
4560 | user = relationship('User', lazy="joined") | |||
|
4561 | notification = relationship('Notification', lazy="joined", | |||
|
4562 | order_by=lambda: Notification.created_on.desc(),) | |||
|
4563 | ||||
|
4564 | def mark_as_read(self): | |||
|
4565 | self.read = True | |||
|
4566 | Session().add(self) | |||
|
4567 | ||||
|
4568 | ||||
|
4569 | class UserNotice(Base, BaseModel): | |||
|
4570 | __tablename__ = 'user_notices' | |||
|
4571 | __table_args__ = ( | |||
|
4572 | base_table_args | |||
|
4573 | ) | |||
|
4574 | ||||
|
4575 | NOTIFICATION_TYPE_MESSAGE = 'message' | |||
|
4576 | NOTIFICATION_TYPE_NOTICE = 'notice' | |||
|
4577 | ||||
|
4578 | NOTIFICATION_LEVEL_INFO = 'info' | |||
|
4579 | NOTIFICATION_LEVEL_WARNING = 'warning' | |||
|
4580 | NOTIFICATION_LEVEL_ERROR = 'error' | |||
|
4581 | ||||
|
4582 | user_notice_id = Column('gist_id', Integer(), primary_key=True) | |||
|
4583 | ||||
|
4584 | notice_subject = Column('notice_subject', Unicode(512), nullable=True) | |||
|
4585 | notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) | |||
|
4586 | ||||
|
4587 | notice_read = Column('notice_read', Boolean, default=False) | |||
|
4588 | ||||
|
4589 | notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO) | |||
|
4590 | notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE) | |||
|
4591 | ||||
|
4592 | notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True) | |||
|
4593 | notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
4594 | ||||
|
4595 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id')) | |||
|
4596 | user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id') | |||
|
4597 | ||||
|
4598 | @classmethod | |||
|
4599 | def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False): | |||
|
4600 | ||||
|
4601 | if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR, | |||
|
4602 | cls.NOTIFICATION_LEVEL_WARNING, | |||
|
4603 | cls.NOTIFICATION_LEVEL_INFO]: | |||
|
4604 | return | |||
|
4605 | ||||
|
4606 | from rhodecode.model.user import UserModel | |||
|
4607 | user = UserModel().get_user(user) | |||
|
4608 | ||||
|
4609 | new_notice = UserNotice() | |||
|
4610 | if not allow_duplicate: | |||
|
4611 | existing_msg = UserNotice().query() \ | |||
|
4612 | .filter(UserNotice.user == user) \ | |||
|
4613 | .filter(UserNotice.notice_body == body) \ | |||
|
4614 | .filter(UserNotice.notice_read == false()) \ | |||
|
4615 | .scalar() | |||
|
4616 | if existing_msg: | |||
|
4617 | log.warning('Ignoring duplicate notice for user %s', user) | |||
|
4618 | return | |||
|
4619 | ||||
|
4620 | new_notice.user = user | |||
|
4621 | new_notice.notice_subject = subject | |||
|
4622 | new_notice.notice_body = body | |||
|
4623 | new_notice.notification_level = notice_level | |||
|
4624 | Session().add(new_notice) | |||
|
4625 | Session().commit() | |||
|
4626 | ||||
|
4627 | ||||
|
4628 | class Gist(Base, BaseModel): | |||
|
4629 | __tablename__ = 'gists' | |||
|
4630 | __table_args__ = ( | |||
|
4631 | Index('g_gist_access_id_idx', 'gist_access_id'), | |||
|
4632 | Index('g_created_on_idx', 'created_on'), | |||
|
4633 | base_table_args | |||
|
4634 | ) | |||
|
4635 | ||||
|
4636 | GIST_PUBLIC = u'public' | |||
|
4637 | GIST_PRIVATE = u'private' | |||
|
4638 | DEFAULT_FILENAME = u'gistfile1.txt' | |||
|
4639 | ||||
|
4640 | ACL_LEVEL_PUBLIC = u'acl_public' | |||
|
4641 | ACL_LEVEL_PRIVATE = u'acl_private' | |||
|
4642 | ||||
|
4643 | gist_id = Column('gist_id', Integer(), primary_key=True) | |||
|
4644 | gist_access_id = Column('gist_access_id', Unicode(250)) | |||
|
4645 | gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) | |||
|
4646 | gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) | |||
|
4647 | gist_expires = Column('gist_expires', Float(53), nullable=False) | |||
|
4648 | gist_type = Column('gist_type', Unicode(128), nullable=False) | |||
|
4649 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
4650 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
4651 | acl_level = Column('acl_level', Unicode(128), nullable=True) | |||
|
4652 | ||||
|
4653 | owner = relationship('User') | |||
|
4654 | ||||
|
4655 | def __repr__(self): | |||
|
4656 | return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id) | |||
|
4657 | ||||
|
4658 | @hybrid_property | |||
|
4659 | def description_safe(self): | |||
|
4660 | from rhodecode.lib import helpers as h | |||
|
4661 | return h.escape(self.gist_description) | |||
|
4662 | ||||
|
4663 | @classmethod | |||
|
4664 | def get_or_404(cls, id_): | |||
|
4665 | from pyramid.httpexceptions import HTTPNotFound | |||
|
4666 | ||||
|
4667 | res = cls.query().filter(cls.gist_access_id == id_).scalar() | |||
|
4668 | if not res: | |||
|
4669 | raise HTTPNotFound() | |||
|
4670 | return res | |||
|
4671 | ||||
|
4672 | @classmethod | |||
|
4673 | def get_by_access_id(cls, gist_access_id): | |||
|
4674 | return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() | |||
|
4675 | ||||
|
4676 | def gist_url(self): | |||
|
4677 | from rhodecode.model.gist import GistModel | |||
|
4678 | return GistModel().get_url(self) | |||
|
4679 | ||||
|
4680 | @classmethod | |||
|
4681 | def base_path(cls): | |||
|
4682 | """ | |||
|
4683 | Returns base path when all gists are stored | |||
|
4684 | ||||
|
4685 | :param cls: | |||
|
4686 | """ | |||
|
4687 | from rhodecode.model.gist import GIST_STORE_LOC | |||
|
4688 | q = Session().query(RhodeCodeUi)\ | |||
|
4689 | .filter(RhodeCodeUi.ui_key == URL_SEP) | |||
|
4690 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) | |||
|
4691 | return os.path.join(q.one().ui_value, GIST_STORE_LOC) | |||
|
4692 | ||||
|
4693 | def get_api_data(self): | |||
|
4694 | """ | |||
|
4695 | Common function for generating gist related data for API | |||
|
4696 | """ | |||
|
4697 | gist = self | |||
|
4698 | data = { | |||
|
4699 | 'gist_id': gist.gist_id, | |||
|
4700 | 'type': gist.gist_type, | |||
|
4701 | 'access_id': gist.gist_access_id, | |||
|
4702 | 'description': gist.gist_description, | |||
|
4703 | 'url': gist.gist_url(), | |||
|
4704 | 'expires': gist.gist_expires, | |||
|
4705 | 'created_on': gist.created_on, | |||
|
4706 | 'modified_at': gist.modified_at, | |||
|
4707 | 'content': None, | |||
|
4708 | 'acl_level': gist.acl_level, | |||
|
4709 | } | |||
|
4710 | return data | |||
|
4711 | ||||
|
4712 | def __json__(self): | |||
|
4713 | data = dict( | |||
|
4714 | ) | |||
|
4715 | data.update(self.get_api_data()) | |||
|
4716 | return data | |||
|
4717 | # SCM functions | |||
|
4718 | ||||
|
4719 | def scm_instance(self, **kwargs): | |||
|
4720 | """ | |||
|
4721 | Get an instance of VCS Repository | |||
|
4722 | ||||
|
4723 | :param kwargs: | |||
|
4724 | """ | |||
|
4725 | from rhodecode.model.gist import GistModel | |||
|
4726 | full_repo_path = os.path.join(self.base_path(), self.gist_access_id) | |||
|
4727 | return get_vcs_instance( | |||
|
4728 | repo_path=safe_str(full_repo_path), create=False, | |||
|
4729 | _vcs_alias=GistModel.vcs_backend) | |||
|
4730 | ||||
|
4731 | ||||
|
4732 | class ExternalIdentity(Base, BaseModel): | |||
|
4733 | __tablename__ = 'external_identities' | |||
|
4734 | __table_args__ = ( | |||
|
4735 | Index('local_user_id_idx', 'local_user_id'), | |||
|
4736 | Index('external_id_idx', 'external_id'), | |||
|
4737 | base_table_args | |||
|
4738 | ) | |||
|
4739 | ||||
|
4740 | external_id = Column('external_id', Unicode(255), default=u'', primary_key=True) | |||
|
4741 | external_username = Column('external_username', Unicode(1024), default=u'') | |||
|
4742 | local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) | |||
|
4743 | provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True) | |||
|
4744 | access_token = Column('access_token', String(1024), default=u'') | |||
|
4745 | alt_token = Column('alt_token', String(1024), default=u'') | |||
|
4746 | token_secret = Column('token_secret', String(1024), default=u'') | |||
|
4747 | ||||
|
4748 | @classmethod | |||
|
4749 | def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None): | |||
|
4750 | """ | |||
|
4751 | Returns ExternalIdentity instance based on search params | |||
|
4752 | ||||
|
4753 | :param external_id: | |||
|
4754 | :param provider_name: | |||
|
4755 | :return: ExternalIdentity | |||
|
4756 | """ | |||
|
4757 | query = cls.query() | |||
|
4758 | query = query.filter(cls.external_id == external_id) | |||
|
4759 | query = query.filter(cls.provider_name == provider_name) | |||
|
4760 | if local_user_id: | |||
|
4761 | query = query.filter(cls.local_user_id == local_user_id) | |||
|
4762 | return query.first() | |||
|
4763 | ||||
|
4764 | @classmethod | |||
|
4765 | def user_by_external_id_and_provider(cls, external_id, provider_name): | |||
|
4766 | """ | |||
|
4767 | Returns User instance based on search params | |||
|
4768 | ||||
|
4769 | :param external_id: | |||
|
4770 | :param provider_name: | |||
|
4771 | :return: User | |||
|
4772 | """ | |||
|
4773 | query = User.query() | |||
|
4774 | query = query.filter(cls.external_id == external_id) | |||
|
4775 | query = query.filter(cls.provider_name == provider_name) | |||
|
4776 | query = query.filter(User.user_id == cls.local_user_id) | |||
|
4777 | return query.first() | |||
|
4778 | ||||
|
4779 | @classmethod | |||
|
4780 | def by_local_user_id(cls, local_user_id): | |||
|
4781 | """ | |||
|
4782 | Returns all tokens for user | |||
|
4783 | ||||
|
4784 | :param local_user_id: | |||
|
4785 | :return: ExternalIdentity | |||
|
4786 | """ | |||
|
4787 | query = cls.query() | |||
|
4788 | query = query.filter(cls.local_user_id == local_user_id) | |||
|
4789 | return query | |||
|
4790 | ||||
|
4791 | @classmethod | |||
|
4792 | def load_provider_plugin(cls, plugin_id): | |||
|
4793 | from rhodecode.authentication.base import loadplugin | |||
|
4794 | _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id) | |||
|
4795 | auth_plugin = loadplugin(_plugin_id) | |||
|
4796 | return auth_plugin | |||
|
4797 | ||||
|
4798 | ||||
|
4799 | class Integration(Base, BaseModel): | |||
|
4800 | __tablename__ = 'integrations' | |||
|
4801 | __table_args__ = ( | |||
|
4802 | base_table_args | |||
|
4803 | ) | |||
|
4804 | ||||
|
4805 | integration_id = Column('integration_id', Integer(), primary_key=True) | |||
|
4806 | integration_type = Column('integration_type', String(255)) | |||
|
4807 | enabled = Column('enabled', Boolean(), nullable=False) | |||
|
4808 | name = Column('name', String(255), nullable=False) | |||
|
4809 | child_repos_only = Column('child_repos_only', Boolean(), nullable=False, | |||
|
4810 | default=False) | |||
|
4811 | ||||
|
4812 | settings = Column( | |||
|
4813 | 'settings_json', MutationObj.as_mutable( | |||
|
4814 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) | |||
|
4815 | repo_id = Column( | |||
|
4816 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), | |||
|
4817 | nullable=True, unique=None, default=None) | |||
|
4818 | repo = relationship('Repository', lazy='joined') | |||
|
4819 | ||||
|
4820 | repo_group_id = Column( | |||
|
4821 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), | |||
|
4822 | nullable=True, unique=None, default=None) | |||
|
4823 | repo_group = relationship('RepoGroup', lazy='joined') | |||
|
4824 | ||||
|
4825 | @property | |||
|
4826 | def scope(self): | |||
|
4827 | if self.repo: | |||
|
4828 | return repr(self.repo) | |||
|
4829 | if self.repo_group: | |||
|
4830 | if self.child_repos_only: | |||
|
4831 | return repr(self.repo_group) + ' (child repos only)' | |||
|
4832 | else: | |||
|
4833 | return repr(self.repo_group) + ' (recursive)' | |||
|
4834 | if self.child_repos_only: | |||
|
4835 | return 'root_repos' | |||
|
4836 | return 'global' | |||
|
4837 | ||||
|
4838 | def __repr__(self): | |||
|
4839 | return '<Integration(%r, %r)>' % (self.integration_type, self.scope) | |||
|
4840 | ||||
|
4841 | ||||
|
4842 | class RepoReviewRuleUser(Base, BaseModel): | |||
|
4843 | __tablename__ = 'repo_review_rules_users' | |||
|
4844 | __table_args__ = ( | |||
|
4845 | base_table_args | |||
|
4846 | ) | |||
|
4847 | ||||
|
4848 | repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True) | |||
|
4849 | repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) | |||
|
4850 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False) | |||
|
4851 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) | |||
|
4852 | user = relationship('User') | |||
|
4853 | ||||
|
4854 | def rule_data(self): | |||
|
4855 | return { | |||
|
4856 | 'mandatory': self.mandatory | |||
|
4857 | } | |||
|
4858 | ||||
|
4859 | ||||
|
4860 | class RepoReviewRuleUserGroup(Base, BaseModel): | |||
|
4861 | __tablename__ = 'repo_review_rules_users_groups' | |||
|
4862 | __table_args__ = ( | |||
|
4863 | base_table_args | |||
|
4864 | ) | |||
|
4865 | ||||
|
4866 | VOTE_RULE_ALL = -1 | |||
|
4867 | ||||
|
4868 | repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True) | |||
|
4869 | repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) | |||
|
4870 | users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False) | |||
|
4871 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) | |||
|
4872 | vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL) | |||
|
4873 | users_group = relationship('UserGroup') | |||
|
4874 | ||||
|
4875 | def rule_data(self): | |||
|
4876 | return { | |||
|
4877 | 'mandatory': self.mandatory, | |||
|
4878 | 'vote_rule': self.vote_rule | |||
|
4879 | } | |||
|
4880 | ||||
|
4881 | @property | |||
|
4882 | def vote_rule_label(self): | |||
|
4883 | if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL: | |||
|
4884 | return 'all must vote' | |||
|
4885 | else: | |||
|
4886 | return 'min. vote {}'.format(self.vote_rule) | |||
|
4887 | ||||
|
4888 | ||||
|
4889 | class RepoReviewRule(Base, BaseModel): | |||
|
4890 | __tablename__ = 'repo_review_rules' | |||
|
4891 | __table_args__ = ( | |||
|
4892 | base_table_args | |||
|
4893 | ) | |||
|
4894 | ||||
|
4895 | repo_review_rule_id = Column( | |||
|
4896 | 'repo_review_rule_id', Integer(), primary_key=True) | |||
|
4897 | repo_id = Column( | |||
|
4898 | "repo_id", Integer(), ForeignKey('repositories.repo_id')) | |||
|
4899 | repo = relationship('Repository', backref='review_rules') | |||
|
4900 | ||||
|
4901 | review_rule_name = Column('review_rule_name', String(255)) | |||
|
4902 | _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob | |||
|
4903 | _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob | |||
|
4904 | _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob | |||
|
4905 | ||||
|
4906 | use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False) | |||
|
4907 | forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False) | |||
|
4908 | forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False) | |||
|
4909 | forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False) | |||
|
4910 | ||||
|
4911 | rule_users = relationship('RepoReviewRuleUser') | |||
|
4912 | rule_user_groups = relationship('RepoReviewRuleUserGroup') | |||
|
4913 | ||||
|
4914 | def _validate_pattern(self, value): | |||
|
4915 | re.compile('^' + glob2re(value) + '$') | |||
|
4916 | ||||
|
4917 | @hybrid_property | |||
|
4918 | def source_branch_pattern(self): | |||
|
4919 | return self._branch_pattern or '*' | |||
|
4920 | ||||
|
4921 | @source_branch_pattern.setter | |||
|
4922 | def source_branch_pattern(self, value): | |||
|
4923 | self._validate_pattern(value) | |||
|
4924 | self._branch_pattern = value or '*' | |||
|
4925 | ||||
|
4926 | @hybrid_property | |||
|
4927 | def target_branch_pattern(self): | |||
|
4928 | return self._target_branch_pattern or '*' | |||
|
4929 | ||||
|
4930 | @target_branch_pattern.setter | |||
|
4931 | def target_branch_pattern(self, value): | |||
|
4932 | self._validate_pattern(value) | |||
|
4933 | self._target_branch_pattern = value or '*' | |||
|
4934 | ||||
|
4935 | @hybrid_property | |||
|
4936 | def file_pattern(self): | |||
|
4937 | return self._file_pattern or '*' | |||
|
4938 | ||||
|
4939 | @file_pattern.setter | |||
|
4940 | def file_pattern(self, value): | |||
|
4941 | self._validate_pattern(value) | |||
|
4942 | self._file_pattern = value or '*' | |||
|
4943 | ||||
|
4944 | def matches(self, source_branch, target_branch, files_changed): | |||
|
4945 | """ | |||
|
4946 | Check if this review rule matches a branch/files in a pull request | |||
|
4947 | ||||
|
4948 | :param source_branch: source branch name for the commit | |||
|
4949 | :param target_branch: target branch name for the commit | |||
|
4950 | :param files_changed: list of file paths changed in the pull request | |||
|
4951 | """ | |||
|
4952 | ||||
|
4953 | source_branch = source_branch or '' | |||
|
4954 | target_branch = target_branch or '' | |||
|
4955 | files_changed = files_changed or [] | |||
|
4956 | ||||
|
4957 | branch_matches = True | |||
|
4958 | if source_branch or target_branch: | |||
|
4959 | if self.source_branch_pattern == '*': | |||
|
4960 | source_branch_match = True | |||
|
4961 | else: | |||
|
4962 | if self.source_branch_pattern.startswith('re:'): | |||
|
4963 | source_pattern = self.source_branch_pattern[3:] | |||
|
4964 | else: | |||
|
4965 | source_pattern = '^' + glob2re(self.source_branch_pattern) + '$' | |||
|
4966 | source_branch_regex = re.compile(source_pattern) | |||
|
4967 | source_branch_match = bool(source_branch_regex.search(source_branch)) | |||
|
4968 | if self.target_branch_pattern == '*': | |||
|
4969 | target_branch_match = True | |||
|
4970 | else: | |||
|
4971 | if self.target_branch_pattern.startswith('re:'): | |||
|
4972 | target_pattern = self.target_branch_pattern[3:] | |||
|
4973 | else: | |||
|
4974 | target_pattern = '^' + glob2re(self.target_branch_pattern) + '$' | |||
|
4975 | target_branch_regex = re.compile(target_pattern) | |||
|
4976 | target_branch_match = bool(target_branch_regex.search(target_branch)) | |||
|
4977 | ||||
|
4978 | branch_matches = source_branch_match and target_branch_match | |||
|
4979 | ||||
|
4980 | files_matches = True | |||
|
4981 | if self.file_pattern != '*': | |||
|
4982 | files_matches = False | |||
|
4983 | if self.file_pattern.startswith('re:'): | |||
|
4984 | file_pattern = self.file_pattern[3:] | |||
|
4985 | else: | |||
|
4986 | file_pattern = glob2re(self.file_pattern) | |||
|
4987 | file_regex = re.compile(file_pattern) | |||
|
4988 | for filename in files_changed: | |||
|
4989 | if file_regex.search(filename): | |||
|
4990 | files_matches = True | |||
|
4991 | break | |||
|
4992 | ||||
|
4993 | return branch_matches and files_matches | |||
|
4994 | ||||
|
4995 | @property | |||
|
4996 | def review_users(self): | |||
|
4997 | """ Returns the users which this rule applies to """ | |||
|
4998 | ||||
|
4999 | users = collections.OrderedDict() | |||
|
5000 | ||||
|
5001 | for rule_user in self.rule_users: | |||
|
5002 | if rule_user.user.active: | |||
|
5003 | if rule_user.user not in users: | |||
|
5004 | users[rule_user.user.username] = { | |||
|
5005 | 'user': rule_user.user, | |||
|
5006 | 'source': 'user', | |||
|
5007 | 'source_data': {}, | |||
|
5008 | 'data': rule_user.rule_data() | |||
|
5009 | } | |||
|
5010 | ||||
|
5011 | for rule_user_group in self.rule_user_groups: | |||
|
5012 | source_data = { | |||
|
5013 | 'user_group_id': rule_user_group.users_group.users_group_id, | |||
|
5014 | 'name': rule_user_group.users_group.users_group_name, | |||
|
5015 | 'members': len(rule_user_group.users_group.members) | |||
|
5016 | } | |||
|
5017 | for member in rule_user_group.users_group.members: | |||
|
5018 | if member.user.active: | |||
|
5019 | key = member.user.username | |||
|
5020 | if key in users: | |||
|
5021 | # skip this member as we have him already | |||
|
5022 | # this prevents from override the "first" matched | |||
|
5023 | # users with duplicates in multiple groups | |||
|
5024 | continue | |||
|
5025 | ||||
|
5026 | users[key] = { | |||
|
5027 | 'user': member.user, | |||
|
5028 | 'source': 'user_group', | |||
|
5029 | 'source_data': source_data, | |||
|
5030 | 'data': rule_user_group.rule_data() | |||
|
5031 | } | |||
|
5032 | ||||
|
5033 | return users | |||
|
5034 | ||||
|
5035 | def user_group_vote_rule(self, user_id): | |||
|
5036 | ||||
|
5037 | rules = [] | |||
|
5038 | if not self.rule_user_groups: | |||
|
5039 | return rules | |||
|
5040 | ||||
|
5041 | for user_group in self.rule_user_groups: | |||
|
5042 | user_group_members = [x.user_id for x in user_group.users_group.members] | |||
|
5043 | if user_id in user_group_members: | |||
|
5044 | rules.append(user_group) | |||
|
5045 | return rules | |||
|
5046 | ||||
|
5047 | def __repr__(self): | |||
|
5048 | return '<RepoReviewerRule(id=%r, repo=%r)>' % ( | |||
|
5049 | self.repo_review_rule_id, self.repo) | |||
|
5050 | ||||
|
5051 | ||||
|
5052 | class ScheduleEntry(Base, BaseModel): | |||
|
5053 | __tablename__ = 'schedule_entries' | |||
|
5054 | __table_args__ = ( | |||
|
5055 | UniqueConstraint('schedule_name', name='s_schedule_name_idx'), | |||
|
5056 | UniqueConstraint('task_uid', name='s_task_uid_idx'), | |||
|
5057 | base_table_args, | |||
|
5058 | ) | |||
|
5059 | ||||
|
5060 | schedule_types = ['crontab', 'timedelta', 'integer'] | |||
|
5061 | schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True) | |||
|
5062 | ||||
|
5063 | schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None) | |||
|
5064 | schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None) | |||
|
5065 | schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True) | |||
|
5066 | ||||
|
5067 | _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None) | |||
|
5068 | schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT())))) | |||
|
5069 | ||||
|
5070 | schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None) | |||
|
5071 | schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0) | |||
|
5072 | ||||
|
5073 | # task | |||
|
5074 | task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None) | |||
|
5075 | task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None) | |||
|
5076 | task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT())))) | |||
|
5077 | task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT())))) | |||
|
5078 | ||||
|
5079 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
5080 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None) | |||
|
5081 | ||||
|
5082 | @hybrid_property | |||
|
5083 | def schedule_type(self): | |||
|
5084 | return self._schedule_type | |||
|
5085 | ||||
|
5086 | @schedule_type.setter | |||
|
5087 | def schedule_type(self, val): | |||
|
5088 | if val not in self.schedule_types: | |||
|
5089 | raise ValueError('Value must be on of `{}` and got `{}`'.format( | |||
|
5090 | val, self.schedule_type)) | |||
|
5091 | ||||
|
5092 | self._schedule_type = val | |||
|
5093 | ||||
|
5094 | @classmethod | |||
|
5095 | def get_uid(cls, obj): | |||
|
5096 | args = obj.task_args | |||
|
5097 | kwargs = obj.task_kwargs | |||
|
5098 | if isinstance(args, JsonRaw): | |||
|
5099 | try: | |||
|
5100 | args = json.loads(args) | |||
|
5101 | except ValueError: | |||
|
5102 | args = tuple() | |||
|
5103 | ||||
|
5104 | if isinstance(kwargs, JsonRaw): | |||
|
5105 | try: | |||
|
5106 | kwargs = json.loads(kwargs) | |||
|
5107 | except ValueError: | |||
|
5108 | kwargs = dict() | |||
|
5109 | ||||
|
5110 | dot_notation = obj.task_dot_notation | |||
|
5111 | val = '.'.join(map(safe_str, [ | |||
|
5112 | sorted(dot_notation), args, sorted(kwargs.items())])) | |||
|
5113 | return hashlib.sha1(val).hexdigest() | |||
|
5114 | ||||
|
5115 | @classmethod | |||
|
5116 | def get_by_schedule_name(cls, schedule_name): | |||
|
5117 | return cls.query().filter(cls.schedule_name == schedule_name).scalar() | |||
|
5118 | ||||
|
5119 | @classmethod | |||
|
5120 | def get_by_schedule_id(cls, schedule_id): | |||
|
5121 | return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar() | |||
|
5122 | ||||
|
5123 | @property | |||
|
5124 | def task(self): | |||
|
5125 | return self.task_dot_notation | |||
|
5126 | ||||
|
5127 | @property | |||
|
5128 | def schedule(self): | |||
|
5129 | from rhodecode.lib.celerylib.utils import raw_2_schedule | |||
|
5130 | schedule = raw_2_schedule(self.schedule_definition, self.schedule_type) | |||
|
5131 | return schedule | |||
|
5132 | ||||
|
5133 | @property | |||
|
5134 | def args(self): | |||
|
5135 | try: | |||
|
5136 | return list(self.task_args or []) | |||
|
5137 | except ValueError: | |||
|
5138 | return list() | |||
|
5139 | ||||
|
5140 | @property | |||
|
5141 | def kwargs(self): | |||
|
5142 | try: | |||
|
5143 | return dict(self.task_kwargs or {}) | |||
|
5144 | except ValueError: | |||
|
5145 | return dict() | |||
|
5146 | ||||
|
5147 | def _as_raw(self, val): | |||
|
5148 | if hasattr(val, 'de_coerce'): | |||
|
5149 | val = val.de_coerce() | |||
|
5150 | if val: | |||
|
5151 | val = json.dumps(val) | |||
|
5152 | ||||
|
5153 | return val | |||
|
5154 | ||||
|
5155 | @property | |||
|
5156 | def schedule_definition_raw(self): | |||
|
5157 | return self._as_raw(self.schedule_definition) | |||
|
5158 | ||||
|
5159 | @property | |||
|
5160 | def args_raw(self): | |||
|
5161 | return self._as_raw(self.task_args) | |||
|
5162 | ||||
|
5163 | @property | |||
|
5164 | def kwargs_raw(self): | |||
|
5165 | return self._as_raw(self.task_kwargs) | |||
|
5166 | ||||
|
5167 | def __repr__(self): | |||
|
5168 | return '<DB:ScheduleEntry({}:{})>'.format( | |||
|
5169 | self.schedule_entry_id, self.schedule_name) | |||
|
5170 | ||||
|
5171 | ||||
|
5172 | @event.listens_for(ScheduleEntry, 'before_update') | |||
|
5173 | def update_task_uid(mapper, connection, target): | |||
|
5174 | target.task_uid = ScheduleEntry.get_uid(target) | |||
|
5175 | ||||
|
5176 | ||||
|
5177 | @event.listens_for(ScheduleEntry, 'before_insert') | |||
|
5178 | def set_task_uid(mapper, connection, target): | |||
|
5179 | target.task_uid = ScheduleEntry.get_uid(target) | |||
|
5180 | ||||
|
5181 | ||||
|
5182 | class _BaseBranchPerms(BaseModel): | |||
|
5183 | @classmethod | |||
|
5184 | def compute_hash(cls, value): | |||
|
5185 | return sha1_safe(value) | |||
|
5186 | ||||
|
5187 | @hybrid_property | |||
|
5188 | def branch_pattern(self): | |||
|
5189 | return self._branch_pattern or '*' | |||
|
5190 | ||||
|
5191 | @hybrid_property | |||
|
5192 | def branch_hash(self): | |||
|
5193 | return self._branch_hash | |||
|
5194 | ||||
|
5195 | def _validate_glob(self, value): | |||
|
5196 | re.compile('^' + glob2re(value) + '$') | |||
|
5197 | ||||
|
5198 | @branch_pattern.setter | |||
|
5199 | def branch_pattern(self, value): | |||
|
5200 | self._validate_glob(value) | |||
|
5201 | self._branch_pattern = value or '*' | |||
|
5202 | # set the Hash when setting the branch pattern | |||
|
5203 | self._branch_hash = self.compute_hash(self._branch_pattern) | |||
|
5204 | ||||
|
5205 | def matches(self, branch): | |||
|
5206 | """ | |||
|
5207 | Check if this the branch matches entry | |||
|
5208 | ||||
|
5209 | :param branch: branch name for the commit | |||
|
5210 | """ | |||
|
5211 | ||||
|
5212 | branch = branch or '' | |||
|
5213 | ||||
|
5214 | branch_matches = True | |||
|
5215 | if branch: | |||
|
5216 | branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$') | |||
|
5217 | branch_matches = bool(branch_regex.search(branch)) | |||
|
5218 | ||||
|
5219 | return branch_matches | |||
|
5220 | ||||
|
5221 | ||||
|
5222 | class UserToRepoBranchPermission(Base, _BaseBranchPerms): | |||
|
5223 | __tablename__ = 'user_to_repo_branch_permissions' | |||
|
5224 | __table_args__ = ( | |||
|
5225 | base_table_args | |||
|
5226 | ) | |||
|
5227 | ||||
|
5228 | branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) | |||
|
5229 | ||||
|
5230 | repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |||
|
5231 | repo = relationship('Repository', backref='user_branch_perms') | |||
|
5232 | ||||
|
5233 | permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
5234 | permission = relationship('Permission') | |||
|
5235 | ||||
|
5236 | rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None) | |||
|
5237 | user_repo_to_perm = relationship('UserRepoToPerm') | |||
|
5238 | ||||
|
5239 | rule_order = Column('rule_order', Integer(), nullable=False) | |||
|
5240 | _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob | |||
|
5241 | _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) | |||
|
5242 | ||||
|
5243 | def __unicode__(self): | |||
|
5244 | return u'<UserBranchPermission(%s => %r)>' % ( | |||
|
5245 | self.user_repo_to_perm, self.branch_pattern) | |||
|
5246 | ||||
|
5247 | ||||
|
5248 | class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms): | |||
|
5249 | __tablename__ = 'user_group_to_repo_branch_permissions' | |||
|
5250 | __table_args__ = ( | |||
|
5251 | base_table_args | |||
|
5252 | ) | |||
|
5253 | ||||
|
5254 | branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) | |||
|
5255 | ||||
|
5256 | repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) | |||
|
5257 | repo = relationship('Repository', backref='user_group_branch_perms') | |||
|
5258 | ||||
|
5259 | permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) | |||
|
5260 | permission = relationship('Permission') | |||
|
5261 | ||||
|
5262 | rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None) | |||
|
5263 | user_group_repo_to_perm = relationship('UserGroupRepoToPerm') | |||
|
5264 | ||||
|
5265 | rule_order = Column('rule_order', Integer(), nullable=False) | |||
|
5266 | _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob | |||
|
5267 | _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) | |||
|
5268 | ||||
|
5269 | def __unicode__(self): | |||
|
5270 | return u'<UserBranchPermission(%s => %r)>' % ( | |||
|
5271 | self.user_group_repo_to_perm, self.branch_pattern) | |||
|
5272 | ||||
|
5273 | ||||
|
5274 | class UserBookmark(Base, BaseModel): | |||
|
5275 | __tablename__ = 'user_bookmarks' | |||
|
5276 | __table_args__ = ( | |||
|
5277 | UniqueConstraint('user_id', 'bookmark_repo_id'), | |||
|
5278 | UniqueConstraint('user_id', 'bookmark_repo_group_id'), | |||
|
5279 | UniqueConstraint('user_id', 'bookmark_position'), | |||
|
5280 | base_table_args | |||
|
5281 | ) | |||
|
5282 | ||||
|
5283 | user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |||
|
5284 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) | |||
|
5285 | position = Column("bookmark_position", Integer(), nullable=False) | |||
|
5286 | title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None) | |||
|
5287 | redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None) | |||
|
5288 | created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
5289 | ||||
|
5290 | bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None) | |||
|
5291 | bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None) | |||
|
5292 | ||||
|
5293 | user = relationship("User") | |||
|
5294 | ||||
|
5295 | repository = relationship("Repository") | |||
|
5296 | repository_group = relationship("RepoGroup") | |||
|
5297 | ||||
|
5298 | @classmethod | |||
|
5299 | def get_by_position_for_user(cls, position, user_id): | |||
|
5300 | return cls.query() \ | |||
|
5301 | .filter(UserBookmark.user_id == user_id) \ | |||
|
5302 | .filter(UserBookmark.position == position).scalar() | |||
|
5303 | ||||
|
5304 | @classmethod | |||
|
5305 | def get_bookmarks_for_user(cls, user_id, cache=True): | |||
|
5306 | bookmarks = cls.query() \ | |||
|
5307 | .filter(UserBookmark.user_id == user_id) \ | |||
|
5308 | .options(joinedload(UserBookmark.repository)) \ | |||
|
5309 | .options(joinedload(UserBookmark.repository_group)) \ | |||
|
5310 | .order_by(UserBookmark.position.asc()) | |||
|
5311 | ||||
|
5312 | if cache: | |||
|
5313 | bookmarks = bookmarks.options( | |||
|
5314 | FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id)) | |||
|
5315 | ) | |||
|
5316 | ||||
|
5317 | return bookmarks.all() | |||
|
5318 | ||||
|
5319 | def __unicode__(self): | |||
|
5320 | return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url) | |||
|
5321 | ||||
|
5322 | ||||
|
5323 | class FileStore(Base, BaseModel): | |||
|
5324 | __tablename__ = 'file_store' | |||
|
5325 | __table_args__ = ( | |||
|
5326 | base_table_args | |||
|
5327 | ) | |||
|
5328 | ||||
|
5329 | file_store_id = Column('file_store_id', Integer(), primary_key=True) | |||
|
5330 | file_uid = Column('file_uid', String(1024), nullable=False) | |||
|
5331 | file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True) | |||
|
5332 | file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) | |||
|
5333 | file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False) | |||
|
5334 | ||||
|
5335 | # sha256 hash | |||
|
5336 | file_hash = Column('file_hash', String(512), nullable=False) | |||
|
5337 | file_size = Column('file_size', BigInteger(), nullable=False) | |||
|
5338 | ||||
|
5339 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) | |||
|
5340 | accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True) | |||
|
5341 | accessed_count = Column('accessed_count', Integer(), default=0) | |||
|
5342 | ||||
|
5343 | enabled = Column('enabled', Boolean(), nullable=False, default=True) | |||
|
5344 | ||||
|
5345 | # if repo/repo_group reference is set, check for permissions | |||
|
5346 | check_acl = Column('check_acl', Boolean(), nullable=False, default=True) | |||
|
5347 | ||||
|
5348 | # hidden defines an attachment that should be hidden from showing in artifact listing | |||
|
5349 | hidden = Column('hidden', Boolean(), nullable=False, default=False) | |||
|
5350 | ||||
|
5351 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) | |||
|
5352 | upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id') | |||
|
5353 | ||||
|
5354 | file_metadata = relationship('FileStoreMetadata', lazy='joined') | |||
|
5355 | ||||
|
5356 | # scope limited to user, which requester have access to | |||
|
5357 | scope_user_id = Column( | |||
|
5358 | 'scope_user_id', Integer(), ForeignKey('users.user_id'), | |||
|
5359 | nullable=True, unique=None, default=None) | |||
|
5360 | user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id') | |||
|
5361 | ||||
|
5362 | # scope limited to user group, which requester have access to | |||
|
5363 | scope_user_group_id = Column( | |||
|
5364 | 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'), | |||
|
5365 | nullable=True, unique=None, default=None) | |||
|
5366 | user_group = relationship('UserGroup', lazy='joined') | |||
|
5367 | ||||
|
5368 | # scope limited to repo, which requester have access to | |||
|
5369 | scope_repo_id = Column( | |||
|
5370 | 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'), | |||
|
5371 | nullable=True, unique=None, default=None) | |||
|
5372 | repo = relationship('Repository', lazy='joined') | |||
|
5373 | ||||
|
5374 | # scope limited to repo group, which requester have access to | |||
|
5375 | scope_repo_group_id = Column( | |||
|
5376 | 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'), | |||
|
5377 | nullable=True, unique=None, default=None) | |||
|
5378 | repo_group = relationship('RepoGroup', lazy='joined') | |||
|
5379 | ||||
|
5380 | @classmethod | |||
|
5381 | def get_by_store_uid(cls, file_store_uid): | |||
|
5382 | return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar() | |||
|
5383 | ||||
|
5384 | @classmethod | |||
|
5385 | def create(cls, file_uid, filename, file_hash, file_size, file_display_name='', | |||
|
5386 | file_description='', enabled=True, hidden=False, check_acl=True, | |||
|
5387 | user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None): | |||
|
5388 | ||||
|
5389 | store_entry = FileStore() | |||
|
5390 | store_entry.file_uid = file_uid | |||
|
5391 | store_entry.file_display_name = file_display_name | |||
|
5392 | store_entry.file_org_name = filename | |||
|
5393 | store_entry.file_size = file_size | |||
|
5394 | store_entry.file_hash = file_hash | |||
|
5395 | store_entry.file_description = file_description | |||
|
5396 | ||||
|
5397 | store_entry.check_acl = check_acl | |||
|
5398 | store_entry.enabled = enabled | |||
|
5399 | store_entry.hidden = hidden | |||
|
5400 | ||||
|
5401 | store_entry.user_id = user_id | |||
|
5402 | store_entry.scope_user_id = scope_user_id | |||
|
5403 | store_entry.scope_repo_id = scope_repo_id | |||
|
5404 | store_entry.scope_repo_group_id = scope_repo_group_id | |||
|
5405 | ||||
|
5406 | return store_entry | |||
|
5407 | ||||
|
5408 | @classmethod | |||
|
5409 | def store_metadata(cls, file_store_id, args, commit=True): | |||
|
5410 | file_store = FileStore.get(file_store_id) | |||
|
5411 | if file_store is None: | |||
|
5412 | return | |||
|
5413 | ||||
|
5414 | for section, key, value, value_type in args: | |||
|
5415 | has_key = FileStoreMetadata().query() \ | |||
|
5416 | .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \ | |||
|
5417 | .filter(FileStoreMetadata.file_store_meta_section == section) \ | |||
|
5418 | .filter(FileStoreMetadata.file_store_meta_key == key) \ | |||
|
5419 | .scalar() | |||
|
5420 | if has_key: | |||
|
5421 | msg = 'key `{}` already defined under section `{}` for this file.'\ | |||
|
5422 | .format(key, section) | |||
|
5423 | raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key) | |||
|
5424 | ||||
|
5425 | # NOTE(marcink): raises ArtifactMetadataBadValueType | |||
|
5426 | FileStoreMetadata.valid_value_type(value_type) | |||
|
5427 | ||||
|
5428 | meta_entry = FileStoreMetadata() | |||
|
5429 | meta_entry.file_store = file_store | |||
|
5430 | meta_entry.file_store_meta_section = section | |||
|
5431 | meta_entry.file_store_meta_key = key | |||
|
5432 | meta_entry.file_store_meta_value_type = value_type | |||
|
5433 | meta_entry.file_store_meta_value = value | |||
|
5434 | ||||
|
5435 | Session().add(meta_entry) | |||
|
5436 | ||||
|
5437 | try: | |||
|
5438 | if commit: | |||
|
5439 | Session().commit() | |||
|
5440 | except IntegrityError: | |||
|
5441 | Session().rollback() | |||
|
5442 | raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.') | |||
|
5443 | ||||
|
5444 | @classmethod | |||
|
5445 | def bump_access_counter(cls, file_uid, commit=True): | |||
|
5446 | FileStore().query()\ | |||
|
5447 | .filter(FileStore.file_uid == file_uid)\ | |||
|
5448 | .update({FileStore.accessed_count: (FileStore.accessed_count + 1), | |||
|
5449 | FileStore.accessed_on: datetime.datetime.now()}) | |||
|
5450 | if commit: | |||
|
5451 | Session().commit() | |||
|
5452 | ||||
|
5453 | def __json__(self): | |||
|
5454 | data = { | |||
|
5455 | 'filename': self.file_display_name, | |||
|
5456 | 'filename_org': self.file_org_name, | |||
|
5457 | 'file_uid': self.file_uid, | |||
|
5458 | 'description': self.file_description, | |||
|
5459 | 'hidden': self.hidden, | |||
|
5460 | 'size': self.file_size, | |||
|
5461 | 'created_on': self.created_on, | |||
|
5462 | 'uploaded_by': self.upload_user.get_api_data(details='basic'), | |||
|
5463 | 'downloaded_times': self.accessed_count, | |||
|
5464 | 'sha256': self.file_hash, | |||
|
5465 | 'metadata': self.file_metadata, | |||
|
5466 | } | |||
|
5467 | ||||
|
5468 | return data | |||
|
5469 | ||||
|
5470 | def __repr__(self): | |||
|
5471 | return '<FileStore({})>'.format(self.file_store_id) | |||
|
5472 | ||||
|
5473 | ||||
|
5474 | class FileStoreMetadata(Base, BaseModel): | |||
|
5475 | __tablename__ = 'file_store_metadata' | |||
|
5476 | __table_args__ = ( | |||
|
5477 | UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'), | |||
|
5478 | Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255), | |||
|
5479 | Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255), | |||
|
5480 | base_table_args | |||
|
5481 | ) | |||
|
5482 | SETTINGS_TYPES = { | |||
|
5483 | 'str': safe_str, | |||
|
5484 | 'int': safe_int, | |||
|
5485 | 'unicode': safe_unicode, | |||
|
5486 | 'bool': str2bool, | |||
|
5487 | 'list': functools.partial(aslist, sep=',') | |||
|
5488 | } | |||
|
5489 | ||||
|
5490 | file_store_meta_id = Column( | |||
|
5491 | "file_store_meta_id", Integer(), nullable=False, unique=True, default=None, | |||
|
5492 | primary_key=True) | |||
|
5493 | _file_store_meta_section = Column( | |||
|
5494 | "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), | |||
|
5495 | nullable=True, unique=None, default=None) | |||
|
5496 | _file_store_meta_section_hash = Column( | |||
|
5497 | "file_store_meta_section_hash", String(255), | |||
|
5498 | nullable=True, unique=None, default=None) | |||
|
5499 | _file_store_meta_key = Column( | |||
|
5500 | "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), | |||
|
5501 | nullable=True, unique=None, default=None) | |||
|
5502 | _file_store_meta_key_hash = Column( | |||
|
5503 | "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None) | |||
|
5504 | _file_store_meta_value = Column( | |||
|
5505 | "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'), | |||
|
5506 | nullable=True, unique=None, default=None) | |||
|
5507 | _file_store_meta_value_type = Column( | |||
|
5508 | "file_store_meta_value_type", String(255), nullable=True, unique=None, | |||
|
5509 | default='unicode') | |||
|
5510 | ||||
|
5511 | file_store_id = Column( | |||
|
5512 | 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'), | |||
|
5513 | nullable=True, unique=None, default=None) | |||
|
5514 | ||||
|
5515 | file_store = relationship('FileStore', lazy='joined') | |||
|
5516 | ||||
|
5517 | @classmethod | |||
|
5518 | def valid_value_type(cls, value): | |||
|
5519 | if value.split('.')[0] not in cls.SETTINGS_TYPES: | |||
|
5520 | raise ArtifactMetadataBadValueType( | |||
|
5521 | 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value)) | |||
|
5522 | ||||
|
5523 | @hybrid_property | |||
|
5524 | def file_store_meta_section(self): | |||
|
5525 | return self._file_store_meta_section | |||
|
5526 | ||||
|
5527 | @file_store_meta_section.setter | |||
|
5528 | def file_store_meta_section(self, value): | |||
|
5529 | self._file_store_meta_section = value | |||
|
5530 | self._file_store_meta_section_hash = _hash_key(value) | |||
|
5531 | ||||
|
5532 | @hybrid_property | |||
|
5533 | def file_store_meta_key(self): | |||
|
5534 | return self._file_store_meta_key | |||
|
5535 | ||||
|
5536 | @file_store_meta_key.setter | |||
|
5537 | def file_store_meta_key(self, value): | |||
|
5538 | self._file_store_meta_key = value | |||
|
5539 | self._file_store_meta_key_hash = _hash_key(value) | |||
|
5540 | ||||
|
5541 | @hybrid_property | |||
|
5542 | def file_store_meta_value(self): | |||
|
5543 | val = self._file_store_meta_value | |||
|
5544 | ||||
|
5545 | if self._file_store_meta_value_type: | |||
|
5546 | # e.g unicode.encrypted == unicode | |||
|
5547 | _type = self._file_store_meta_value_type.split('.')[0] | |||
|
5548 | # decode the encrypted value if it's encrypted field type | |||
|
5549 | if '.encrypted' in self._file_store_meta_value_type: | |||
|
5550 | cipher = EncryptedTextValue() | |||
|
5551 | val = safe_unicode(cipher.process_result_value(val, None)) | |||
|
5552 | # do final type conversion | |||
|
5553 | converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode'] | |||
|
5554 | val = converter(val) | |||
|
5555 | ||||
|
5556 | return val | |||
|
5557 | ||||
|
5558 | @file_store_meta_value.setter | |||
|
5559 | def file_store_meta_value(self, val): | |||
|
5560 | val = safe_unicode(val) | |||
|
5561 | # encode the encrypted value | |||
|
5562 | if '.encrypted' in self.file_store_meta_value_type: | |||
|
5563 | cipher = EncryptedTextValue() | |||
|
5564 | val = safe_unicode(cipher.process_bind_param(val, None)) | |||
|
5565 | self._file_store_meta_value = val | |||
|
5566 | ||||
|
5567 | @hybrid_property | |||
|
5568 | def file_store_meta_value_type(self): | |||
|
5569 | return self._file_store_meta_value_type | |||
|
5570 | ||||
|
5571 | @file_store_meta_value_type.setter | |||
|
5572 | def file_store_meta_value_type(self, val): | |||
|
5573 | # e.g unicode.encrypted | |||
|
5574 | self.valid_value_type(val) | |||
|
5575 | self._file_store_meta_value_type = val | |||
|
5576 | ||||
|
5577 | def __json__(self): | |||
|
5578 | data = { | |||
|
5579 | 'artifact': self.file_store.file_uid, | |||
|
5580 | 'section': self.file_store_meta_section, | |||
|
5581 | 'key': self.file_store_meta_key, | |||
|
5582 | 'value': self.file_store_meta_value, | |||
|
5583 | } | |||
|
5584 | ||||
|
5585 | return data | |||
|
5586 | ||||
|
5587 | def __repr__(self): | |||
|
5588 | return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section, | |||
|
5589 | self.file_store_meta_key, self.file_store_meta_value) | |||
|
5590 | ||||
|
5591 | ||||
|
5592 | class DbMigrateVersion(Base, BaseModel): | |||
|
5593 | __tablename__ = 'db_migrate_version' | |||
|
5594 | __table_args__ = ( | |||
|
5595 | base_table_args, | |||
|
5596 | ) | |||
|
5597 | ||||
|
5598 | repository_id = Column('repository_id', String(250), primary_key=True) | |||
|
5599 | repository_path = Column('repository_path', Text) | |||
|
5600 | version = Column('version', Integer) | |||
|
5601 | ||||
|
5602 | @classmethod | |||
|
5603 | def set_version(cls, version): | |||
|
5604 | """ | |||
|
5605 | Helper for forcing a different version, usually for debugging purposes via ishell. | |||
|
5606 | """ | |||
|
5607 | ver = DbMigrateVersion.query().first() | |||
|
5608 | ver.version = version | |||
|
5609 | Session().commit() | |||
|
5610 | ||||
|
5611 | ||||
|
5612 | class DbSession(Base, BaseModel): | |||
|
5613 | __tablename__ = 'db_session' | |||
|
5614 | __table_args__ = ( | |||
|
5615 | base_table_args, | |||
|
5616 | ) | |||
|
5617 | ||||
|
5618 | def __repr__(self): | |||
|
5619 | return '<DB:DbSession({})>'.format(self.id) | |||
|
5620 | ||||
|
5621 | id = Column('id', Integer()) | |||
|
5622 | namespace = Column('namespace', String(255), primary_key=True) | |||
|
5623 | accessed = Column('accessed', DateTime, nullable=False) | |||
|
5624 | created = Column('created', DateTime, nullable=False) | |||
|
5625 | data = Column('data', PickleType, nullable=False) |
@@ -0,0 +1,35 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | import logging | |||
|
4 | from sqlalchemy import * | |||
|
5 | ||||
|
6 | from alembic.migration import MigrationContext | |||
|
7 | from alembic.operations import Operations | |||
|
8 | from sqlalchemy import BigInteger | |||
|
9 | ||||
|
10 | from rhodecode.lib.dbmigrate.versions import _reset_base | |||
|
11 | from rhodecode.model import init_model_encryption | |||
|
12 | ||||
|
13 | ||||
|
14 | log = logging.getLogger(__name__) | |||
|
15 | ||||
|
16 | ||||
|
17 | def upgrade(migrate_engine): | |||
|
18 | """ | |||
|
19 | Upgrade operations go here. | |||
|
20 | Don't create your own engine; bind migrate_engine to your metadata | |||
|
21 | """ | |||
|
22 | _reset_base(migrate_engine) | |||
|
23 | from rhodecode.lib.dbmigrate.schema import db_4_19_0_2 as db | |||
|
24 | ||||
|
25 | init_model_encryption(db) | |||
|
26 | db.ChangesetCommentHistory().__table__.create() | |||
|
27 | ||||
|
28 | ||||
|
29 | def downgrade(migrate_engine): | |||
|
30 | meta = MetaData() | |||
|
31 | meta.bind = migrate_engine | |||
|
32 | ||||
|
33 | ||||
|
34 | def fixups(models, _SESSION): | |||
|
35 | pass |
@@ -0,0 +1,25 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | # Copyright (C) 2020-2020 RhodeCode GmbH | |||
|
4 | # | |||
|
5 | # This program is free software: you can redistribute it and/or modify | |||
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
7 | # (only), as published by the Free Software Foundation. | |||
|
8 | # | |||
|
9 | # This program is distributed in the hope that it will be useful, | |||
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
12 | # GNU General Public License for more details. | |||
|
13 | # | |||
|
14 | # You should have received a copy of the GNU Affero General Public License | |||
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
16 | # | |||
|
17 | # This program is dual-licensed. If you wish to learn more about the | |||
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
20 | ||||
|
21 | ## base64 filter e.g ${ example | base64,n } | |||
|
22 | def base64(text): | |||
|
23 | import base64 | |||
|
24 | from rhodecode.lib.helpers import safe_str | |||
|
25 | return base64.encodestring(safe_str(text)) |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,60 +1,60 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | from collections import OrderedDict |
|
22 | from collections import OrderedDict | |
23 |
|
23 | |||
24 | import sys |
|
24 | import sys | |
25 | import platform |
|
25 | import platform | |
26 |
|
26 | |||
27 | VERSION = tuple(open(os.path.join( |
|
27 | VERSION = tuple(open(os.path.join( | |
28 | os.path.dirname(__file__), 'VERSION')).read().split('.')) |
|
28 | os.path.dirname(__file__), 'VERSION')).read().split('.')) | |
29 |
|
29 | |||
30 | BACKENDS = OrderedDict() |
|
30 | BACKENDS = OrderedDict() | |
31 |
|
31 | |||
32 | BACKENDS['hg'] = 'Mercurial repository' |
|
32 | BACKENDS['hg'] = 'Mercurial repository' | |
33 | BACKENDS['git'] = 'Git repository' |
|
33 | BACKENDS['git'] = 'Git repository' | |
34 | BACKENDS['svn'] = 'Subversion repository' |
|
34 | BACKENDS['svn'] = 'Subversion repository' | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | CELERY_ENABLED = False |
|
37 | CELERY_ENABLED = False | |
38 | CELERY_EAGER = False |
|
38 | CELERY_EAGER = False | |
39 |
|
39 | |||
40 | # link to config for pyramid |
|
40 | # link to config for pyramid | |
41 | CONFIG = {} |
|
41 | CONFIG = {} | |
42 |
|
42 | |||
43 | # Populated with the settings dictionary from application init in |
|
43 | # Populated with the settings dictionary from application init in | |
44 | # rhodecode.conf.environment.load_pyramid_environment |
|
44 | # rhodecode.conf.environment.load_pyramid_environment | |
45 | PYRAMID_SETTINGS = {} |
|
45 | PYRAMID_SETTINGS = {} | |
46 |
|
46 | |||
47 | # Linked module for extensions |
|
47 | # Linked module for extensions | |
48 | EXTENSIONS = {} |
|
48 | EXTENSIONS = {} | |
49 |
|
49 | |||
50 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) |
|
50 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) | |
51 |
__dbversion__ = 10 |
|
51 | __dbversion__ = 108 # defines current db version for migrations | |
52 | __platform__ = platform.system() |
|
52 | __platform__ = platform.system() | |
53 | __license__ = 'AGPLv3, and Commercial License' |
|
53 | __license__ = 'AGPLv3, and Commercial License' | |
54 | __author__ = 'RhodeCode GmbH' |
|
54 | __author__ = 'RhodeCode GmbH' | |
55 | __url__ = 'https://code.rhodecode.com' |
|
55 | __url__ = 'https://code.rhodecode.com' | |
56 |
|
56 | |||
57 | is_windows = __platform__ in ['Windows'] |
|
57 | is_windows = __platform__ in ['Windows'] | |
58 | is_unix = not is_windows |
|
58 | is_unix = not is_windows | |
59 | is_test = False |
|
59 | is_test = False | |
60 | disable_error_handler = False |
|
60 | disable_error_handler = False |
@@ -1,520 +1,533 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | from rhodecode.apps._base import add_route_with_slash |
|
20 | from rhodecode.apps._base import add_route_with_slash | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | def includeme(config): |
|
23 | def includeme(config): | |
24 |
|
24 | |||
25 | # repo creating checks, special cases that aren't repo routes |
|
25 | # repo creating checks, special cases that aren't repo routes | |
26 | config.add_route( |
|
26 | config.add_route( | |
27 | name='repo_creating', |
|
27 | name='repo_creating', | |
28 | pattern='/{repo_name:.*?[^/]}/repo_creating') |
|
28 | pattern='/{repo_name:.*?[^/]}/repo_creating') | |
29 |
|
29 | |||
30 | config.add_route( |
|
30 | config.add_route( | |
31 | name='repo_creating_check', |
|
31 | name='repo_creating_check', | |
32 | pattern='/{repo_name:.*?[^/]}/repo_creating_check') |
|
32 | pattern='/{repo_name:.*?[^/]}/repo_creating_check') | |
33 |
|
33 | |||
34 | # Summary |
|
34 | # Summary | |
35 | # NOTE(marcink): one additional route is defined in very bottom, catch |
|
35 | # NOTE(marcink): one additional route is defined in very bottom, catch | |
36 | # all pattern |
|
36 | # all pattern | |
37 | config.add_route( |
|
37 | config.add_route( | |
38 | name='repo_summary_explicit', |
|
38 | name='repo_summary_explicit', | |
39 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) |
|
39 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) | |
40 | config.add_route( |
|
40 | config.add_route( | |
41 | name='repo_summary_commits', |
|
41 | name='repo_summary_commits', | |
42 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) |
|
42 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) | |
43 |
|
43 | |||
44 | # Commits |
|
44 | # Commits | |
45 | config.add_route( |
|
45 | config.add_route( | |
46 | name='repo_commit', |
|
46 | name='repo_commit', | |
47 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) |
|
47 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) | |
48 |
|
48 | |||
49 | config.add_route( |
|
49 | config.add_route( | |
50 | name='repo_commit_children', |
|
50 | name='repo_commit_children', | |
51 | pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True) |
|
51 | pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True) | |
52 |
|
52 | |||
53 | config.add_route( |
|
53 | config.add_route( | |
54 | name='repo_commit_parents', |
|
54 | name='repo_commit_parents', | |
55 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True) |
|
55 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True) | |
56 |
|
56 | |||
57 | config.add_route( |
|
57 | config.add_route( | |
58 | name='repo_commit_raw', |
|
58 | name='repo_commit_raw', | |
59 | pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True) |
|
59 | pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True) | |
60 |
|
60 | |||
61 | config.add_route( |
|
61 | config.add_route( | |
62 | name='repo_commit_patch', |
|
62 | name='repo_commit_patch', | |
63 | pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True) |
|
63 | pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True) | |
64 |
|
64 | |||
65 | config.add_route( |
|
65 | config.add_route( | |
66 | name='repo_commit_download', |
|
66 | name='repo_commit_download', | |
67 | pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True) |
|
67 | pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True) | |
68 |
|
68 | |||
69 | config.add_route( |
|
69 | config.add_route( | |
70 | name='repo_commit_data', |
|
70 | name='repo_commit_data', | |
71 | pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True) |
|
71 | pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True) | |
72 |
|
72 | |||
73 | config.add_route( |
|
73 | config.add_route( | |
74 | name='repo_commit_comment_create', |
|
74 | name='repo_commit_comment_create', | |
75 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True) |
|
75 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True) | |
76 |
|
76 | |||
77 | config.add_route( |
|
77 | config.add_route( | |
78 | name='repo_commit_comment_preview', |
|
78 | name='repo_commit_comment_preview', | |
79 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) |
|
79 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) | |
80 |
|
80 | |||
81 | config.add_route( |
|
81 | config.add_route( | |
|
82 | name='repo_commit_comment_history_view', | |||
|
83 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_history_id}/history_view', repo_route=True) | |||
|
84 | ||||
|
85 | config.add_route( | |||
82 | name='repo_commit_comment_attachment_upload', |
|
86 | name='repo_commit_comment_attachment_upload', | |
83 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True) |
|
87 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True) | |
84 |
|
88 | |||
85 | config.add_route( |
|
89 | config.add_route( | |
86 | name='repo_commit_comment_delete', |
|
90 | name='repo_commit_comment_delete', | |
87 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) |
|
91 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) | |
88 |
|
92 | |||
|
93 | config.add_route( | |||
|
94 | name='repo_commit_comment_edit', | |||
|
95 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True) | |||
|
96 | ||||
89 | # still working url for backward compat. |
|
97 | # still working url for backward compat. | |
90 | config.add_route( |
|
98 | config.add_route( | |
91 | name='repo_commit_raw_deprecated', |
|
99 | name='repo_commit_raw_deprecated', | |
92 | pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True) |
|
100 | pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True) | |
93 |
|
101 | |||
94 | # Files |
|
102 | # Files | |
95 | config.add_route( |
|
103 | config.add_route( | |
96 | name='repo_archivefile', |
|
104 | name='repo_archivefile', | |
97 | pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True) |
|
105 | pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True) | |
98 |
|
106 | |||
99 | config.add_route( |
|
107 | config.add_route( | |
100 | name='repo_files_diff', |
|
108 | name='repo_files_diff', | |
101 | pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True) |
|
109 | pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True) | |
102 | config.add_route( # legacy route to make old links work |
|
110 | config.add_route( # legacy route to make old links work | |
103 | name='repo_files_diff_2way_redirect', |
|
111 | name='repo_files_diff_2way_redirect', | |
104 | pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True) |
|
112 | pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True) | |
105 |
|
113 | |||
106 | config.add_route( |
|
114 | config.add_route( | |
107 | name='repo_files', |
|
115 | name='repo_files', | |
108 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True) |
|
116 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True) | |
109 | config.add_route( |
|
117 | config.add_route( | |
110 | name='repo_files:default_path', |
|
118 | name='repo_files:default_path', | |
111 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True) |
|
119 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True) | |
112 | config.add_route( |
|
120 | config.add_route( | |
113 | name='repo_files:default_commit', |
|
121 | name='repo_files:default_commit', | |
114 | pattern='/{repo_name:.*?[^/]}/files', repo_route=True) |
|
122 | pattern='/{repo_name:.*?[^/]}/files', repo_route=True) | |
115 |
|
123 | |||
116 | config.add_route( |
|
124 | config.add_route( | |
117 | name='repo_files:rendered', |
|
125 | name='repo_files:rendered', | |
118 | pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True) |
|
126 | pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True) | |
119 |
|
127 | |||
120 | config.add_route( |
|
128 | config.add_route( | |
121 | name='repo_files:annotated', |
|
129 | name='repo_files:annotated', | |
122 | pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True) |
|
130 | pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True) | |
123 | config.add_route( |
|
131 | config.add_route( | |
124 | name='repo_files:annotated_previous', |
|
132 | name='repo_files:annotated_previous', | |
125 | pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True) |
|
133 | pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True) | |
126 |
|
134 | |||
127 | config.add_route( |
|
135 | config.add_route( | |
128 | name='repo_nodetree_full', |
|
136 | name='repo_nodetree_full', | |
129 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True) |
|
137 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True) | |
130 | config.add_route( |
|
138 | config.add_route( | |
131 | name='repo_nodetree_full:default_path', |
|
139 | name='repo_nodetree_full:default_path', | |
132 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True) |
|
140 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True) | |
133 |
|
141 | |||
134 | config.add_route( |
|
142 | config.add_route( | |
135 | name='repo_files_nodelist', |
|
143 | name='repo_files_nodelist', | |
136 | pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True) |
|
144 | pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True) | |
137 |
|
145 | |||
138 | config.add_route( |
|
146 | config.add_route( | |
139 | name='repo_file_raw', |
|
147 | name='repo_file_raw', | |
140 | pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True) |
|
148 | pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True) | |
141 |
|
149 | |||
142 | config.add_route( |
|
150 | config.add_route( | |
143 | name='repo_file_download', |
|
151 | name='repo_file_download', | |
144 | pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True) |
|
152 | pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True) | |
145 | config.add_route( # backward compat to keep old links working |
|
153 | config.add_route( # backward compat to keep old links working | |
146 | name='repo_file_download:legacy', |
|
154 | name='repo_file_download:legacy', | |
147 | pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}', |
|
155 | pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}', | |
148 | repo_route=True) |
|
156 | repo_route=True) | |
149 |
|
157 | |||
150 | config.add_route( |
|
158 | config.add_route( | |
151 | name='repo_file_history', |
|
159 | name='repo_file_history', | |
152 | pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True) |
|
160 | pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True) | |
153 |
|
161 | |||
154 | config.add_route( |
|
162 | config.add_route( | |
155 | name='repo_file_authors', |
|
163 | name='repo_file_authors', | |
156 | pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True) |
|
164 | pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True) | |
157 |
|
165 | |||
158 | config.add_route( |
|
166 | config.add_route( | |
159 | name='repo_files_check_head', |
|
167 | name='repo_files_check_head', | |
160 | pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}', |
|
168 | pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}', | |
161 | repo_route=True) |
|
169 | repo_route=True) | |
162 | config.add_route( |
|
170 | config.add_route( | |
163 | name='repo_files_remove_file', |
|
171 | name='repo_files_remove_file', | |
164 | pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}', |
|
172 | pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}', | |
165 | repo_route=True) |
|
173 | repo_route=True) | |
166 | config.add_route( |
|
174 | config.add_route( | |
167 | name='repo_files_delete_file', |
|
175 | name='repo_files_delete_file', | |
168 | pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}', |
|
176 | pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}', | |
169 | repo_route=True) |
|
177 | repo_route=True) | |
170 | config.add_route( |
|
178 | config.add_route( | |
171 | name='repo_files_edit_file', |
|
179 | name='repo_files_edit_file', | |
172 | pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}', |
|
180 | pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}', | |
173 | repo_route=True) |
|
181 | repo_route=True) | |
174 | config.add_route( |
|
182 | config.add_route( | |
175 | name='repo_files_update_file', |
|
183 | name='repo_files_update_file', | |
176 | pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}', |
|
184 | pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}', | |
177 | repo_route=True) |
|
185 | repo_route=True) | |
178 | config.add_route( |
|
186 | config.add_route( | |
179 | name='repo_files_add_file', |
|
187 | name='repo_files_add_file', | |
180 | pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}', |
|
188 | pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}', | |
181 | repo_route=True) |
|
189 | repo_route=True) | |
182 | config.add_route( |
|
190 | config.add_route( | |
183 | name='repo_files_upload_file', |
|
191 | name='repo_files_upload_file', | |
184 | pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}', |
|
192 | pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}', | |
185 | repo_route=True) |
|
193 | repo_route=True) | |
186 | config.add_route( |
|
194 | config.add_route( | |
187 | name='repo_files_create_file', |
|
195 | name='repo_files_create_file', | |
188 | pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}', |
|
196 | pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}', | |
189 | repo_route=True) |
|
197 | repo_route=True) | |
190 |
|
198 | |||
191 | # Refs data |
|
199 | # Refs data | |
192 | config.add_route( |
|
200 | config.add_route( | |
193 | name='repo_refs_data', |
|
201 | name='repo_refs_data', | |
194 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) |
|
202 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) | |
195 |
|
203 | |||
196 | config.add_route( |
|
204 | config.add_route( | |
197 | name='repo_refs_changelog_data', |
|
205 | name='repo_refs_changelog_data', | |
198 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) |
|
206 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) | |
199 |
|
207 | |||
200 | config.add_route( |
|
208 | config.add_route( | |
201 | name='repo_stats', |
|
209 | name='repo_stats', | |
202 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) |
|
210 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) | |
203 |
|
211 | |||
204 | # Commits |
|
212 | # Commits | |
205 | config.add_route( |
|
213 | config.add_route( | |
206 | name='repo_commits', |
|
214 | name='repo_commits', | |
207 | pattern='/{repo_name:.*?[^/]}/commits', repo_route=True) |
|
215 | pattern='/{repo_name:.*?[^/]}/commits', repo_route=True) | |
208 | config.add_route( |
|
216 | config.add_route( | |
209 | name='repo_commits_file', |
|
217 | name='repo_commits_file', | |
210 | pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True) |
|
218 | pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True) | |
211 | config.add_route( |
|
219 | config.add_route( | |
212 | name='repo_commits_elements', |
|
220 | name='repo_commits_elements', | |
213 | pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True) |
|
221 | pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True) | |
214 | config.add_route( |
|
222 | config.add_route( | |
215 | name='repo_commits_elements_file', |
|
223 | name='repo_commits_elements_file', | |
216 | pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True) |
|
224 | pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True) | |
217 |
|
225 | |||
218 | # Changelog (old deprecated name for commits page) |
|
226 | # Changelog (old deprecated name for commits page) | |
219 | config.add_route( |
|
227 | config.add_route( | |
220 | name='repo_changelog', |
|
228 | name='repo_changelog', | |
221 | pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True) |
|
229 | pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True) | |
222 | config.add_route( |
|
230 | config.add_route( | |
223 | name='repo_changelog_file', |
|
231 | name='repo_changelog_file', | |
224 | pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True) |
|
232 | pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True) | |
225 |
|
233 | |||
226 | # Compare |
|
234 | # Compare | |
227 | config.add_route( |
|
235 | config.add_route( | |
228 | name='repo_compare_select', |
|
236 | name='repo_compare_select', | |
229 | pattern='/{repo_name:.*?[^/]}/compare', repo_route=True) |
|
237 | pattern='/{repo_name:.*?[^/]}/compare', repo_route=True) | |
230 |
|
238 | |||
231 | config.add_route( |
|
239 | config.add_route( | |
232 | name='repo_compare', |
|
240 | name='repo_compare', | |
233 | pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True) |
|
241 | pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True) | |
234 |
|
242 | |||
235 | # Tags |
|
243 | # Tags | |
236 | config.add_route( |
|
244 | config.add_route( | |
237 | name='tags_home', |
|
245 | name='tags_home', | |
238 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) |
|
246 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) | |
239 |
|
247 | |||
240 | # Branches |
|
248 | # Branches | |
241 | config.add_route( |
|
249 | config.add_route( | |
242 | name='branches_home', |
|
250 | name='branches_home', | |
243 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) |
|
251 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) | |
244 |
|
252 | |||
245 | # Bookmarks |
|
253 | # Bookmarks | |
246 | config.add_route( |
|
254 | config.add_route( | |
247 | name='bookmarks_home', |
|
255 | name='bookmarks_home', | |
248 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) |
|
256 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) | |
249 |
|
257 | |||
250 | # Forks |
|
258 | # Forks | |
251 | config.add_route( |
|
259 | config.add_route( | |
252 | name='repo_fork_new', |
|
260 | name='repo_fork_new', | |
253 | pattern='/{repo_name:.*?[^/]}/fork', repo_route=True, |
|
261 | pattern='/{repo_name:.*?[^/]}/fork', repo_route=True, | |
254 | repo_forbid_when_archived=True, |
|
262 | repo_forbid_when_archived=True, | |
255 | repo_accepted_types=['hg', 'git']) |
|
263 | repo_accepted_types=['hg', 'git']) | |
256 |
|
264 | |||
257 | config.add_route( |
|
265 | config.add_route( | |
258 | name='repo_fork_create', |
|
266 | name='repo_fork_create', | |
259 | pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True, |
|
267 | pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True, | |
260 | repo_forbid_when_archived=True, |
|
268 | repo_forbid_when_archived=True, | |
261 | repo_accepted_types=['hg', 'git']) |
|
269 | repo_accepted_types=['hg', 'git']) | |
262 |
|
270 | |||
263 | config.add_route( |
|
271 | config.add_route( | |
264 | name='repo_forks_show_all', |
|
272 | name='repo_forks_show_all', | |
265 | pattern='/{repo_name:.*?[^/]}/forks', repo_route=True, |
|
273 | pattern='/{repo_name:.*?[^/]}/forks', repo_route=True, | |
266 | repo_accepted_types=['hg', 'git']) |
|
274 | repo_accepted_types=['hg', 'git']) | |
267 | config.add_route( |
|
275 | config.add_route( | |
268 | name='repo_forks_data', |
|
276 | name='repo_forks_data', | |
269 | pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True, |
|
277 | pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True, | |
270 | repo_accepted_types=['hg', 'git']) |
|
278 | repo_accepted_types=['hg', 'git']) | |
271 |
|
279 | |||
272 | # Pull Requests |
|
280 | # Pull Requests | |
273 | config.add_route( |
|
281 | config.add_route( | |
274 | name='pullrequest_show', |
|
282 | name='pullrequest_show', | |
275 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}', |
|
283 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}', | |
276 | repo_route=True) |
|
284 | repo_route=True) | |
277 |
|
285 | |||
278 | config.add_route( |
|
286 | config.add_route( | |
279 | name='pullrequest_show_all', |
|
287 | name='pullrequest_show_all', | |
280 | pattern='/{repo_name:.*?[^/]}/pull-request', |
|
288 | pattern='/{repo_name:.*?[^/]}/pull-request', | |
281 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
289 | repo_route=True, repo_accepted_types=['hg', 'git']) | |
282 |
|
290 | |||
283 | config.add_route( |
|
291 | config.add_route( | |
284 | name='pullrequest_show_all_data', |
|
292 | name='pullrequest_show_all_data', | |
285 | pattern='/{repo_name:.*?[^/]}/pull-request-data', |
|
293 | pattern='/{repo_name:.*?[^/]}/pull-request-data', | |
286 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
294 | repo_route=True, repo_accepted_types=['hg', 'git']) | |
287 |
|
295 | |||
288 | config.add_route( |
|
296 | config.add_route( | |
289 | name='pullrequest_repo_refs', |
|
297 | name='pullrequest_repo_refs', | |
290 | pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
298 | pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}', | |
291 | repo_route=True) |
|
299 | repo_route=True) | |
292 |
|
300 | |||
293 | config.add_route( |
|
301 | config.add_route( | |
294 | name='pullrequest_repo_targets', |
|
302 | name='pullrequest_repo_targets', | |
295 | pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets', |
|
303 | pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets', | |
296 | repo_route=True) |
|
304 | repo_route=True) | |
297 |
|
305 | |||
298 | config.add_route( |
|
306 | config.add_route( | |
299 | name='pullrequest_new', |
|
307 | name='pullrequest_new', | |
300 | pattern='/{repo_name:.*?[^/]}/pull-request/new', |
|
308 | pattern='/{repo_name:.*?[^/]}/pull-request/new', | |
301 | repo_route=True, repo_accepted_types=['hg', 'git'], |
|
309 | repo_route=True, repo_accepted_types=['hg', 'git'], | |
302 | repo_forbid_when_archived=True) |
|
310 | repo_forbid_when_archived=True) | |
303 |
|
311 | |||
304 | config.add_route( |
|
312 | config.add_route( | |
305 | name='pullrequest_create', |
|
313 | name='pullrequest_create', | |
306 | pattern='/{repo_name:.*?[^/]}/pull-request/create', |
|
314 | pattern='/{repo_name:.*?[^/]}/pull-request/create', | |
307 | repo_route=True, repo_accepted_types=['hg', 'git'], |
|
315 | repo_route=True, repo_accepted_types=['hg', 'git'], | |
308 | repo_forbid_when_archived=True) |
|
316 | repo_forbid_when_archived=True) | |
309 |
|
317 | |||
310 | config.add_route( |
|
318 | config.add_route( | |
311 | name='pullrequest_update', |
|
319 | name='pullrequest_update', | |
312 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update', |
|
320 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update', | |
313 | repo_route=True, repo_forbid_when_archived=True) |
|
321 | repo_route=True, repo_forbid_when_archived=True) | |
314 |
|
322 | |||
315 | config.add_route( |
|
323 | config.add_route( | |
316 | name='pullrequest_merge', |
|
324 | name='pullrequest_merge', | |
317 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge', |
|
325 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge', | |
318 | repo_route=True, repo_forbid_when_archived=True) |
|
326 | repo_route=True, repo_forbid_when_archived=True) | |
319 |
|
327 | |||
320 | config.add_route( |
|
328 | config.add_route( | |
321 | name='pullrequest_delete', |
|
329 | name='pullrequest_delete', | |
322 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete', |
|
330 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete', | |
323 | repo_route=True, repo_forbid_when_archived=True) |
|
331 | repo_route=True, repo_forbid_when_archived=True) | |
324 |
|
332 | |||
325 | config.add_route( |
|
333 | config.add_route( | |
326 | name='pullrequest_comment_create', |
|
334 | name='pullrequest_comment_create', | |
327 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment', |
|
335 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment', | |
328 | repo_route=True) |
|
336 | repo_route=True) | |
329 |
|
337 | |||
330 | config.add_route( |
|
338 | config.add_route( | |
|
339 | name='pullrequest_comment_edit', | |||
|
340 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit', | |||
|
341 | repo_route=True, repo_accepted_types=['hg', 'git']) | |||
|
342 | ||||
|
343 | config.add_route( | |||
331 | name='pullrequest_comment_delete', |
|
344 | name='pullrequest_comment_delete', | |
332 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', |
|
345 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', | |
333 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
346 | repo_route=True, repo_accepted_types=['hg', 'git']) | |
334 |
|
347 | |||
335 | # Artifacts, (EE feature) |
|
348 | # Artifacts, (EE feature) | |
336 | config.add_route( |
|
349 | config.add_route( | |
337 | name='repo_artifacts_list', |
|
350 | name='repo_artifacts_list', | |
338 | pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True) |
|
351 | pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True) | |
339 |
|
352 | |||
340 | # Settings |
|
353 | # Settings | |
341 | config.add_route( |
|
354 | config.add_route( | |
342 | name='edit_repo', |
|
355 | name='edit_repo', | |
343 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) |
|
356 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) | |
344 | # update is POST on edit_repo |
|
357 | # update is POST on edit_repo | |
345 |
|
358 | |||
346 | # Settings advanced |
|
359 | # Settings advanced | |
347 | config.add_route( |
|
360 | config.add_route( | |
348 | name='edit_repo_advanced', |
|
361 | name='edit_repo_advanced', | |
349 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) |
|
362 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) | |
350 | config.add_route( |
|
363 | config.add_route( | |
351 | name='edit_repo_advanced_archive', |
|
364 | name='edit_repo_advanced_archive', | |
352 | pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True) |
|
365 | pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True) | |
353 | config.add_route( |
|
366 | config.add_route( | |
354 | name='edit_repo_advanced_delete', |
|
367 | name='edit_repo_advanced_delete', | |
355 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) |
|
368 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) | |
356 | config.add_route( |
|
369 | config.add_route( | |
357 | name='edit_repo_advanced_locking', |
|
370 | name='edit_repo_advanced_locking', | |
358 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) |
|
371 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) | |
359 | config.add_route( |
|
372 | config.add_route( | |
360 | name='edit_repo_advanced_journal', |
|
373 | name='edit_repo_advanced_journal', | |
361 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) |
|
374 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) | |
362 | config.add_route( |
|
375 | config.add_route( | |
363 | name='edit_repo_advanced_fork', |
|
376 | name='edit_repo_advanced_fork', | |
364 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) |
|
377 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) | |
365 |
|
378 | |||
366 | config.add_route( |
|
379 | config.add_route( | |
367 | name='edit_repo_advanced_hooks', |
|
380 | name='edit_repo_advanced_hooks', | |
368 | pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True) |
|
381 | pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True) | |
369 |
|
382 | |||
370 | # Caches |
|
383 | # Caches | |
371 | config.add_route( |
|
384 | config.add_route( | |
372 | name='edit_repo_caches', |
|
385 | name='edit_repo_caches', | |
373 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) |
|
386 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) | |
374 |
|
387 | |||
375 | # Permissions |
|
388 | # Permissions | |
376 | config.add_route( |
|
389 | config.add_route( | |
377 | name='edit_repo_perms', |
|
390 | name='edit_repo_perms', | |
378 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) |
|
391 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) | |
379 |
|
392 | |||
380 | config.add_route( |
|
393 | config.add_route( | |
381 | name='edit_repo_perms_set_private', |
|
394 | name='edit_repo_perms_set_private', | |
382 | pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True) |
|
395 | pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True) | |
383 |
|
396 | |||
384 | # Permissions Branch (EE feature) |
|
397 | # Permissions Branch (EE feature) | |
385 | config.add_route( |
|
398 | config.add_route( | |
386 | name='edit_repo_perms_branch', |
|
399 | name='edit_repo_perms_branch', | |
387 | pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True) |
|
400 | pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True) | |
388 | config.add_route( |
|
401 | config.add_route( | |
389 | name='edit_repo_perms_branch_delete', |
|
402 | name='edit_repo_perms_branch_delete', | |
390 | pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete', |
|
403 | pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete', | |
391 | repo_route=True) |
|
404 | repo_route=True) | |
392 |
|
405 | |||
393 | # Maintenance |
|
406 | # Maintenance | |
394 | config.add_route( |
|
407 | config.add_route( | |
395 | name='edit_repo_maintenance', |
|
408 | name='edit_repo_maintenance', | |
396 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) |
|
409 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) | |
397 |
|
410 | |||
398 | config.add_route( |
|
411 | config.add_route( | |
399 | name='edit_repo_maintenance_execute', |
|
412 | name='edit_repo_maintenance_execute', | |
400 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) |
|
413 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) | |
401 |
|
414 | |||
402 | # Fields |
|
415 | # Fields | |
403 | config.add_route( |
|
416 | config.add_route( | |
404 | name='edit_repo_fields', |
|
417 | name='edit_repo_fields', | |
405 | pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True) |
|
418 | pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True) | |
406 | config.add_route( |
|
419 | config.add_route( | |
407 | name='edit_repo_fields_create', |
|
420 | name='edit_repo_fields_create', | |
408 | pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True) |
|
421 | pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True) | |
409 | config.add_route( |
|
422 | config.add_route( | |
410 | name='edit_repo_fields_delete', |
|
423 | name='edit_repo_fields_delete', | |
411 | pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True) |
|
424 | pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True) | |
412 |
|
425 | |||
413 | # Locking |
|
426 | # Locking | |
414 | config.add_route( |
|
427 | config.add_route( | |
415 | name='repo_edit_toggle_locking', |
|
428 | name='repo_edit_toggle_locking', | |
416 | pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True) |
|
429 | pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True) | |
417 |
|
430 | |||
418 | # Remote |
|
431 | # Remote | |
419 | config.add_route( |
|
432 | config.add_route( | |
420 | name='edit_repo_remote', |
|
433 | name='edit_repo_remote', | |
421 | pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True) |
|
434 | pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True) | |
422 | config.add_route( |
|
435 | config.add_route( | |
423 | name='edit_repo_remote_pull', |
|
436 | name='edit_repo_remote_pull', | |
424 | pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True) |
|
437 | pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True) | |
425 | config.add_route( |
|
438 | config.add_route( | |
426 | name='edit_repo_remote_push', |
|
439 | name='edit_repo_remote_push', | |
427 | pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True) |
|
440 | pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True) | |
428 |
|
441 | |||
429 | # Statistics |
|
442 | # Statistics | |
430 | config.add_route( |
|
443 | config.add_route( | |
431 | name='edit_repo_statistics', |
|
444 | name='edit_repo_statistics', | |
432 | pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True) |
|
445 | pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True) | |
433 | config.add_route( |
|
446 | config.add_route( | |
434 | name='edit_repo_statistics_reset', |
|
447 | name='edit_repo_statistics_reset', | |
435 | pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True) |
|
448 | pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True) | |
436 |
|
449 | |||
437 | # Issue trackers |
|
450 | # Issue trackers | |
438 | config.add_route( |
|
451 | config.add_route( | |
439 | name='edit_repo_issuetracker', |
|
452 | name='edit_repo_issuetracker', | |
440 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True) |
|
453 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True) | |
441 | config.add_route( |
|
454 | config.add_route( | |
442 | name='edit_repo_issuetracker_test', |
|
455 | name='edit_repo_issuetracker_test', | |
443 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True) |
|
456 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True) | |
444 | config.add_route( |
|
457 | config.add_route( | |
445 | name='edit_repo_issuetracker_delete', |
|
458 | name='edit_repo_issuetracker_delete', | |
446 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True) |
|
459 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True) | |
447 | config.add_route( |
|
460 | config.add_route( | |
448 | name='edit_repo_issuetracker_update', |
|
461 | name='edit_repo_issuetracker_update', | |
449 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True) |
|
462 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True) | |
450 |
|
463 | |||
451 | # VCS Settings |
|
464 | # VCS Settings | |
452 | config.add_route( |
|
465 | config.add_route( | |
453 | name='edit_repo_vcs', |
|
466 | name='edit_repo_vcs', | |
454 | pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True) |
|
467 | pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True) | |
455 | config.add_route( |
|
468 | config.add_route( | |
456 | name='edit_repo_vcs_update', |
|
469 | name='edit_repo_vcs_update', | |
457 | pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True) |
|
470 | pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True) | |
458 |
|
471 | |||
459 | # svn pattern |
|
472 | # svn pattern | |
460 | config.add_route( |
|
473 | config.add_route( | |
461 | name='edit_repo_vcs_svn_pattern_delete', |
|
474 | name='edit_repo_vcs_svn_pattern_delete', | |
462 | pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True) |
|
475 | pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True) | |
463 |
|
476 | |||
464 | # Repo Review Rules (EE feature) |
|
477 | # Repo Review Rules (EE feature) | |
465 | config.add_route( |
|
478 | config.add_route( | |
466 | name='repo_reviewers', |
|
479 | name='repo_reviewers', | |
467 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) |
|
480 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) | |
468 |
|
481 | |||
469 | config.add_route( |
|
482 | config.add_route( | |
470 | name='repo_default_reviewers_data', |
|
483 | name='repo_default_reviewers_data', | |
471 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) |
|
484 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) | |
472 |
|
485 | |||
473 | # Repo Automation (EE feature) |
|
486 | # Repo Automation (EE feature) | |
474 | config.add_route( |
|
487 | config.add_route( | |
475 | name='repo_automation', |
|
488 | name='repo_automation', | |
476 | pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True) |
|
489 | pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True) | |
477 |
|
490 | |||
478 | # Strip |
|
491 | # Strip | |
479 | config.add_route( |
|
492 | config.add_route( | |
480 | name='edit_repo_strip', |
|
493 | name='edit_repo_strip', | |
481 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) |
|
494 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) | |
482 |
|
495 | |||
483 | config.add_route( |
|
496 | config.add_route( | |
484 | name='strip_check', |
|
497 | name='strip_check', | |
485 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) |
|
498 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) | |
486 |
|
499 | |||
487 | config.add_route( |
|
500 | config.add_route( | |
488 | name='strip_execute', |
|
501 | name='strip_execute', | |
489 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) |
|
502 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) | |
490 |
|
503 | |||
491 | # Audit logs |
|
504 | # Audit logs | |
492 | config.add_route( |
|
505 | config.add_route( | |
493 | name='edit_repo_audit_logs', |
|
506 | name='edit_repo_audit_logs', | |
494 | pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True) |
|
507 | pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True) | |
495 |
|
508 | |||
496 | # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility |
|
509 | # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility | |
497 | config.add_route( |
|
510 | config.add_route( | |
498 | name='rss_feed_home', |
|
511 | name='rss_feed_home', | |
499 | pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True) |
|
512 | pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True) | |
500 |
|
513 | |||
501 | config.add_route( |
|
514 | config.add_route( | |
502 | name='atom_feed_home', |
|
515 | name='atom_feed_home', | |
503 | pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True) |
|
516 | pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True) | |
504 |
|
517 | |||
505 | config.add_route( |
|
518 | config.add_route( | |
506 | name='rss_feed_home_old', |
|
519 | name='rss_feed_home_old', | |
507 | pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True) |
|
520 | pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True) | |
508 |
|
521 | |||
509 | config.add_route( |
|
522 | config.add_route( | |
510 | name='atom_feed_home_old', |
|
523 | name='atom_feed_home_old', | |
511 | pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True) |
|
524 | pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True) | |
512 |
|
525 | |||
513 | # NOTE(marcink): needs to be at the end for catch-all |
|
526 | # NOTE(marcink): needs to be at the end for catch-all | |
514 | add_route_with_slash( |
|
527 | add_route_with_slash( | |
515 | config, |
|
528 | config, | |
516 | name='repo_summary', |
|
529 | name='repo_summary', | |
517 | pattern='/{repo_name:.*?[^/]}', repo_route=True) |
|
530 | pattern='/{repo_name:.*?[^/]}', repo_route=True) | |
518 |
|
531 | |||
519 | # Scan module for configuration decorators. |
|
532 | # Scan module for configuration decorators. | |
520 | config.scan('.views', ignore='.tests') |
|
533 | config.scan('.views', ignore='.tests') |
@@ -1,348 +1,507 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | from rhodecode.tests import TestController |
|
23 | from rhodecode.tests import TestController | |
24 |
|
24 | |||
25 | from rhodecode.model.db import ChangesetComment, Notification |
|
25 | from rhodecode.model.db import ChangesetComment, Notification | |
26 | from rhodecode.model.meta import Session |
|
26 | from rhodecode.model.meta import Session | |
27 | from rhodecode.lib import helpers as h |
|
27 | from rhodecode.lib import helpers as h | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | def route_path(name, params=None, **kwargs): |
|
30 | def route_path(name, params=None, **kwargs): | |
31 | import urllib |
|
31 | import urllib | |
32 |
|
32 | |||
33 | base_url = { |
|
33 | base_url = { | |
34 | 'repo_commit': '/{repo_name}/changeset/{commit_id}', |
|
34 | 'repo_commit': '/{repo_name}/changeset/{commit_id}', | |
35 | 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create', |
|
35 | 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create', | |
36 | 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview', |
|
36 | 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview', | |
37 | 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete', |
|
37 | 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete', | |
|
38 | 'repo_commit_comment_edit': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/edit', | |||
38 | }[name].format(**kwargs) |
|
39 | }[name].format(**kwargs) | |
39 |
|
40 | |||
40 | if params: |
|
41 | if params: | |
41 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
42 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
42 | return base_url |
|
43 | return base_url | |
43 |
|
44 | |||
44 |
|
45 | |||
45 | @pytest.mark.backends("git", "hg", "svn") |
|
46 | @pytest.mark.backends("git", "hg", "svn") | |
46 | class TestRepoCommitCommentsView(TestController): |
|
47 | class TestRepoCommitCommentsView(TestController): | |
47 |
|
48 | |||
48 | @pytest.fixture(autouse=True) |
|
49 | @pytest.fixture(autouse=True) | |
49 | def prepare(self, request, baseapp): |
|
50 | def prepare(self, request, baseapp): | |
50 | for x in ChangesetComment.query().all(): |
|
51 | for x in ChangesetComment.query().all(): | |
51 | Session().delete(x) |
|
52 | Session().delete(x) | |
52 | Session().commit() |
|
53 | Session().commit() | |
53 |
|
54 | |||
54 | for x in Notification.query().all(): |
|
55 | for x in Notification.query().all(): | |
55 | Session().delete(x) |
|
56 | Session().delete(x) | |
56 | Session().commit() |
|
57 | Session().commit() | |
57 |
|
58 | |||
58 | request.addfinalizer(self.cleanup) |
|
59 | request.addfinalizer(self.cleanup) | |
59 |
|
60 | |||
60 | def cleanup(self): |
|
61 | def cleanup(self): | |
61 | for x in ChangesetComment.query().all(): |
|
62 | for x in ChangesetComment.query().all(): | |
62 | Session().delete(x) |
|
63 | Session().delete(x) | |
63 | Session().commit() |
|
64 | Session().commit() | |
64 |
|
65 | |||
65 | for x in Notification.query().all(): |
|
66 | for x in Notification.query().all(): | |
66 | Session().delete(x) |
|
67 | Session().delete(x) | |
67 | Session().commit() |
|
68 | Session().commit() | |
68 |
|
69 | |||
69 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) |
|
70 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) | |
70 | def test_create(self, comment_type, backend): |
|
71 | def test_create(self, comment_type, backend): | |
71 | self.log_user() |
|
72 | self.log_user() | |
72 | commit = backend.repo.get_commit('300') |
|
73 | commit = backend.repo.get_commit('300') | |
73 | commit_id = commit.raw_id |
|
74 | commit_id = commit.raw_id | |
74 | text = u'CommentOnCommit' |
|
75 | text = u'CommentOnCommit' | |
75 |
|
76 | |||
76 | params = {'text': text, 'csrf_token': self.csrf_token, |
|
77 | params = {'text': text, 'csrf_token': self.csrf_token, | |
77 | 'comment_type': comment_type} |
|
78 | 'comment_type': comment_type} | |
78 | self.app.post( |
|
79 | self.app.post( | |
79 | route_path('repo_commit_comment_create', |
|
80 | route_path('repo_commit_comment_create', | |
80 | repo_name=backend.repo_name, commit_id=commit_id), |
|
81 | repo_name=backend.repo_name, commit_id=commit_id), | |
81 | params=params) |
|
82 | params=params) | |
82 |
|
83 | |||
83 | response = self.app.get( |
|
84 | response = self.app.get( | |
84 | route_path('repo_commit', |
|
85 | route_path('repo_commit', | |
85 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
86 | repo_name=backend.repo_name, commit_id=commit_id)) | |
86 |
|
87 | |||
87 | # test DB |
|
88 | # test DB | |
88 | assert ChangesetComment.query().count() == 1 |
|
89 | assert ChangesetComment.query().count() == 1 | |
89 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
90 | assert_comment_links(response, ChangesetComment.query().count(), 0) | |
90 |
|
91 | |||
91 | assert Notification.query().count() == 1 |
|
92 | assert Notification.query().count() == 1 | |
92 | assert ChangesetComment.query().count() == 1 |
|
93 | assert ChangesetComment.query().count() == 1 | |
93 |
|
94 | |||
94 | notification = Notification.query().all()[0] |
|
95 | notification = Notification.query().all()[0] | |
95 |
|
96 | |||
96 | comment_id = ChangesetComment.query().first().comment_id |
|
97 | comment_id = ChangesetComment.query().first().comment_id | |
97 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
98 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT | |
98 |
|
99 | |||
99 | author = notification.created_by_user.username_and_name |
|
100 | author = notification.created_by_user.username_and_name | |
100 | sbj = '@{0} left a {1} on commit `{2}` in the `{3}` repository'.format( |
|
101 | sbj = '@{0} left a {1} on commit `{2}` in the `{3}` repository'.format( | |
101 | author, comment_type, h.show_id(commit), backend.repo_name) |
|
102 | author, comment_type, h.show_id(commit), backend.repo_name) | |
102 | assert sbj == notification.subject |
|
103 | assert sbj == notification.subject | |
103 |
|
104 | |||
104 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
105 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( | |
105 | backend.repo_name, commit_id, comment_id)) |
|
106 | backend.repo_name, commit_id, comment_id)) | |
106 | assert lnk in notification.body |
|
107 | assert lnk in notification.body | |
107 |
|
108 | |||
108 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) |
|
109 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) | |
109 | def test_create_inline(self, comment_type, backend): |
|
110 | def test_create_inline(self, comment_type, backend): | |
110 | self.log_user() |
|
111 | self.log_user() | |
111 | commit = backend.repo.get_commit('300') |
|
112 | commit = backend.repo.get_commit('300') | |
112 | commit_id = commit.raw_id |
|
113 | commit_id = commit.raw_id | |
113 | text = u'CommentOnCommit' |
|
114 | text = u'CommentOnCommit' | |
114 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
115 | f_path = 'vcs/web/simplevcs/views/repository.py' | |
115 | line = 'n1' |
|
116 | line = 'n1' | |
116 |
|
117 | |||
117 | params = {'text': text, 'f_path': f_path, 'line': line, |
|
118 | params = {'text': text, 'f_path': f_path, 'line': line, | |
118 | 'comment_type': comment_type, |
|
119 | 'comment_type': comment_type, | |
119 | 'csrf_token': self.csrf_token} |
|
120 | 'csrf_token': self.csrf_token} | |
120 |
|
121 | |||
121 | self.app.post( |
|
122 | self.app.post( | |
122 | route_path('repo_commit_comment_create', |
|
123 | route_path('repo_commit_comment_create', | |
123 | repo_name=backend.repo_name, commit_id=commit_id), |
|
124 | repo_name=backend.repo_name, commit_id=commit_id), | |
124 | params=params) |
|
125 | params=params) | |
125 |
|
126 | |||
126 | response = self.app.get( |
|
127 | response = self.app.get( | |
127 | route_path('repo_commit', |
|
128 | route_path('repo_commit', | |
128 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
129 | repo_name=backend.repo_name, commit_id=commit_id)) | |
129 |
|
130 | |||
130 | # test DB |
|
131 | # test DB | |
131 | assert ChangesetComment.query().count() == 1 |
|
132 | assert ChangesetComment.query().count() == 1 | |
132 | assert_comment_links(response, 0, ChangesetComment.query().count()) |
|
133 | assert_comment_links(response, 0, ChangesetComment.query().count()) | |
133 |
|
134 | |||
134 | if backend.alias == 'svn': |
|
135 | if backend.alias == 'svn': | |
135 | response.mustcontain( |
|
136 | response.mustcontain( | |
136 | '''data-f-path="vcs/commands/summary.py" ''' |
|
137 | '''data-f-path="vcs/commands/summary.py" ''' | |
137 | '''data-anchor-id="c-300-ad05457a43f8"''' |
|
138 | '''data-anchor-id="c-300-ad05457a43f8"''' | |
138 | ) |
|
139 | ) | |
139 | if backend.alias == 'git': |
|
140 | if backend.alias == 'git': | |
140 | response.mustcontain( |
|
141 | response.mustcontain( | |
141 | '''data-f-path="vcs/backends/hg.py" ''' |
|
142 | '''data-f-path="vcs/backends/hg.py" ''' | |
142 | '''data-anchor-id="c-883e775e89ea-9c390eb52cd6"''' |
|
143 | '''data-anchor-id="c-883e775e89ea-9c390eb52cd6"''' | |
143 | ) |
|
144 | ) | |
144 |
|
145 | |||
145 | if backend.alias == 'hg': |
|
146 | if backend.alias == 'hg': | |
146 | response.mustcontain( |
|
147 | response.mustcontain( | |
147 | '''data-f-path="vcs/backends/hg.py" ''' |
|
148 | '''data-f-path="vcs/backends/hg.py" ''' | |
148 | '''data-anchor-id="c-e58d85a3973b-9c390eb52cd6"''' |
|
149 | '''data-anchor-id="c-e58d85a3973b-9c390eb52cd6"''' | |
149 | ) |
|
150 | ) | |
150 |
|
151 | |||
151 | assert Notification.query().count() == 1 |
|
152 | assert Notification.query().count() == 1 | |
152 | assert ChangesetComment.query().count() == 1 |
|
153 | assert ChangesetComment.query().count() == 1 | |
153 |
|
154 | |||
154 | notification = Notification.query().all()[0] |
|
155 | notification = Notification.query().all()[0] | |
155 | comment = ChangesetComment.query().first() |
|
156 | comment = ChangesetComment.query().first() | |
156 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
157 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT | |
157 |
|
158 | |||
158 | assert comment.revision == commit_id |
|
159 | assert comment.revision == commit_id | |
159 |
|
160 | |||
160 | author = notification.created_by_user.username_and_name |
|
161 | author = notification.created_by_user.username_and_name | |
161 | sbj = '@{0} left a {1} on file `{2}` in commit `{3}` in the `{4}` repository'.format( |
|
162 | sbj = '@{0} left a {1} on file `{2}` in commit `{3}` in the `{4}` repository'.format( | |
162 | author, comment_type, f_path, h.show_id(commit), backend.repo_name) |
|
163 | author, comment_type, f_path, h.show_id(commit), backend.repo_name) | |
163 |
|
164 | |||
164 | assert sbj == notification.subject |
|
165 | assert sbj == notification.subject | |
165 |
|
166 | |||
166 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
167 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( | |
167 | backend.repo_name, commit_id, comment.comment_id)) |
|
168 | backend.repo_name, commit_id, comment.comment_id)) | |
168 | assert lnk in notification.body |
|
169 | assert lnk in notification.body | |
169 | assert 'on line n1' in notification.body |
|
170 | assert 'on line n1' in notification.body | |
170 |
|
171 | |||
171 | def test_create_with_mention(self, backend): |
|
172 | def test_create_with_mention(self, backend): | |
172 | self.log_user() |
|
173 | self.log_user() | |
173 |
|
174 | |||
174 | commit_id = backend.repo.get_commit('300').raw_id |
|
175 | commit_id = backend.repo.get_commit('300').raw_id | |
175 | text = u'@test_regular check CommentOnCommit' |
|
176 | text = u'@test_regular check CommentOnCommit' | |
176 |
|
177 | |||
177 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
178 | params = {'text': text, 'csrf_token': self.csrf_token} | |
178 | self.app.post( |
|
179 | self.app.post( | |
179 | route_path('repo_commit_comment_create', |
|
180 | route_path('repo_commit_comment_create', | |
180 | repo_name=backend.repo_name, commit_id=commit_id), |
|
181 | repo_name=backend.repo_name, commit_id=commit_id), | |
181 | params=params) |
|
182 | params=params) | |
182 |
|
183 | |||
183 | response = self.app.get( |
|
184 | response = self.app.get( | |
184 | route_path('repo_commit', |
|
185 | route_path('repo_commit', | |
185 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
186 | repo_name=backend.repo_name, commit_id=commit_id)) | |
186 | # test DB |
|
187 | # test DB | |
187 | assert ChangesetComment.query().count() == 1 |
|
188 | assert ChangesetComment.query().count() == 1 | |
188 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
189 | assert_comment_links(response, ChangesetComment.query().count(), 0) | |
189 |
|
190 | |||
190 | notification = Notification.query().one() |
|
191 | notification = Notification.query().one() | |
191 |
|
192 | |||
192 | assert len(notification.recipients) == 2 |
|
193 | assert len(notification.recipients) == 2 | |
193 | users = [x.username for x in notification.recipients] |
|
194 | users = [x.username for x in notification.recipients] | |
194 |
|
195 | |||
195 | # test_regular gets notification by @mention |
|
196 | # test_regular gets notification by @mention | |
196 | assert sorted(users) == [u'test_admin', u'test_regular'] |
|
197 | assert sorted(users) == [u'test_admin', u'test_regular'] | |
197 |
|
198 | |||
198 | def test_create_with_status_change(self, backend): |
|
199 | def test_create_with_status_change(self, backend): | |
199 | self.log_user() |
|
200 | self.log_user() | |
200 | commit = backend.repo.get_commit('300') |
|
201 | commit = backend.repo.get_commit('300') | |
201 | commit_id = commit.raw_id |
|
202 | commit_id = commit.raw_id | |
202 | text = u'CommentOnCommit' |
|
203 | text = u'CommentOnCommit' | |
203 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
204 | f_path = 'vcs/web/simplevcs/views/repository.py' | |
204 | line = 'n1' |
|
205 | line = 'n1' | |
205 |
|
206 | |||
206 | params = {'text': text, 'changeset_status': 'approved', |
|
207 | params = {'text': text, 'changeset_status': 'approved', | |
207 | 'csrf_token': self.csrf_token} |
|
208 | 'csrf_token': self.csrf_token} | |
208 |
|
209 | |||
209 | self.app.post( |
|
210 | self.app.post( | |
210 | route_path( |
|
211 | route_path( | |
211 | 'repo_commit_comment_create', |
|
212 | 'repo_commit_comment_create', | |
212 | repo_name=backend.repo_name, commit_id=commit_id), |
|
213 | repo_name=backend.repo_name, commit_id=commit_id), | |
213 | params=params) |
|
214 | params=params) | |
214 |
|
215 | |||
215 | response = self.app.get( |
|
216 | response = self.app.get( | |
216 | route_path('repo_commit', |
|
217 | route_path('repo_commit', | |
217 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
218 | repo_name=backend.repo_name, commit_id=commit_id)) | |
218 |
|
219 | |||
219 | # test DB |
|
220 | # test DB | |
220 | assert ChangesetComment.query().count() == 1 |
|
221 | assert ChangesetComment.query().count() == 1 | |
221 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
222 | assert_comment_links(response, ChangesetComment.query().count(), 0) | |
222 |
|
223 | |||
223 | assert Notification.query().count() == 1 |
|
224 | assert Notification.query().count() == 1 | |
224 | assert ChangesetComment.query().count() == 1 |
|
225 | assert ChangesetComment.query().count() == 1 | |
225 |
|
226 | |||
226 | notification = Notification.query().all()[0] |
|
227 | notification = Notification.query().all()[0] | |
227 |
|
228 | |||
228 | comment_id = ChangesetComment.query().first().comment_id |
|
229 | comment_id = ChangesetComment.query().first().comment_id | |
229 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
230 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT | |
230 |
|
231 | |||
231 | author = notification.created_by_user.username_and_name |
|
232 | author = notification.created_by_user.username_and_name | |
232 | sbj = '[status: Approved] @{0} left a note on commit `{1}` in the `{2}` repository'.format( |
|
233 | sbj = '[status: Approved] @{0} left a note on commit `{1}` in the `{2}` repository'.format( | |
233 | author, h.show_id(commit), backend.repo_name) |
|
234 | author, h.show_id(commit), backend.repo_name) | |
234 | assert sbj == notification.subject |
|
235 | assert sbj == notification.subject | |
235 |
|
236 | |||
236 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
237 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( | |
237 | backend.repo_name, commit_id, comment_id)) |
|
238 | backend.repo_name, commit_id, comment_id)) | |
238 | assert lnk in notification.body |
|
239 | assert lnk in notification.body | |
239 |
|
240 | |||
240 | def test_delete(self, backend): |
|
241 | def test_delete(self, backend): | |
241 | self.log_user() |
|
242 | self.log_user() | |
242 | commit_id = backend.repo.get_commit('300').raw_id |
|
243 | commit_id = backend.repo.get_commit('300').raw_id | |
243 | text = u'CommentOnCommit' |
|
244 | text = u'CommentOnCommit' | |
244 |
|
245 | |||
245 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
246 | params = {'text': text, 'csrf_token': self.csrf_token} | |
246 | self.app.post( |
|
247 | self.app.post( | |
247 | route_path( |
|
248 | route_path( | |
248 | 'repo_commit_comment_create', |
|
249 | 'repo_commit_comment_create', | |
249 | repo_name=backend.repo_name, commit_id=commit_id), |
|
250 | repo_name=backend.repo_name, commit_id=commit_id), | |
250 | params=params) |
|
251 | params=params) | |
251 |
|
252 | |||
252 | comments = ChangesetComment.query().all() |
|
253 | comments = ChangesetComment.query().all() | |
253 | assert len(comments) == 1 |
|
254 | assert len(comments) == 1 | |
254 | comment_id = comments[0].comment_id |
|
255 | comment_id = comments[0].comment_id | |
255 |
|
256 | |||
256 | self.app.post( |
|
257 | self.app.post( | |
257 | route_path('repo_commit_comment_delete', |
|
258 | route_path('repo_commit_comment_delete', | |
258 | repo_name=backend.repo_name, |
|
259 | repo_name=backend.repo_name, | |
259 | commit_id=commit_id, |
|
260 | commit_id=commit_id, | |
260 | comment_id=comment_id), |
|
261 | comment_id=comment_id), | |
261 | params={'csrf_token': self.csrf_token}) |
|
262 | params={'csrf_token': self.csrf_token}) | |
262 |
|
263 | |||
263 | comments = ChangesetComment.query().all() |
|
264 | comments = ChangesetComment.query().all() | |
264 | assert len(comments) == 0 |
|
265 | assert len(comments) == 0 | |
265 |
|
266 | |||
266 | response = self.app.get( |
|
267 | response = self.app.get( | |
267 | route_path('repo_commit', |
|
268 | route_path('repo_commit', | |
268 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
269 | repo_name=backend.repo_name, commit_id=commit_id)) | |
269 | assert_comment_links(response, 0, 0) |
|
270 | assert_comment_links(response, 0, 0) | |
270 |
|
271 | |||
|
272 | def test_edit(self, backend): | |||
|
273 | self.log_user() | |||
|
274 | commit_id = backend.repo.get_commit('300').raw_id | |||
|
275 | text = u'CommentOnCommit' | |||
|
276 | ||||
|
277 | params = {'text': text, 'csrf_token': self.csrf_token} | |||
|
278 | self.app.post( | |||
|
279 | route_path( | |||
|
280 | 'repo_commit_comment_create', | |||
|
281 | repo_name=backend.repo_name, commit_id=commit_id), | |||
|
282 | params=params) | |||
|
283 | ||||
|
284 | comments = ChangesetComment.query().all() | |||
|
285 | assert len(comments) == 1 | |||
|
286 | comment_id = comments[0].comment_id | |||
|
287 | test_text = 'test_text' | |||
|
288 | self.app.post( | |||
|
289 | route_path( | |||
|
290 | 'repo_commit_comment_edit', | |||
|
291 | repo_name=backend.repo_name, | |||
|
292 | commit_id=commit_id, | |||
|
293 | comment_id=comment_id, | |||
|
294 | ), | |||
|
295 | params={ | |||
|
296 | 'csrf_token': self.csrf_token, | |||
|
297 | 'text': test_text, | |||
|
298 | 'version': '0', | |||
|
299 | }) | |||
|
300 | ||||
|
301 | text_form_db = ChangesetComment.query().filter( | |||
|
302 | ChangesetComment.comment_id == comment_id).first().text | |||
|
303 | assert test_text == text_form_db | |||
|
304 | ||||
|
305 | def test_edit_without_change(self, backend): | |||
|
306 | self.log_user() | |||
|
307 | commit_id = backend.repo.get_commit('300').raw_id | |||
|
308 | text = u'CommentOnCommit' | |||
|
309 | ||||
|
310 | params = {'text': text, 'csrf_token': self.csrf_token} | |||
|
311 | self.app.post( | |||
|
312 | route_path( | |||
|
313 | 'repo_commit_comment_create', | |||
|
314 | repo_name=backend.repo_name, commit_id=commit_id), | |||
|
315 | params=params) | |||
|
316 | ||||
|
317 | comments = ChangesetComment.query().all() | |||
|
318 | assert len(comments) == 1 | |||
|
319 | comment_id = comments[0].comment_id | |||
|
320 | ||||
|
321 | response = self.app.post( | |||
|
322 | route_path( | |||
|
323 | 'repo_commit_comment_edit', | |||
|
324 | repo_name=backend.repo_name, | |||
|
325 | commit_id=commit_id, | |||
|
326 | comment_id=comment_id, | |||
|
327 | ), | |||
|
328 | params={ | |||
|
329 | 'csrf_token': self.csrf_token, | |||
|
330 | 'text': text, | |||
|
331 | 'version': '0', | |||
|
332 | }, | |||
|
333 | status=404, | |||
|
334 | ) | |||
|
335 | assert response.status_int == 404 | |||
|
336 | ||||
|
337 | def test_edit_try_edit_already_edited(self, backend): | |||
|
338 | self.log_user() | |||
|
339 | commit_id = backend.repo.get_commit('300').raw_id | |||
|
340 | text = u'CommentOnCommit' | |||
|
341 | ||||
|
342 | params = {'text': text, 'csrf_token': self.csrf_token} | |||
|
343 | self.app.post( | |||
|
344 | route_path( | |||
|
345 | 'repo_commit_comment_create', | |||
|
346 | repo_name=backend.repo_name, commit_id=commit_id | |||
|
347 | ), | |||
|
348 | params=params, | |||
|
349 | ) | |||
|
350 | ||||
|
351 | comments = ChangesetComment.query().all() | |||
|
352 | assert len(comments) == 1 | |||
|
353 | comment_id = comments[0].comment_id | |||
|
354 | test_text = 'test_text' | |||
|
355 | self.app.post( | |||
|
356 | route_path( | |||
|
357 | 'repo_commit_comment_edit', | |||
|
358 | repo_name=backend.repo_name, | |||
|
359 | commit_id=commit_id, | |||
|
360 | comment_id=comment_id, | |||
|
361 | ), | |||
|
362 | params={ | |||
|
363 | 'csrf_token': self.csrf_token, | |||
|
364 | 'text': test_text, | |||
|
365 | 'version': '0', | |||
|
366 | } | |||
|
367 | ) | |||
|
368 | test_text_v2 = 'test_v2' | |||
|
369 | response = self.app.post( | |||
|
370 | route_path( | |||
|
371 | 'repo_commit_comment_edit', | |||
|
372 | repo_name=backend.repo_name, | |||
|
373 | commit_id=commit_id, | |||
|
374 | comment_id=comment_id, | |||
|
375 | ), | |||
|
376 | params={ | |||
|
377 | 'csrf_token': self.csrf_token, | |||
|
378 | 'text': test_text_v2, | |||
|
379 | 'version': '0', | |||
|
380 | }, | |||
|
381 | status=404, | |||
|
382 | ) | |||
|
383 | assert response.status_int == 404 | |||
|
384 | ||||
|
385 | text_form_db = ChangesetComment.query().filter( | |||
|
386 | ChangesetComment.comment_id == comment_id).first().text | |||
|
387 | ||||
|
388 | assert test_text == text_form_db | |||
|
389 | assert test_text_v2 != text_form_db | |||
|
390 | ||||
|
391 | def test_edit_forbidden_for_immutable_comments(self, backend): | |||
|
392 | self.log_user() | |||
|
393 | commit_id = backend.repo.get_commit('300').raw_id | |||
|
394 | text = u'CommentOnCommit' | |||
|
395 | ||||
|
396 | params = {'text': text, 'csrf_token': self.csrf_token, 'version': '0'} | |||
|
397 | self.app.post( | |||
|
398 | route_path( | |||
|
399 | 'repo_commit_comment_create', | |||
|
400 | repo_name=backend.repo_name, | |||
|
401 | commit_id=commit_id, | |||
|
402 | ), | |||
|
403 | params=params | |||
|
404 | ) | |||
|
405 | ||||
|
406 | comments = ChangesetComment.query().all() | |||
|
407 | assert len(comments) == 1 | |||
|
408 | comment_id = comments[0].comment_id | |||
|
409 | ||||
|
410 | comment = ChangesetComment.get(comment_id) | |||
|
411 | comment.immutable_state = ChangesetComment.OP_IMMUTABLE | |||
|
412 | Session().add(comment) | |||
|
413 | Session().commit() | |||
|
414 | ||||
|
415 | response = self.app.post( | |||
|
416 | route_path( | |||
|
417 | 'repo_commit_comment_edit', | |||
|
418 | repo_name=backend.repo_name, | |||
|
419 | commit_id=commit_id, | |||
|
420 | comment_id=comment_id, | |||
|
421 | ), | |||
|
422 | params={ | |||
|
423 | 'csrf_token': self.csrf_token, | |||
|
424 | 'text': 'test_text', | |||
|
425 | }, | |||
|
426 | status=403, | |||
|
427 | ) | |||
|
428 | assert response.status_int == 403 | |||
|
429 | ||||
271 | def test_delete_forbidden_for_immutable_comments(self, backend): |
|
430 | def test_delete_forbidden_for_immutable_comments(self, backend): | |
272 | self.log_user() |
|
431 | self.log_user() | |
273 | commit_id = backend.repo.get_commit('300').raw_id |
|
432 | commit_id = backend.repo.get_commit('300').raw_id | |
274 | text = u'CommentOnCommit' |
|
433 | text = u'CommentOnCommit' | |
275 |
|
434 | |||
276 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
435 | params = {'text': text, 'csrf_token': self.csrf_token} | |
277 | self.app.post( |
|
436 | self.app.post( | |
278 | route_path( |
|
437 | route_path( | |
279 | 'repo_commit_comment_create', |
|
438 | 'repo_commit_comment_create', | |
280 | repo_name=backend.repo_name, commit_id=commit_id), |
|
439 | repo_name=backend.repo_name, commit_id=commit_id), | |
281 | params=params) |
|
440 | params=params) | |
282 |
|
441 | |||
283 | comments = ChangesetComment.query().all() |
|
442 | comments = ChangesetComment.query().all() | |
284 | assert len(comments) == 1 |
|
443 | assert len(comments) == 1 | |
285 | comment_id = comments[0].comment_id |
|
444 | comment_id = comments[0].comment_id | |
286 |
|
445 | |||
287 | comment = ChangesetComment.get(comment_id) |
|
446 | comment = ChangesetComment.get(comment_id) | |
288 | comment.immutable_state = ChangesetComment.OP_IMMUTABLE |
|
447 | comment.immutable_state = ChangesetComment.OP_IMMUTABLE | |
289 | Session().add(comment) |
|
448 | Session().add(comment) | |
290 | Session().commit() |
|
449 | Session().commit() | |
291 |
|
450 | |||
292 | self.app.post( |
|
451 | self.app.post( | |
293 | route_path('repo_commit_comment_delete', |
|
452 | route_path('repo_commit_comment_delete', | |
294 | repo_name=backend.repo_name, |
|
453 | repo_name=backend.repo_name, | |
295 | commit_id=commit_id, |
|
454 | commit_id=commit_id, | |
296 | comment_id=comment_id), |
|
455 | comment_id=comment_id), | |
297 | params={'csrf_token': self.csrf_token}, |
|
456 | params={'csrf_token': self.csrf_token}, | |
298 | status=403) |
|
457 | status=403) | |
299 |
|
458 | |||
300 | @pytest.mark.parametrize('renderer, text_input, output', [ |
|
459 | @pytest.mark.parametrize('renderer, text_input, output', [ | |
301 | ('rst', 'plain text', '<p>plain text</p>'), |
|
460 | ('rst', 'plain text', '<p>plain text</p>'), | |
302 | ('rst', 'header\n======', '<h1 class="title">header</h1>'), |
|
461 | ('rst', 'header\n======', '<h1 class="title">header</h1>'), | |
303 | ('rst', '*italics*', '<em>italics</em>'), |
|
462 | ('rst', '*italics*', '<em>italics</em>'), | |
304 | ('rst', '**bold**', '<strong>bold</strong>'), |
|
463 | ('rst', '**bold**', '<strong>bold</strong>'), | |
305 | ('markdown', 'plain text', '<p>plain text</p>'), |
|
464 | ('markdown', 'plain text', '<p>plain text</p>'), | |
306 | ('markdown', '# header', '<h1>header</h1>'), |
|
465 | ('markdown', '# header', '<h1>header</h1>'), | |
307 | ('markdown', '*italics*', '<em>italics</em>'), |
|
466 | ('markdown', '*italics*', '<em>italics</em>'), | |
308 | ('markdown', '**bold**', '<strong>bold</strong>'), |
|
467 | ('markdown', '**bold**', '<strong>bold</strong>'), | |
309 | ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain', |
|
468 | ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain', | |
310 | 'md-header', 'md-italics', 'md-bold', ]) |
|
469 | 'md-header', 'md-italics', 'md-bold', ]) | |
311 | def test_preview(self, renderer, text_input, output, backend, xhr_header): |
|
470 | def test_preview(self, renderer, text_input, output, backend, xhr_header): | |
312 | self.log_user() |
|
471 | self.log_user() | |
313 | params = { |
|
472 | params = { | |
314 | 'renderer': renderer, |
|
473 | 'renderer': renderer, | |
315 | 'text': text_input, |
|
474 | 'text': text_input, | |
316 | 'csrf_token': self.csrf_token |
|
475 | 'csrf_token': self.csrf_token | |
317 | } |
|
476 | } | |
318 | commit_id = '0' * 16 # fake this for tests |
|
477 | commit_id = '0' * 16 # fake this for tests | |
319 | response = self.app.post( |
|
478 | response = self.app.post( | |
320 | route_path('repo_commit_comment_preview', |
|
479 | route_path('repo_commit_comment_preview', | |
321 | repo_name=backend.repo_name, commit_id=commit_id,), |
|
480 | repo_name=backend.repo_name, commit_id=commit_id,), | |
322 | params=params, |
|
481 | params=params, | |
323 | extra_environ=xhr_header) |
|
482 | extra_environ=xhr_header) | |
324 |
|
483 | |||
325 | response.mustcontain(output) |
|
484 | response.mustcontain(output) | |
326 |
|
485 | |||
327 |
|
486 | |||
328 | def assert_comment_links(response, comments, inline_comments): |
|
487 | def assert_comment_links(response, comments, inline_comments): | |
329 | if comments == 1: |
|
488 | if comments == 1: | |
330 | comments_text = "%d General" % comments |
|
489 | comments_text = "%d General" % comments | |
331 | else: |
|
490 | else: | |
332 | comments_text = "%d General" % comments |
|
491 | comments_text = "%d General" % comments | |
333 |
|
492 | |||
334 | if inline_comments == 1: |
|
493 | if inline_comments == 1: | |
335 | inline_comments_text = "%d Inline" % inline_comments |
|
494 | inline_comments_text = "%d Inline" % inline_comments | |
336 | else: |
|
495 | else: | |
337 | inline_comments_text = "%d Inline" % inline_comments |
|
496 | inline_comments_text = "%d Inline" % inline_comments | |
338 |
|
497 | |||
339 | if comments: |
|
498 | if comments: | |
340 | response.mustcontain('<a href="#comments">%s</a>,' % comments_text) |
|
499 | response.mustcontain('<a href="#comments">%s</a>,' % comments_text) | |
341 | else: |
|
500 | else: | |
342 | response.mustcontain(comments_text) |
|
501 | response.mustcontain(comments_text) | |
343 |
|
502 | |||
344 | if inline_comments: |
|
503 | if inline_comments: | |
345 | response.mustcontain( |
|
504 | response.mustcontain( | |
346 | 'id="inline-comments-counter">%s' % inline_comments_text) |
|
505 | 'id="inline-comments-counter">%s' % inline_comments_text) | |
347 | else: |
|
506 | else: | |
348 | response.mustcontain(inline_comments_text) |
|
507 | response.mustcontain(inline_comments_text) |
@@ -1,1217 +1,1427 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | import mock |
|
20 | import mock | |
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | import rhodecode |
|
23 | import rhodecode | |
24 | from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason |
|
24 | from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason | |
25 | from rhodecode.lib.vcs.nodes import FileNode |
|
25 | from rhodecode.lib.vcs.nodes import FileNode | |
26 | from rhodecode.lib import helpers as h |
|
26 | from rhodecode.lib import helpers as h | |
27 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
27 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
28 | from rhodecode.model.db import ( |
|
28 | from rhodecode.model.db import ( | |
29 | PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository) |
|
29 | PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository) | |
30 | from rhodecode.model.meta import Session |
|
30 | from rhodecode.model.meta import Session | |
31 | from rhodecode.model.pull_request import PullRequestModel |
|
31 | from rhodecode.model.pull_request import PullRequestModel | |
32 | from rhodecode.model.user import UserModel |
|
32 | from rhodecode.model.user import UserModel | |
|
33 | from rhodecode.model.comment import CommentsModel | |||
33 | from rhodecode.tests import ( |
|
34 | from rhodecode.tests import ( | |
34 | assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN) |
|
35 | assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN) | |
35 |
|
36 | |||
36 |
|
37 | |||
37 | def route_path(name, params=None, **kwargs): |
|
38 | def route_path(name, params=None, **kwargs): | |
38 | import urllib |
|
39 | import urllib | |
39 |
|
40 | |||
40 | base_url = { |
|
41 | base_url = { | |
41 | 'repo_changelog': '/{repo_name}/changelog', |
|
42 | 'repo_changelog': '/{repo_name}/changelog', | |
42 | 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}', |
|
43 | 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}', | |
43 | 'repo_commits': '/{repo_name}/commits', |
|
44 | 'repo_commits': '/{repo_name}/commits', | |
44 | 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}', |
|
45 | 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}', | |
45 | 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}', |
|
46 | 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}', | |
46 | 'pullrequest_show_all': '/{repo_name}/pull-request', |
|
47 | 'pullrequest_show_all': '/{repo_name}/pull-request', | |
47 | 'pullrequest_show_all_data': '/{repo_name}/pull-request-data', |
|
48 | 'pullrequest_show_all_data': '/{repo_name}/pull-request-data', | |
48 | 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
49 | 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', | |
49 | 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations', |
|
50 | 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations', | |
50 | 'pullrequest_new': '/{repo_name}/pull-request/new', |
|
51 | 'pullrequest_new': '/{repo_name}/pull-request/new', | |
51 | 'pullrequest_create': '/{repo_name}/pull-request/create', |
|
52 | 'pullrequest_create': '/{repo_name}/pull-request/create', | |
52 | 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update', |
|
53 | 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update', | |
53 | 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge', |
|
54 | 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge', | |
54 | 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete', |
|
55 | 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete', | |
55 | 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment', |
|
56 | 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment', | |
56 | 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete', |
|
57 | 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete', | |
|
58 | 'pullrequest_comment_edit': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/edit', | |||
57 | }[name].format(**kwargs) |
|
59 | }[name].format(**kwargs) | |
58 |
|
60 | |||
59 | if params: |
|
61 | if params: | |
60 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
62 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
61 | return base_url |
|
63 | return base_url | |
62 |
|
64 | |||
63 |
|
65 | |||
64 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
66 | @pytest.mark.usefixtures('app', 'autologin_user') | |
65 | @pytest.mark.backends("git", "hg") |
|
67 | @pytest.mark.backends("git", "hg") | |
66 | class TestPullrequestsView(object): |
|
68 | class TestPullrequestsView(object): | |
67 |
|
69 | |||
68 | def test_index(self, backend): |
|
70 | def test_index(self, backend): | |
69 | self.app.get(route_path( |
|
71 | self.app.get(route_path( | |
70 | 'pullrequest_new', |
|
72 | 'pullrequest_new', | |
71 | repo_name=backend.repo_name)) |
|
73 | repo_name=backend.repo_name)) | |
72 |
|
74 | |||
73 | def test_option_menu_create_pull_request_exists(self, backend): |
|
75 | def test_option_menu_create_pull_request_exists(self, backend): | |
74 | repo_name = backend.repo_name |
|
76 | repo_name = backend.repo_name | |
75 | response = self.app.get(h.route_path('repo_summary', repo_name=repo_name)) |
|
77 | response = self.app.get(h.route_path('repo_summary', repo_name=repo_name)) | |
76 |
|
78 | |||
77 | create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path( |
|
79 | create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path( | |
78 | 'pullrequest_new', repo_name=repo_name) |
|
80 | 'pullrequest_new', repo_name=repo_name) | |
79 | response.mustcontain(create_pr_link) |
|
81 | response.mustcontain(create_pr_link) | |
80 |
|
82 | |||
81 | def test_create_pr_form_with_raw_commit_id(self, backend): |
|
83 | def test_create_pr_form_with_raw_commit_id(self, backend): | |
82 | repo = backend.repo |
|
84 | repo = backend.repo | |
83 |
|
85 | |||
84 | self.app.get( |
|
86 | self.app.get( | |
85 | route_path('pullrequest_new', repo_name=repo.repo_name, |
|
87 | route_path('pullrequest_new', repo_name=repo.repo_name, | |
86 | commit=repo.get_commit().raw_id), |
|
88 | commit=repo.get_commit().raw_id), | |
87 | status=200) |
|
89 | status=200) | |
88 |
|
90 | |||
89 | @pytest.mark.parametrize('pr_merge_enabled', [True, False]) |
|
91 | @pytest.mark.parametrize('pr_merge_enabled', [True, False]) | |
90 | @pytest.mark.parametrize('range_diff', ["0", "1"]) |
|
92 | @pytest.mark.parametrize('range_diff', ["0", "1"]) | |
91 | def test_show(self, pr_util, pr_merge_enabled, range_diff): |
|
93 | def test_show(self, pr_util, pr_merge_enabled, range_diff): | |
92 | pull_request = pr_util.create_pull_request( |
|
94 | pull_request = pr_util.create_pull_request( | |
93 | mergeable=pr_merge_enabled, enable_notifications=False) |
|
95 | mergeable=pr_merge_enabled, enable_notifications=False) | |
94 |
|
96 | |||
95 | response = self.app.get(route_path( |
|
97 | response = self.app.get(route_path( | |
96 | 'pullrequest_show', |
|
98 | 'pullrequest_show', | |
97 | repo_name=pull_request.target_repo.scm_instance().name, |
|
99 | repo_name=pull_request.target_repo.scm_instance().name, | |
98 | pull_request_id=pull_request.pull_request_id, |
|
100 | pull_request_id=pull_request.pull_request_id, | |
99 | params={'range-diff': range_diff})) |
|
101 | params={'range-diff': range_diff})) | |
100 |
|
102 | |||
101 | for commit_id in pull_request.revisions: |
|
103 | for commit_id in pull_request.revisions: | |
102 | response.mustcontain(commit_id) |
|
104 | response.mustcontain(commit_id) | |
103 |
|
105 | |||
104 | response.mustcontain(pull_request.target_ref_parts.type) |
|
106 | response.mustcontain(pull_request.target_ref_parts.type) | |
105 | response.mustcontain(pull_request.target_ref_parts.name) |
|
107 | response.mustcontain(pull_request.target_ref_parts.name) | |
106 |
|
108 | |||
107 | response.mustcontain('class="pull-request-merge"') |
|
109 | response.mustcontain('class="pull-request-merge"') | |
108 |
|
110 | |||
109 | if pr_merge_enabled: |
|
111 | if pr_merge_enabled: | |
110 | response.mustcontain('Pull request reviewer approval is pending') |
|
112 | response.mustcontain('Pull request reviewer approval is pending') | |
111 | else: |
|
113 | else: | |
112 | response.mustcontain('Server-side pull request merging is disabled.') |
|
114 | response.mustcontain('Server-side pull request merging is disabled.') | |
113 |
|
115 | |||
114 | if range_diff == "1": |
|
116 | if range_diff == "1": | |
115 | response.mustcontain('Turn off: Show the diff as commit range') |
|
117 | response.mustcontain('Turn off: Show the diff as commit range') | |
116 |
|
118 | |||
117 | def test_close_status_visibility(self, pr_util, user_util, csrf_token): |
|
119 | def test_close_status_visibility(self, pr_util, user_util, csrf_token): | |
118 | # Logout |
|
120 | # Logout | |
119 | response = self.app.post( |
|
121 | response = self.app.post( | |
120 | h.route_path('logout'), |
|
122 | h.route_path('logout'), | |
121 | params={'csrf_token': csrf_token}) |
|
123 | params={'csrf_token': csrf_token}) | |
122 | # Login as regular user |
|
124 | # Login as regular user | |
123 | response = self.app.post(h.route_path('login'), |
|
125 | response = self.app.post(h.route_path('login'), | |
124 | {'username': TEST_USER_REGULAR_LOGIN, |
|
126 | {'username': TEST_USER_REGULAR_LOGIN, | |
125 | 'password': 'test12'}) |
|
127 | 'password': 'test12'}) | |
126 |
|
128 | |||
127 | pull_request = pr_util.create_pull_request( |
|
129 | pull_request = pr_util.create_pull_request( | |
128 | author=TEST_USER_REGULAR_LOGIN) |
|
130 | author=TEST_USER_REGULAR_LOGIN) | |
129 |
|
131 | |||
130 | response = self.app.get(route_path( |
|
132 | response = self.app.get(route_path( | |
131 | 'pullrequest_show', |
|
133 | 'pullrequest_show', | |
132 | repo_name=pull_request.target_repo.scm_instance().name, |
|
134 | repo_name=pull_request.target_repo.scm_instance().name, | |
133 | pull_request_id=pull_request.pull_request_id)) |
|
135 | pull_request_id=pull_request.pull_request_id)) | |
134 |
|
136 | |||
135 | response.mustcontain('Server-side pull request merging is disabled.') |
|
137 | response.mustcontain('Server-side pull request merging is disabled.') | |
136 |
|
138 | |||
137 | assert_response = response.assert_response() |
|
139 | assert_response = response.assert_response() | |
138 | # for regular user without a merge permissions, we don't see it |
|
140 | # for regular user without a merge permissions, we don't see it | |
139 | assert_response.no_element_exists('#close-pull-request-action') |
|
141 | assert_response.no_element_exists('#close-pull-request-action') | |
140 |
|
142 | |||
141 | user_util.grant_user_permission_to_repo( |
|
143 | user_util.grant_user_permission_to_repo( | |
142 | pull_request.target_repo, |
|
144 | pull_request.target_repo, | |
143 | UserModel().get_by_username(TEST_USER_REGULAR_LOGIN), |
|
145 | UserModel().get_by_username(TEST_USER_REGULAR_LOGIN), | |
144 | 'repository.write') |
|
146 | 'repository.write') | |
145 | response = self.app.get(route_path( |
|
147 | response = self.app.get(route_path( | |
146 | 'pullrequest_show', |
|
148 | 'pullrequest_show', | |
147 | repo_name=pull_request.target_repo.scm_instance().name, |
|
149 | repo_name=pull_request.target_repo.scm_instance().name, | |
148 | pull_request_id=pull_request.pull_request_id)) |
|
150 | pull_request_id=pull_request.pull_request_id)) | |
149 |
|
151 | |||
150 | response.mustcontain('Server-side pull request merging is disabled.') |
|
152 | response.mustcontain('Server-side pull request merging is disabled.') | |
151 |
|
153 | |||
152 | assert_response = response.assert_response() |
|
154 | assert_response = response.assert_response() | |
153 | # now regular user has a merge permissions, we have CLOSE button |
|
155 | # now regular user has a merge permissions, we have CLOSE button | |
154 | assert_response.one_element_exists('#close-pull-request-action') |
|
156 | assert_response.one_element_exists('#close-pull-request-action') | |
155 |
|
157 | |||
156 | def test_show_invalid_commit_id(self, pr_util): |
|
158 | def test_show_invalid_commit_id(self, pr_util): | |
157 | # Simulating invalid revisions which will cause a lookup error |
|
159 | # Simulating invalid revisions which will cause a lookup error | |
158 | pull_request = pr_util.create_pull_request() |
|
160 | pull_request = pr_util.create_pull_request() | |
159 | pull_request.revisions = ['invalid'] |
|
161 | pull_request.revisions = ['invalid'] | |
160 | Session().add(pull_request) |
|
162 | Session().add(pull_request) | |
161 | Session().commit() |
|
163 | Session().commit() | |
162 |
|
164 | |||
163 | response = self.app.get(route_path( |
|
165 | response = self.app.get(route_path( | |
164 | 'pullrequest_show', |
|
166 | 'pullrequest_show', | |
165 | repo_name=pull_request.target_repo.scm_instance().name, |
|
167 | repo_name=pull_request.target_repo.scm_instance().name, | |
166 | pull_request_id=pull_request.pull_request_id)) |
|
168 | pull_request_id=pull_request.pull_request_id)) | |
167 |
|
169 | |||
168 | for commit_id in pull_request.revisions: |
|
170 | for commit_id in pull_request.revisions: | |
169 | response.mustcontain(commit_id) |
|
171 | response.mustcontain(commit_id) | |
170 |
|
172 | |||
171 | def test_show_invalid_source_reference(self, pr_util): |
|
173 | def test_show_invalid_source_reference(self, pr_util): | |
172 | pull_request = pr_util.create_pull_request() |
|
174 | pull_request = pr_util.create_pull_request() | |
173 | pull_request.source_ref = 'branch:b:invalid' |
|
175 | pull_request.source_ref = 'branch:b:invalid' | |
174 | Session().add(pull_request) |
|
176 | Session().add(pull_request) | |
175 | Session().commit() |
|
177 | Session().commit() | |
176 |
|
178 | |||
177 | self.app.get(route_path( |
|
179 | self.app.get(route_path( | |
178 | 'pullrequest_show', |
|
180 | 'pullrequest_show', | |
179 | repo_name=pull_request.target_repo.scm_instance().name, |
|
181 | repo_name=pull_request.target_repo.scm_instance().name, | |
180 | pull_request_id=pull_request.pull_request_id)) |
|
182 | pull_request_id=pull_request.pull_request_id)) | |
181 |
|
183 | |||
182 | def test_edit_title_description(self, pr_util, csrf_token): |
|
184 | def test_edit_title_description(self, pr_util, csrf_token): | |
183 | pull_request = pr_util.create_pull_request() |
|
185 | pull_request = pr_util.create_pull_request() | |
184 | pull_request_id = pull_request.pull_request_id |
|
186 | pull_request_id = pull_request.pull_request_id | |
185 |
|
187 | |||
186 | response = self.app.post( |
|
188 | response = self.app.post( | |
187 | route_path('pullrequest_update', |
|
189 | route_path('pullrequest_update', | |
188 | repo_name=pull_request.target_repo.repo_name, |
|
190 | repo_name=pull_request.target_repo.repo_name, | |
189 | pull_request_id=pull_request_id), |
|
191 | pull_request_id=pull_request_id), | |
190 | params={ |
|
192 | params={ | |
191 | 'edit_pull_request': 'true', |
|
193 | 'edit_pull_request': 'true', | |
192 | 'title': 'New title', |
|
194 | 'title': 'New title', | |
193 | 'description': 'New description', |
|
195 | 'description': 'New description', | |
194 | 'csrf_token': csrf_token}) |
|
196 | 'csrf_token': csrf_token}) | |
195 |
|
197 | |||
196 | assert_session_flash( |
|
198 | assert_session_flash( | |
197 | response, u'Pull request title & description updated.', |
|
199 | response, u'Pull request title & description updated.', | |
198 | category='success') |
|
200 | category='success') | |
199 |
|
201 | |||
200 | pull_request = PullRequest.get(pull_request_id) |
|
202 | pull_request = PullRequest.get(pull_request_id) | |
201 | assert pull_request.title == 'New title' |
|
203 | assert pull_request.title == 'New title' | |
202 | assert pull_request.description == 'New description' |
|
204 | assert pull_request.description == 'New description' | |
203 |
|
205 | |||
204 | def test_edit_title_description_closed(self, pr_util, csrf_token): |
|
206 | def test_edit_title_description_closed(self, pr_util, csrf_token): | |
205 | pull_request = pr_util.create_pull_request() |
|
207 | pull_request = pr_util.create_pull_request() | |
206 | pull_request_id = pull_request.pull_request_id |
|
208 | pull_request_id = pull_request.pull_request_id | |
207 | repo_name = pull_request.target_repo.repo_name |
|
209 | repo_name = pull_request.target_repo.repo_name | |
208 | pr_util.close() |
|
210 | pr_util.close() | |
209 |
|
211 | |||
210 | response = self.app.post( |
|
212 | response = self.app.post( | |
211 | route_path('pullrequest_update', |
|
213 | route_path('pullrequest_update', | |
212 | repo_name=repo_name, pull_request_id=pull_request_id), |
|
214 | repo_name=repo_name, pull_request_id=pull_request_id), | |
213 | params={ |
|
215 | params={ | |
214 | 'edit_pull_request': 'true', |
|
216 | 'edit_pull_request': 'true', | |
215 | 'title': 'New title', |
|
217 | 'title': 'New title', | |
216 | 'description': 'New description', |
|
218 | 'description': 'New description', | |
217 | 'csrf_token': csrf_token}, status=200) |
|
219 | 'csrf_token': csrf_token}, status=200) | |
218 | assert_session_flash( |
|
220 | assert_session_flash( | |
219 | response, u'Cannot update closed pull requests.', |
|
221 | response, u'Cannot update closed pull requests.', | |
220 | category='error') |
|
222 | category='error') | |
221 |
|
223 | |||
222 | def test_update_invalid_source_reference(self, pr_util, csrf_token): |
|
224 | def test_update_invalid_source_reference(self, pr_util, csrf_token): | |
223 | from rhodecode.lib.vcs.backends.base import UpdateFailureReason |
|
225 | from rhodecode.lib.vcs.backends.base import UpdateFailureReason | |
224 |
|
226 | |||
225 | pull_request = pr_util.create_pull_request() |
|
227 | pull_request = pr_util.create_pull_request() | |
226 | pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id' |
|
228 | pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id' | |
227 | Session().add(pull_request) |
|
229 | Session().add(pull_request) | |
228 | Session().commit() |
|
230 | Session().commit() | |
229 |
|
231 | |||
230 | pull_request_id = pull_request.pull_request_id |
|
232 | pull_request_id = pull_request.pull_request_id | |
231 |
|
233 | |||
232 | response = self.app.post( |
|
234 | response = self.app.post( | |
233 | route_path('pullrequest_update', |
|
235 | route_path('pullrequest_update', | |
234 | repo_name=pull_request.target_repo.repo_name, |
|
236 | repo_name=pull_request.target_repo.repo_name, | |
235 | pull_request_id=pull_request_id), |
|
237 | pull_request_id=pull_request_id), | |
236 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
238 | params={'update_commits': 'true', 'csrf_token': csrf_token}) | |
237 |
|
239 | |||
238 | expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[ |
|
240 | expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[ | |
239 | UpdateFailureReason.MISSING_SOURCE_REF]) |
|
241 | UpdateFailureReason.MISSING_SOURCE_REF]) | |
240 | assert_session_flash(response, expected_msg, category='error') |
|
242 | assert_session_flash(response, expected_msg, category='error') | |
241 |
|
243 | |||
242 | def test_missing_target_reference(self, pr_util, csrf_token): |
|
244 | def test_missing_target_reference(self, pr_util, csrf_token): | |
243 | from rhodecode.lib.vcs.backends.base import MergeFailureReason |
|
245 | from rhodecode.lib.vcs.backends.base import MergeFailureReason | |
244 | pull_request = pr_util.create_pull_request( |
|
246 | pull_request = pr_util.create_pull_request( | |
245 | approved=True, mergeable=True) |
|
247 | approved=True, mergeable=True) | |
246 | unicode_reference = u'branch:invalid-branch:invalid-commit-id' |
|
248 | unicode_reference = u'branch:invalid-branch:invalid-commit-id' | |
247 | pull_request.target_ref = unicode_reference |
|
249 | pull_request.target_ref = unicode_reference | |
248 | Session().add(pull_request) |
|
250 | Session().add(pull_request) | |
249 | Session().commit() |
|
251 | Session().commit() | |
250 |
|
252 | |||
251 | pull_request_id = pull_request.pull_request_id |
|
253 | pull_request_id = pull_request.pull_request_id | |
252 | pull_request_url = route_path( |
|
254 | pull_request_url = route_path( | |
253 | 'pullrequest_show', |
|
255 | 'pullrequest_show', | |
254 | repo_name=pull_request.target_repo.repo_name, |
|
256 | repo_name=pull_request.target_repo.repo_name, | |
255 | pull_request_id=pull_request_id) |
|
257 | pull_request_id=pull_request_id) | |
256 |
|
258 | |||
257 | response = self.app.get(pull_request_url) |
|
259 | response = self.app.get(pull_request_url) | |
258 | target_ref_id = 'invalid-branch' |
|
260 | target_ref_id = 'invalid-branch' | |
259 | merge_resp = MergeResponse( |
|
261 | merge_resp = MergeResponse( | |
260 | True, True, '', MergeFailureReason.MISSING_TARGET_REF, |
|
262 | True, True, '', MergeFailureReason.MISSING_TARGET_REF, | |
261 | metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)}) |
|
263 | metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)}) | |
262 | response.assert_response().element_contains( |
|
264 | response.assert_response().element_contains( | |
263 | 'div[data-role="merge-message"]', merge_resp.merge_status_message) |
|
265 | 'div[data-role="merge-message"]', merge_resp.merge_status_message) | |
264 |
|
266 | |||
265 | def test_comment_and_close_pull_request_custom_message_approved( |
|
267 | def test_comment_and_close_pull_request_custom_message_approved( | |
266 | self, pr_util, csrf_token, xhr_header): |
|
268 | self, pr_util, csrf_token, xhr_header): | |
267 |
|
269 | |||
268 | pull_request = pr_util.create_pull_request(approved=True) |
|
270 | pull_request = pr_util.create_pull_request(approved=True) | |
269 | pull_request_id = pull_request.pull_request_id |
|
271 | pull_request_id = pull_request.pull_request_id | |
270 | author = pull_request.user_id |
|
272 | author = pull_request.user_id | |
271 | repo = pull_request.target_repo.repo_id |
|
273 | repo = pull_request.target_repo.repo_id | |
272 |
|
274 | |||
273 | self.app.post( |
|
275 | self.app.post( | |
274 | route_path('pullrequest_comment_create', |
|
276 | route_path('pullrequest_comment_create', | |
275 | repo_name=pull_request.target_repo.scm_instance().name, |
|
277 | repo_name=pull_request.target_repo.scm_instance().name, | |
276 | pull_request_id=pull_request_id), |
|
278 | pull_request_id=pull_request_id), | |
277 | params={ |
|
279 | params={ | |
278 | 'close_pull_request': '1', |
|
280 | 'close_pull_request': '1', | |
279 | 'text': 'Closing a PR', |
|
281 | 'text': 'Closing a PR', | |
280 | 'csrf_token': csrf_token}, |
|
282 | 'csrf_token': csrf_token}, | |
281 | extra_environ=xhr_header,) |
|
283 | extra_environ=xhr_header,) | |
282 |
|
284 | |||
283 | journal = UserLog.query()\ |
|
285 | journal = UserLog.query()\ | |
284 | .filter(UserLog.user_id == author)\ |
|
286 | .filter(UserLog.user_id == author)\ | |
285 | .filter(UserLog.repository_id == repo) \ |
|
287 | .filter(UserLog.repository_id == repo) \ | |
286 | .order_by(UserLog.user_log_id.asc()) \ |
|
288 | .order_by(UserLog.user_log_id.asc()) \ | |
287 | .all() |
|
289 | .all() | |
288 | assert journal[-1].action == 'repo.pull_request.close' |
|
290 | assert journal[-1].action == 'repo.pull_request.close' | |
289 |
|
291 | |||
290 | pull_request = PullRequest.get(pull_request_id) |
|
292 | pull_request = PullRequest.get(pull_request_id) | |
291 | assert pull_request.is_closed() |
|
293 | assert pull_request.is_closed() | |
292 |
|
294 | |||
293 | status = ChangesetStatusModel().get_status( |
|
295 | status = ChangesetStatusModel().get_status( | |
294 | pull_request.source_repo, pull_request=pull_request) |
|
296 | pull_request.source_repo, pull_request=pull_request) | |
295 | assert status == ChangesetStatus.STATUS_APPROVED |
|
297 | assert status == ChangesetStatus.STATUS_APPROVED | |
296 | comments = ChangesetComment().query() \ |
|
298 | comments = ChangesetComment().query() \ | |
297 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
299 | .filter(ChangesetComment.pull_request == pull_request) \ | |
298 | .order_by(ChangesetComment.comment_id.asc())\ |
|
300 | .order_by(ChangesetComment.comment_id.asc())\ | |
299 | .all() |
|
301 | .all() | |
300 | assert comments[-1].text == 'Closing a PR' |
|
302 | assert comments[-1].text == 'Closing a PR' | |
301 |
|
303 | |||
302 | def test_comment_force_close_pull_request_rejected( |
|
304 | def test_comment_force_close_pull_request_rejected( | |
303 | self, pr_util, csrf_token, xhr_header): |
|
305 | self, pr_util, csrf_token, xhr_header): | |
304 | pull_request = pr_util.create_pull_request() |
|
306 | pull_request = pr_util.create_pull_request() | |
305 | pull_request_id = pull_request.pull_request_id |
|
307 | pull_request_id = pull_request.pull_request_id | |
306 | PullRequestModel().update_reviewers( |
|
308 | PullRequestModel().update_reviewers( | |
307 | pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])], |
|
309 | pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])], | |
308 | pull_request.author) |
|
310 | pull_request.author) | |
309 | author = pull_request.user_id |
|
311 | author = pull_request.user_id | |
310 | repo = pull_request.target_repo.repo_id |
|
312 | repo = pull_request.target_repo.repo_id | |
311 |
|
313 | |||
312 | self.app.post( |
|
314 | self.app.post( | |
313 | route_path('pullrequest_comment_create', |
|
315 | route_path('pullrequest_comment_create', | |
314 | repo_name=pull_request.target_repo.scm_instance().name, |
|
316 | repo_name=pull_request.target_repo.scm_instance().name, | |
315 | pull_request_id=pull_request_id), |
|
317 | pull_request_id=pull_request_id), | |
316 | params={ |
|
318 | params={ | |
317 | 'close_pull_request': '1', |
|
319 | 'close_pull_request': '1', | |
318 | 'csrf_token': csrf_token}, |
|
320 | 'csrf_token': csrf_token}, | |
319 | extra_environ=xhr_header) |
|
321 | extra_environ=xhr_header) | |
320 |
|
322 | |||
321 | pull_request = PullRequest.get(pull_request_id) |
|
323 | pull_request = PullRequest.get(pull_request_id) | |
322 |
|
324 | |||
323 | journal = UserLog.query()\ |
|
325 | journal = UserLog.query()\ | |
324 | .filter(UserLog.user_id == author, UserLog.repository_id == repo) \ |
|
326 | .filter(UserLog.user_id == author, UserLog.repository_id == repo) \ | |
325 | .order_by(UserLog.user_log_id.asc()) \ |
|
327 | .order_by(UserLog.user_log_id.asc()) \ | |
326 | .all() |
|
328 | .all() | |
327 | assert journal[-1].action == 'repo.pull_request.close' |
|
329 | assert journal[-1].action == 'repo.pull_request.close' | |
328 |
|
330 | |||
329 | # check only the latest status, not the review status |
|
331 | # check only the latest status, not the review status | |
330 | status = ChangesetStatusModel().get_status( |
|
332 | status = ChangesetStatusModel().get_status( | |
331 | pull_request.source_repo, pull_request=pull_request) |
|
333 | pull_request.source_repo, pull_request=pull_request) | |
332 | assert status == ChangesetStatus.STATUS_REJECTED |
|
334 | assert status == ChangesetStatus.STATUS_REJECTED | |
333 |
|
335 | |||
334 | def test_comment_and_close_pull_request( |
|
336 | def test_comment_and_close_pull_request( | |
335 | self, pr_util, csrf_token, xhr_header): |
|
337 | self, pr_util, csrf_token, xhr_header): | |
336 | pull_request = pr_util.create_pull_request() |
|
338 | pull_request = pr_util.create_pull_request() | |
337 | pull_request_id = pull_request.pull_request_id |
|
339 | pull_request_id = pull_request.pull_request_id | |
338 |
|
340 | |||
339 | response = self.app.post( |
|
341 | response = self.app.post( | |
340 | route_path('pullrequest_comment_create', |
|
342 | route_path('pullrequest_comment_create', | |
341 | repo_name=pull_request.target_repo.scm_instance().name, |
|
343 | repo_name=pull_request.target_repo.scm_instance().name, | |
342 | pull_request_id=pull_request.pull_request_id), |
|
344 | pull_request_id=pull_request.pull_request_id), | |
343 | params={ |
|
345 | params={ | |
344 | 'close_pull_request': 'true', |
|
346 | 'close_pull_request': 'true', | |
345 | 'csrf_token': csrf_token}, |
|
347 | 'csrf_token': csrf_token}, | |
346 | extra_environ=xhr_header) |
|
348 | extra_environ=xhr_header) | |
347 |
|
349 | |||
348 | assert response.json |
|
350 | assert response.json | |
349 |
|
351 | |||
350 | pull_request = PullRequest.get(pull_request_id) |
|
352 | pull_request = PullRequest.get(pull_request_id) | |
351 | assert pull_request.is_closed() |
|
353 | assert pull_request.is_closed() | |
352 |
|
354 | |||
353 | # check only the latest status, not the review status |
|
355 | # check only the latest status, not the review status | |
354 | status = ChangesetStatusModel().get_status( |
|
356 | status = ChangesetStatusModel().get_status( | |
355 | pull_request.source_repo, pull_request=pull_request) |
|
357 | pull_request.source_repo, pull_request=pull_request) | |
356 | assert status == ChangesetStatus.STATUS_REJECTED |
|
358 | assert status == ChangesetStatus.STATUS_REJECTED | |
357 |
|
359 | |||
|
360 | def test_comment_and_close_pull_request_try_edit_comment( | |||
|
361 | self, pr_util, csrf_token, xhr_header | |||
|
362 | ): | |||
|
363 | pull_request = pr_util.create_pull_request() | |||
|
364 | pull_request_id = pull_request.pull_request_id | |||
|
365 | ||||
|
366 | response = self.app.post( | |||
|
367 | route_path( | |||
|
368 | 'pullrequest_comment_create', | |||
|
369 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
370 | pull_request_id=pull_request.pull_request_id, | |||
|
371 | ), | |||
|
372 | params={ | |||
|
373 | 'close_pull_request': 'true', | |||
|
374 | 'csrf_token': csrf_token, | |||
|
375 | }, | |||
|
376 | extra_environ=xhr_header) | |||
|
377 | ||||
|
378 | assert response.json | |||
|
379 | ||||
|
380 | pull_request = PullRequest.get(pull_request_id) | |||
|
381 | assert pull_request.is_closed() | |||
|
382 | ||||
|
383 | # check only the latest status, not the review status | |||
|
384 | status = ChangesetStatusModel().get_status( | |||
|
385 | pull_request.source_repo, pull_request=pull_request) | |||
|
386 | assert status == ChangesetStatus.STATUS_REJECTED | |||
|
387 | ||||
|
388 | comment_id = response.json.get('comment_id', None) | |||
|
389 | test_text = 'test' | |||
|
390 | response = self.app.post( | |||
|
391 | route_path( | |||
|
392 | 'pullrequest_comment_edit', | |||
|
393 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
394 | pull_request_id=pull_request.pull_request_id, | |||
|
395 | comment_id=comment_id, | |||
|
396 | ), | |||
|
397 | extra_environ=xhr_header, | |||
|
398 | params={ | |||
|
399 | 'csrf_token': csrf_token, | |||
|
400 | 'text': test_text, | |||
|
401 | }, | |||
|
402 | status=403, | |||
|
403 | ) | |||
|
404 | assert response.status_int == 403 | |||
|
405 | ||||
|
406 | def test_comment_and_comment_edit( | |||
|
407 | self, pr_util, csrf_token, xhr_header | |||
|
408 | ): | |||
|
409 | pull_request = pr_util.create_pull_request() | |||
|
410 | response = self.app.post( | |||
|
411 | route_path( | |||
|
412 | 'pullrequest_comment_create', | |||
|
413 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
414 | pull_request_id=pull_request.pull_request_id), | |||
|
415 | params={ | |||
|
416 | 'csrf_token': csrf_token, | |||
|
417 | 'text': 'init', | |||
|
418 | }, | |||
|
419 | extra_environ=xhr_header, | |||
|
420 | ) | |||
|
421 | assert response.json | |||
|
422 | ||||
|
423 | comment_id = response.json.get('comment_id', None) | |||
|
424 | assert comment_id | |||
|
425 | test_text = 'test' | |||
|
426 | self.app.post( | |||
|
427 | route_path( | |||
|
428 | 'pullrequest_comment_edit', | |||
|
429 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
430 | pull_request_id=pull_request.pull_request_id, | |||
|
431 | comment_id=comment_id, | |||
|
432 | ), | |||
|
433 | extra_environ=xhr_header, | |||
|
434 | params={ | |||
|
435 | 'csrf_token': csrf_token, | |||
|
436 | 'text': test_text, | |||
|
437 | 'version': '0', | |||
|
438 | }, | |||
|
439 | ||||
|
440 | ) | |||
|
441 | text_form_db = ChangesetComment.query().filter( | |||
|
442 | ChangesetComment.comment_id == comment_id).first().text | |||
|
443 | assert test_text == text_form_db | |||
|
444 | ||||
|
445 | def test_comment_and_comment_edit( | |||
|
446 | self, pr_util, csrf_token, xhr_header | |||
|
447 | ): | |||
|
448 | pull_request = pr_util.create_pull_request() | |||
|
449 | response = self.app.post( | |||
|
450 | route_path( | |||
|
451 | 'pullrequest_comment_create', | |||
|
452 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
453 | pull_request_id=pull_request.pull_request_id), | |||
|
454 | params={ | |||
|
455 | 'csrf_token': csrf_token, | |||
|
456 | 'text': 'init', | |||
|
457 | }, | |||
|
458 | extra_environ=xhr_header, | |||
|
459 | ) | |||
|
460 | assert response.json | |||
|
461 | ||||
|
462 | comment_id = response.json.get('comment_id', None) | |||
|
463 | assert comment_id | |||
|
464 | test_text = 'init' | |||
|
465 | response = self.app.post( | |||
|
466 | route_path( | |||
|
467 | 'pullrequest_comment_edit', | |||
|
468 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
469 | pull_request_id=pull_request.pull_request_id, | |||
|
470 | comment_id=comment_id, | |||
|
471 | ), | |||
|
472 | extra_environ=xhr_header, | |||
|
473 | params={ | |||
|
474 | 'csrf_token': csrf_token, | |||
|
475 | 'text': test_text, | |||
|
476 | 'version': '0', | |||
|
477 | }, | |||
|
478 | status=404, | |||
|
479 | ||||
|
480 | ) | |||
|
481 | assert response.status_int == 404 | |||
|
482 | ||||
|
483 | def test_comment_and_try_edit_already_edited( | |||
|
484 | self, pr_util, csrf_token, xhr_header | |||
|
485 | ): | |||
|
486 | pull_request = pr_util.create_pull_request() | |||
|
487 | response = self.app.post( | |||
|
488 | route_path( | |||
|
489 | 'pullrequest_comment_create', | |||
|
490 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
491 | pull_request_id=pull_request.pull_request_id), | |||
|
492 | params={ | |||
|
493 | 'csrf_token': csrf_token, | |||
|
494 | 'text': 'init', | |||
|
495 | }, | |||
|
496 | extra_environ=xhr_header, | |||
|
497 | ) | |||
|
498 | assert response.json | |||
|
499 | comment_id = response.json.get('comment_id', None) | |||
|
500 | assert comment_id | |||
|
501 | test_text = 'test' | |||
|
502 | response = self.app.post( | |||
|
503 | route_path( | |||
|
504 | 'pullrequest_comment_edit', | |||
|
505 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
506 | pull_request_id=pull_request.pull_request_id, | |||
|
507 | comment_id=comment_id, | |||
|
508 | ), | |||
|
509 | extra_environ=xhr_header, | |||
|
510 | params={ | |||
|
511 | 'csrf_token': csrf_token, | |||
|
512 | 'text': test_text, | |||
|
513 | 'version': '0', | |||
|
514 | }, | |||
|
515 | ||||
|
516 | ) | |||
|
517 | test_text_v2 = 'test_v2' | |||
|
518 | response = self.app.post( | |||
|
519 | route_path( | |||
|
520 | 'pullrequest_comment_edit', | |||
|
521 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
522 | pull_request_id=pull_request.pull_request_id, | |||
|
523 | comment_id=comment_id, | |||
|
524 | ), | |||
|
525 | extra_environ=xhr_header, | |||
|
526 | params={ | |||
|
527 | 'csrf_token': csrf_token, | |||
|
528 | 'text': test_text_v2, | |||
|
529 | 'version': '0', | |||
|
530 | }, | |||
|
531 | status=404, | |||
|
532 | ) | |||
|
533 | assert response.status_int == 404 | |||
|
534 | ||||
|
535 | text_form_db = ChangesetComment.query().filter( | |||
|
536 | ChangesetComment.comment_id == comment_id).first().text | |||
|
537 | ||||
|
538 | assert test_text == text_form_db | |||
|
539 | assert test_text_v2 != text_form_db | |||
|
540 | ||||
|
541 | def test_comment_and_comment_edit_permissions_forbidden( | |||
|
542 | self, autologin_regular_user, user_regular, user_admin, pr_util, | |||
|
543 | csrf_token, xhr_header): | |||
|
544 | pull_request = pr_util.create_pull_request( | |||
|
545 | author=user_admin.username, enable_notifications=False) | |||
|
546 | comment = CommentsModel().create( | |||
|
547 | text='test', | |||
|
548 | repo=pull_request.target_repo.scm_instance().name, | |||
|
549 | user=user_admin, | |||
|
550 | pull_request=pull_request, | |||
|
551 | ) | |||
|
552 | response = self.app.post( | |||
|
553 | route_path( | |||
|
554 | 'pullrequest_comment_edit', | |||
|
555 | repo_name=pull_request.target_repo.scm_instance().name, | |||
|
556 | pull_request_id=pull_request.pull_request_id, | |||
|
557 | comment_id=comment.comment_id, | |||
|
558 | ), | |||
|
559 | extra_environ=xhr_header, | |||
|
560 | params={ | |||
|
561 | 'csrf_token': csrf_token, | |||
|
562 | 'text': 'test_text', | |||
|
563 | }, | |||
|
564 | status=403, | |||
|
565 | ) | |||
|
566 | assert response.status_int == 403 | |||
|
567 | ||||
358 | def test_create_pull_request(self, backend, csrf_token): |
|
568 | def test_create_pull_request(self, backend, csrf_token): | |
359 | commits = [ |
|
569 | commits = [ | |
360 | {'message': 'ancestor'}, |
|
570 | {'message': 'ancestor'}, | |
361 | {'message': 'change'}, |
|
571 | {'message': 'change'}, | |
362 | {'message': 'change2'}, |
|
572 | {'message': 'change2'}, | |
363 | ] |
|
573 | ] | |
364 | commit_ids = backend.create_master_repo(commits) |
|
574 | commit_ids = backend.create_master_repo(commits) | |
365 | target = backend.create_repo(heads=['ancestor']) |
|
575 | target = backend.create_repo(heads=['ancestor']) | |
366 | source = backend.create_repo(heads=['change2']) |
|
576 | source = backend.create_repo(heads=['change2']) | |
367 |
|
577 | |||
368 | response = self.app.post( |
|
578 | response = self.app.post( | |
369 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
579 | route_path('pullrequest_create', repo_name=source.repo_name), | |
370 | [ |
|
580 | [ | |
371 | ('source_repo', source.repo_name), |
|
581 | ('source_repo', source.repo_name), | |
372 | ('source_ref', 'branch:default:' + commit_ids['change2']), |
|
582 | ('source_ref', 'branch:default:' + commit_ids['change2']), | |
373 | ('target_repo', target.repo_name), |
|
583 | ('target_repo', target.repo_name), | |
374 | ('target_ref', 'branch:default:' + commit_ids['ancestor']), |
|
584 | ('target_ref', 'branch:default:' + commit_ids['ancestor']), | |
375 | ('common_ancestor', commit_ids['ancestor']), |
|
585 | ('common_ancestor', commit_ids['ancestor']), | |
376 | ('pullrequest_title', 'Title'), |
|
586 | ('pullrequest_title', 'Title'), | |
377 | ('pullrequest_desc', 'Description'), |
|
587 | ('pullrequest_desc', 'Description'), | |
378 | ('description_renderer', 'markdown'), |
|
588 | ('description_renderer', 'markdown'), | |
379 | ('__start__', 'review_members:sequence'), |
|
589 | ('__start__', 'review_members:sequence'), | |
380 | ('__start__', 'reviewer:mapping'), |
|
590 | ('__start__', 'reviewer:mapping'), | |
381 | ('user_id', '1'), |
|
591 | ('user_id', '1'), | |
382 | ('__start__', 'reasons:sequence'), |
|
592 | ('__start__', 'reasons:sequence'), | |
383 | ('reason', 'Some reason'), |
|
593 | ('reason', 'Some reason'), | |
384 | ('__end__', 'reasons:sequence'), |
|
594 | ('__end__', 'reasons:sequence'), | |
385 | ('__start__', 'rules:sequence'), |
|
595 | ('__start__', 'rules:sequence'), | |
386 | ('__end__', 'rules:sequence'), |
|
596 | ('__end__', 'rules:sequence'), | |
387 | ('mandatory', 'False'), |
|
597 | ('mandatory', 'False'), | |
388 | ('__end__', 'reviewer:mapping'), |
|
598 | ('__end__', 'reviewer:mapping'), | |
389 | ('__end__', 'review_members:sequence'), |
|
599 | ('__end__', 'review_members:sequence'), | |
390 | ('__start__', 'revisions:sequence'), |
|
600 | ('__start__', 'revisions:sequence'), | |
391 | ('revisions', commit_ids['change']), |
|
601 | ('revisions', commit_ids['change']), | |
392 | ('revisions', commit_ids['change2']), |
|
602 | ('revisions', commit_ids['change2']), | |
393 | ('__end__', 'revisions:sequence'), |
|
603 | ('__end__', 'revisions:sequence'), | |
394 | ('user', ''), |
|
604 | ('user', ''), | |
395 | ('csrf_token', csrf_token), |
|
605 | ('csrf_token', csrf_token), | |
396 | ], |
|
606 | ], | |
397 | status=302) |
|
607 | status=302) | |
398 |
|
608 | |||
399 | location = response.headers['Location'] |
|
609 | location = response.headers['Location'] | |
400 | pull_request_id = location.rsplit('/', 1)[1] |
|
610 | pull_request_id = location.rsplit('/', 1)[1] | |
401 | assert pull_request_id != 'new' |
|
611 | assert pull_request_id != 'new' | |
402 | pull_request = PullRequest.get(int(pull_request_id)) |
|
612 | pull_request = PullRequest.get(int(pull_request_id)) | |
403 |
|
613 | |||
404 | # check that we have now both revisions |
|
614 | # check that we have now both revisions | |
405 | assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']] |
|
615 | assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']] | |
406 | assert pull_request.source_ref == 'branch:default:' + commit_ids['change2'] |
|
616 | assert pull_request.source_ref == 'branch:default:' + commit_ids['change2'] | |
407 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
617 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] | |
408 | assert pull_request.target_ref == expected_target_ref |
|
618 | assert pull_request.target_ref == expected_target_ref | |
409 |
|
619 | |||
410 | def test_reviewer_notifications(self, backend, csrf_token): |
|
620 | def test_reviewer_notifications(self, backend, csrf_token): | |
411 | # We have to use the app.post for this test so it will create the |
|
621 | # We have to use the app.post for this test so it will create the | |
412 | # notifications properly with the new PR |
|
622 | # notifications properly with the new PR | |
413 | commits = [ |
|
623 | commits = [ | |
414 | {'message': 'ancestor', |
|
624 | {'message': 'ancestor', | |
415 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
625 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, | |
416 | {'message': 'change', |
|
626 | {'message': 'change', | |
417 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
627 | 'added': [FileNode('file_a', content='content_of_change')]}, | |
418 | {'message': 'change-child'}, |
|
628 | {'message': 'change-child'}, | |
419 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
629 | {'message': 'ancestor-child', 'parents': ['ancestor'], | |
420 | 'added': [ |
|
630 | 'added': [ | |
421 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
631 | FileNode('file_B', content='content_of_ancestor_child')]}, | |
422 | {'message': 'ancestor-child-2'}, |
|
632 | {'message': 'ancestor-child-2'}, | |
423 | ] |
|
633 | ] | |
424 | commit_ids = backend.create_master_repo(commits) |
|
634 | commit_ids = backend.create_master_repo(commits) | |
425 | target = backend.create_repo(heads=['ancestor-child']) |
|
635 | target = backend.create_repo(heads=['ancestor-child']) | |
426 | source = backend.create_repo(heads=['change']) |
|
636 | source = backend.create_repo(heads=['change']) | |
427 |
|
637 | |||
428 | response = self.app.post( |
|
638 | response = self.app.post( | |
429 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
639 | route_path('pullrequest_create', repo_name=source.repo_name), | |
430 | [ |
|
640 | [ | |
431 | ('source_repo', source.repo_name), |
|
641 | ('source_repo', source.repo_name), | |
432 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
642 | ('source_ref', 'branch:default:' + commit_ids['change']), | |
433 | ('target_repo', target.repo_name), |
|
643 | ('target_repo', target.repo_name), | |
434 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
644 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), | |
435 | ('common_ancestor', commit_ids['ancestor']), |
|
645 | ('common_ancestor', commit_ids['ancestor']), | |
436 | ('pullrequest_title', 'Title'), |
|
646 | ('pullrequest_title', 'Title'), | |
437 | ('pullrequest_desc', 'Description'), |
|
647 | ('pullrequest_desc', 'Description'), | |
438 | ('description_renderer', 'markdown'), |
|
648 | ('description_renderer', 'markdown'), | |
439 | ('__start__', 'review_members:sequence'), |
|
649 | ('__start__', 'review_members:sequence'), | |
440 | ('__start__', 'reviewer:mapping'), |
|
650 | ('__start__', 'reviewer:mapping'), | |
441 | ('user_id', '2'), |
|
651 | ('user_id', '2'), | |
442 | ('__start__', 'reasons:sequence'), |
|
652 | ('__start__', 'reasons:sequence'), | |
443 | ('reason', 'Some reason'), |
|
653 | ('reason', 'Some reason'), | |
444 | ('__end__', 'reasons:sequence'), |
|
654 | ('__end__', 'reasons:sequence'), | |
445 | ('__start__', 'rules:sequence'), |
|
655 | ('__start__', 'rules:sequence'), | |
446 | ('__end__', 'rules:sequence'), |
|
656 | ('__end__', 'rules:sequence'), | |
447 | ('mandatory', 'False'), |
|
657 | ('mandatory', 'False'), | |
448 | ('__end__', 'reviewer:mapping'), |
|
658 | ('__end__', 'reviewer:mapping'), | |
449 | ('__end__', 'review_members:sequence'), |
|
659 | ('__end__', 'review_members:sequence'), | |
450 | ('__start__', 'revisions:sequence'), |
|
660 | ('__start__', 'revisions:sequence'), | |
451 | ('revisions', commit_ids['change']), |
|
661 | ('revisions', commit_ids['change']), | |
452 | ('__end__', 'revisions:sequence'), |
|
662 | ('__end__', 'revisions:sequence'), | |
453 | ('user', ''), |
|
663 | ('user', ''), | |
454 | ('csrf_token', csrf_token), |
|
664 | ('csrf_token', csrf_token), | |
455 | ], |
|
665 | ], | |
456 | status=302) |
|
666 | status=302) | |
457 |
|
667 | |||
458 | location = response.headers['Location'] |
|
668 | location = response.headers['Location'] | |
459 |
|
669 | |||
460 | pull_request_id = location.rsplit('/', 1)[1] |
|
670 | pull_request_id = location.rsplit('/', 1)[1] | |
461 | assert pull_request_id != 'new' |
|
671 | assert pull_request_id != 'new' | |
462 | pull_request = PullRequest.get(int(pull_request_id)) |
|
672 | pull_request = PullRequest.get(int(pull_request_id)) | |
463 |
|
673 | |||
464 | # Check that a notification was made |
|
674 | # Check that a notification was made | |
465 | notifications = Notification.query()\ |
|
675 | notifications = Notification.query()\ | |
466 | .filter(Notification.created_by == pull_request.author.user_id, |
|
676 | .filter(Notification.created_by == pull_request.author.user_id, | |
467 | Notification.type_ == Notification.TYPE_PULL_REQUEST, |
|
677 | Notification.type_ == Notification.TYPE_PULL_REQUEST, | |
468 | Notification.subject.contains( |
|
678 | Notification.subject.contains( | |
469 | "requested a pull request review. !%s" % pull_request_id)) |
|
679 | "requested a pull request review. !%s" % pull_request_id)) | |
470 | assert len(notifications.all()) == 1 |
|
680 | assert len(notifications.all()) == 1 | |
471 |
|
681 | |||
472 | # Change reviewers and check that a notification was made |
|
682 | # Change reviewers and check that a notification was made | |
473 | PullRequestModel().update_reviewers( |
|
683 | PullRequestModel().update_reviewers( | |
474 | pull_request.pull_request_id, [(1, [], False, [])], |
|
684 | pull_request.pull_request_id, [(1, [], False, [])], | |
475 | pull_request.author) |
|
685 | pull_request.author) | |
476 | assert len(notifications.all()) == 2 |
|
686 | assert len(notifications.all()) == 2 | |
477 |
|
687 | |||
478 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, |
|
688 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, | |
479 | csrf_token): |
|
689 | csrf_token): | |
480 | commits = [ |
|
690 | commits = [ | |
481 | {'message': 'ancestor', |
|
691 | {'message': 'ancestor', | |
482 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
692 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, | |
483 | {'message': 'change', |
|
693 | {'message': 'change', | |
484 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
694 | 'added': [FileNode('file_a', content='content_of_change')]}, | |
485 | {'message': 'change-child'}, |
|
695 | {'message': 'change-child'}, | |
486 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
696 | {'message': 'ancestor-child', 'parents': ['ancestor'], | |
487 | 'added': [ |
|
697 | 'added': [ | |
488 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
698 | FileNode('file_B', content='content_of_ancestor_child')]}, | |
489 | {'message': 'ancestor-child-2'}, |
|
699 | {'message': 'ancestor-child-2'}, | |
490 | ] |
|
700 | ] | |
491 | commit_ids = backend.create_master_repo(commits) |
|
701 | commit_ids = backend.create_master_repo(commits) | |
492 | target = backend.create_repo(heads=['ancestor-child']) |
|
702 | target = backend.create_repo(heads=['ancestor-child']) | |
493 | source = backend.create_repo(heads=['change']) |
|
703 | source = backend.create_repo(heads=['change']) | |
494 |
|
704 | |||
495 | response = self.app.post( |
|
705 | response = self.app.post( | |
496 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
706 | route_path('pullrequest_create', repo_name=source.repo_name), | |
497 | [ |
|
707 | [ | |
498 | ('source_repo', source.repo_name), |
|
708 | ('source_repo', source.repo_name), | |
499 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
709 | ('source_ref', 'branch:default:' + commit_ids['change']), | |
500 | ('target_repo', target.repo_name), |
|
710 | ('target_repo', target.repo_name), | |
501 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
711 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), | |
502 | ('common_ancestor', commit_ids['ancestor']), |
|
712 | ('common_ancestor', commit_ids['ancestor']), | |
503 | ('pullrequest_title', 'Title'), |
|
713 | ('pullrequest_title', 'Title'), | |
504 | ('pullrequest_desc', 'Description'), |
|
714 | ('pullrequest_desc', 'Description'), | |
505 | ('description_renderer', 'markdown'), |
|
715 | ('description_renderer', 'markdown'), | |
506 | ('__start__', 'review_members:sequence'), |
|
716 | ('__start__', 'review_members:sequence'), | |
507 | ('__start__', 'reviewer:mapping'), |
|
717 | ('__start__', 'reviewer:mapping'), | |
508 | ('user_id', '1'), |
|
718 | ('user_id', '1'), | |
509 | ('__start__', 'reasons:sequence'), |
|
719 | ('__start__', 'reasons:sequence'), | |
510 | ('reason', 'Some reason'), |
|
720 | ('reason', 'Some reason'), | |
511 | ('__end__', 'reasons:sequence'), |
|
721 | ('__end__', 'reasons:sequence'), | |
512 | ('__start__', 'rules:sequence'), |
|
722 | ('__start__', 'rules:sequence'), | |
513 | ('__end__', 'rules:sequence'), |
|
723 | ('__end__', 'rules:sequence'), | |
514 | ('mandatory', 'False'), |
|
724 | ('mandatory', 'False'), | |
515 | ('__end__', 'reviewer:mapping'), |
|
725 | ('__end__', 'reviewer:mapping'), | |
516 | ('__end__', 'review_members:sequence'), |
|
726 | ('__end__', 'review_members:sequence'), | |
517 | ('__start__', 'revisions:sequence'), |
|
727 | ('__start__', 'revisions:sequence'), | |
518 | ('revisions', commit_ids['change']), |
|
728 | ('revisions', commit_ids['change']), | |
519 | ('__end__', 'revisions:sequence'), |
|
729 | ('__end__', 'revisions:sequence'), | |
520 | ('user', ''), |
|
730 | ('user', ''), | |
521 | ('csrf_token', csrf_token), |
|
731 | ('csrf_token', csrf_token), | |
522 | ], |
|
732 | ], | |
523 | status=302) |
|
733 | status=302) | |
524 |
|
734 | |||
525 | location = response.headers['Location'] |
|
735 | location = response.headers['Location'] | |
526 |
|
736 | |||
527 | pull_request_id = location.rsplit('/', 1)[1] |
|
737 | pull_request_id = location.rsplit('/', 1)[1] | |
528 | assert pull_request_id != 'new' |
|
738 | assert pull_request_id != 'new' | |
529 | pull_request = PullRequest.get(int(pull_request_id)) |
|
739 | pull_request = PullRequest.get(int(pull_request_id)) | |
530 |
|
740 | |||
531 | # target_ref has to point to the ancestor's commit_id in order to |
|
741 | # target_ref has to point to the ancestor's commit_id in order to | |
532 | # show the correct diff |
|
742 | # show the correct diff | |
533 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
743 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] | |
534 | assert pull_request.target_ref == expected_target_ref |
|
744 | assert pull_request.target_ref == expected_target_ref | |
535 |
|
745 | |||
536 | # Check generated diff contents |
|
746 | # Check generated diff contents | |
537 | response = response.follow() |
|
747 | response = response.follow() | |
538 | response.mustcontain(no=['content_of_ancestor']) |
|
748 | response.mustcontain(no=['content_of_ancestor']) | |
539 | response.mustcontain(no=['content_of_ancestor-child']) |
|
749 | response.mustcontain(no=['content_of_ancestor-child']) | |
540 | response.mustcontain('content_of_change') |
|
750 | response.mustcontain('content_of_change') | |
541 |
|
751 | |||
542 | def test_merge_pull_request_enabled(self, pr_util, csrf_token): |
|
752 | def test_merge_pull_request_enabled(self, pr_util, csrf_token): | |
543 | # Clear any previous calls to rcextensions |
|
753 | # Clear any previous calls to rcextensions | |
544 | rhodecode.EXTENSIONS.calls.clear() |
|
754 | rhodecode.EXTENSIONS.calls.clear() | |
545 |
|
755 | |||
546 | pull_request = pr_util.create_pull_request( |
|
756 | pull_request = pr_util.create_pull_request( | |
547 | approved=True, mergeable=True) |
|
757 | approved=True, mergeable=True) | |
548 | pull_request_id = pull_request.pull_request_id |
|
758 | pull_request_id = pull_request.pull_request_id | |
549 | repo_name = pull_request.target_repo.scm_instance().name, |
|
759 | repo_name = pull_request.target_repo.scm_instance().name, | |
550 |
|
760 | |||
551 | url = route_path('pullrequest_merge', |
|
761 | url = route_path('pullrequest_merge', | |
552 | repo_name=str(repo_name[0]), |
|
762 | repo_name=str(repo_name[0]), | |
553 | pull_request_id=pull_request_id) |
|
763 | pull_request_id=pull_request_id) | |
554 | response = self.app.post(url, params={'csrf_token': csrf_token}).follow() |
|
764 | response = self.app.post(url, params={'csrf_token': csrf_token}).follow() | |
555 |
|
765 | |||
556 | pull_request = PullRequest.get(pull_request_id) |
|
766 | pull_request = PullRequest.get(pull_request_id) | |
557 |
|
767 | |||
558 | assert response.status_int == 200 |
|
768 | assert response.status_int == 200 | |
559 | assert pull_request.is_closed() |
|
769 | assert pull_request.is_closed() | |
560 | assert_pull_request_status( |
|
770 | assert_pull_request_status( | |
561 | pull_request, ChangesetStatus.STATUS_APPROVED) |
|
771 | pull_request, ChangesetStatus.STATUS_APPROVED) | |
562 |
|
772 | |||
563 | # Check the relevant log entries were added |
|
773 | # Check the relevant log entries were added | |
564 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3) |
|
774 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3) | |
565 | actions = [log.action for log in user_logs] |
|
775 | actions = [log.action for log in user_logs] | |
566 | pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
776 | pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) | |
567 | expected_actions = [ |
|
777 | expected_actions = [ | |
568 | u'repo.pull_request.close', |
|
778 | u'repo.pull_request.close', | |
569 | u'repo.pull_request.merge', |
|
779 | u'repo.pull_request.merge', | |
570 | u'repo.pull_request.comment.create' |
|
780 | u'repo.pull_request.comment.create' | |
571 | ] |
|
781 | ] | |
572 | assert actions == expected_actions |
|
782 | assert actions == expected_actions | |
573 |
|
783 | |||
574 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4) |
|
784 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4) | |
575 | actions = [log for log in user_logs] |
|
785 | actions = [log for log in user_logs] | |
576 | assert actions[-1].action == 'user.push' |
|
786 | assert actions[-1].action == 'user.push' | |
577 | assert actions[-1].action_data['commit_ids'] == pr_commit_ids |
|
787 | assert actions[-1].action_data['commit_ids'] == pr_commit_ids | |
578 |
|
788 | |||
579 | # Check post_push rcextension was really executed |
|
789 | # Check post_push rcextension was really executed | |
580 | push_calls = rhodecode.EXTENSIONS.calls['_push_hook'] |
|
790 | push_calls = rhodecode.EXTENSIONS.calls['_push_hook'] | |
581 | assert len(push_calls) == 1 |
|
791 | assert len(push_calls) == 1 | |
582 | unused_last_call_args, last_call_kwargs = push_calls[0] |
|
792 | unused_last_call_args, last_call_kwargs = push_calls[0] | |
583 | assert last_call_kwargs['action'] == 'push' |
|
793 | assert last_call_kwargs['action'] == 'push' | |
584 | assert last_call_kwargs['commit_ids'] == pr_commit_ids |
|
794 | assert last_call_kwargs['commit_ids'] == pr_commit_ids | |
585 |
|
795 | |||
586 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): |
|
796 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): | |
587 | pull_request = pr_util.create_pull_request(mergeable=False) |
|
797 | pull_request = pr_util.create_pull_request(mergeable=False) | |
588 | pull_request_id = pull_request.pull_request_id |
|
798 | pull_request_id = pull_request.pull_request_id | |
589 | pull_request = PullRequest.get(pull_request_id) |
|
799 | pull_request = PullRequest.get(pull_request_id) | |
590 |
|
800 | |||
591 | response = self.app.post( |
|
801 | response = self.app.post( | |
592 | route_path('pullrequest_merge', |
|
802 | route_path('pullrequest_merge', | |
593 | repo_name=pull_request.target_repo.scm_instance().name, |
|
803 | repo_name=pull_request.target_repo.scm_instance().name, | |
594 | pull_request_id=pull_request.pull_request_id), |
|
804 | pull_request_id=pull_request.pull_request_id), | |
595 | params={'csrf_token': csrf_token}).follow() |
|
805 | params={'csrf_token': csrf_token}).follow() | |
596 |
|
806 | |||
597 | assert response.status_int == 200 |
|
807 | assert response.status_int == 200 | |
598 | response.mustcontain( |
|
808 | response.mustcontain( | |
599 | 'Merge is not currently possible because of below failed checks.') |
|
809 | 'Merge is not currently possible because of below failed checks.') | |
600 | response.mustcontain('Server-side pull request merging is disabled.') |
|
810 | response.mustcontain('Server-side pull request merging is disabled.') | |
601 |
|
811 | |||
602 | @pytest.mark.skip_backends('svn') |
|
812 | @pytest.mark.skip_backends('svn') | |
603 | def test_merge_pull_request_not_approved(self, pr_util, csrf_token): |
|
813 | def test_merge_pull_request_not_approved(self, pr_util, csrf_token): | |
604 | pull_request = pr_util.create_pull_request(mergeable=True) |
|
814 | pull_request = pr_util.create_pull_request(mergeable=True) | |
605 | pull_request_id = pull_request.pull_request_id |
|
815 | pull_request_id = pull_request.pull_request_id | |
606 | repo_name = pull_request.target_repo.scm_instance().name |
|
816 | repo_name = pull_request.target_repo.scm_instance().name | |
607 |
|
817 | |||
608 | response = self.app.post( |
|
818 | response = self.app.post( | |
609 | route_path('pullrequest_merge', |
|
819 | route_path('pullrequest_merge', | |
610 | repo_name=repo_name, pull_request_id=pull_request_id), |
|
820 | repo_name=repo_name, pull_request_id=pull_request_id), | |
611 | params={'csrf_token': csrf_token}).follow() |
|
821 | params={'csrf_token': csrf_token}).follow() | |
612 |
|
822 | |||
613 | assert response.status_int == 200 |
|
823 | assert response.status_int == 200 | |
614 |
|
824 | |||
615 | response.mustcontain( |
|
825 | response.mustcontain( | |
616 | 'Merge is not currently possible because of below failed checks.') |
|
826 | 'Merge is not currently possible because of below failed checks.') | |
617 | response.mustcontain('Pull request reviewer approval is pending.') |
|
827 | response.mustcontain('Pull request reviewer approval is pending.') | |
618 |
|
828 | |||
619 | def test_merge_pull_request_renders_failure_reason( |
|
829 | def test_merge_pull_request_renders_failure_reason( | |
620 | self, user_regular, csrf_token, pr_util): |
|
830 | self, user_regular, csrf_token, pr_util): | |
621 | pull_request = pr_util.create_pull_request(mergeable=True, approved=True) |
|
831 | pull_request = pr_util.create_pull_request(mergeable=True, approved=True) | |
622 | pull_request_id = pull_request.pull_request_id |
|
832 | pull_request_id = pull_request.pull_request_id | |
623 | repo_name = pull_request.target_repo.scm_instance().name |
|
833 | repo_name = pull_request.target_repo.scm_instance().name | |
624 |
|
834 | |||
625 | merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID', |
|
835 | merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID', | |
626 | MergeFailureReason.PUSH_FAILED, |
|
836 | MergeFailureReason.PUSH_FAILED, | |
627 | metadata={'target': 'shadow repo', |
|
837 | metadata={'target': 'shadow repo', | |
628 | 'merge_commit': 'xxx'}) |
|
838 | 'merge_commit': 'xxx'}) | |
629 | model_patcher = mock.patch.multiple( |
|
839 | model_patcher = mock.patch.multiple( | |
630 | PullRequestModel, |
|
840 | PullRequestModel, | |
631 | merge_repo=mock.Mock(return_value=merge_resp), |
|
841 | merge_repo=mock.Mock(return_value=merge_resp), | |
632 | merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE'))) |
|
842 | merge_status=mock.Mock(return_value=(None, True, 'WRONG_MESSAGE'))) | |
633 |
|
843 | |||
634 | with model_patcher: |
|
844 | with model_patcher: | |
635 | response = self.app.post( |
|
845 | response = self.app.post( | |
636 | route_path('pullrequest_merge', |
|
846 | route_path('pullrequest_merge', | |
637 | repo_name=repo_name, |
|
847 | repo_name=repo_name, | |
638 | pull_request_id=pull_request_id), |
|
848 | pull_request_id=pull_request_id), | |
639 | params={'csrf_token': csrf_token}, status=302) |
|
849 | params={'csrf_token': csrf_token}, status=302) | |
640 |
|
850 | |||
641 | merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED, |
|
851 | merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED, | |
642 | metadata={'target': 'shadow repo', |
|
852 | metadata={'target': 'shadow repo', | |
643 | 'merge_commit': 'xxx'}) |
|
853 | 'merge_commit': 'xxx'}) | |
644 | assert_session_flash(response, merge_resp.merge_status_message) |
|
854 | assert_session_flash(response, merge_resp.merge_status_message) | |
645 |
|
855 | |||
646 | def test_update_source_revision(self, backend, csrf_token): |
|
856 | def test_update_source_revision(self, backend, csrf_token): | |
647 | commits = [ |
|
857 | commits = [ | |
648 | {'message': 'ancestor'}, |
|
858 | {'message': 'ancestor'}, | |
649 | {'message': 'change'}, |
|
859 | {'message': 'change'}, | |
650 | {'message': 'change-2'}, |
|
860 | {'message': 'change-2'}, | |
651 | ] |
|
861 | ] | |
652 | commit_ids = backend.create_master_repo(commits) |
|
862 | commit_ids = backend.create_master_repo(commits) | |
653 | target = backend.create_repo(heads=['ancestor']) |
|
863 | target = backend.create_repo(heads=['ancestor']) | |
654 | source = backend.create_repo(heads=['change']) |
|
864 | source = backend.create_repo(heads=['change']) | |
655 |
|
865 | |||
656 | # create pr from a in source to A in target |
|
866 | # create pr from a in source to A in target | |
657 | pull_request = PullRequest() |
|
867 | pull_request = PullRequest() | |
658 |
|
868 | |||
659 | pull_request.source_repo = source |
|
869 | pull_request.source_repo = source | |
660 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
870 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
661 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
871 | branch=backend.default_branch_name, commit_id=commit_ids['change']) | |
662 |
|
872 | |||
663 | pull_request.target_repo = target |
|
873 | pull_request.target_repo = target | |
664 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
874 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
665 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
875 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) | |
666 |
|
876 | |||
667 | pull_request.revisions = [commit_ids['change']] |
|
877 | pull_request.revisions = [commit_ids['change']] | |
668 | pull_request.title = u"Test" |
|
878 | pull_request.title = u"Test" | |
669 | pull_request.description = u"Description" |
|
879 | pull_request.description = u"Description" | |
670 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
880 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
671 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
881 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
672 | Session().add(pull_request) |
|
882 | Session().add(pull_request) | |
673 | Session().commit() |
|
883 | Session().commit() | |
674 | pull_request_id = pull_request.pull_request_id |
|
884 | pull_request_id = pull_request.pull_request_id | |
675 |
|
885 | |||
676 | # source has ancestor - change - change-2 |
|
886 | # source has ancestor - change - change-2 | |
677 | backend.pull_heads(source, heads=['change-2']) |
|
887 | backend.pull_heads(source, heads=['change-2']) | |
678 |
|
888 | |||
679 | # update PR |
|
889 | # update PR | |
680 | self.app.post( |
|
890 | self.app.post( | |
681 | route_path('pullrequest_update', |
|
891 | route_path('pullrequest_update', | |
682 | repo_name=target.repo_name, pull_request_id=pull_request_id), |
|
892 | repo_name=target.repo_name, pull_request_id=pull_request_id), | |
683 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
893 | params={'update_commits': 'true', 'csrf_token': csrf_token}) | |
684 |
|
894 | |||
685 | response = self.app.get( |
|
895 | response = self.app.get( | |
686 | route_path('pullrequest_show', |
|
896 | route_path('pullrequest_show', | |
687 | repo_name=target.repo_name, |
|
897 | repo_name=target.repo_name, | |
688 | pull_request_id=pull_request.pull_request_id)) |
|
898 | pull_request_id=pull_request.pull_request_id)) | |
689 |
|
899 | |||
690 | assert response.status_int == 200 |
|
900 | assert response.status_int == 200 | |
691 | response.mustcontain('Pull request updated to') |
|
901 | response.mustcontain('Pull request updated to') | |
692 | response.mustcontain('with 1 added, 0 removed commits.') |
|
902 | response.mustcontain('with 1 added, 0 removed commits.') | |
693 |
|
903 | |||
694 | # check that we have now both revisions |
|
904 | # check that we have now both revisions | |
695 | pull_request = PullRequest.get(pull_request_id) |
|
905 | pull_request = PullRequest.get(pull_request_id) | |
696 | assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']] |
|
906 | assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']] | |
697 |
|
907 | |||
698 | def test_update_target_revision(self, backend, csrf_token): |
|
908 | def test_update_target_revision(self, backend, csrf_token): | |
699 | commits = [ |
|
909 | commits = [ | |
700 | {'message': 'ancestor'}, |
|
910 | {'message': 'ancestor'}, | |
701 | {'message': 'change'}, |
|
911 | {'message': 'change'}, | |
702 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
912 | {'message': 'ancestor-new', 'parents': ['ancestor']}, | |
703 | {'message': 'change-rebased'}, |
|
913 | {'message': 'change-rebased'}, | |
704 | ] |
|
914 | ] | |
705 | commit_ids = backend.create_master_repo(commits) |
|
915 | commit_ids = backend.create_master_repo(commits) | |
706 | target = backend.create_repo(heads=['ancestor']) |
|
916 | target = backend.create_repo(heads=['ancestor']) | |
707 | source = backend.create_repo(heads=['change']) |
|
917 | source = backend.create_repo(heads=['change']) | |
708 |
|
918 | |||
709 | # create pr from a in source to A in target |
|
919 | # create pr from a in source to A in target | |
710 | pull_request = PullRequest() |
|
920 | pull_request = PullRequest() | |
711 |
|
921 | |||
712 | pull_request.source_repo = source |
|
922 | pull_request.source_repo = source | |
713 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
923 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
714 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
924 | branch=backend.default_branch_name, commit_id=commit_ids['change']) | |
715 |
|
925 | |||
716 | pull_request.target_repo = target |
|
926 | pull_request.target_repo = target | |
717 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
927 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
718 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
928 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) | |
719 |
|
929 | |||
720 | pull_request.revisions = [commit_ids['change']] |
|
930 | pull_request.revisions = [commit_ids['change']] | |
721 | pull_request.title = u"Test" |
|
931 | pull_request.title = u"Test" | |
722 | pull_request.description = u"Description" |
|
932 | pull_request.description = u"Description" | |
723 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
933 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
724 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
934 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
725 |
|
935 | |||
726 | Session().add(pull_request) |
|
936 | Session().add(pull_request) | |
727 | Session().commit() |
|
937 | Session().commit() | |
728 | pull_request_id = pull_request.pull_request_id |
|
938 | pull_request_id = pull_request.pull_request_id | |
729 |
|
939 | |||
730 | # target has ancestor - ancestor-new |
|
940 | # target has ancestor - ancestor-new | |
731 | # source has ancestor - ancestor-new - change-rebased |
|
941 | # source has ancestor - ancestor-new - change-rebased | |
732 | backend.pull_heads(target, heads=['ancestor-new']) |
|
942 | backend.pull_heads(target, heads=['ancestor-new']) | |
733 | backend.pull_heads(source, heads=['change-rebased']) |
|
943 | backend.pull_heads(source, heads=['change-rebased']) | |
734 |
|
944 | |||
735 | # update PR |
|
945 | # update PR | |
736 | url = route_path('pullrequest_update', |
|
946 | url = route_path('pullrequest_update', | |
737 | repo_name=target.repo_name, |
|
947 | repo_name=target.repo_name, | |
738 | pull_request_id=pull_request_id) |
|
948 | pull_request_id=pull_request_id) | |
739 | self.app.post(url, |
|
949 | self.app.post(url, | |
740 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
950 | params={'update_commits': 'true', 'csrf_token': csrf_token}, | |
741 | status=200) |
|
951 | status=200) | |
742 |
|
952 | |||
743 | # check that we have now both revisions |
|
953 | # check that we have now both revisions | |
744 | pull_request = PullRequest.get(pull_request_id) |
|
954 | pull_request = PullRequest.get(pull_request_id) | |
745 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
955 | assert pull_request.revisions == [commit_ids['change-rebased']] | |
746 | assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format( |
|
956 | assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format( | |
747 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new']) |
|
957 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new']) | |
748 |
|
958 | |||
749 | response = self.app.get( |
|
959 | response = self.app.get( | |
750 | route_path('pullrequest_show', |
|
960 | route_path('pullrequest_show', | |
751 | repo_name=target.repo_name, |
|
961 | repo_name=target.repo_name, | |
752 | pull_request_id=pull_request.pull_request_id)) |
|
962 | pull_request_id=pull_request.pull_request_id)) | |
753 | assert response.status_int == 200 |
|
963 | assert response.status_int == 200 | |
754 | response.mustcontain('Pull request updated to') |
|
964 | response.mustcontain('Pull request updated to') | |
755 | response.mustcontain('with 1 added, 1 removed commits.') |
|
965 | response.mustcontain('with 1 added, 1 removed commits.') | |
756 |
|
966 | |||
757 | def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token): |
|
967 | def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token): | |
758 | backend = backend_git |
|
968 | backend = backend_git | |
759 | commits = [ |
|
969 | commits = [ | |
760 | {'message': 'master-commit-1'}, |
|
970 | {'message': 'master-commit-1'}, | |
761 | {'message': 'master-commit-2-change-1'}, |
|
971 | {'message': 'master-commit-2-change-1'}, | |
762 | {'message': 'master-commit-3-change-2'}, |
|
972 | {'message': 'master-commit-3-change-2'}, | |
763 |
|
973 | |||
764 | {'message': 'feat-commit-1', 'parents': ['master-commit-1']}, |
|
974 | {'message': 'feat-commit-1', 'parents': ['master-commit-1']}, | |
765 | {'message': 'feat-commit-2'}, |
|
975 | {'message': 'feat-commit-2'}, | |
766 | ] |
|
976 | ] | |
767 | commit_ids = backend.create_master_repo(commits) |
|
977 | commit_ids = backend.create_master_repo(commits) | |
768 | target = backend.create_repo(heads=['master-commit-3-change-2']) |
|
978 | target = backend.create_repo(heads=['master-commit-3-change-2']) | |
769 | source = backend.create_repo(heads=['feat-commit-2']) |
|
979 | source = backend.create_repo(heads=['feat-commit-2']) | |
770 |
|
980 | |||
771 | # create pr from a in source to A in target |
|
981 | # create pr from a in source to A in target | |
772 | pull_request = PullRequest() |
|
982 | pull_request = PullRequest() | |
773 | pull_request.source_repo = source |
|
983 | pull_request.source_repo = source | |
774 |
|
984 | |||
775 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
985 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
776 | branch=backend.default_branch_name, |
|
986 | branch=backend.default_branch_name, | |
777 | commit_id=commit_ids['master-commit-3-change-2']) |
|
987 | commit_id=commit_ids['master-commit-3-change-2']) | |
778 |
|
988 | |||
779 | pull_request.target_repo = target |
|
989 | pull_request.target_repo = target | |
780 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
990 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
781 | branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2']) |
|
991 | branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2']) | |
782 |
|
992 | |||
783 | pull_request.revisions = [ |
|
993 | pull_request.revisions = [ | |
784 | commit_ids['feat-commit-1'], |
|
994 | commit_ids['feat-commit-1'], | |
785 | commit_ids['feat-commit-2'] |
|
995 | commit_ids['feat-commit-2'] | |
786 | ] |
|
996 | ] | |
787 | pull_request.title = u"Test" |
|
997 | pull_request.title = u"Test" | |
788 | pull_request.description = u"Description" |
|
998 | pull_request.description = u"Description" | |
789 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
999 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
790 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1000 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
791 | Session().add(pull_request) |
|
1001 | Session().add(pull_request) | |
792 | Session().commit() |
|
1002 | Session().commit() | |
793 | pull_request_id = pull_request.pull_request_id |
|
1003 | pull_request_id = pull_request.pull_request_id | |
794 |
|
1004 | |||
795 | # PR is created, now we simulate a force-push into target, |
|
1005 | # PR is created, now we simulate a force-push into target, | |
796 | # that drops a 2 last commits |
|
1006 | # that drops a 2 last commits | |
797 | vcsrepo = target.scm_instance() |
|
1007 | vcsrepo = target.scm_instance() | |
798 | vcsrepo.config.clear_section('hooks') |
|
1008 | vcsrepo.config.clear_section('hooks') | |
799 | vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2']) |
|
1009 | vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2']) | |
800 |
|
1010 | |||
801 | # update PR |
|
1011 | # update PR | |
802 | url = route_path('pullrequest_update', |
|
1012 | url = route_path('pullrequest_update', | |
803 | repo_name=target.repo_name, |
|
1013 | repo_name=target.repo_name, | |
804 | pull_request_id=pull_request_id) |
|
1014 | pull_request_id=pull_request_id) | |
805 | self.app.post(url, |
|
1015 | self.app.post(url, | |
806 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
1016 | params={'update_commits': 'true', 'csrf_token': csrf_token}, | |
807 | status=200) |
|
1017 | status=200) | |
808 |
|
1018 | |||
809 | response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name)) |
|
1019 | response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name)) | |
810 | assert response.status_int == 200 |
|
1020 | assert response.status_int == 200 | |
811 | response.mustcontain('Pull request updated to') |
|
1021 | response.mustcontain('Pull request updated to') | |
812 | response.mustcontain('with 0 added, 0 removed commits.') |
|
1022 | response.mustcontain('with 0 added, 0 removed commits.') | |
813 |
|
1023 | |||
814 | def test_update_of_ancestor_reference(self, backend, csrf_token): |
|
1024 | def test_update_of_ancestor_reference(self, backend, csrf_token): | |
815 | commits = [ |
|
1025 | commits = [ | |
816 | {'message': 'ancestor'}, |
|
1026 | {'message': 'ancestor'}, | |
817 | {'message': 'change'}, |
|
1027 | {'message': 'change'}, | |
818 | {'message': 'change-2'}, |
|
1028 | {'message': 'change-2'}, | |
819 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
1029 | {'message': 'ancestor-new', 'parents': ['ancestor']}, | |
820 | {'message': 'change-rebased'}, |
|
1030 | {'message': 'change-rebased'}, | |
821 | ] |
|
1031 | ] | |
822 | commit_ids = backend.create_master_repo(commits) |
|
1032 | commit_ids = backend.create_master_repo(commits) | |
823 | target = backend.create_repo(heads=['ancestor']) |
|
1033 | target = backend.create_repo(heads=['ancestor']) | |
824 | source = backend.create_repo(heads=['change']) |
|
1034 | source = backend.create_repo(heads=['change']) | |
825 |
|
1035 | |||
826 | # create pr from a in source to A in target |
|
1036 | # create pr from a in source to A in target | |
827 | pull_request = PullRequest() |
|
1037 | pull_request = PullRequest() | |
828 | pull_request.source_repo = source |
|
1038 | pull_request.source_repo = source | |
829 |
|
1039 | |||
830 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
1040 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
831 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
1041 | branch=backend.default_branch_name, commit_id=commit_ids['change']) | |
832 | pull_request.target_repo = target |
|
1042 | pull_request.target_repo = target | |
833 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1043 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
834 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
1044 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) | |
835 | pull_request.revisions = [commit_ids['change']] |
|
1045 | pull_request.revisions = [commit_ids['change']] | |
836 | pull_request.title = u"Test" |
|
1046 | pull_request.title = u"Test" | |
837 | pull_request.description = u"Description" |
|
1047 | pull_request.description = u"Description" | |
838 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1048 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
839 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1049 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
840 | Session().add(pull_request) |
|
1050 | Session().add(pull_request) | |
841 | Session().commit() |
|
1051 | Session().commit() | |
842 | pull_request_id = pull_request.pull_request_id |
|
1052 | pull_request_id = pull_request.pull_request_id | |
843 |
|
1053 | |||
844 | # target has ancestor - ancestor-new |
|
1054 | # target has ancestor - ancestor-new | |
845 | # source has ancestor - ancestor-new - change-rebased |
|
1055 | # source has ancestor - ancestor-new - change-rebased | |
846 | backend.pull_heads(target, heads=['ancestor-new']) |
|
1056 | backend.pull_heads(target, heads=['ancestor-new']) | |
847 | backend.pull_heads(source, heads=['change-rebased']) |
|
1057 | backend.pull_heads(source, heads=['change-rebased']) | |
848 |
|
1058 | |||
849 | # update PR |
|
1059 | # update PR | |
850 | self.app.post( |
|
1060 | self.app.post( | |
851 | route_path('pullrequest_update', |
|
1061 | route_path('pullrequest_update', | |
852 | repo_name=target.repo_name, pull_request_id=pull_request_id), |
|
1062 | repo_name=target.repo_name, pull_request_id=pull_request_id), | |
853 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
1063 | params={'update_commits': 'true', 'csrf_token': csrf_token}, | |
854 | status=200) |
|
1064 | status=200) | |
855 |
|
1065 | |||
856 | # Expect the target reference to be updated correctly |
|
1066 | # Expect the target reference to be updated correctly | |
857 | pull_request = PullRequest.get(pull_request_id) |
|
1067 | pull_request = PullRequest.get(pull_request_id) | |
858 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
1068 | assert pull_request.revisions == [commit_ids['change-rebased']] | |
859 | expected_target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1069 | expected_target_ref = 'branch:{branch}:{commit_id}'.format( | |
860 | branch=backend.default_branch_name, |
|
1070 | branch=backend.default_branch_name, | |
861 | commit_id=commit_ids['ancestor-new']) |
|
1071 | commit_id=commit_ids['ancestor-new']) | |
862 | assert pull_request.target_ref == expected_target_ref |
|
1072 | assert pull_request.target_ref == expected_target_ref | |
863 |
|
1073 | |||
864 | def test_remove_pull_request_branch(self, backend_git, csrf_token): |
|
1074 | def test_remove_pull_request_branch(self, backend_git, csrf_token): | |
865 | branch_name = 'development' |
|
1075 | branch_name = 'development' | |
866 | commits = [ |
|
1076 | commits = [ | |
867 | {'message': 'initial-commit'}, |
|
1077 | {'message': 'initial-commit'}, | |
868 | {'message': 'old-feature'}, |
|
1078 | {'message': 'old-feature'}, | |
869 | {'message': 'new-feature', 'branch': branch_name}, |
|
1079 | {'message': 'new-feature', 'branch': branch_name}, | |
870 | ] |
|
1080 | ] | |
871 | repo = backend_git.create_repo(commits) |
|
1081 | repo = backend_git.create_repo(commits) | |
872 | repo_name = repo.repo_name |
|
1082 | repo_name = repo.repo_name | |
873 | commit_ids = backend_git.commit_ids |
|
1083 | commit_ids = backend_git.commit_ids | |
874 |
|
1084 | |||
875 | pull_request = PullRequest() |
|
1085 | pull_request = PullRequest() | |
876 | pull_request.source_repo = repo |
|
1086 | pull_request.source_repo = repo | |
877 | pull_request.target_repo = repo |
|
1087 | pull_request.target_repo = repo | |
878 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
1088 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
879 | branch=branch_name, commit_id=commit_ids['new-feature']) |
|
1089 | branch=branch_name, commit_id=commit_ids['new-feature']) | |
880 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1090 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
881 | branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature']) |
|
1091 | branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature']) | |
882 | pull_request.revisions = [commit_ids['new-feature']] |
|
1092 | pull_request.revisions = [commit_ids['new-feature']] | |
883 | pull_request.title = u"Test" |
|
1093 | pull_request.title = u"Test" | |
884 | pull_request.description = u"Description" |
|
1094 | pull_request.description = u"Description" | |
885 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1095 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
886 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1096 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
887 | Session().add(pull_request) |
|
1097 | Session().add(pull_request) | |
888 | Session().commit() |
|
1098 | Session().commit() | |
889 |
|
1099 | |||
890 | pull_request_id = pull_request.pull_request_id |
|
1100 | pull_request_id = pull_request.pull_request_id | |
891 |
|
1101 | |||
892 | vcs = repo.scm_instance() |
|
1102 | vcs = repo.scm_instance() | |
893 | vcs.remove_ref('refs/heads/{}'.format(branch_name)) |
|
1103 | vcs.remove_ref('refs/heads/{}'.format(branch_name)) | |
894 | # NOTE(marcink): run GC to ensure the commits are gone |
|
1104 | # NOTE(marcink): run GC to ensure the commits are gone | |
895 | vcs.run_gc() |
|
1105 | vcs.run_gc() | |
896 |
|
1106 | |||
897 | response = self.app.get(route_path( |
|
1107 | response = self.app.get(route_path( | |
898 | 'pullrequest_show', |
|
1108 | 'pullrequest_show', | |
899 | repo_name=repo_name, |
|
1109 | repo_name=repo_name, | |
900 | pull_request_id=pull_request_id)) |
|
1110 | pull_request_id=pull_request_id)) | |
901 |
|
1111 | |||
902 | assert response.status_int == 200 |
|
1112 | assert response.status_int == 200 | |
903 |
|
1113 | |||
904 | response.assert_response().element_contains( |
|
1114 | response.assert_response().element_contains( | |
905 | '#changeset_compare_view_content .alert strong', |
|
1115 | '#changeset_compare_view_content .alert strong', | |
906 | 'Missing commits') |
|
1116 | 'Missing commits') | |
907 | response.assert_response().element_contains( |
|
1117 | response.assert_response().element_contains( | |
908 | '#changeset_compare_view_content .alert', |
|
1118 | '#changeset_compare_view_content .alert', | |
909 | 'This pull request cannot be displayed, because one or more' |
|
1119 | 'This pull request cannot be displayed, because one or more' | |
910 | ' commits no longer exist in the source repository.') |
|
1120 | ' commits no longer exist in the source repository.') | |
911 |
|
1121 | |||
912 | def test_strip_commits_from_pull_request( |
|
1122 | def test_strip_commits_from_pull_request( | |
913 | self, backend, pr_util, csrf_token): |
|
1123 | self, backend, pr_util, csrf_token): | |
914 | commits = [ |
|
1124 | commits = [ | |
915 | {'message': 'initial-commit'}, |
|
1125 | {'message': 'initial-commit'}, | |
916 | {'message': 'old-feature'}, |
|
1126 | {'message': 'old-feature'}, | |
917 | {'message': 'new-feature', 'parents': ['initial-commit']}, |
|
1127 | {'message': 'new-feature', 'parents': ['initial-commit']}, | |
918 | ] |
|
1128 | ] | |
919 | pull_request = pr_util.create_pull_request( |
|
1129 | pull_request = pr_util.create_pull_request( | |
920 | commits, target_head='initial-commit', source_head='new-feature', |
|
1130 | commits, target_head='initial-commit', source_head='new-feature', | |
921 | revisions=['new-feature']) |
|
1131 | revisions=['new-feature']) | |
922 |
|
1132 | |||
923 | vcs = pr_util.source_repository.scm_instance() |
|
1133 | vcs = pr_util.source_repository.scm_instance() | |
924 | if backend.alias == 'git': |
|
1134 | if backend.alias == 'git': | |
925 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
1135 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') | |
926 | else: |
|
1136 | else: | |
927 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
1137 | vcs.strip(pr_util.commit_ids['new-feature']) | |
928 |
|
1138 | |||
929 | response = self.app.get(route_path( |
|
1139 | response = self.app.get(route_path( | |
930 | 'pullrequest_show', |
|
1140 | 'pullrequest_show', | |
931 | repo_name=pr_util.target_repository.repo_name, |
|
1141 | repo_name=pr_util.target_repository.repo_name, | |
932 | pull_request_id=pull_request.pull_request_id)) |
|
1142 | pull_request_id=pull_request.pull_request_id)) | |
933 |
|
1143 | |||
934 | assert response.status_int == 200 |
|
1144 | assert response.status_int == 200 | |
935 |
|
1145 | |||
936 | response.assert_response().element_contains( |
|
1146 | response.assert_response().element_contains( | |
937 | '#changeset_compare_view_content .alert strong', |
|
1147 | '#changeset_compare_view_content .alert strong', | |
938 | 'Missing commits') |
|
1148 | 'Missing commits') | |
939 | response.assert_response().element_contains( |
|
1149 | response.assert_response().element_contains( | |
940 | '#changeset_compare_view_content .alert', |
|
1150 | '#changeset_compare_view_content .alert', | |
941 | 'This pull request cannot be displayed, because one or more' |
|
1151 | 'This pull request cannot be displayed, because one or more' | |
942 | ' commits no longer exist in the source repository.') |
|
1152 | ' commits no longer exist in the source repository.') | |
943 | response.assert_response().element_contains( |
|
1153 | response.assert_response().element_contains( | |
944 | '#update_commits', |
|
1154 | '#update_commits', | |
945 | 'Update commits') |
|
1155 | 'Update commits') | |
946 |
|
1156 | |||
947 | def test_strip_commits_and_update( |
|
1157 | def test_strip_commits_and_update( | |
948 | self, backend, pr_util, csrf_token): |
|
1158 | self, backend, pr_util, csrf_token): | |
949 | commits = [ |
|
1159 | commits = [ | |
950 | {'message': 'initial-commit'}, |
|
1160 | {'message': 'initial-commit'}, | |
951 | {'message': 'old-feature'}, |
|
1161 | {'message': 'old-feature'}, | |
952 | {'message': 'new-feature', 'parents': ['old-feature']}, |
|
1162 | {'message': 'new-feature', 'parents': ['old-feature']}, | |
953 | ] |
|
1163 | ] | |
954 | pull_request = pr_util.create_pull_request( |
|
1164 | pull_request = pr_util.create_pull_request( | |
955 | commits, target_head='old-feature', source_head='new-feature', |
|
1165 | commits, target_head='old-feature', source_head='new-feature', | |
956 | revisions=['new-feature'], mergeable=True) |
|
1166 | revisions=['new-feature'], mergeable=True) | |
957 |
|
1167 | |||
958 | vcs = pr_util.source_repository.scm_instance() |
|
1168 | vcs = pr_util.source_repository.scm_instance() | |
959 | if backend.alias == 'git': |
|
1169 | if backend.alias == 'git': | |
960 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
1170 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') | |
961 | else: |
|
1171 | else: | |
962 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
1172 | vcs.strip(pr_util.commit_ids['new-feature']) | |
963 |
|
1173 | |||
964 | url = route_path('pullrequest_update', |
|
1174 | url = route_path('pullrequest_update', | |
965 | repo_name=pull_request.target_repo.repo_name, |
|
1175 | repo_name=pull_request.target_repo.repo_name, | |
966 | pull_request_id=pull_request.pull_request_id) |
|
1176 | pull_request_id=pull_request.pull_request_id) | |
967 | response = self.app.post(url, |
|
1177 | response = self.app.post(url, | |
968 | params={'update_commits': 'true', |
|
1178 | params={'update_commits': 'true', | |
969 | 'csrf_token': csrf_token}) |
|
1179 | 'csrf_token': csrf_token}) | |
970 |
|
1180 | |||
971 | assert response.status_int == 200 |
|
1181 | assert response.status_int == 200 | |
972 | assert response.body == '{"response": true, "redirect_url": null}' |
|
1182 | assert response.body == '{"response": true, "redirect_url": null}' | |
973 |
|
1183 | |||
974 | # Make sure that after update, it won't raise 500 errors |
|
1184 | # Make sure that after update, it won't raise 500 errors | |
975 | response = self.app.get(route_path( |
|
1185 | response = self.app.get(route_path( | |
976 | 'pullrequest_show', |
|
1186 | 'pullrequest_show', | |
977 | repo_name=pr_util.target_repository.repo_name, |
|
1187 | repo_name=pr_util.target_repository.repo_name, | |
978 | pull_request_id=pull_request.pull_request_id)) |
|
1188 | pull_request_id=pull_request.pull_request_id)) | |
979 |
|
1189 | |||
980 | assert response.status_int == 200 |
|
1190 | assert response.status_int == 200 | |
981 | response.assert_response().element_contains( |
|
1191 | response.assert_response().element_contains( | |
982 | '#changeset_compare_view_content .alert strong', |
|
1192 | '#changeset_compare_view_content .alert strong', | |
983 | 'Missing commits') |
|
1193 | 'Missing commits') | |
984 |
|
1194 | |||
985 | def test_branch_is_a_link(self, pr_util): |
|
1195 | def test_branch_is_a_link(self, pr_util): | |
986 | pull_request = pr_util.create_pull_request() |
|
1196 | pull_request = pr_util.create_pull_request() | |
987 | pull_request.source_ref = 'branch:origin:1234567890abcdef' |
|
1197 | pull_request.source_ref = 'branch:origin:1234567890abcdef' | |
988 | pull_request.target_ref = 'branch:target:abcdef1234567890' |
|
1198 | pull_request.target_ref = 'branch:target:abcdef1234567890' | |
989 | Session().add(pull_request) |
|
1199 | Session().add(pull_request) | |
990 | Session().commit() |
|
1200 | Session().commit() | |
991 |
|
1201 | |||
992 | response = self.app.get(route_path( |
|
1202 | response = self.app.get(route_path( | |
993 | 'pullrequest_show', |
|
1203 | 'pullrequest_show', | |
994 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1204 | repo_name=pull_request.target_repo.scm_instance().name, | |
995 | pull_request_id=pull_request.pull_request_id)) |
|
1205 | pull_request_id=pull_request.pull_request_id)) | |
996 | assert response.status_int == 200 |
|
1206 | assert response.status_int == 200 | |
997 |
|
1207 | |||
998 | source = response.assert_response().get_element('.pr-source-info') |
|
1208 | source = response.assert_response().get_element('.pr-source-info') | |
999 | source_parent = source.getparent() |
|
1209 | source_parent = source.getparent() | |
1000 | assert len(source_parent) == 1 |
|
1210 | assert len(source_parent) == 1 | |
1001 |
|
1211 | |||
1002 | target = response.assert_response().get_element('.pr-target-info') |
|
1212 | target = response.assert_response().get_element('.pr-target-info') | |
1003 | target_parent = target.getparent() |
|
1213 | target_parent = target.getparent() | |
1004 | assert len(target_parent) == 1 |
|
1214 | assert len(target_parent) == 1 | |
1005 |
|
1215 | |||
1006 | expected_origin_link = route_path( |
|
1216 | expected_origin_link = route_path( | |
1007 | 'repo_commits', |
|
1217 | 'repo_commits', | |
1008 | repo_name=pull_request.source_repo.scm_instance().name, |
|
1218 | repo_name=pull_request.source_repo.scm_instance().name, | |
1009 | params=dict(branch='origin')) |
|
1219 | params=dict(branch='origin')) | |
1010 | expected_target_link = route_path( |
|
1220 | expected_target_link = route_path( | |
1011 | 'repo_commits', |
|
1221 | 'repo_commits', | |
1012 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1222 | repo_name=pull_request.target_repo.scm_instance().name, | |
1013 | params=dict(branch='target')) |
|
1223 | params=dict(branch='target')) | |
1014 | assert source_parent.attrib['href'] == expected_origin_link |
|
1224 | assert source_parent.attrib['href'] == expected_origin_link | |
1015 | assert target_parent.attrib['href'] == expected_target_link |
|
1225 | assert target_parent.attrib['href'] == expected_target_link | |
1016 |
|
1226 | |||
1017 | def test_bookmark_is_not_a_link(self, pr_util): |
|
1227 | def test_bookmark_is_not_a_link(self, pr_util): | |
1018 | pull_request = pr_util.create_pull_request() |
|
1228 | pull_request = pr_util.create_pull_request() | |
1019 | pull_request.source_ref = 'bookmark:origin:1234567890abcdef' |
|
1229 | pull_request.source_ref = 'bookmark:origin:1234567890abcdef' | |
1020 | pull_request.target_ref = 'bookmark:target:abcdef1234567890' |
|
1230 | pull_request.target_ref = 'bookmark:target:abcdef1234567890' | |
1021 | Session().add(pull_request) |
|
1231 | Session().add(pull_request) | |
1022 | Session().commit() |
|
1232 | Session().commit() | |
1023 |
|
1233 | |||
1024 | response = self.app.get(route_path( |
|
1234 | response = self.app.get(route_path( | |
1025 | 'pullrequest_show', |
|
1235 | 'pullrequest_show', | |
1026 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1236 | repo_name=pull_request.target_repo.scm_instance().name, | |
1027 | pull_request_id=pull_request.pull_request_id)) |
|
1237 | pull_request_id=pull_request.pull_request_id)) | |
1028 | assert response.status_int == 200 |
|
1238 | assert response.status_int == 200 | |
1029 |
|
1239 | |||
1030 | source = response.assert_response().get_element('.pr-source-info') |
|
1240 | source = response.assert_response().get_element('.pr-source-info') | |
1031 | assert source.text.strip() == 'bookmark:origin' |
|
1241 | assert source.text.strip() == 'bookmark:origin' | |
1032 | assert source.getparent().attrib.get('href') is None |
|
1242 | assert source.getparent().attrib.get('href') is None | |
1033 |
|
1243 | |||
1034 | target = response.assert_response().get_element('.pr-target-info') |
|
1244 | target = response.assert_response().get_element('.pr-target-info') | |
1035 | assert target.text.strip() == 'bookmark:target' |
|
1245 | assert target.text.strip() == 'bookmark:target' | |
1036 | assert target.getparent().attrib.get('href') is None |
|
1246 | assert target.getparent().attrib.get('href') is None | |
1037 |
|
1247 | |||
1038 | def test_tag_is_not_a_link(self, pr_util): |
|
1248 | def test_tag_is_not_a_link(self, pr_util): | |
1039 | pull_request = pr_util.create_pull_request() |
|
1249 | pull_request = pr_util.create_pull_request() | |
1040 | pull_request.source_ref = 'tag:origin:1234567890abcdef' |
|
1250 | pull_request.source_ref = 'tag:origin:1234567890abcdef' | |
1041 | pull_request.target_ref = 'tag:target:abcdef1234567890' |
|
1251 | pull_request.target_ref = 'tag:target:abcdef1234567890' | |
1042 | Session().add(pull_request) |
|
1252 | Session().add(pull_request) | |
1043 | Session().commit() |
|
1253 | Session().commit() | |
1044 |
|
1254 | |||
1045 | response = self.app.get(route_path( |
|
1255 | response = self.app.get(route_path( | |
1046 | 'pullrequest_show', |
|
1256 | 'pullrequest_show', | |
1047 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1257 | repo_name=pull_request.target_repo.scm_instance().name, | |
1048 | pull_request_id=pull_request.pull_request_id)) |
|
1258 | pull_request_id=pull_request.pull_request_id)) | |
1049 | assert response.status_int == 200 |
|
1259 | assert response.status_int == 200 | |
1050 |
|
1260 | |||
1051 | source = response.assert_response().get_element('.pr-source-info') |
|
1261 | source = response.assert_response().get_element('.pr-source-info') | |
1052 | assert source.text.strip() == 'tag:origin' |
|
1262 | assert source.text.strip() == 'tag:origin' | |
1053 | assert source.getparent().attrib.get('href') is None |
|
1263 | assert source.getparent().attrib.get('href') is None | |
1054 |
|
1264 | |||
1055 | target = response.assert_response().get_element('.pr-target-info') |
|
1265 | target = response.assert_response().get_element('.pr-target-info') | |
1056 | assert target.text.strip() == 'tag:target' |
|
1266 | assert target.text.strip() == 'tag:target' | |
1057 | assert target.getparent().attrib.get('href') is None |
|
1267 | assert target.getparent().attrib.get('href') is None | |
1058 |
|
1268 | |||
1059 | @pytest.mark.parametrize('mergeable', [True, False]) |
|
1269 | @pytest.mark.parametrize('mergeable', [True, False]) | |
1060 | def test_shadow_repository_link( |
|
1270 | def test_shadow_repository_link( | |
1061 | self, mergeable, pr_util, http_host_only_stub): |
|
1271 | self, mergeable, pr_util, http_host_only_stub): | |
1062 | """ |
|
1272 | """ | |
1063 | Check that the pull request summary page displays a link to the shadow |
|
1273 | Check that the pull request summary page displays a link to the shadow | |
1064 | repository if the pull request is mergeable. If it is not mergeable |
|
1274 | repository if the pull request is mergeable. If it is not mergeable | |
1065 | the link should not be displayed. |
|
1275 | the link should not be displayed. | |
1066 | """ |
|
1276 | """ | |
1067 | pull_request = pr_util.create_pull_request( |
|
1277 | pull_request = pr_util.create_pull_request( | |
1068 | mergeable=mergeable, enable_notifications=False) |
|
1278 | mergeable=mergeable, enable_notifications=False) | |
1069 | target_repo = pull_request.target_repo.scm_instance() |
|
1279 | target_repo = pull_request.target_repo.scm_instance() | |
1070 | pr_id = pull_request.pull_request_id |
|
1280 | pr_id = pull_request.pull_request_id | |
1071 | shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format( |
|
1281 | shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format( | |
1072 | host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id) |
|
1282 | host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id) | |
1073 |
|
1283 | |||
1074 | response = self.app.get(route_path( |
|
1284 | response = self.app.get(route_path( | |
1075 | 'pullrequest_show', |
|
1285 | 'pullrequest_show', | |
1076 | repo_name=target_repo.name, |
|
1286 | repo_name=target_repo.name, | |
1077 | pull_request_id=pr_id)) |
|
1287 | pull_request_id=pr_id)) | |
1078 |
|
1288 | |||
1079 | if mergeable: |
|
1289 | if mergeable: | |
1080 | response.assert_response().element_value_contains( |
|
1290 | response.assert_response().element_value_contains( | |
1081 | 'input.pr-mergeinfo', shadow_url) |
|
1291 | 'input.pr-mergeinfo', shadow_url) | |
1082 | response.assert_response().element_value_contains( |
|
1292 | response.assert_response().element_value_contains( | |
1083 | 'input.pr-mergeinfo ', 'pr-merge') |
|
1293 | 'input.pr-mergeinfo ', 'pr-merge') | |
1084 | else: |
|
1294 | else: | |
1085 | response.assert_response().no_element_exists('.pr-mergeinfo') |
|
1295 | response.assert_response().no_element_exists('.pr-mergeinfo') | |
1086 |
|
1296 | |||
1087 |
|
1297 | |||
1088 | @pytest.mark.usefixtures('app') |
|
1298 | @pytest.mark.usefixtures('app') | |
1089 | @pytest.mark.backends("git", "hg") |
|
1299 | @pytest.mark.backends("git", "hg") | |
1090 | class TestPullrequestsControllerDelete(object): |
|
1300 | class TestPullrequestsControllerDelete(object): | |
1091 | def test_pull_request_delete_button_permissions_admin( |
|
1301 | def test_pull_request_delete_button_permissions_admin( | |
1092 | self, autologin_user, user_admin, pr_util): |
|
1302 | self, autologin_user, user_admin, pr_util): | |
1093 | pull_request = pr_util.create_pull_request( |
|
1303 | pull_request = pr_util.create_pull_request( | |
1094 | author=user_admin.username, enable_notifications=False) |
|
1304 | author=user_admin.username, enable_notifications=False) | |
1095 |
|
1305 | |||
1096 | response = self.app.get(route_path( |
|
1306 | response = self.app.get(route_path( | |
1097 | 'pullrequest_show', |
|
1307 | 'pullrequest_show', | |
1098 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1308 | repo_name=pull_request.target_repo.scm_instance().name, | |
1099 | pull_request_id=pull_request.pull_request_id)) |
|
1309 | pull_request_id=pull_request.pull_request_id)) | |
1100 |
|
1310 | |||
1101 | response.mustcontain('id="delete_pullrequest"') |
|
1311 | response.mustcontain('id="delete_pullrequest"') | |
1102 | response.mustcontain('Confirm to delete this pull request') |
|
1312 | response.mustcontain('Confirm to delete this pull request') | |
1103 |
|
1313 | |||
1104 | def test_pull_request_delete_button_permissions_owner( |
|
1314 | def test_pull_request_delete_button_permissions_owner( | |
1105 | self, autologin_regular_user, user_regular, pr_util): |
|
1315 | self, autologin_regular_user, user_regular, pr_util): | |
1106 | pull_request = pr_util.create_pull_request( |
|
1316 | pull_request = pr_util.create_pull_request( | |
1107 | author=user_regular.username, enable_notifications=False) |
|
1317 | author=user_regular.username, enable_notifications=False) | |
1108 |
|
1318 | |||
1109 | response = self.app.get(route_path( |
|
1319 | response = self.app.get(route_path( | |
1110 | 'pullrequest_show', |
|
1320 | 'pullrequest_show', | |
1111 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1321 | repo_name=pull_request.target_repo.scm_instance().name, | |
1112 | pull_request_id=pull_request.pull_request_id)) |
|
1322 | pull_request_id=pull_request.pull_request_id)) | |
1113 |
|
1323 | |||
1114 | response.mustcontain('id="delete_pullrequest"') |
|
1324 | response.mustcontain('id="delete_pullrequest"') | |
1115 | response.mustcontain('Confirm to delete this pull request') |
|
1325 | response.mustcontain('Confirm to delete this pull request') | |
1116 |
|
1326 | |||
1117 | def test_pull_request_delete_button_permissions_forbidden( |
|
1327 | def test_pull_request_delete_button_permissions_forbidden( | |
1118 | self, autologin_regular_user, user_regular, user_admin, pr_util): |
|
1328 | self, autologin_regular_user, user_regular, user_admin, pr_util): | |
1119 | pull_request = pr_util.create_pull_request( |
|
1329 | pull_request = pr_util.create_pull_request( | |
1120 | author=user_admin.username, enable_notifications=False) |
|
1330 | author=user_admin.username, enable_notifications=False) | |
1121 |
|
1331 | |||
1122 | response = self.app.get(route_path( |
|
1332 | response = self.app.get(route_path( | |
1123 | 'pullrequest_show', |
|
1333 | 'pullrequest_show', | |
1124 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1334 | repo_name=pull_request.target_repo.scm_instance().name, | |
1125 | pull_request_id=pull_request.pull_request_id)) |
|
1335 | pull_request_id=pull_request.pull_request_id)) | |
1126 | response.mustcontain(no=['id="delete_pullrequest"']) |
|
1336 | response.mustcontain(no=['id="delete_pullrequest"']) | |
1127 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1337 | response.mustcontain(no=['Confirm to delete this pull request']) | |
1128 |
|
1338 | |||
1129 | def test_pull_request_delete_button_permissions_can_update_cannot_delete( |
|
1339 | def test_pull_request_delete_button_permissions_can_update_cannot_delete( | |
1130 | self, autologin_regular_user, user_regular, user_admin, pr_util, |
|
1340 | self, autologin_regular_user, user_regular, user_admin, pr_util, | |
1131 | user_util): |
|
1341 | user_util): | |
1132 |
|
1342 | |||
1133 | pull_request = pr_util.create_pull_request( |
|
1343 | pull_request = pr_util.create_pull_request( | |
1134 | author=user_admin.username, enable_notifications=False) |
|
1344 | author=user_admin.username, enable_notifications=False) | |
1135 |
|
1345 | |||
1136 | user_util.grant_user_permission_to_repo( |
|
1346 | user_util.grant_user_permission_to_repo( | |
1137 | pull_request.target_repo, user_regular, |
|
1347 | pull_request.target_repo, user_regular, | |
1138 | 'repository.write') |
|
1348 | 'repository.write') | |
1139 |
|
1349 | |||
1140 | response = self.app.get(route_path( |
|
1350 | response = self.app.get(route_path( | |
1141 | 'pullrequest_show', |
|
1351 | 'pullrequest_show', | |
1142 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1352 | repo_name=pull_request.target_repo.scm_instance().name, | |
1143 | pull_request_id=pull_request.pull_request_id)) |
|
1353 | pull_request_id=pull_request.pull_request_id)) | |
1144 |
|
1354 | |||
1145 | response.mustcontain('id="open_edit_pullrequest"') |
|
1355 | response.mustcontain('id="open_edit_pullrequest"') | |
1146 | response.mustcontain('id="delete_pullrequest"') |
|
1356 | response.mustcontain('id="delete_pullrequest"') | |
1147 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1357 | response.mustcontain(no=['Confirm to delete this pull request']) | |
1148 |
|
1358 | |||
1149 | def test_delete_comment_returns_404_if_comment_does_not_exist( |
|
1359 | def test_delete_comment_returns_404_if_comment_does_not_exist( | |
1150 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1360 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): | |
1151 |
|
1361 | |||
1152 | pull_request = pr_util.create_pull_request( |
|
1362 | pull_request = pr_util.create_pull_request( | |
1153 | author=user_admin.username, enable_notifications=False) |
|
1363 | author=user_admin.username, enable_notifications=False) | |
1154 |
|
1364 | |||
1155 | self.app.post( |
|
1365 | self.app.post( | |
1156 | route_path( |
|
1366 | route_path( | |
1157 | 'pullrequest_comment_delete', |
|
1367 | 'pullrequest_comment_delete', | |
1158 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1368 | repo_name=pull_request.target_repo.scm_instance().name, | |
1159 | pull_request_id=pull_request.pull_request_id, |
|
1369 | pull_request_id=pull_request.pull_request_id, | |
1160 | comment_id=1024404), |
|
1370 | comment_id=1024404), | |
1161 | extra_environ=xhr_header, |
|
1371 | extra_environ=xhr_header, | |
1162 | params={'csrf_token': csrf_token}, |
|
1372 | params={'csrf_token': csrf_token}, | |
1163 | status=404 |
|
1373 | status=404 | |
1164 | ) |
|
1374 | ) | |
1165 |
|
1375 | |||
1166 | def test_delete_comment( |
|
1376 | def test_delete_comment( | |
1167 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1377 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): | |
1168 |
|
1378 | |||
1169 | pull_request = pr_util.create_pull_request( |
|
1379 | pull_request = pr_util.create_pull_request( | |
1170 | author=user_admin.username, enable_notifications=False) |
|
1380 | author=user_admin.username, enable_notifications=False) | |
1171 | comment = pr_util.create_comment() |
|
1381 | comment = pr_util.create_comment() | |
1172 | comment_id = comment.comment_id |
|
1382 | comment_id = comment.comment_id | |
1173 |
|
1383 | |||
1174 | response = self.app.post( |
|
1384 | response = self.app.post( | |
1175 | route_path( |
|
1385 | route_path( | |
1176 | 'pullrequest_comment_delete', |
|
1386 | 'pullrequest_comment_delete', | |
1177 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1387 | repo_name=pull_request.target_repo.scm_instance().name, | |
1178 | pull_request_id=pull_request.pull_request_id, |
|
1388 | pull_request_id=pull_request.pull_request_id, | |
1179 | comment_id=comment_id), |
|
1389 | comment_id=comment_id), | |
1180 | extra_environ=xhr_header, |
|
1390 | extra_environ=xhr_header, | |
1181 | params={'csrf_token': csrf_token}, |
|
1391 | params={'csrf_token': csrf_token}, | |
1182 | status=200 |
|
1392 | status=200 | |
1183 | ) |
|
1393 | ) | |
1184 | assert response.body == 'true' |
|
1394 | assert response.body == 'true' | |
1185 |
|
1395 | |||
1186 | @pytest.mark.parametrize('url_type', [ |
|
1396 | @pytest.mark.parametrize('url_type', [ | |
1187 | 'pullrequest_new', |
|
1397 | 'pullrequest_new', | |
1188 | 'pullrequest_create', |
|
1398 | 'pullrequest_create', | |
1189 | 'pullrequest_update', |
|
1399 | 'pullrequest_update', | |
1190 | 'pullrequest_merge', |
|
1400 | 'pullrequest_merge', | |
1191 | ]) |
|
1401 | ]) | |
1192 | def test_pull_request_is_forbidden_on_archived_repo( |
|
1402 | def test_pull_request_is_forbidden_on_archived_repo( | |
1193 | self, autologin_user, backend, xhr_header, user_util, url_type): |
|
1403 | self, autologin_user, backend, xhr_header, user_util, url_type): | |
1194 |
|
1404 | |||
1195 | # create a temporary repo |
|
1405 | # create a temporary repo | |
1196 | source = user_util.create_repo(repo_type=backend.alias) |
|
1406 | source = user_util.create_repo(repo_type=backend.alias) | |
1197 | repo_name = source.repo_name |
|
1407 | repo_name = source.repo_name | |
1198 | repo = Repository.get_by_repo_name(repo_name) |
|
1408 | repo = Repository.get_by_repo_name(repo_name) | |
1199 | repo.archived = True |
|
1409 | repo.archived = True | |
1200 | Session().commit() |
|
1410 | Session().commit() | |
1201 |
|
1411 | |||
1202 | response = self.app.get( |
|
1412 | response = self.app.get( | |
1203 | route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302) |
|
1413 | route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302) | |
1204 |
|
1414 | |||
1205 | msg = 'Action not supported for archived repository.' |
|
1415 | msg = 'Action not supported for archived repository.' | |
1206 | assert_session_flash(response, msg) |
|
1416 | assert_session_flash(response, msg) | |
1207 |
|
1417 | |||
1208 |
|
1418 | |||
1209 | def assert_pull_request_status(pull_request, expected_status): |
|
1419 | def assert_pull_request_status(pull_request, expected_status): | |
1210 | status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request) |
|
1420 | status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request) | |
1211 | assert status == expected_status |
|
1421 | assert status == expected_status | |
1212 |
|
1422 | |||
1213 |
|
1423 | |||
1214 | @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create']) |
|
1424 | @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create']) | |
1215 | @pytest.mark.usefixtures("autologin_user") |
|
1425 | @pytest.mark.usefixtures("autologin_user") | |
1216 | def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route): |
|
1426 | def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route): | |
1217 | app.get(route_path(route, repo_name=backend_svn.repo_name), status=404) |
|
1427 | app.get(route_path(route, repo_name=backend_svn.repo_name), status=404) |
@@ -1,610 +1,700 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import logging |
|
22 | import logging | |
23 | import collections |
|
23 | import collections | |
24 |
|
24 | |||
25 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden |
|
25 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden | |
26 | from pyramid.view import view_config |
|
26 | from pyramid.view import view_config | |
27 | from pyramid.renderers import render |
|
27 | from pyramid.renderers import render | |
28 | from pyramid.response import Response |
|
28 | from pyramid.response import Response | |
29 |
|
29 | |||
30 | from rhodecode.apps._base import RepoAppView |
|
30 | from rhodecode.apps._base import RepoAppView | |
31 | from rhodecode.apps.file_store import utils as store_utils |
|
31 | from rhodecode.apps.file_store import utils as store_utils | |
32 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException |
|
32 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException | |
33 |
|
33 | |||
34 | from rhodecode.lib import diffs, codeblocks |
|
34 | from rhodecode.lib import diffs, codeblocks | |
35 | from rhodecode.lib.auth import ( |
|
35 | from rhodecode.lib.auth import ( | |
36 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) |
|
36 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) | |
37 |
|
37 | |||
38 | from rhodecode.lib.compat import OrderedDict |
|
38 | from rhodecode.lib.compat import OrderedDict | |
39 | from rhodecode.lib.diffs import ( |
|
39 | from rhodecode.lib.diffs import ( | |
40 | cache_diff, load_cached_diff, diff_cache_exist, get_diff_context, |
|
40 | cache_diff, load_cached_diff, diff_cache_exist, get_diff_context, | |
41 | get_diff_whitespace_flag) |
|
41 | get_diff_whitespace_flag) | |
42 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
42 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError | |
43 | import rhodecode.lib.helpers as h |
|
43 | import rhodecode.lib.helpers as h | |
44 | from rhodecode.lib.utils2 import safe_unicode, str2bool |
|
44 | from rhodecode.lib.utils2 import safe_unicode, str2bool | |
45 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
45 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
46 | from rhodecode.lib.vcs.exceptions import ( |
|
46 | from rhodecode.lib.vcs.exceptions import ( | |
47 | RepositoryError, CommitDoesNotExistError) |
|
47 | RepositoryError, CommitDoesNotExistError) | |
48 | from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore |
|
48 | from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \ | |
|
49 | ChangesetCommentHistory | |||
49 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
50 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
50 | from rhodecode.model.comment import CommentsModel |
|
51 | from rhodecode.model.comment import CommentsModel | |
51 | from rhodecode.model.meta import Session |
|
52 | from rhodecode.model.meta import Session | |
52 | from rhodecode.model.settings import VcsSettingsModel |
|
53 | from rhodecode.model.settings import VcsSettingsModel | |
53 |
|
54 | |||
54 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
55 |
|
56 | |||
56 |
|
57 | |||
57 | def _update_with_GET(params, request): |
|
58 | def _update_with_GET(params, request): | |
58 | for k in ['diff1', 'diff2', 'diff']: |
|
59 | for k in ['diff1', 'diff2', 'diff']: | |
59 | params[k] += request.GET.getall(k) |
|
60 | params[k] += request.GET.getall(k) | |
60 |
|
61 | |||
61 |
|
62 | |||
62 | class RepoCommitsView(RepoAppView): |
|
63 | class RepoCommitsView(RepoAppView): | |
63 | def load_default_context(self): |
|
64 | def load_default_context(self): | |
64 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
65 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
65 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
66 | c.rhodecode_repo = self.rhodecode_vcs_repo | |
66 |
|
67 | |||
67 | return c |
|
68 | return c | |
68 |
|
69 | |||
69 | def _is_diff_cache_enabled(self, target_repo): |
|
70 | def _is_diff_cache_enabled(self, target_repo): | |
70 | caching_enabled = self._get_general_setting( |
|
71 | caching_enabled = self._get_general_setting( | |
71 | target_repo, 'rhodecode_diff_cache') |
|
72 | target_repo, 'rhodecode_diff_cache') | |
72 | log.debug('Diff caching enabled: %s', caching_enabled) |
|
73 | log.debug('Diff caching enabled: %s', caching_enabled) | |
73 | return caching_enabled |
|
74 | return caching_enabled | |
74 |
|
75 | |||
75 | def _commit(self, commit_id_range, method): |
|
76 | def _commit(self, commit_id_range, method): | |
76 | _ = self.request.translate |
|
77 | _ = self.request.translate | |
77 | c = self.load_default_context() |
|
78 | c = self.load_default_context() | |
78 | c.fulldiff = self.request.GET.get('fulldiff') |
|
79 | c.fulldiff = self.request.GET.get('fulldiff') | |
79 |
|
80 | |||
80 | # fetch global flags of ignore ws or context lines |
|
81 | # fetch global flags of ignore ws or context lines | |
81 | diff_context = get_diff_context(self.request) |
|
82 | diff_context = get_diff_context(self.request) | |
82 | hide_whitespace_changes = get_diff_whitespace_flag(self.request) |
|
83 | hide_whitespace_changes = get_diff_whitespace_flag(self.request) | |
83 |
|
84 | |||
84 | # diff_limit will cut off the whole diff if the limit is applied |
|
85 | # diff_limit will cut off the whole diff if the limit is applied | |
85 | # otherwise it will just hide the big files from the front-end |
|
86 | # otherwise it will just hide the big files from the front-end | |
86 | diff_limit = c.visual.cut_off_limit_diff |
|
87 | diff_limit = c.visual.cut_off_limit_diff | |
87 | file_limit = c.visual.cut_off_limit_file |
|
88 | file_limit = c.visual.cut_off_limit_file | |
88 |
|
89 | |||
89 | # get ranges of commit ids if preset |
|
90 | # get ranges of commit ids if preset | |
90 | commit_range = commit_id_range.split('...')[:2] |
|
91 | commit_range = commit_id_range.split('...')[:2] | |
91 |
|
92 | |||
92 | try: |
|
93 | try: | |
93 | pre_load = ['affected_files', 'author', 'branch', 'date', |
|
94 | pre_load = ['affected_files', 'author', 'branch', 'date', | |
94 | 'message', 'parents'] |
|
95 | 'message', 'parents'] | |
95 | if self.rhodecode_vcs_repo.alias == 'hg': |
|
96 | if self.rhodecode_vcs_repo.alias == 'hg': | |
96 | pre_load += ['hidden', 'obsolete', 'phase'] |
|
97 | pre_load += ['hidden', 'obsolete', 'phase'] | |
97 |
|
98 | |||
98 | if len(commit_range) == 2: |
|
99 | if len(commit_range) == 2: | |
99 | commits = self.rhodecode_vcs_repo.get_commits( |
|
100 | commits = self.rhodecode_vcs_repo.get_commits( | |
100 | start_id=commit_range[0], end_id=commit_range[1], |
|
101 | start_id=commit_range[0], end_id=commit_range[1], | |
101 | pre_load=pre_load, translate_tags=False) |
|
102 | pre_load=pre_load, translate_tags=False) | |
102 | commits = list(commits) |
|
103 | commits = list(commits) | |
103 | else: |
|
104 | else: | |
104 | commits = [self.rhodecode_vcs_repo.get_commit( |
|
105 | commits = [self.rhodecode_vcs_repo.get_commit( | |
105 | commit_id=commit_id_range, pre_load=pre_load)] |
|
106 | commit_id=commit_id_range, pre_load=pre_load)] | |
106 |
|
107 | |||
107 | c.commit_ranges = commits |
|
108 | c.commit_ranges = commits | |
108 | if not c.commit_ranges: |
|
109 | if not c.commit_ranges: | |
109 | raise RepositoryError('The commit range returned an empty result') |
|
110 | raise RepositoryError('The commit range returned an empty result') | |
110 | except CommitDoesNotExistError as e: |
|
111 | except CommitDoesNotExistError as e: | |
111 | msg = _('No such commit exists. Org exception: `{}`').format(e) |
|
112 | msg = _('No such commit exists. Org exception: `{}`').format(e) | |
112 | h.flash(msg, category='error') |
|
113 | h.flash(msg, category='error') | |
113 | raise HTTPNotFound() |
|
114 | raise HTTPNotFound() | |
114 | except Exception: |
|
115 | except Exception: | |
115 | log.exception("General failure") |
|
116 | log.exception("General failure") | |
116 | raise HTTPNotFound() |
|
117 | raise HTTPNotFound() | |
117 |
|
118 | |||
118 | c.changes = OrderedDict() |
|
119 | c.changes = OrderedDict() | |
119 | c.lines_added = 0 |
|
120 | c.lines_added = 0 | |
120 | c.lines_deleted = 0 |
|
121 | c.lines_deleted = 0 | |
121 |
|
122 | |||
122 | # auto collapse if we have more than limit |
|
123 | # auto collapse if we have more than limit | |
123 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
124 | collapse_limit = diffs.DiffProcessor._collapse_commits_over | |
124 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
125 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit | |
125 |
|
126 | |||
126 | c.commit_statuses = ChangesetStatus.STATUSES |
|
127 | c.commit_statuses = ChangesetStatus.STATUSES | |
127 | c.inline_comments = [] |
|
128 | c.inline_comments = [] | |
128 | c.files = [] |
|
129 | c.files = [] | |
129 |
|
130 | |||
130 | c.statuses = [] |
|
131 | c.statuses = [] | |
131 | c.comments = [] |
|
132 | c.comments = [] | |
132 | c.unresolved_comments = [] |
|
133 | c.unresolved_comments = [] | |
133 | c.resolved_comments = [] |
|
134 | c.resolved_comments = [] | |
134 | if len(c.commit_ranges) == 1: |
|
135 | if len(c.commit_ranges) == 1: | |
135 | commit = c.commit_ranges[0] |
|
136 | commit = c.commit_ranges[0] | |
136 | c.comments = CommentsModel().get_comments( |
|
137 | c.comments = CommentsModel().get_comments( | |
137 | self.db_repo.repo_id, |
|
138 | self.db_repo.repo_id, | |
138 | revision=commit.raw_id) |
|
139 | revision=commit.raw_id) | |
139 | c.statuses.append(ChangesetStatusModel().get_status( |
|
140 | c.statuses.append(ChangesetStatusModel().get_status( | |
140 | self.db_repo.repo_id, commit.raw_id)) |
|
141 | self.db_repo.repo_id, commit.raw_id)) | |
141 | # comments from PR |
|
142 | # comments from PR | |
142 | statuses = ChangesetStatusModel().get_statuses( |
|
143 | statuses = ChangesetStatusModel().get_statuses( | |
143 | self.db_repo.repo_id, commit.raw_id, |
|
144 | self.db_repo.repo_id, commit.raw_id, | |
144 | with_revisions=True) |
|
145 | with_revisions=True) | |
145 | prs = set(st.pull_request for st in statuses |
|
146 | prs = set(st.pull_request for st in statuses | |
146 | if st.pull_request is not None) |
|
147 | if st.pull_request is not None) | |
147 | # from associated statuses, check the pull requests, and |
|
148 | # from associated statuses, check the pull requests, and | |
148 | # show comments from them |
|
149 | # show comments from them | |
149 | for pr in prs: |
|
150 | for pr in prs: | |
150 | c.comments.extend(pr.comments) |
|
151 | c.comments.extend(pr.comments) | |
151 |
|
152 | |||
152 | c.unresolved_comments = CommentsModel()\ |
|
153 | c.unresolved_comments = CommentsModel()\ | |
153 | .get_commit_unresolved_todos(commit.raw_id) |
|
154 | .get_commit_unresolved_todos(commit.raw_id) | |
154 | c.resolved_comments = CommentsModel()\ |
|
155 | c.resolved_comments = CommentsModel()\ | |
155 | .get_commit_resolved_todos(commit.raw_id) |
|
156 | .get_commit_resolved_todos(commit.raw_id) | |
156 |
|
157 | |||
157 | diff = None |
|
158 | diff = None | |
158 | # Iterate over ranges (default commit view is always one commit) |
|
159 | # Iterate over ranges (default commit view is always one commit) | |
159 | for commit in c.commit_ranges: |
|
160 | for commit in c.commit_ranges: | |
160 | c.changes[commit.raw_id] = [] |
|
161 | c.changes[commit.raw_id] = [] | |
161 |
|
162 | |||
162 | commit2 = commit |
|
163 | commit2 = commit | |
163 | commit1 = commit.first_parent |
|
164 | commit1 = commit.first_parent | |
164 |
|
165 | |||
165 | if method == 'show': |
|
166 | if method == 'show': | |
166 | inline_comments = CommentsModel().get_inline_comments( |
|
167 | inline_comments = CommentsModel().get_inline_comments( | |
167 | self.db_repo.repo_id, revision=commit.raw_id) |
|
168 | self.db_repo.repo_id, revision=commit.raw_id) | |
168 | c.inline_cnt = CommentsModel().get_inline_comments_count( |
|
169 | c.inline_cnt = CommentsModel().get_inline_comments_count( | |
169 | inline_comments) |
|
170 | inline_comments) | |
170 | c.inline_comments = inline_comments |
|
171 | c.inline_comments = inline_comments | |
171 |
|
172 | |||
172 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path( |
|
173 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path( | |
173 | self.db_repo) |
|
174 | self.db_repo) | |
174 | cache_file_path = diff_cache_exist( |
|
175 | cache_file_path = diff_cache_exist( | |
175 | cache_path, 'diff', commit.raw_id, |
|
176 | cache_path, 'diff', commit.raw_id, | |
176 | hide_whitespace_changes, diff_context, c.fulldiff) |
|
177 | hide_whitespace_changes, diff_context, c.fulldiff) | |
177 |
|
178 | |||
178 | caching_enabled = self._is_diff_cache_enabled(self.db_repo) |
|
179 | caching_enabled = self._is_diff_cache_enabled(self.db_repo) | |
179 | force_recache = str2bool(self.request.GET.get('force_recache')) |
|
180 | force_recache = str2bool(self.request.GET.get('force_recache')) | |
180 |
|
181 | |||
181 | cached_diff = None |
|
182 | cached_diff = None | |
182 | if caching_enabled: |
|
183 | if caching_enabled: | |
183 | cached_diff = load_cached_diff(cache_file_path) |
|
184 | cached_diff = load_cached_diff(cache_file_path) | |
184 |
|
185 | |||
185 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') |
|
186 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') | |
186 | if not force_recache and has_proper_diff_cache: |
|
187 | if not force_recache and has_proper_diff_cache: | |
187 | diffset = cached_diff['diff'] |
|
188 | diffset = cached_diff['diff'] | |
188 | else: |
|
189 | else: | |
189 | vcs_diff = self.rhodecode_vcs_repo.get_diff( |
|
190 | vcs_diff = self.rhodecode_vcs_repo.get_diff( | |
190 | commit1, commit2, |
|
191 | commit1, commit2, | |
191 | ignore_whitespace=hide_whitespace_changes, |
|
192 | ignore_whitespace=hide_whitespace_changes, | |
192 | context=diff_context) |
|
193 | context=diff_context) | |
193 |
|
194 | |||
194 | diff_processor = diffs.DiffProcessor( |
|
195 | diff_processor = diffs.DiffProcessor( | |
195 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
196 | vcs_diff, format='newdiff', diff_limit=diff_limit, | |
196 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
197 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
197 |
|
198 | |||
198 | _parsed = diff_processor.prepare() |
|
199 | _parsed = diff_processor.prepare() | |
199 |
|
200 | |||
200 | diffset = codeblocks.DiffSet( |
|
201 | diffset = codeblocks.DiffSet( | |
201 | repo_name=self.db_repo_name, |
|
202 | repo_name=self.db_repo_name, | |
202 | source_node_getter=codeblocks.diffset_node_getter(commit1), |
|
203 | source_node_getter=codeblocks.diffset_node_getter(commit1), | |
203 | target_node_getter=codeblocks.diffset_node_getter(commit2)) |
|
204 | target_node_getter=codeblocks.diffset_node_getter(commit2)) | |
204 |
|
205 | |||
205 | diffset = self.path_filter.render_patchset_filtered( |
|
206 | diffset = self.path_filter.render_patchset_filtered( | |
206 | diffset, _parsed, commit1.raw_id, commit2.raw_id) |
|
207 | diffset, _parsed, commit1.raw_id, commit2.raw_id) | |
207 |
|
208 | |||
208 | # save cached diff |
|
209 | # save cached diff | |
209 | if caching_enabled: |
|
210 | if caching_enabled: | |
210 | cache_diff(cache_file_path, diffset, None) |
|
211 | cache_diff(cache_file_path, diffset, None) | |
211 |
|
212 | |||
212 | c.limited_diff = diffset.limited_diff |
|
213 | c.limited_diff = diffset.limited_diff | |
213 | c.changes[commit.raw_id] = diffset |
|
214 | c.changes[commit.raw_id] = diffset | |
214 | else: |
|
215 | else: | |
215 | # TODO(marcink): no cache usage here... |
|
216 | # TODO(marcink): no cache usage here... | |
216 | _diff = self.rhodecode_vcs_repo.get_diff( |
|
217 | _diff = self.rhodecode_vcs_repo.get_diff( | |
217 | commit1, commit2, |
|
218 | commit1, commit2, | |
218 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
219 | ignore_whitespace=hide_whitespace_changes, context=diff_context) | |
219 | diff_processor = diffs.DiffProcessor( |
|
220 | diff_processor = diffs.DiffProcessor( | |
220 | _diff, format='newdiff', diff_limit=diff_limit, |
|
221 | _diff, format='newdiff', diff_limit=diff_limit, | |
221 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
222 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
222 | # downloads/raw we only need RAW diff nothing else |
|
223 | # downloads/raw we only need RAW diff nothing else | |
223 | diff = self.path_filter.get_raw_patch(diff_processor) |
|
224 | diff = self.path_filter.get_raw_patch(diff_processor) | |
224 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] |
|
225 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] | |
225 |
|
226 | |||
226 | # sort comments by how they were generated |
|
227 | # sort comments by how they were generated | |
227 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) |
|
228 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) | |
228 |
|
229 | |||
229 | if len(c.commit_ranges) == 1: |
|
230 | if len(c.commit_ranges) == 1: | |
230 | c.commit = c.commit_ranges[0] |
|
231 | c.commit = c.commit_ranges[0] | |
231 | c.parent_tmpl = ''.join( |
|
232 | c.parent_tmpl = ''.join( | |
232 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) |
|
233 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) | |
233 |
|
234 | |||
234 | if method == 'download': |
|
235 | if method == 'download': | |
235 | response = Response(diff) |
|
236 | response = Response(diff) | |
236 | response.content_type = 'text/plain' |
|
237 | response.content_type = 'text/plain' | |
237 | response.content_disposition = ( |
|
238 | response.content_disposition = ( | |
238 | 'attachment; filename=%s.diff' % commit_id_range[:12]) |
|
239 | 'attachment; filename=%s.diff' % commit_id_range[:12]) | |
239 | return response |
|
240 | return response | |
240 | elif method == 'patch': |
|
241 | elif method == 'patch': | |
241 | c.diff = safe_unicode(diff) |
|
242 | c.diff = safe_unicode(diff) | |
242 | patch = render( |
|
243 | patch = render( | |
243 | 'rhodecode:templates/changeset/patch_changeset.mako', |
|
244 | 'rhodecode:templates/changeset/patch_changeset.mako', | |
244 | self._get_template_context(c), self.request) |
|
245 | self._get_template_context(c), self.request) | |
245 | response = Response(patch) |
|
246 | response = Response(patch) | |
246 | response.content_type = 'text/plain' |
|
247 | response.content_type = 'text/plain' | |
247 | return response |
|
248 | return response | |
248 | elif method == 'raw': |
|
249 | elif method == 'raw': | |
249 | response = Response(diff) |
|
250 | response = Response(diff) | |
250 | response.content_type = 'text/plain' |
|
251 | response.content_type = 'text/plain' | |
251 | return response |
|
252 | return response | |
252 | elif method == 'show': |
|
253 | elif method == 'show': | |
253 | if len(c.commit_ranges) == 1: |
|
254 | if len(c.commit_ranges) == 1: | |
254 | html = render( |
|
255 | html = render( | |
255 | 'rhodecode:templates/changeset/changeset.mako', |
|
256 | 'rhodecode:templates/changeset/changeset.mako', | |
256 | self._get_template_context(c), self.request) |
|
257 | self._get_template_context(c), self.request) | |
257 | return Response(html) |
|
258 | return Response(html) | |
258 | else: |
|
259 | else: | |
259 | c.ancestor = None |
|
260 | c.ancestor = None | |
260 | c.target_repo = self.db_repo |
|
261 | c.target_repo = self.db_repo | |
261 | html = render( |
|
262 | html = render( | |
262 | 'rhodecode:templates/changeset/changeset_range.mako', |
|
263 | 'rhodecode:templates/changeset/changeset_range.mako', | |
263 | self._get_template_context(c), self.request) |
|
264 | self._get_template_context(c), self.request) | |
264 | return Response(html) |
|
265 | return Response(html) | |
265 |
|
266 | |||
266 | raise HTTPBadRequest() |
|
267 | raise HTTPBadRequest() | |
267 |
|
268 | |||
268 | @LoginRequired() |
|
269 | @LoginRequired() | |
269 | @HasRepoPermissionAnyDecorator( |
|
270 | @HasRepoPermissionAnyDecorator( | |
270 | 'repository.read', 'repository.write', 'repository.admin') |
|
271 | 'repository.read', 'repository.write', 'repository.admin') | |
271 | @view_config( |
|
272 | @view_config( | |
272 | route_name='repo_commit', request_method='GET', |
|
273 | route_name='repo_commit', request_method='GET', | |
273 | renderer=None) |
|
274 | renderer=None) | |
274 | def repo_commit_show(self): |
|
275 | def repo_commit_show(self): | |
275 | commit_id = self.request.matchdict['commit_id'] |
|
276 | commit_id = self.request.matchdict['commit_id'] | |
276 | return self._commit(commit_id, method='show') |
|
277 | return self._commit(commit_id, method='show') | |
277 |
|
278 | |||
278 | @LoginRequired() |
|
279 | @LoginRequired() | |
279 | @HasRepoPermissionAnyDecorator( |
|
280 | @HasRepoPermissionAnyDecorator( | |
280 | 'repository.read', 'repository.write', 'repository.admin') |
|
281 | 'repository.read', 'repository.write', 'repository.admin') | |
281 | @view_config( |
|
282 | @view_config( | |
282 | route_name='repo_commit_raw', request_method='GET', |
|
283 | route_name='repo_commit_raw', request_method='GET', | |
283 | renderer=None) |
|
284 | renderer=None) | |
284 | @view_config( |
|
285 | @view_config( | |
285 | route_name='repo_commit_raw_deprecated', request_method='GET', |
|
286 | route_name='repo_commit_raw_deprecated', request_method='GET', | |
286 | renderer=None) |
|
287 | renderer=None) | |
287 | def repo_commit_raw(self): |
|
288 | def repo_commit_raw(self): | |
288 | commit_id = self.request.matchdict['commit_id'] |
|
289 | commit_id = self.request.matchdict['commit_id'] | |
289 | return self._commit(commit_id, method='raw') |
|
290 | return self._commit(commit_id, method='raw') | |
290 |
|
291 | |||
291 | @LoginRequired() |
|
292 | @LoginRequired() | |
292 | @HasRepoPermissionAnyDecorator( |
|
293 | @HasRepoPermissionAnyDecorator( | |
293 | 'repository.read', 'repository.write', 'repository.admin') |
|
294 | 'repository.read', 'repository.write', 'repository.admin') | |
294 | @view_config( |
|
295 | @view_config( | |
295 | route_name='repo_commit_patch', request_method='GET', |
|
296 | route_name='repo_commit_patch', request_method='GET', | |
296 | renderer=None) |
|
297 | renderer=None) | |
297 | def repo_commit_patch(self): |
|
298 | def repo_commit_patch(self): | |
298 | commit_id = self.request.matchdict['commit_id'] |
|
299 | commit_id = self.request.matchdict['commit_id'] | |
299 | return self._commit(commit_id, method='patch') |
|
300 | return self._commit(commit_id, method='patch') | |
300 |
|
301 | |||
301 | @LoginRequired() |
|
302 | @LoginRequired() | |
302 | @HasRepoPermissionAnyDecorator( |
|
303 | @HasRepoPermissionAnyDecorator( | |
303 | 'repository.read', 'repository.write', 'repository.admin') |
|
304 | 'repository.read', 'repository.write', 'repository.admin') | |
304 | @view_config( |
|
305 | @view_config( | |
305 | route_name='repo_commit_download', request_method='GET', |
|
306 | route_name='repo_commit_download', request_method='GET', | |
306 | renderer=None) |
|
307 | renderer=None) | |
307 | def repo_commit_download(self): |
|
308 | def repo_commit_download(self): | |
308 | commit_id = self.request.matchdict['commit_id'] |
|
309 | commit_id = self.request.matchdict['commit_id'] | |
309 | return self._commit(commit_id, method='download') |
|
310 | return self._commit(commit_id, method='download') | |
310 |
|
311 | |||
311 | @LoginRequired() |
|
312 | @LoginRequired() | |
312 | @NotAnonymous() |
|
313 | @NotAnonymous() | |
313 | @HasRepoPermissionAnyDecorator( |
|
314 | @HasRepoPermissionAnyDecorator( | |
314 | 'repository.read', 'repository.write', 'repository.admin') |
|
315 | 'repository.read', 'repository.write', 'repository.admin') | |
315 | @CSRFRequired() |
|
316 | @CSRFRequired() | |
316 | @view_config( |
|
317 | @view_config( | |
317 | route_name='repo_commit_comment_create', request_method='POST', |
|
318 | route_name='repo_commit_comment_create', request_method='POST', | |
318 | renderer='json_ext') |
|
319 | renderer='json_ext') | |
319 | def repo_commit_comment_create(self): |
|
320 | def repo_commit_comment_create(self): | |
320 | _ = self.request.translate |
|
321 | _ = self.request.translate | |
321 | commit_id = self.request.matchdict['commit_id'] |
|
322 | commit_id = self.request.matchdict['commit_id'] | |
322 |
|
323 | |||
323 | c = self.load_default_context() |
|
324 | c = self.load_default_context() | |
324 | status = self.request.POST.get('changeset_status', None) |
|
325 | status = self.request.POST.get('changeset_status', None) | |
325 | text = self.request.POST.get('text') |
|
326 | text = self.request.POST.get('text') | |
326 | comment_type = self.request.POST.get('comment_type') |
|
327 | comment_type = self.request.POST.get('comment_type') | |
327 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
328 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) | |
328 |
|
329 | |||
329 | if status: |
|
330 | if status: | |
330 | text = text or (_('Status change %(transition_icon)s %(status)s') |
|
331 | text = text or (_('Status change %(transition_icon)s %(status)s') | |
331 | % {'transition_icon': '>', |
|
332 | % {'transition_icon': '>', | |
332 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
333 | 'status': ChangesetStatus.get_status_lbl(status)}) | |
333 |
|
334 | |||
334 | multi_commit_ids = [] |
|
335 | multi_commit_ids = [] | |
335 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): |
|
336 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): | |
336 | if _commit_id not in ['', None, EmptyCommit.raw_id]: |
|
337 | if _commit_id not in ['', None, EmptyCommit.raw_id]: | |
337 | if _commit_id not in multi_commit_ids: |
|
338 | if _commit_id not in multi_commit_ids: | |
338 | multi_commit_ids.append(_commit_id) |
|
339 | multi_commit_ids.append(_commit_id) | |
339 |
|
340 | |||
340 | commit_ids = multi_commit_ids or [commit_id] |
|
341 | commit_ids = multi_commit_ids or [commit_id] | |
341 |
|
342 | |||
342 | comment = None |
|
343 | comment = None | |
343 | for current_id in filter(None, commit_ids): |
|
344 | for current_id in filter(None, commit_ids): | |
344 | comment = CommentsModel().create( |
|
345 | comment = CommentsModel().create( | |
345 | text=text, |
|
346 | text=text, | |
346 | repo=self.db_repo.repo_id, |
|
347 | repo=self.db_repo.repo_id, | |
347 | user=self._rhodecode_db_user.user_id, |
|
348 | user=self._rhodecode_db_user.user_id, | |
348 | commit_id=current_id, |
|
349 | commit_id=current_id, | |
349 | f_path=self.request.POST.get('f_path'), |
|
350 | f_path=self.request.POST.get('f_path'), | |
350 | line_no=self.request.POST.get('line'), |
|
351 | line_no=self.request.POST.get('line'), | |
351 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
352 | status_change=(ChangesetStatus.get_status_lbl(status) | |
352 | if status else None), |
|
353 | if status else None), | |
353 | status_change_type=status, |
|
354 | status_change_type=status, | |
354 | comment_type=comment_type, |
|
355 | comment_type=comment_type, | |
355 | resolves_comment_id=resolves_comment_id, |
|
356 | resolves_comment_id=resolves_comment_id, | |
356 | auth_user=self._rhodecode_user |
|
357 | auth_user=self._rhodecode_user | |
357 | ) |
|
358 | ) | |
358 |
|
359 | |||
359 | # get status if set ! |
|
360 | # get status if set ! | |
360 | if status: |
|
361 | if status: | |
361 | # if latest status was from pull request and it's closed |
|
362 | # if latest status was from pull request and it's closed | |
362 | # disallow changing status ! |
|
363 | # disallow changing status ! | |
363 | # dont_allow_on_closed_pull_request = True ! |
|
364 | # dont_allow_on_closed_pull_request = True ! | |
364 |
|
365 | |||
365 | try: |
|
366 | try: | |
366 | ChangesetStatusModel().set_status( |
|
367 | ChangesetStatusModel().set_status( | |
367 | self.db_repo.repo_id, |
|
368 | self.db_repo.repo_id, | |
368 | status, |
|
369 | status, | |
369 | self._rhodecode_db_user.user_id, |
|
370 | self._rhodecode_db_user.user_id, | |
370 | comment, |
|
371 | comment, | |
371 | revision=current_id, |
|
372 | revision=current_id, | |
372 | dont_allow_on_closed_pull_request=True |
|
373 | dont_allow_on_closed_pull_request=True | |
373 | ) |
|
374 | ) | |
374 | except StatusChangeOnClosedPullRequestError: |
|
375 | except StatusChangeOnClosedPullRequestError: | |
375 | msg = _('Changing the status of a commit associated with ' |
|
376 | msg = _('Changing the status of a commit associated with ' | |
376 | 'a closed pull request is not allowed') |
|
377 | 'a closed pull request is not allowed') | |
377 | log.exception(msg) |
|
378 | log.exception(msg) | |
378 | h.flash(msg, category='warning') |
|
379 | h.flash(msg, category='warning') | |
379 | raise HTTPFound(h.route_path( |
|
380 | raise HTTPFound(h.route_path( | |
380 | 'repo_commit', repo_name=self.db_repo_name, |
|
381 | 'repo_commit', repo_name=self.db_repo_name, | |
381 | commit_id=current_id)) |
|
382 | commit_id=current_id)) | |
382 |
|
383 | |||
383 | commit = self.db_repo.get_commit(current_id) |
|
384 | commit = self.db_repo.get_commit(current_id) | |
384 | CommentsModel().trigger_commit_comment_hook( |
|
385 | CommentsModel().trigger_commit_comment_hook( | |
385 | self.db_repo, self._rhodecode_user, 'create', |
|
386 | self.db_repo, self._rhodecode_user, 'create', | |
386 | data={'comment': comment, 'commit': commit}) |
|
387 | data={'comment': comment, 'commit': commit}) | |
387 |
|
388 | |||
388 | # finalize, commit and redirect |
|
389 | # finalize, commit and redirect | |
389 | Session().commit() |
|
390 | Session().commit() | |
390 |
|
391 | |||
391 | data = { |
|
392 | data = { | |
392 | 'target_id': h.safeid(h.safe_unicode( |
|
393 | 'target_id': h.safeid(h.safe_unicode( | |
393 | self.request.POST.get('f_path'))), |
|
394 | self.request.POST.get('f_path'))), | |
394 | } |
|
395 | } | |
395 | if comment: |
|
396 | if comment: | |
396 | c.co = comment |
|
397 | c.co = comment | |
397 | rendered_comment = render( |
|
398 | rendered_comment = render( | |
398 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
399 | 'rhodecode:templates/changeset/changeset_comment_block.mako', | |
399 | self._get_template_context(c), self.request) |
|
400 | self._get_template_context(c), self.request) | |
400 |
|
401 | |||
401 | data.update(comment.get_dict()) |
|
402 | data.update(comment.get_dict()) | |
402 | data.update({'rendered_text': rendered_comment}) |
|
403 | data.update({'rendered_text': rendered_comment}) | |
403 |
|
404 | |||
404 | return data |
|
405 | return data | |
405 |
|
406 | |||
406 | @LoginRequired() |
|
407 | @LoginRequired() | |
407 | @NotAnonymous() |
|
408 | @NotAnonymous() | |
408 | @HasRepoPermissionAnyDecorator( |
|
409 | @HasRepoPermissionAnyDecorator( | |
409 | 'repository.read', 'repository.write', 'repository.admin') |
|
410 | 'repository.read', 'repository.write', 'repository.admin') | |
410 | @CSRFRequired() |
|
411 | @CSRFRequired() | |
411 | @view_config( |
|
412 | @view_config( | |
412 | route_name='repo_commit_comment_preview', request_method='POST', |
|
413 | route_name='repo_commit_comment_preview', request_method='POST', | |
413 | renderer='string', xhr=True) |
|
414 | renderer='string', xhr=True) | |
414 | def repo_commit_comment_preview(self): |
|
415 | def repo_commit_comment_preview(self): | |
415 | # Technically a CSRF token is not needed as no state changes with this |
|
416 | # Technically a CSRF token is not needed as no state changes with this | |
416 | # call. However, as this is a POST is better to have it, so automated |
|
417 | # call. However, as this is a POST is better to have it, so automated | |
417 | # tools don't flag it as potential CSRF. |
|
418 | # tools don't flag it as potential CSRF. | |
418 | # Post is required because the payload could be bigger than the maximum |
|
419 | # Post is required because the payload could be bigger than the maximum | |
419 | # allowed by GET. |
|
420 | # allowed by GET. | |
420 |
|
421 | |||
421 | text = self.request.POST.get('text') |
|
422 | text = self.request.POST.get('text') | |
422 | renderer = self.request.POST.get('renderer') or 'rst' |
|
423 | renderer = self.request.POST.get('renderer') or 'rst' | |
423 | if text: |
|
424 | if text: | |
424 | return h.render(text, renderer=renderer, mentions=True, |
|
425 | return h.render(text, renderer=renderer, mentions=True, | |
425 | repo_name=self.db_repo_name) |
|
426 | repo_name=self.db_repo_name) | |
426 | return '' |
|
427 | return '' | |
427 |
|
428 | |||
428 | @LoginRequired() |
|
429 | @LoginRequired() | |
429 | @NotAnonymous() |
|
430 | @NotAnonymous() | |
430 | @HasRepoPermissionAnyDecorator( |
|
431 | @HasRepoPermissionAnyDecorator( | |
431 | 'repository.read', 'repository.write', 'repository.admin') |
|
432 | 'repository.read', 'repository.write', 'repository.admin') | |
432 | @CSRFRequired() |
|
433 | @CSRFRequired() | |
433 | @view_config( |
|
434 | @view_config( | |
|
435 | route_name='repo_commit_comment_history_view', request_method='POST', | |||
|
436 | renderer='string', xhr=True) | |||
|
437 | def repo_commit_comment_history_view(self): | |||
|
438 | commit_id = self.request.matchdict['commit_id'] | |||
|
439 | comment_history_id = self.request.matchdict['comment_history_id'] | |||
|
440 | comment_history = ChangesetCommentHistory.get_or_404(comment_history_id) | |||
|
441 | c = self.load_default_context() | |||
|
442 | c.comment_history = comment_history | |||
|
443 | ||||
|
444 | rendered_comment = render( | |||
|
445 | 'rhodecode:templates/changeset/comment_history.mako', | |||
|
446 | self._get_template_context(c) | |||
|
447 | , self.request) | |||
|
448 | return rendered_comment | |||
|
449 | ||||
|
450 | @LoginRequired() | |||
|
451 | @NotAnonymous() | |||
|
452 | @HasRepoPermissionAnyDecorator( | |||
|
453 | 'repository.read', 'repository.write', 'repository.admin') | |||
|
454 | @CSRFRequired() | |||
|
455 | @view_config( | |||
434 | route_name='repo_commit_comment_attachment_upload', request_method='POST', |
|
456 | route_name='repo_commit_comment_attachment_upload', request_method='POST', | |
435 | renderer='json_ext', xhr=True) |
|
457 | renderer='json_ext', xhr=True) | |
436 | def repo_commit_comment_attachment_upload(self): |
|
458 | def repo_commit_comment_attachment_upload(self): | |
437 | c = self.load_default_context() |
|
459 | c = self.load_default_context() | |
438 | upload_key = 'attachment' |
|
460 | upload_key = 'attachment' | |
439 |
|
461 | |||
440 | file_obj = self.request.POST.get(upload_key) |
|
462 | file_obj = self.request.POST.get(upload_key) | |
441 |
|
463 | |||
442 | if file_obj is None: |
|
464 | if file_obj is None: | |
443 | self.request.response.status = 400 |
|
465 | self.request.response.status = 400 | |
444 | return {'store_fid': None, |
|
466 | return {'store_fid': None, | |
445 | 'access_path': None, |
|
467 | 'access_path': None, | |
446 | 'error': '{} data field is missing'.format(upload_key)} |
|
468 | 'error': '{} data field is missing'.format(upload_key)} | |
447 |
|
469 | |||
448 | if not hasattr(file_obj, 'filename'): |
|
470 | if not hasattr(file_obj, 'filename'): | |
449 | self.request.response.status = 400 |
|
471 | self.request.response.status = 400 | |
450 | return {'store_fid': None, |
|
472 | return {'store_fid': None, | |
451 | 'access_path': None, |
|
473 | 'access_path': None, | |
452 | 'error': 'filename cannot be read from the data field'} |
|
474 | 'error': 'filename cannot be read from the data field'} | |
453 |
|
475 | |||
454 | filename = file_obj.filename |
|
476 | filename = file_obj.filename | |
455 | file_display_name = filename |
|
477 | file_display_name = filename | |
456 |
|
478 | |||
457 | metadata = { |
|
479 | metadata = { | |
458 | 'user_uploaded': {'username': self._rhodecode_user.username, |
|
480 | 'user_uploaded': {'username': self._rhodecode_user.username, | |
459 | 'user_id': self._rhodecode_user.user_id, |
|
481 | 'user_id': self._rhodecode_user.user_id, | |
460 | 'ip': self._rhodecode_user.ip_addr}} |
|
482 | 'ip': self._rhodecode_user.ip_addr}} | |
461 |
|
483 | |||
462 | # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size |
|
484 | # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size | |
463 | allowed_extensions = [ |
|
485 | allowed_extensions = [ | |
464 | 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf', |
|
486 | 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf', | |
465 | '.pptx', '.txt', '.xlsx', '.zip'] |
|
487 | '.pptx', '.txt', '.xlsx', '.zip'] | |
466 | max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js |
|
488 | max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js | |
467 |
|
489 | |||
468 | try: |
|
490 | try: | |
469 | storage = store_utils.get_file_storage(self.request.registry.settings) |
|
491 | storage = store_utils.get_file_storage(self.request.registry.settings) | |
470 | store_uid, metadata = storage.save_file( |
|
492 | store_uid, metadata = storage.save_file( | |
471 | file_obj.file, filename, extra_metadata=metadata, |
|
493 | file_obj.file, filename, extra_metadata=metadata, | |
472 | extensions=allowed_extensions, max_filesize=max_file_size) |
|
494 | extensions=allowed_extensions, max_filesize=max_file_size) | |
473 | except FileNotAllowedException: |
|
495 | except FileNotAllowedException: | |
474 | self.request.response.status = 400 |
|
496 | self.request.response.status = 400 | |
475 | permitted_extensions = ', '.join(allowed_extensions) |
|
497 | permitted_extensions = ', '.join(allowed_extensions) | |
476 | error_msg = 'File `{}` is not allowed. ' \ |
|
498 | error_msg = 'File `{}` is not allowed. ' \ | |
477 | 'Only following extensions are permitted: {}'.format( |
|
499 | 'Only following extensions are permitted: {}'.format( | |
478 | filename, permitted_extensions) |
|
500 | filename, permitted_extensions) | |
479 | return {'store_fid': None, |
|
501 | return {'store_fid': None, | |
480 | 'access_path': None, |
|
502 | 'access_path': None, | |
481 | 'error': error_msg} |
|
503 | 'error': error_msg} | |
482 | except FileOverSizeException: |
|
504 | except FileOverSizeException: | |
483 | self.request.response.status = 400 |
|
505 | self.request.response.status = 400 | |
484 | limit_mb = h.format_byte_size_binary(max_file_size) |
|
506 | limit_mb = h.format_byte_size_binary(max_file_size) | |
485 | return {'store_fid': None, |
|
507 | return {'store_fid': None, | |
486 | 'access_path': None, |
|
508 | 'access_path': None, | |
487 | 'error': 'File {} is exceeding allowed limit of {}.'.format( |
|
509 | 'error': 'File {} is exceeding allowed limit of {}.'.format( | |
488 | filename, limit_mb)} |
|
510 | filename, limit_mb)} | |
489 |
|
511 | |||
490 | try: |
|
512 | try: | |
491 | entry = FileStore.create( |
|
513 | entry = FileStore.create( | |
492 | file_uid=store_uid, filename=metadata["filename"], |
|
514 | file_uid=store_uid, filename=metadata["filename"], | |
493 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
515 | file_hash=metadata["sha256"], file_size=metadata["size"], | |
494 | file_display_name=file_display_name, |
|
516 | file_display_name=file_display_name, | |
495 | file_description=u'comment attachment `{}`'.format(safe_unicode(filename)), |
|
517 | file_description=u'comment attachment `{}`'.format(safe_unicode(filename)), | |
496 | hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id, |
|
518 | hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id, | |
497 | scope_repo_id=self.db_repo.repo_id |
|
519 | scope_repo_id=self.db_repo.repo_id | |
498 | ) |
|
520 | ) | |
499 | Session().add(entry) |
|
521 | Session().add(entry) | |
500 | Session().commit() |
|
522 | Session().commit() | |
501 | log.debug('Stored upload in DB as %s', entry) |
|
523 | log.debug('Stored upload in DB as %s', entry) | |
502 | except Exception: |
|
524 | except Exception: | |
503 | log.exception('Failed to store file %s', filename) |
|
525 | log.exception('Failed to store file %s', filename) | |
504 | self.request.response.status = 400 |
|
526 | self.request.response.status = 400 | |
505 | return {'store_fid': None, |
|
527 | return {'store_fid': None, | |
506 | 'access_path': None, |
|
528 | 'access_path': None, | |
507 | 'error': 'File {} failed to store in DB.'.format(filename)} |
|
529 | 'error': 'File {} failed to store in DB.'.format(filename)} | |
508 |
|
530 | |||
509 | Session().commit() |
|
531 | Session().commit() | |
510 |
|
532 | |||
511 | return { |
|
533 | return { | |
512 | 'store_fid': store_uid, |
|
534 | 'store_fid': store_uid, | |
513 | 'access_path': h.route_path( |
|
535 | 'access_path': h.route_path( | |
514 | 'download_file', fid=store_uid), |
|
536 | 'download_file', fid=store_uid), | |
515 | 'fqn_access_path': h.route_url( |
|
537 | 'fqn_access_path': h.route_url( | |
516 | 'download_file', fid=store_uid), |
|
538 | 'download_file', fid=store_uid), | |
517 | 'repo_access_path': h.route_path( |
|
539 | 'repo_access_path': h.route_path( | |
518 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
540 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), | |
519 | 'repo_fqn_access_path': h.route_url( |
|
541 | 'repo_fqn_access_path': h.route_url( | |
520 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
542 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), | |
521 | } |
|
543 | } | |
522 |
|
544 | |||
523 | @LoginRequired() |
|
545 | @LoginRequired() | |
524 | @NotAnonymous() |
|
546 | @NotAnonymous() | |
525 | @HasRepoPermissionAnyDecorator( |
|
547 | @HasRepoPermissionAnyDecorator( | |
526 | 'repository.read', 'repository.write', 'repository.admin') |
|
548 | 'repository.read', 'repository.write', 'repository.admin') | |
527 | @CSRFRequired() |
|
549 | @CSRFRequired() | |
528 | @view_config( |
|
550 | @view_config( | |
529 | route_name='repo_commit_comment_delete', request_method='POST', |
|
551 | route_name='repo_commit_comment_delete', request_method='POST', | |
530 | renderer='json_ext') |
|
552 | renderer='json_ext') | |
531 | def repo_commit_comment_delete(self): |
|
553 | def repo_commit_comment_delete(self): | |
532 | commit_id = self.request.matchdict['commit_id'] |
|
554 | commit_id = self.request.matchdict['commit_id'] | |
533 | comment_id = self.request.matchdict['comment_id'] |
|
555 | comment_id = self.request.matchdict['comment_id'] | |
534 |
|
556 | |||
535 | comment = ChangesetComment.get_or_404(comment_id) |
|
557 | comment = ChangesetComment.get_or_404(comment_id) | |
536 | if not comment: |
|
558 | if not comment: | |
537 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
559 | log.debug('Comment with id:%s not found, skipping', comment_id) | |
538 | # comment already deleted in another call probably |
|
560 | # comment already deleted in another call probably | |
539 | return True |
|
561 | return True | |
540 |
|
562 | |||
541 | if comment.immutable: |
|
563 | if comment.immutable: | |
542 | # don't allow deleting comments that are immutable |
|
564 | # don't allow deleting comments that are immutable | |
543 | raise HTTPForbidden() |
|
565 | raise HTTPForbidden() | |
544 |
|
566 | |||
545 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
567 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |
546 | super_admin = h.HasPermissionAny('hg.admin')() |
|
568 | super_admin = h.HasPermissionAny('hg.admin')() | |
547 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) |
|
569 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) | |
548 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
570 | is_repo_comment = comment.repo.repo_name == self.db_repo_name | |
549 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
571 | comment_repo_admin = is_repo_admin and is_repo_comment | |
550 |
|
572 | |||
551 | if super_admin or comment_owner or comment_repo_admin: |
|
573 | if super_admin or comment_owner or comment_repo_admin: | |
552 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) |
|
574 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) | |
553 | Session().commit() |
|
575 | Session().commit() | |
554 | return True |
|
576 | return True | |
555 | else: |
|
577 | else: | |
556 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
578 | log.warning('No permissions for user %s to delete comment_id: %s', | |
557 | self._rhodecode_db_user, comment_id) |
|
579 | self._rhodecode_db_user, comment_id) | |
558 | raise HTTPNotFound() |
|
580 | raise HTTPNotFound() | |
559 |
|
581 | |||
560 | @LoginRequired() |
|
582 | @LoginRequired() | |
|
583 | @NotAnonymous() | |||
|
584 | @HasRepoPermissionAnyDecorator( | |||
|
585 | 'repository.read', 'repository.write', 'repository.admin') | |||
|
586 | @CSRFRequired() | |||
|
587 | @view_config( | |||
|
588 | route_name='repo_commit_comment_edit', request_method='POST', | |||
|
589 | renderer='json_ext') | |||
|
590 | def repo_commit_comment_edit(self): | |||
|
591 | commit_id = self.request.matchdict['commit_id'] | |||
|
592 | comment_id = self.request.matchdict['comment_id'] | |||
|
593 | ||||
|
594 | comment = ChangesetComment.get_or_404(comment_id) | |||
|
595 | ||||
|
596 | if comment.immutable: | |||
|
597 | # don't allow deleting comments that are immutable | |||
|
598 | raise HTTPForbidden() | |||
|
599 | ||||
|
600 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |||
|
601 | super_admin = h.HasPermissionAny('hg.admin')() | |||
|
602 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) | |||
|
603 | is_repo_comment = comment.repo.repo_name == self.db_repo_name | |||
|
604 | comment_repo_admin = is_repo_admin and is_repo_comment | |||
|
605 | ||||
|
606 | if super_admin or comment_owner or comment_repo_admin: | |||
|
607 | text = self.request.POST.get('text') | |||
|
608 | version = self.request.POST.get('version') | |||
|
609 | if text == comment.text: | |||
|
610 | log.warning( | |||
|
611 | 'Comment(repo): ' | |||
|
612 | 'Trying to create new version ' | |||
|
613 | 'of existing comment {}'.format( | |||
|
614 | comment_id, | |||
|
615 | ) | |||
|
616 | ) | |||
|
617 | raise HTTPNotFound() | |||
|
618 | if version.isdigit(): | |||
|
619 | version = int(version) | |||
|
620 | else: | |||
|
621 | log.warning( | |||
|
622 | 'Comment(repo): Wrong version type {} {} ' | |||
|
623 | 'for comment {}'.format( | |||
|
624 | version, | |||
|
625 | type(version), | |||
|
626 | comment_id, | |||
|
627 | ) | |||
|
628 | ) | |||
|
629 | raise HTTPNotFound() | |||
|
630 | ||||
|
631 | comment_history = CommentsModel().edit( | |||
|
632 | comment_id=comment_id, | |||
|
633 | text=text, | |||
|
634 | auth_user=self._rhodecode_user, | |||
|
635 | version=version, | |||
|
636 | ) | |||
|
637 | if not comment_history: | |||
|
638 | raise HTTPNotFound() | |||
|
639 | Session().commit() | |||
|
640 | return { | |||
|
641 | 'comment_history_id': comment_history.comment_history_id, | |||
|
642 | 'comment_id': comment.comment_id, | |||
|
643 | 'comment_version': comment_history.version, | |||
|
644 | } | |||
|
645 | else: | |||
|
646 | log.warning('No permissions for user %s to edit comment_id: %s', | |||
|
647 | self._rhodecode_db_user, comment_id) | |||
|
648 | raise HTTPNotFound() | |||
|
649 | ||||
|
650 | @LoginRequired() | |||
561 | @HasRepoPermissionAnyDecorator( |
|
651 | @HasRepoPermissionAnyDecorator( | |
562 | 'repository.read', 'repository.write', 'repository.admin') |
|
652 | 'repository.read', 'repository.write', 'repository.admin') | |
563 | @view_config( |
|
653 | @view_config( | |
564 | route_name='repo_commit_data', request_method='GET', |
|
654 | route_name='repo_commit_data', request_method='GET', | |
565 | renderer='json_ext', xhr=True) |
|
655 | renderer='json_ext', xhr=True) | |
566 | def repo_commit_data(self): |
|
656 | def repo_commit_data(self): | |
567 | commit_id = self.request.matchdict['commit_id'] |
|
657 | commit_id = self.request.matchdict['commit_id'] | |
568 | self.load_default_context() |
|
658 | self.load_default_context() | |
569 |
|
659 | |||
570 | try: |
|
660 | try: | |
571 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
661 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
572 | except CommitDoesNotExistError as e: |
|
662 | except CommitDoesNotExistError as e: | |
573 | return EmptyCommit(message=str(e)) |
|
663 | return EmptyCommit(message=str(e)) | |
574 |
|
664 | |||
575 | @LoginRequired() |
|
665 | @LoginRequired() | |
576 | @HasRepoPermissionAnyDecorator( |
|
666 | @HasRepoPermissionAnyDecorator( | |
577 | 'repository.read', 'repository.write', 'repository.admin') |
|
667 | 'repository.read', 'repository.write', 'repository.admin') | |
578 | @view_config( |
|
668 | @view_config( | |
579 | route_name='repo_commit_children', request_method='GET', |
|
669 | route_name='repo_commit_children', request_method='GET', | |
580 | renderer='json_ext', xhr=True) |
|
670 | renderer='json_ext', xhr=True) | |
581 | def repo_commit_children(self): |
|
671 | def repo_commit_children(self): | |
582 | commit_id = self.request.matchdict['commit_id'] |
|
672 | commit_id = self.request.matchdict['commit_id'] | |
583 | self.load_default_context() |
|
673 | self.load_default_context() | |
584 |
|
674 | |||
585 | try: |
|
675 | try: | |
586 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
676 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
587 | children = commit.children |
|
677 | children = commit.children | |
588 | except CommitDoesNotExistError: |
|
678 | except CommitDoesNotExistError: | |
589 | children = [] |
|
679 | children = [] | |
590 |
|
680 | |||
591 | result = {"results": children} |
|
681 | result = {"results": children} | |
592 | return result |
|
682 | return result | |
593 |
|
683 | |||
594 | @LoginRequired() |
|
684 | @LoginRequired() | |
595 | @HasRepoPermissionAnyDecorator( |
|
685 | @HasRepoPermissionAnyDecorator( | |
596 | 'repository.read', 'repository.write', 'repository.admin') |
|
686 | 'repository.read', 'repository.write', 'repository.admin') | |
597 | @view_config( |
|
687 | @view_config( | |
598 | route_name='repo_commit_parents', request_method='GET', |
|
688 | route_name='repo_commit_parents', request_method='GET', | |
599 | renderer='json_ext') |
|
689 | renderer='json_ext') | |
600 | def repo_commit_parents(self): |
|
690 | def repo_commit_parents(self): | |
601 | commit_id = self.request.matchdict['commit_id'] |
|
691 | commit_id = self.request.matchdict['commit_id'] | |
602 | self.load_default_context() |
|
692 | self.load_default_context() | |
603 |
|
693 | |||
604 | try: |
|
694 | try: | |
605 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
695 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
606 | parents = commit.parents |
|
696 | parents = commit.parents | |
607 | except CommitDoesNotExistError: |
|
697 | except CommitDoesNotExistError: | |
608 | parents = [] |
|
698 | parents = [] | |
609 | result = {"results": parents} |
|
699 | result = {"results": parents} | |
610 | return result |
|
700 | return result |
@@ -1,1520 +1,1607 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 | import collections |
|
22 | import collections | |
23 |
|
23 | |||
24 | import formencode |
|
24 | import formencode | |
25 | import formencode.htmlfill |
|
25 | import formencode.htmlfill | |
26 | import peppercorn |
|
26 | import peppercorn | |
27 | from pyramid.httpexceptions import ( |
|
27 | from pyramid.httpexceptions import ( | |
28 | HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest) |
|
28 | HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest) | |
29 | from pyramid.view import view_config |
|
29 | from pyramid.view import view_config | |
30 | from pyramid.renderers import render |
|
30 | from pyramid.renderers import render | |
31 |
|
31 | |||
32 | from rhodecode.apps._base import RepoAppView, DataGridAppView |
|
32 | from rhodecode.apps._base import RepoAppView, DataGridAppView | |
33 |
|
33 | |||
34 | from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream |
|
34 | from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream | |
35 | from rhodecode.lib.base import vcs_operation_context |
|
35 | from rhodecode.lib.base import vcs_operation_context | |
36 | from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist |
|
36 | from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist | |
37 | from rhodecode.lib.ext_json import json |
|
37 | from rhodecode.lib.ext_json import json | |
38 | from rhodecode.lib.auth import ( |
|
38 | from rhodecode.lib.auth import ( | |
39 | LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, |
|
39 | LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, | |
40 | NotAnonymous, CSRFRequired) |
|
40 | NotAnonymous, CSRFRequired) | |
41 | from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode |
|
41 | from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode | |
42 | from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason |
|
42 | from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason | |
43 | from rhodecode.lib.vcs.exceptions import ( |
|
43 | from rhodecode.lib.vcs.exceptions import ( | |
44 | CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError) |
|
44 | CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError) | |
45 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
45 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
46 | from rhodecode.model.comment import CommentsModel |
|
46 | from rhodecode.model.comment import CommentsModel | |
47 | from rhodecode.model.db import ( |
|
47 | from rhodecode.model.db import ( | |
48 | func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository) |
|
48 | func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository) | |
49 | from rhodecode.model.forms import PullRequestForm |
|
49 | from rhodecode.model.forms import PullRequestForm | |
50 | from rhodecode.model.meta import Session |
|
50 | from rhodecode.model.meta import Session | |
51 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
51 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck | |
52 | from rhodecode.model.scm import ScmModel |
|
52 | from rhodecode.model.scm import ScmModel | |
53 |
|
53 | |||
54 | log = logging.getLogger(__name__) |
|
54 | log = logging.getLogger(__name__) | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | class RepoPullRequestsView(RepoAppView, DataGridAppView): |
|
57 | class RepoPullRequestsView(RepoAppView, DataGridAppView): | |
58 |
|
58 | |||
59 | def load_default_context(self): |
|
59 | def load_default_context(self): | |
60 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
60 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
61 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED |
|
61 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED | |
62 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED |
|
62 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED | |
63 | # backward compat., we use for OLD PRs a plain renderer |
|
63 | # backward compat., we use for OLD PRs a plain renderer | |
64 | c.renderer = 'plain' |
|
64 | c.renderer = 'plain' | |
65 | return c |
|
65 | return c | |
66 |
|
66 | |||
67 | def _get_pull_requests_list( |
|
67 | def _get_pull_requests_list( | |
68 | self, repo_name, source, filter_type, opened_by, statuses): |
|
68 | self, repo_name, source, filter_type, opened_by, statuses): | |
69 |
|
69 | |||
70 | draw, start, limit = self._extract_chunk(self.request) |
|
70 | draw, start, limit = self._extract_chunk(self.request) | |
71 | search_q, order_by, order_dir = self._extract_ordering(self.request) |
|
71 | search_q, order_by, order_dir = self._extract_ordering(self.request) | |
72 | _render = self.request.get_partial_renderer( |
|
72 | _render = self.request.get_partial_renderer( | |
73 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
73 | 'rhodecode:templates/data_table/_dt_elements.mako') | |
74 |
|
74 | |||
75 | # pagination |
|
75 | # pagination | |
76 |
|
76 | |||
77 | if filter_type == 'awaiting_review': |
|
77 | if filter_type == 'awaiting_review': | |
78 | pull_requests = PullRequestModel().get_awaiting_review( |
|
78 | pull_requests = PullRequestModel().get_awaiting_review( | |
79 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
79 | repo_name, search_q=search_q, source=source, opened_by=opened_by, | |
80 | statuses=statuses, offset=start, length=limit, |
|
80 | statuses=statuses, offset=start, length=limit, | |
81 | order_by=order_by, order_dir=order_dir) |
|
81 | order_by=order_by, order_dir=order_dir) | |
82 | pull_requests_total_count = PullRequestModel().count_awaiting_review( |
|
82 | pull_requests_total_count = PullRequestModel().count_awaiting_review( | |
83 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
83 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
84 | opened_by=opened_by) |
|
84 | opened_by=opened_by) | |
85 | elif filter_type == 'awaiting_my_review': |
|
85 | elif filter_type == 'awaiting_my_review': | |
86 | pull_requests = PullRequestModel().get_awaiting_my_review( |
|
86 | pull_requests = PullRequestModel().get_awaiting_my_review( | |
87 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
87 | repo_name, search_q=search_q, source=source, opened_by=opened_by, | |
88 | user_id=self._rhodecode_user.user_id, statuses=statuses, |
|
88 | user_id=self._rhodecode_user.user_id, statuses=statuses, | |
89 | offset=start, length=limit, order_by=order_by, |
|
89 | offset=start, length=limit, order_by=order_by, | |
90 | order_dir=order_dir) |
|
90 | order_dir=order_dir) | |
91 | pull_requests_total_count = PullRequestModel().count_awaiting_my_review( |
|
91 | pull_requests_total_count = PullRequestModel().count_awaiting_my_review( | |
92 | repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id, |
|
92 | repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id, | |
93 | statuses=statuses, opened_by=opened_by) |
|
93 | statuses=statuses, opened_by=opened_by) | |
94 | else: |
|
94 | else: | |
95 | pull_requests = PullRequestModel().get_all( |
|
95 | pull_requests = PullRequestModel().get_all( | |
96 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
96 | repo_name, search_q=search_q, source=source, opened_by=opened_by, | |
97 | statuses=statuses, offset=start, length=limit, |
|
97 | statuses=statuses, offset=start, length=limit, | |
98 | order_by=order_by, order_dir=order_dir) |
|
98 | order_by=order_by, order_dir=order_dir) | |
99 | pull_requests_total_count = PullRequestModel().count_all( |
|
99 | pull_requests_total_count = PullRequestModel().count_all( | |
100 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
100 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
101 | opened_by=opened_by) |
|
101 | opened_by=opened_by) | |
102 |
|
102 | |||
103 | data = [] |
|
103 | data = [] | |
104 | comments_model = CommentsModel() |
|
104 | comments_model = CommentsModel() | |
105 | for pr in pull_requests: |
|
105 | for pr in pull_requests: | |
106 | comments = comments_model.get_all_comments( |
|
106 | comments = comments_model.get_all_comments( | |
107 | self.db_repo.repo_id, pull_request=pr) |
|
107 | self.db_repo.repo_id, pull_request=pr) | |
108 |
|
108 | |||
109 | data.append({ |
|
109 | data.append({ | |
110 | 'name': _render('pullrequest_name', |
|
110 | 'name': _render('pullrequest_name', | |
111 | pr.pull_request_id, pr.pull_request_state, |
|
111 | pr.pull_request_id, pr.pull_request_state, | |
112 | pr.work_in_progress, pr.target_repo.repo_name), |
|
112 | pr.work_in_progress, pr.target_repo.repo_name), | |
113 | 'name_raw': pr.pull_request_id, |
|
113 | 'name_raw': pr.pull_request_id, | |
114 | 'status': _render('pullrequest_status', |
|
114 | 'status': _render('pullrequest_status', | |
115 | pr.calculated_review_status()), |
|
115 | pr.calculated_review_status()), | |
116 | 'title': _render('pullrequest_title', pr.title, pr.description), |
|
116 | 'title': _render('pullrequest_title', pr.title, pr.description), | |
117 | 'description': h.escape(pr.description), |
|
117 | 'description': h.escape(pr.description), | |
118 | 'updated_on': _render('pullrequest_updated_on', |
|
118 | 'updated_on': _render('pullrequest_updated_on', | |
119 | h.datetime_to_time(pr.updated_on)), |
|
119 | h.datetime_to_time(pr.updated_on)), | |
120 | 'updated_on_raw': h.datetime_to_time(pr.updated_on), |
|
120 | 'updated_on_raw': h.datetime_to_time(pr.updated_on), | |
121 | 'created_on': _render('pullrequest_updated_on', |
|
121 | 'created_on': _render('pullrequest_updated_on', | |
122 | h.datetime_to_time(pr.created_on)), |
|
122 | h.datetime_to_time(pr.created_on)), | |
123 | 'created_on_raw': h.datetime_to_time(pr.created_on), |
|
123 | 'created_on_raw': h.datetime_to_time(pr.created_on), | |
124 | 'state': pr.pull_request_state, |
|
124 | 'state': pr.pull_request_state, | |
125 | 'author': _render('pullrequest_author', |
|
125 | 'author': _render('pullrequest_author', | |
126 | pr.author.full_contact, ), |
|
126 | pr.author.full_contact, ), | |
127 | 'author_raw': pr.author.full_name, |
|
127 | 'author_raw': pr.author.full_name, | |
128 | 'comments': _render('pullrequest_comments', len(comments)), |
|
128 | 'comments': _render('pullrequest_comments', len(comments)), | |
129 | 'comments_raw': len(comments), |
|
129 | 'comments_raw': len(comments), | |
130 | 'closed': pr.is_closed(), |
|
130 | 'closed': pr.is_closed(), | |
131 | }) |
|
131 | }) | |
132 |
|
132 | |||
133 | data = ({ |
|
133 | data = ({ | |
134 | 'draw': draw, |
|
134 | 'draw': draw, | |
135 | 'data': data, |
|
135 | 'data': data, | |
136 | 'recordsTotal': pull_requests_total_count, |
|
136 | 'recordsTotal': pull_requests_total_count, | |
137 | 'recordsFiltered': pull_requests_total_count, |
|
137 | 'recordsFiltered': pull_requests_total_count, | |
138 | }) |
|
138 | }) | |
139 | return data |
|
139 | return data | |
140 |
|
140 | |||
141 | @LoginRequired() |
|
141 | @LoginRequired() | |
142 | @HasRepoPermissionAnyDecorator( |
|
142 | @HasRepoPermissionAnyDecorator( | |
143 | 'repository.read', 'repository.write', 'repository.admin') |
|
143 | 'repository.read', 'repository.write', 'repository.admin') | |
144 | @view_config( |
|
144 | @view_config( | |
145 | route_name='pullrequest_show_all', request_method='GET', |
|
145 | route_name='pullrequest_show_all', request_method='GET', | |
146 | renderer='rhodecode:templates/pullrequests/pullrequests.mako') |
|
146 | renderer='rhodecode:templates/pullrequests/pullrequests.mako') | |
147 | def pull_request_list(self): |
|
147 | def pull_request_list(self): | |
148 | c = self.load_default_context() |
|
148 | c = self.load_default_context() | |
149 |
|
149 | |||
150 | req_get = self.request.GET |
|
150 | req_get = self.request.GET | |
151 | c.source = str2bool(req_get.get('source')) |
|
151 | c.source = str2bool(req_get.get('source')) | |
152 | c.closed = str2bool(req_get.get('closed')) |
|
152 | c.closed = str2bool(req_get.get('closed')) | |
153 | c.my = str2bool(req_get.get('my')) |
|
153 | c.my = str2bool(req_get.get('my')) | |
154 | c.awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
154 | c.awaiting_review = str2bool(req_get.get('awaiting_review')) | |
155 | c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
155 | c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) | |
156 |
|
156 | |||
157 | c.active = 'open' |
|
157 | c.active = 'open' | |
158 | if c.my: |
|
158 | if c.my: | |
159 | c.active = 'my' |
|
159 | c.active = 'my' | |
160 | if c.closed: |
|
160 | if c.closed: | |
161 | c.active = 'closed' |
|
161 | c.active = 'closed' | |
162 | if c.awaiting_review and not c.source: |
|
162 | if c.awaiting_review and not c.source: | |
163 | c.active = 'awaiting' |
|
163 | c.active = 'awaiting' | |
164 | if c.source and not c.awaiting_review: |
|
164 | if c.source and not c.awaiting_review: | |
165 | c.active = 'source' |
|
165 | c.active = 'source' | |
166 | if c.awaiting_my_review: |
|
166 | if c.awaiting_my_review: | |
167 | c.active = 'awaiting_my' |
|
167 | c.active = 'awaiting_my' | |
168 |
|
168 | |||
169 | return self._get_template_context(c) |
|
169 | return self._get_template_context(c) | |
170 |
|
170 | |||
171 | @LoginRequired() |
|
171 | @LoginRequired() | |
172 | @HasRepoPermissionAnyDecorator( |
|
172 | @HasRepoPermissionAnyDecorator( | |
173 | 'repository.read', 'repository.write', 'repository.admin') |
|
173 | 'repository.read', 'repository.write', 'repository.admin') | |
174 | @view_config( |
|
174 | @view_config( | |
175 | route_name='pullrequest_show_all_data', request_method='GET', |
|
175 | route_name='pullrequest_show_all_data', request_method='GET', | |
176 | renderer='json_ext', xhr=True) |
|
176 | renderer='json_ext', xhr=True) | |
177 | def pull_request_list_data(self): |
|
177 | def pull_request_list_data(self): | |
178 | self.load_default_context() |
|
178 | self.load_default_context() | |
179 |
|
179 | |||
180 | # additional filters |
|
180 | # additional filters | |
181 | req_get = self.request.GET |
|
181 | req_get = self.request.GET | |
182 | source = str2bool(req_get.get('source')) |
|
182 | source = str2bool(req_get.get('source')) | |
183 | closed = str2bool(req_get.get('closed')) |
|
183 | closed = str2bool(req_get.get('closed')) | |
184 | my = str2bool(req_get.get('my')) |
|
184 | my = str2bool(req_get.get('my')) | |
185 | awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
185 | awaiting_review = str2bool(req_get.get('awaiting_review')) | |
186 | awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
186 | awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) | |
187 |
|
187 | |||
188 | filter_type = 'awaiting_review' if awaiting_review \ |
|
188 | filter_type = 'awaiting_review' if awaiting_review \ | |
189 | else 'awaiting_my_review' if awaiting_my_review \ |
|
189 | else 'awaiting_my_review' if awaiting_my_review \ | |
190 | else None |
|
190 | else None | |
191 |
|
191 | |||
192 | opened_by = None |
|
192 | opened_by = None | |
193 | if my: |
|
193 | if my: | |
194 | opened_by = [self._rhodecode_user.user_id] |
|
194 | opened_by = [self._rhodecode_user.user_id] | |
195 |
|
195 | |||
196 | statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN] |
|
196 | statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN] | |
197 | if closed: |
|
197 | if closed: | |
198 | statuses = [PullRequest.STATUS_CLOSED] |
|
198 | statuses = [PullRequest.STATUS_CLOSED] | |
199 |
|
199 | |||
200 | data = self._get_pull_requests_list( |
|
200 | data = self._get_pull_requests_list( | |
201 | repo_name=self.db_repo_name, source=source, |
|
201 | repo_name=self.db_repo_name, source=source, | |
202 | filter_type=filter_type, opened_by=opened_by, statuses=statuses) |
|
202 | filter_type=filter_type, opened_by=opened_by, statuses=statuses) | |
203 |
|
203 | |||
204 | return data |
|
204 | return data | |
205 |
|
205 | |||
206 | def _is_diff_cache_enabled(self, target_repo): |
|
206 | def _is_diff_cache_enabled(self, target_repo): | |
207 | caching_enabled = self._get_general_setting( |
|
207 | caching_enabled = self._get_general_setting( | |
208 | target_repo, 'rhodecode_diff_cache') |
|
208 | target_repo, 'rhodecode_diff_cache') | |
209 | log.debug('Diff caching enabled: %s', caching_enabled) |
|
209 | log.debug('Diff caching enabled: %s', caching_enabled) | |
210 | return caching_enabled |
|
210 | return caching_enabled | |
211 |
|
211 | |||
212 | def _get_diffset(self, source_repo_name, source_repo, |
|
212 | def _get_diffset(self, source_repo_name, source_repo, | |
213 | ancestor_commit, |
|
213 | ancestor_commit, | |
214 | source_ref_id, target_ref_id, |
|
214 | source_ref_id, target_ref_id, | |
215 | target_commit, source_commit, diff_limit, file_limit, |
|
215 | target_commit, source_commit, diff_limit, file_limit, | |
216 | fulldiff, hide_whitespace_changes, diff_context): |
|
216 | fulldiff, hide_whitespace_changes, diff_context): | |
217 |
|
217 | |||
218 | target_ref_id = ancestor_commit.raw_id |
|
218 | target_ref_id = ancestor_commit.raw_id | |
219 | vcs_diff = PullRequestModel().get_diff( |
|
219 | vcs_diff = PullRequestModel().get_diff( | |
220 | source_repo, source_ref_id, target_ref_id, |
|
220 | source_repo, source_ref_id, target_ref_id, | |
221 | hide_whitespace_changes, diff_context) |
|
221 | hide_whitespace_changes, diff_context) | |
222 |
|
222 | |||
223 | diff_processor = diffs.DiffProcessor( |
|
223 | diff_processor = diffs.DiffProcessor( | |
224 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
224 | vcs_diff, format='newdiff', diff_limit=diff_limit, | |
225 | file_limit=file_limit, show_full_diff=fulldiff) |
|
225 | file_limit=file_limit, show_full_diff=fulldiff) | |
226 |
|
226 | |||
227 | _parsed = diff_processor.prepare() |
|
227 | _parsed = diff_processor.prepare() | |
228 |
|
228 | |||
229 | diffset = codeblocks.DiffSet( |
|
229 | diffset = codeblocks.DiffSet( | |
230 | repo_name=self.db_repo_name, |
|
230 | repo_name=self.db_repo_name, | |
231 | source_repo_name=source_repo_name, |
|
231 | source_repo_name=source_repo_name, | |
232 | source_node_getter=codeblocks.diffset_node_getter(target_commit), |
|
232 | source_node_getter=codeblocks.diffset_node_getter(target_commit), | |
233 | target_node_getter=codeblocks.diffset_node_getter(source_commit), |
|
233 | target_node_getter=codeblocks.diffset_node_getter(source_commit), | |
234 | ) |
|
234 | ) | |
235 | diffset = self.path_filter.render_patchset_filtered( |
|
235 | diffset = self.path_filter.render_patchset_filtered( | |
236 | diffset, _parsed, target_commit.raw_id, source_commit.raw_id) |
|
236 | diffset, _parsed, target_commit.raw_id, source_commit.raw_id) | |
237 |
|
237 | |||
238 | return diffset |
|
238 | return diffset | |
239 |
|
239 | |||
240 | def _get_range_diffset(self, source_scm, source_repo, |
|
240 | def _get_range_diffset(self, source_scm, source_repo, | |
241 | commit1, commit2, diff_limit, file_limit, |
|
241 | commit1, commit2, diff_limit, file_limit, | |
242 | fulldiff, hide_whitespace_changes, diff_context): |
|
242 | fulldiff, hide_whitespace_changes, diff_context): | |
243 | vcs_diff = source_scm.get_diff( |
|
243 | vcs_diff = source_scm.get_diff( | |
244 | commit1, commit2, |
|
244 | commit1, commit2, | |
245 | ignore_whitespace=hide_whitespace_changes, |
|
245 | ignore_whitespace=hide_whitespace_changes, | |
246 | context=diff_context) |
|
246 | context=diff_context) | |
247 |
|
247 | |||
248 | diff_processor = diffs.DiffProcessor( |
|
248 | diff_processor = diffs.DiffProcessor( | |
249 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
249 | vcs_diff, format='newdiff', diff_limit=diff_limit, | |
250 | file_limit=file_limit, show_full_diff=fulldiff) |
|
250 | file_limit=file_limit, show_full_diff=fulldiff) | |
251 |
|
251 | |||
252 | _parsed = diff_processor.prepare() |
|
252 | _parsed = diff_processor.prepare() | |
253 |
|
253 | |||
254 | diffset = codeblocks.DiffSet( |
|
254 | diffset = codeblocks.DiffSet( | |
255 | repo_name=source_repo.repo_name, |
|
255 | repo_name=source_repo.repo_name, | |
256 | source_node_getter=codeblocks.diffset_node_getter(commit1), |
|
256 | source_node_getter=codeblocks.diffset_node_getter(commit1), | |
257 | target_node_getter=codeblocks.diffset_node_getter(commit2)) |
|
257 | target_node_getter=codeblocks.diffset_node_getter(commit2)) | |
258 |
|
258 | |||
259 | diffset = self.path_filter.render_patchset_filtered( |
|
259 | diffset = self.path_filter.render_patchset_filtered( | |
260 | diffset, _parsed, commit1.raw_id, commit2.raw_id) |
|
260 | diffset, _parsed, commit1.raw_id, commit2.raw_id) | |
261 |
|
261 | |||
262 | return diffset |
|
262 | return diffset | |
263 |
|
263 | |||
264 | @LoginRequired() |
|
264 | @LoginRequired() | |
265 | @HasRepoPermissionAnyDecorator( |
|
265 | @HasRepoPermissionAnyDecorator( | |
266 | 'repository.read', 'repository.write', 'repository.admin') |
|
266 | 'repository.read', 'repository.write', 'repository.admin') | |
267 | @view_config( |
|
267 | @view_config( | |
268 | route_name='pullrequest_show', request_method='GET', |
|
268 | route_name='pullrequest_show', request_method='GET', | |
269 | renderer='rhodecode:templates/pullrequests/pullrequest_show.mako') |
|
269 | renderer='rhodecode:templates/pullrequests/pullrequest_show.mako') | |
270 | def pull_request_show(self): |
|
270 | def pull_request_show(self): | |
271 | _ = self.request.translate |
|
271 | _ = self.request.translate | |
272 | c = self.load_default_context() |
|
272 | c = self.load_default_context() | |
273 |
|
273 | |||
274 | pull_request = PullRequest.get_or_404( |
|
274 | pull_request = PullRequest.get_or_404( | |
275 | self.request.matchdict['pull_request_id']) |
|
275 | self.request.matchdict['pull_request_id']) | |
276 | pull_request_id = pull_request.pull_request_id |
|
276 | pull_request_id = pull_request.pull_request_id | |
277 |
|
277 | |||
278 | c.state_progressing = pull_request.is_state_changing() |
|
278 | c.state_progressing = pull_request.is_state_changing() | |
279 |
|
279 | |||
280 | _new_state = { |
|
280 | _new_state = { | |
281 | 'created': PullRequest.STATE_CREATED, |
|
281 | 'created': PullRequest.STATE_CREATED, | |
282 | }.get(self.request.GET.get('force_state')) |
|
282 | }.get(self.request.GET.get('force_state')) | |
283 |
|
283 | |||
284 | if c.is_super_admin and _new_state: |
|
284 | if c.is_super_admin and _new_state: | |
285 | with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state): |
|
285 | with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state): | |
286 | h.flash( |
|
286 | h.flash( | |
287 | _('Pull Request state was force changed to `{}`').format(_new_state), |
|
287 | _('Pull Request state was force changed to `{}`').format(_new_state), | |
288 | category='success') |
|
288 | category='success') | |
289 | Session().commit() |
|
289 | Session().commit() | |
290 |
|
290 | |||
291 | raise HTTPFound(h.route_path( |
|
291 | raise HTTPFound(h.route_path( | |
292 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
292 | 'pullrequest_show', repo_name=self.db_repo_name, | |
293 | pull_request_id=pull_request_id)) |
|
293 | pull_request_id=pull_request_id)) | |
294 |
|
294 | |||
295 | version = self.request.GET.get('version') |
|
295 | version = self.request.GET.get('version') | |
296 | from_version = self.request.GET.get('from_version') or version |
|
296 | from_version = self.request.GET.get('from_version') or version | |
297 | merge_checks = self.request.GET.get('merge_checks') |
|
297 | merge_checks = self.request.GET.get('merge_checks') | |
298 | c.fulldiff = str2bool(self.request.GET.get('fulldiff')) |
|
298 | c.fulldiff = str2bool(self.request.GET.get('fulldiff')) | |
299 |
|
299 | |||
300 | # fetch global flags of ignore ws or context lines |
|
300 | # fetch global flags of ignore ws or context lines | |
301 | diff_context = diffs.get_diff_context(self.request) |
|
301 | diff_context = diffs.get_diff_context(self.request) | |
302 | hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request) |
|
302 | hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request) | |
303 |
|
303 | |||
304 | force_refresh = str2bool(self.request.GET.get('force_refresh')) |
|
304 | force_refresh = str2bool(self.request.GET.get('force_refresh')) | |
305 |
|
305 | |||
306 | (pull_request_latest, |
|
306 | (pull_request_latest, | |
307 | pull_request_at_ver, |
|
307 | pull_request_at_ver, | |
308 | pull_request_display_obj, |
|
308 | pull_request_display_obj, | |
309 | at_version) = PullRequestModel().get_pr_version( |
|
309 | at_version) = PullRequestModel().get_pr_version( | |
310 | pull_request_id, version=version) |
|
310 | pull_request_id, version=version) | |
311 | pr_closed = pull_request_latest.is_closed() |
|
311 | pr_closed = pull_request_latest.is_closed() | |
312 |
|
312 | |||
313 | if pr_closed and (version or from_version): |
|
313 | if pr_closed and (version or from_version): | |
314 | # not allow to browse versions |
|
314 | # not allow to browse versions | |
315 | raise HTTPFound(h.route_path( |
|
315 | raise HTTPFound(h.route_path( | |
316 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
316 | 'pullrequest_show', repo_name=self.db_repo_name, | |
317 | pull_request_id=pull_request_id)) |
|
317 | pull_request_id=pull_request_id)) | |
318 |
|
318 | |||
319 | versions = pull_request_display_obj.versions() |
|
319 | versions = pull_request_display_obj.versions() | |
320 | # used to store per-commit range diffs |
|
320 | # used to store per-commit range diffs | |
321 | c.changes = collections.OrderedDict() |
|
321 | c.changes = collections.OrderedDict() | |
322 | c.range_diff_on = self.request.GET.get('range-diff') == "1" |
|
322 | c.range_diff_on = self.request.GET.get('range-diff') == "1" | |
323 |
|
323 | |||
324 | c.at_version = at_version |
|
324 | c.at_version = at_version | |
325 | c.at_version_num = (at_version |
|
325 | c.at_version_num = (at_version | |
326 | if at_version and at_version != 'latest' |
|
326 | if at_version and at_version != 'latest' | |
327 | else None) |
|
327 | else None) | |
328 | c.at_version_pos = ChangesetComment.get_index_from_version( |
|
328 | c.at_version_pos = ChangesetComment.get_index_from_version( | |
329 | c.at_version_num, versions) |
|
329 | c.at_version_num, versions) | |
330 |
|
330 | |||
331 | (prev_pull_request_latest, |
|
331 | (prev_pull_request_latest, | |
332 | prev_pull_request_at_ver, |
|
332 | prev_pull_request_at_ver, | |
333 | prev_pull_request_display_obj, |
|
333 | prev_pull_request_display_obj, | |
334 | prev_at_version) = PullRequestModel().get_pr_version( |
|
334 | prev_at_version) = PullRequestModel().get_pr_version( | |
335 | pull_request_id, version=from_version) |
|
335 | pull_request_id, version=from_version) | |
336 |
|
336 | |||
337 | c.from_version = prev_at_version |
|
337 | c.from_version = prev_at_version | |
338 | c.from_version_num = (prev_at_version |
|
338 | c.from_version_num = (prev_at_version | |
339 | if prev_at_version and prev_at_version != 'latest' |
|
339 | if prev_at_version and prev_at_version != 'latest' | |
340 | else None) |
|
340 | else None) | |
341 | c.from_version_pos = ChangesetComment.get_index_from_version( |
|
341 | c.from_version_pos = ChangesetComment.get_index_from_version( | |
342 | c.from_version_num, versions) |
|
342 | c.from_version_num, versions) | |
343 |
|
343 | |||
344 | # define if we're in COMPARE mode or VIEW at version mode |
|
344 | # define if we're in COMPARE mode or VIEW at version mode | |
345 | compare = at_version != prev_at_version |
|
345 | compare = at_version != prev_at_version | |
346 |
|
346 | |||
347 | # pull_requests repo_name we opened it against |
|
347 | # pull_requests repo_name we opened it against | |
348 | # ie. target_repo must match |
|
348 | # ie. target_repo must match | |
349 | if self.db_repo_name != pull_request_at_ver.target_repo.repo_name: |
|
349 | if self.db_repo_name != pull_request_at_ver.target_repo.repo_name: | |
350 | raise HTTPNotFound() |
|
350 | raise HTTPNotFound() | |
351 |
|
351 | |||
352 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( |
|
352 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( | |
353 | pull_request_at_ver) |
|
353 | pull_request_at_ver) | |
354 |
|
354 | |||
355 | c.pull_request = pull_request_display_obj |
|
355 | c.pull_request = pull_request_display_obj | |
356 | c.renderer = pull_request_at_ver.description_renderer or c.renderer |
|
356 | c.renderer = pull_request_at_ver.description_renderer or c.renderer | |
357 | c.pull_request_latest = pull_request_latest |
|
357 | c.pull_request_latest = pull_request_latest | |
358 |
|
358 | |||
359 | if compare or (at_version and not at_version == 'latest'): |
|
359 | if compare or (at_version and not at_version == 'latest'): | |
360 | c.allowed_to_change_status = False |
|
360 | c.allowed_to_change_status = False | |
361 | c.allowed_to_update = False |
|
361 | c.allowed_to_update = False | |
362 | c.allowed_to_merge = False |
|
362 | c.allowed_to_merge = False | |
363 | c.allowed_to_delete = False |
|
363 | c.allowed_to_delete = False | |
364 | c.allowed_to_comment = False |
|
364 | c.allowed_to_comment = False | |
365 | c.allowed_to_close = False |
|
365 | c.allowed_to_close = False | |
366 | else: |
|
366 | else: | |
367 | can_change_status = PullRequestModel().check_user_change_status( |
|
367 | can_change_status = PullRequestModel().check_user_change_status( | |
368 | pull_request_at_ver, self._rhodecode_user) |
|
368 | pull_request_at_ver, self._rhodecode_user) | |
369 | c.allowed_to_change_status = can_change_status and not pr_closed |
|
369 | c.allowed_to_change_status = can_change_status and not pr_closed | |
370 |
|
370 | |||
371 | c.allowed_to_update = PullRequestModel().check_user_update( |
|
371 | c.allowed_to_update = PullRequestModel().check_user_update( | |
372 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
372 | pull_request_latest, self._rhodecode_user) and not pr_closed | |
373 | c.allowed_to_merge = PullRequestModel().check_user_merge( |
|
373 | c.allowed_to_merge = PullRequestModel().check_user_merge( | |
374 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
374 | pull_request_latest, self._rhodecode_user) and not pr_closed | |
375 | c.allowed_to_delete = PullRequestModel().check_user_delete( |
|
375 | c.allowed_to_delete = PullRequestModel().check_user_delete( | |
376 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
376 | pull_request_latest, self._rhodecode_user) and not pr_closed | |
377 | c.allowed_to_comment = not pr_closed |
|
377 | c.allowed_to_comment = not pr_closed | |
378 | c.allowed_to_close = c.allowed_to_merge and not pr_closed |
|
378 | c.allowed_to_close = c.allowed_to_merge and not pr_closed | |
379 |
|
379 | |||
380 | c.forbid_adding_reviewers = False |
|
380 | c.forbid_adding_reviewers = False | |
381 | c.forbid_author_to_review = False |
|
381 | c.forbid_author_to_review = False | |
382 | c.forbid_commit_author_to_review = False |
|
382 | c.forbid_commit_author_to_review = False | |
383 |
|
383 | |||
384 | if pull_request_latest.reviewer_data and \ |
|
384 | if pull_request_latest.reviewer_data and \ | |
385 | 'rules' in pull_request_latest.reviewer_data: |
|
385 | 'rules' in pull_request_latest.reviewer_data: | |
386 | rules = pull_request_latest.reviewer_data['rules'] or {} |
|
386 | rules = pull_request_latest.reviewer_data['rules'] or {} | |
387 | try: |
|
387 | try: | |
388 | c.forbid_adding_reviewers = rules.get( |
|
388 | c.forbid_adding_reviewers = rules.get( | |
389 | 'forbid_adding_reviewers') |
|
389 | 'forbid_adding_reviewers') | |
390 | c.forbid_author_to_review = rules.get( |
|
390 | c.forbid_author_to_review = rules.get( | |
391 | 'forbid_author_to_review') |
|
391 | 'forbid_author_to_review') | |
392 | c.forbid_commit_author_to_review = rules.get( |
|
392 | c.forbid_commit_author_to_review = rules.get( | |
393 | 'forbid_commit_author_to_review') |
|
393 | 'forbid_commit_author_to_review') | |
394 | except Exception: |
|
394 | except Exception: | |
395 | pass |
|
395 | pass | |
396 |
|
396 | |||
397 | # check merge capabilities |
|
397 | # check merge capabilities | |
398 | _merge_check = MergeCheck.validate( |
|
398 | _merge_check = MergeCheck.validate( | |
399 | pull_request_latest, auth_user=self._rhodecode_user, |
|
399 | pull_request_latest, auth_user=self._rhodecode_user, | |
400 | translator=self.request.translate, |
|
400 | translator=self.request.translate, | |
401 | force_shadow_repo_refresh=force_refresh) |
|
401 | force_shadow_repo_refresh=force_refresh) | |
402 |
|
402 | |||
403 | c.pr_merge_errors = _merge_check.error_details |
|
403 | c.pr_merge_errors = _merge_check.error_details | |
404 | c.pr_merge_possible = not _merge_check.failed |
|
404 | c.pr_merge_possible = not _merge_check.failed | |
405 | c.pr_merge_message = _merge_check.merge_msg |
|
405 | c.pr_merge_message = _merge_check.merge_msg | |
406 | c.pr_merge_source_commit = _merge_check.source_commit |
|
406 | c.pr_merge_source_commit = _merge_check.source_commit | |
407 | c.pr_merge_target_commit = _merge_check.target_commit |
|
407 | c.pr_merge_target_commit = _merge_check.target_commit | |
408 |
|
408 | |||
409 | c.pr_merge_info = MergeCheck.get_merge_conditions( |
|
409 | c.pr_merge_info = MergeCheck.get_merge_conditions( | |
410 | pull_request_latest, translator=self.request.translate) |
|
410 | pull_request_latest, translator=self.request.translate) | |
411 |
|
411 | |||
412 | c.pull_request_review_status = _merge_check.review_status |
|
412 | c.pull_request_review_status = _merge_check.review_status | |
413 | if merge_checks: |
|
413 | if merge_checks: | |
414 | self.request.override_renderer = \ |
|
414 | self.request.override_renderer = \ | |
415 | 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako' |
|
415 | 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako' | |
416 | return self._get_template_context(c) |
|
416 | return self._get_template_context(c) | |
417 |
|
417 | |||
418 | comments_model = CommentsModel() |
|
418 | comments_model = CommentsModel() | |
419 |
|
419 | |||
420 | # reviewers and statuses |
|
420 | # reviewers and statuses | |
421 | c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() |
|
421 | c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() | |
422 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] |
|
422 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] | |
423 |
|
423 | |||
424 | # GENERAL COMMENTS with versions # |
|
424 | # GENERAL COMMENTS with versions # | |
425 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) |
|
425 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) | |
426 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
426 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
427 | general_comments = q |
|
427 | general_comments = q | |
428 |
|
428 | |||
429 | # pick comments we want to render at current version |
|
429 | # pick comments we want to render at current version | |
430 | c.comment_versions = comments_model.aggregate_comments( |
|
430 | c.comment_versions = comments_model.aggregate_comments( | |
431 | general_comments, versions, c.at_version_num) |
|
431 | general_comments, versions, c.at_version_num) | |
432 | c.comments = c.comment_versions[c.at_version_num]['until'] |
|
432 | c.comments = c.comment_versions[c.at_version_num]['until'] | |
433 |
|
433 | |||
434 | # INLINE COMMENTS with versions # |
|
434 | # INLINE COMMENTS with versions # | |
435 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) |
|
435 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) | |
436 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
436 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
437 | inline_comments = q |
|
437 | inline_comments = q | |
438 |
|
438 | |||
439 | c.inline_versions = comments_model.aggregate_comments( |
|
439 | c.inline_versions = comments_model.aggregate_comments( | |
440 | inline_comments, versions, c.at_version_num, inline=True) |
|
440 | inline_comments, versions, c.at_version_num, inline=True) | |
441 |
|
441 | |||
442 | # TODOs |
|
442 | # TODOs | |
443 | c.unresolved_comments = CommentsModel() \ |
|
443 | c.unresolved_comments = CommentsModel() \ | |
444 | .get_pull_request_unresolved_todos(pull_request) |
|
444 | .get_pull_request_unresolved_todos(pull_request) | |
445 | c.resolved_comments = CommentsModel() \ |
|
445 | c.resolved_comments = CommentsModel() \ | |
446 | .get_pull_request_resolved_todos(pull_request) |
|
446 | .get_pull_request_resolved_todos(pull_request) | |
447 |
|
447 | |||
448 | # inject latest version |
|
448 | # inject latest version | |
449 | latest_ver = PullRequest.get_pr_display_object( |
|
449 | latest_ver = PullRequest.get_pr_display_object( | |
450 | pull_request_latest, pull_request_latest) |
|
450 | pull_request_latest, pull_request_latest) | |
451 |
|
451 | |||
452 | c.versions = versions + [latest_ver] |
|
452 | c.versions = versions + [latest_ver] | |
453 |
|
453 | |||
454 | # if we use version, then do not show later comments |
|
454 | # if we use version, then do not show later comments | |
455 | # than current version |
|
455 | # than current version | |
456 | display_inline_comments = collections.defaultdict( |
|
456 | display_inline_comments = collections.defaultdict( | |
457 | lambda: collections.defaultdict(list)) |
|
457 | lambda: collections.defaultdict(list)) | |
458 | for co in inline_comments: |
|
458 | for co in inline_comments: | |
459 | if c.at_version_num: |
|
459 | if c.at_version_num: | |
460 | # pick comments that are at least UPTO given version, so we |
|
460 | # pick comments that are at least UPTO given version, so we | |
461 | # don't render comments for higher version |
|
461 | # don't render comments for higher version | |
462 | should_render = co.pull_request_version_id and \ |
|
462 | should_render = co.pull_request_version_id and \ | |
463 | co.pull_request_version_id <= c.at_version_num |
|
463 | co.pull_request_version_id <= c.at_version_num | |
464 | else: |
|
464 | else: | |
465 | # showing all, for 'latest' |
|
465 | # showing all, for 'latest' | |
466 | should_render = True |
|
466 | should_render = True | |
467 |
|
467 | |||
468 | if should_render: |
|
468 | if should_render: | |
469 | display_inline_comments[co.f_path][co.line_no].append(co) |
|
469 | display_inline_comments[co.f_path][co.line_no].append(co) | |
470 |
|
470 | |||
471 | # load diff data into template context, if we use compare mode then |
|
471 | # load diff data into template context, if we use compare mode then | |
472 | # diff is calculated based on changes between versions of PR |
|
472 | # diff is calculated based on changes between versions of PR | |
473 |
|
473 | |||
474 | source_repo = pull_request_at_ver.source_repo |
|
474 | source_repo = pull_request_at_ver.source_repo | |
475 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id |
|
475 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id | |
476 |
|
476 | |||
477 | target_repo = pull_request_at_ver.target_repo |
|
477 | target_repo = pull_request_at_ver.target_repo | |
478 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id |
|
478 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id | |
479 |
|
479 | |||
480 | if compare: |
|
480 | if compare: | |
481 | # in compare switch the diff base to latest commit from prev version |
|
481 | # in compare switch the diff base to latest commit from prev version | |
482 | target_ref_id = prev_pull_request_display_obj.revisions[0] |
|
482 | target_ref_id = prev_pull_request_display_obj.revisions[0] | |
483 |
|
483 | |||
484 | # despite opening commits for bookmarks/branches/tags, we always |
|
484 | # despite opening commits for bookmarks/branches/tags, we always | |
485 | # convert this to rev to prevent changes after bookmark or branch change |
|
485 | # convert this to rev to prevent changes after bookmark or branch change | |
486 | c.source_ref_type = 'rev' |
|
486 | c.source_ref_type = 'rev' | |
487 | c.source_ref = source_ref_id |
|
487 | c.source_ref = source_ref_id | |
488 |
|
488 | |||
489 | c.target_ref_type = 'rev' |
|
489 | c.target_ref_type = 'rev' | |
490 | c.target_ref = target_ref_id |
|
490 | c.target_ref = target_ref_id | |
491 |
|
491 | |||
492 | c.source_repo = source_repo |
|
492 | c.source_repo = source_repo | |
493 | c.target_repo = target_repo |
|
493 | c.target_repo = target_repo | |
494 |
|
494 | |||
495 | c.commit_ranges = [] |
|
495 | c.commit_ranges = [] | |
496 | source_commit = EmptyCommit() |
|
496 | source_commit = EmptyCommit() | |
497 | target_commit = EmptyCommit() |
|
497 | target_commit = EmptyCommit() | |
498 | c.missing_requirements = False |
|
498 | c.missing_requirements = False | |
499 |
|
499 | |||
500 | source_scm = source_repo.scm_instance() |
|
500 | source_scm = source_repo.scm_instance() | |
501 | target_scm = target_repo.scm_instance() |
|
501 | target_scm = target_repo.scm_instance() | |
502 |
|
502 | |||
503 | shadow_scm = None |
|
503 | shadow_scm = None | |
504 | try: |
|
504 | try: | |
505 | shadow_scm = pull_request_latest.get_shadow_repo() |
|
505 | shadow_scm = pull_request_latest.get_shadow_repo() | |
506 | except Exception: |
|
506 | except Exception: | |
507 | log.debug('Failed to get shadow repo', exc_info=True) |
|
507 | log.debug('Failed to get shadow repo', exc_info=True) | |
508 | # try first the existing source_repo, and then shadow |
|
508 | # try first the existing source_repo, and then shadow | |
509 | # repo if we can obtain one |
|
509 | # repo if we can obtain one | |
510 | commits_source_repo = source_scm |
|
510 | commits_source_repo = source_scm | |
511 | if shadow_scm: |
|
511 | if shadow_scm: | |
512 | commits_source_repo = shadow_scm |
|
512 | commits_source_repo = shadow_scm | |
513 |
|
513 | |||
514 | c.commits_source_repo = commits_source_repo |
|
514 | c.commits_source_repo = commits_source_repo | |
515 | c.ancestor = None # set it to None, to hide it from PR view |
|
515 | c.ancestor = None # set it to None, to hide it from PR view | |
516 |
|
516 | |||
517 | # empty version means latest, so we keep this to prevent |
|
517 | # empty version means latest, so we keep this to prevent | |
518 | # double caching |
|
518 | # double caching | |
519 | version_normalized = version or 'latest' |
|
519 | version_normalized = version or 'latest' | |
520 | from_version_normalized = from_version or 'latest' |
|
520 | from_version_normalized = from_version or 'latest' | |
521 |
|
521 | |||
522 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo) |
|
522 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo) | |
523 | cache_file_path = diff_cache_exist( |
|
523 | cache_file_path = diff_cache_exist( | |
524 | cache_path, 'pull_request', pull_request_id, version_normalized, |
|
524 | cache_path, 'pull_request', pull_request_id, version_normalized, | |
525 | from_version_normalized, source_ref_id, target_ref_id, |
|
525 | from_version_normalized, source_ref_id, target_ref_id, | |
526 | hide_whitespace_changes, diff_context, c.fulldiff) |
|
526 | hide_whitespace_changes, diff_context, c.fulldiff) | |
527 |
|
527 | |||
528 | caching_enabled = self._is_diff_cache_enabled(c.target_repo) |
|
528 | caching_enabled = self._is_diff_cache_enabled(c.target_repo) | |
529 | force_recache = self.get_recache_flag() |
|
529 | force_recache = self.get_recache_flag() | |
530 |
|
530 | |||
531 | cached_diff = None |
|
531 | cached_diff = None | |
532 | if caching_enabled: |
|
532 | if caching_enabled: | |
533 | cached_diff = load_cached_diff(cache_file_path) |
|
533 | cached_diff = load_cached_diff(cache_file_path) | |
534 |
|
534 | |||
535 | has_proper_commit_cache = ( |
|
535 | has_proper_commit_cache = ( | |
536 | cached_diff and cached_diff.get('commits') |
|
536 | cached_diff and cached_diff.get('commits') | |
537 | and len(cached_diff.get('commits', [])) == 5 |
|
537 | and len(cached_diff.get('commits', [])) == 5 | |
538 | and cached_diff.get('commits')[0] |
|
538 | and cached_diff.get('commits')[0] | |
539 | and cached_diff.get('commits')[3]) |
|
539 | and cached_diff.get('commits')[3]) | |
540 |
|
540 | |||
541 | if not force_recache and not c.range_diff_on and has_proper_commit_cache: |
|
541 | if not force_recache and not c.range_diff_on and has_proper_commit_cache: | |
542 | diff_commit_cache = \ |
|
542 | diff_commit_cache = \ | |
543 | (ancestor_commit, commit_cache, missing_requirements, |
|
543 | (ancestor_commit, commit_cache, missing_requirements, | |
544 | source_commit, target_commit) = cached_diff['commits'] |
|
544 | source_commit, target_commit) = cached_diff['commits'] | |
545 | else: |
|
545 | else: | |
546 | # NOTE(marcink): we reach potentially unreachable errors when a PR has |
|
546 | # NOTE(marcink): we reach potentially unreachable errors when a PR has | |
547 | # merge errors resulting in potentially hidden commits in the shadow repo. |
|
547 | # merge errors resulting in potentially hidden commits in the shadow repo. | |
548 | maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \ |
|
548 | maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \ | |
549 | and _merge_check.merge_response |
|
549 | and _merge_check.merge_response | |
550 | maybe_unreachable = maybe_unreachable \ |
|
550 | maybe_unreachable = maybe_unreachable \ | |
551 | and _merge_check.merge_response.metadata.get('unresolved_files') |
|
551 | and _merge_check.merge_response.metadata.get('unresolved_files') | |
552 | log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation") |
|
552 | log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation") | |
553 | diff_commit_cache = \ |
|
553 | diff_commit_cache = \ | |
554 | (ancestor_commit, commit_cache, missing_requirements, |
|
554 | (ancestor_commit, commit_cache, missing_requirements, | |
555 | source_commit, target_commit) = self.get_commits( |
|
555 | source_commit, target_commit) = self.get_commits( | |
556 | commits_source_repo, |
|
556 | commits_source_repo, | |
557 | pull_request_at_ver, |
|
557 | pull_request_at_ver, | |
558 | source_commit, |
|
558 | source_commit, | |
559 | source_ref_id, |
|
559 | source_ref_id, | |
560 | source_scm, |
|
560 | source_scm, | |
561 | target_commit, |
|
561 | target_commit, | |
562 | target_ref_id, |
|
562 | target_ref_id, | |
563 | target_scm, |
|
563 | target_scm, | |
564 | maybe_unreachable=maybe_unreachable) |
|
564 | maybe_unreachable=maybe_unreachable) | |
565 |
|
565 | |||
566 | # register our commit range |
|
566 | # register our commit range | |
567 | for comm in commit_cache.values(): |
|
567 | for comm in commit_cache.values(): | |
568 | c.commit_ranges.append(comm) |
|
568 | c.commit_ranges.append(comm) | |
569 |
|
569 | |||
570 | c.missing_requirements = missing_requirements |
|
570 | c.missing_requirements = missing_requirements | |
571 | c.ancestor_commit = ancestor_commit |
|
571 | c.ancestor_commit = ancestor_commit | |
572 | c.statuses = source_repo.statuses( |
|
572 | c.statuses = source_repo.statuses( | |
573 | [x.raw_id for x in c.commit_ranges]) |
|
573 | [x.raw_id for x in c.commit_ranges]) | |
574 |
|
574 | |||
575 | # auto collapse if we have more than limit |
|
575 | # auto collapse if we have more than limit | |
576 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
576 | collapse_limit = diffs.DiffProcessor._collapse_commits_over | |
577 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
577 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit | |
578 | c.compare_mode = compare |
|
578 | c.compare_mode = compare | |
579 |
|
579 | |||
580 | # diff_limit is the old behavior, will cut off the whole diff |
|
580 | # diff_limit is the old behavior, will cut off the whole diff | |
581 | # if the limit is applied otherwise will just hide the |
|
581 | # if the limit is applied otherwise will just hide the | |
582 | # big files from the front-end |
|
582 | # big files from the front-end | |
583 | diff_limit = c.visual.cut_off_limit_diff |
|
583 | diff_limit = c.visual.cut_off_limit_diff | |
584 | file_limit = c.visual.cut_off_limit_file |
|
584 | file_limit = c.visual.cut_off_limit_file | |
585 |
|
585 | |||
586 | c.missing_commits = False |
|
586 | c.missing_commits = False | |
587 | if (c.missing_requirements |
|
587 | if (c.missing_requirements | |
588 | or isinstance(source_commit, EmptyCommit) |
|
588 | or isinstance(source_commit, EmptyCommit) | |
589 | or source_commit == target_commit): |
|
589 | or source_commit == target_commit): | |
590 |
|
590 | |||
591 | c.missing_commits = True |
|
591 | c.missing_commits = True | |
592 | else: |
|
592 | else: | |
593 | c.inline_comments = display_inline_comments |
|
593 | c.inline_comments = display_inline_comments | |
594 |
|
594 | |||
595 | has_proper_diff_cache = cached_diff and cached_diff.get('commits') |
|
595 | has_proper_diff_cache = cached_diff and cached_diff.get('commits') | |
596 | if not force_recache and has_proper_diff_cache: |
|
596 | if not force_recache and has_proper_diff_cache: | |
597 | c.diffset = cached_diff['diff'] |
|
597 | c.diffset = cached_diff['diff'] | |
598 | else: |
|
598 | else: | |
599 | c.diffset = self._get_diffset( |
|
599 | c.diffset = self._get_diffset( | |
600 | c.source_repo.repo_name, commits_source_repo, |
|
600 | c.source_repo.repo_name, commits_source_repo, | |
601 | c.ancestor_commit, |
|
601 | c.ancestor_commit, | |
602 | source_ref_id, target_ref_id, |
|
602 | source_ref_id, target_ref_id, | |
603 | target_commit, source_commit, |
|
603 | target_commit, source_commit, | |
604 | diff_limit, file_limit, c.fulldiff, |
|
604 | diff_limit, file_limit, c.fulldiff, | |
605 | hide_whitespace_changes, diff_context) |
|
605 | hide_whitespace_changes, diff_context) | |
606 |
|
606 | |||
607 | # save cached diff |
|
607 | # save cached diff | |
608 | if caching_enabled: |
|
608 | if caching_enabled: | |
609 | cache_diff(cache_file_path, c.diffset, diff_commit_cache) |
|
609 | cache_diff(cache_file_path, c.diffset, diff_commit_cache) | |
610 |
|
610 | |||
611 | c.limited_diff = c.diffset.limited_diff |
|
611 | c.limited_diff = c.diffset.limited_diff | |
612 |
|
612 | |||
613 | # calculate removed files that are bound to comments |
|
613 | # calculate removed files that are bound to comments | |
614 | comment_deleted_files = [ |
|
614 | comment_deleted_files = [ | |
615 | fname for fname in display_inline_comments |
|
615 | fname for fname in display_inline_comments | |
616 | if fname not in c.diffset.file_stats] |
|
616 | if fname not in c.diffset.file_stats] | |
617 |
|
617 | |||
618 | c.deleted_files_comments = collections.defaultdict(dict) |
|
618 | c.deleted_files_comments = collections.defaultdict(dict) | |
619 | for fname, per_line_comments in display_inline_comments.items(): |
|
619 | for fname, per_line_comments in display_inline_comments.items(): | |
620 | if fname in comment_deleted_files: |
|
620 | if fname in comment_deleted_files: | |
621 | c.deleted_files_comments[fname]['stats'] = 0 |
|
621 | c.deleted_files_comments[fname]['stats'] = 0 | |
622 | c.deleted_files_comments[fname]['comments'] = list() |
|
622 | c.deleted_files_comments[fname]['comments'] = list() | |
623 | for lno, comments in per_line_comments.items(): |
|
623 | for lno, comments in per_line_comments.items(): | |
624 | c.deleted_files_comments[fname]['comments'].extend(comments) |
|
624 | c.deleted_files_comments[fname]['comments'].extend(comments) | |
625 |
|
625 | |||
626 | # maybe calculate the range diff |
|
626 | # maybe calculate the range diff | |
627 | if c.range_diff_on: |
|
627 | if c.range_diff_on: | |
628 | # TODO(marcink): set whitespace/context |
|
628 | # TODO(marcink): set whitespace/context | |
629 | context_lcl = 3 |
|
629 | context_lcl = 3 | |
630 | ign_whitespace_lcl = False |
|
630 | ign_whitespace_lcl = False | |
631 |
|
631 | |||
632 | for commit in c.commit_ranges: |
|
632 | for commit in c.commit_ranges: | |
633 | commit2 = commit |
|
633 | commit2 = commit | |
634 | commit1 = commit.first_parent |
|
634 | commit1 = commit.first_parent | |
635 |
|
635 | |||
636 | range_diff_cache_file_path = diff_cache_exist( |
|
636 | range_diff_cache_file_path = diff_cache_exist( | |
637 | cache_path, 'diff', commit.raw_id, |
|
637 | cache_path, 'diff', commit.raw_id, | |
638 | ign_whitespace_lcl, context_lcl, c.fulldiff) |
|
638 | ign_whitespace_lcl, context_lcl, c.fulldiff) | |
639 |
|
639 | |||
640 | cached_diff = None |
|
640 | cached_diff = None | |
641 | if caching_enabled: |
|
641 | if caching_enabled: | |
642 | cached_diff = load_cached_diff(range_diff_cache_file_path) |
|
642 | cached_diff = load_cached_diff(range_diff_cache_file_path) | |
643 |
|
643 | |||
644 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') |
|
644 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') | |
645 | if not force_recache and has_proper_diff_cache: |
|
645 | if not force_recache and has_proper_diff_cache: | |
646 | diffset = cached_diff['diff'] |
|
646 | diffset = cached_diff['diff'] | |
647 | else: |
|
647 | else: | |
648 | diffset = self._get_range_diffset( |
|
648 | diffset = self._get_range_diffset( | |
649 | commits_source_repo, source_repo, |
|
649 | commits_source_repo, source_repo, | |
650 | commit1, commit2, diff_limit, file_limit, |
|
650 | commit1, commit2, diff_limit, file_limit, | |
651 | c.fulldiff, ign_whitespace_lcl, context_lcl |
|
651 | c.fulldiff, ign_whitespace_lcl, context_lcl | |
652 | ) |
|
652 | ) | |
653 |
|
653 | |||
654 | # save cached diff |
|
654 | # save cached diff | |
655 | if caching_enabled: |
|
655 | if caching_enabled: | |
656 | cache_diff(range_diff_cache_file_path, diffset, None) |
|
656 | cache_diff(range_diff_cache_file_path, diffset, None) | |
657 |
|
657 | |||
658 | c.changes[commit.raw_id] = diffset |
|
658 | c.changes[commit.raw_id] = diffset | |
659 |
|
659 | |||
660 | # this is a hack to properly display links, when creating PR, the |
|
660 | # this is a hack to properly display links, when creating PR, the | |
661 | # compare view and others uses different notation, and |
|
661 | # compare view and others uses different notation, and | |
662 | # compare_commits.mako renders links based on the target_repo. |
|
662 | # compare_commits.mako renders links based on the target_repo. | |
663 | # We need to swap that here to generate it properly on the html side |
|
663 | # We need to swap that here to generate it properly on the html side | |
664 | c.target_repo = c.source_repo |
|
664 | c.target_repo = c.source_repo | |
665 |
|
665 | |||
666 | c.commit_statuses = ChangesetStatus.STATUSES |
|
666 | c.commit_statuses = ChangesetStatus.STATUSES | |
667 |
|
667 | |||
668 | c.show_version_changes = not pr_closed |
|
668 | c.show_version_changes = not pr_closed | |
669 | if c.show_version_changes: |
|
669 | if c.show_version_changes: | |
670 | cur_obj = pull_request_at_ver |
|
670 | cur_obj = pull_request_at_ver | |
671 | prev_obj = prev_pull_request_at_ver |
|
671 | prev_obj = prev_pull_request_at_ver | |
672 |
|
672 | |||
673 | old_commit_ids = prev_obj.revisions |
|
673 | old_commit_ids = prev_obj.revisions | |
674 | new_commit_ids = cur_obj.revisions |
|
674 | new_commit_ids = cur_obj.revisions | |
675 | commit_changes = PullRequestModel()._calculate_commit_id_changes( |
|
675 | commit_changes = PullRequestModel()._calculate_commit_id_changes( | |
676 | old_commit_ids, new_commit_ids) |
|
676 | old_commit_ids, new_commit_ids) | |
677 | c.commit_changes_summary = commit_changes |
|
677 | c.commit_changes_summary = commit_changes | |
678 |
|
678 | |||
679 | # calculate the diff for commits between versions |
|
679 | # calculate the diff for commits between versions | |
680 | c.commit_changes = [] |
|
680 | c.commit_changes = [] | |
681 |
|
681 | |||
682 | def mark(cs, fw): |
|
682 | def mark(cs, fw): | |
683 | return list(h.itertools.izip_longest([], cs, fillvalue=fw)) |
|
683 | return list(h.itertools.izip_longest([], cs, fillvalue=fw)) | |
684 |
|
684 | |||
685 | for c_type, raw_id in mark(commit_changes.added, 'a') \ |
|
685 | for c_type, raw_id in mark(commit_changes.added, 'a') \ | |
686 | + mark(commit_changes.removed, 'r') \ |
|
686 | + mark(commit_changes.removed, 'r') \ | |
687 | + mark(commit_changes.common, 'c'): |
|
687 | + mark(commit_changes.common, 'c'): | |
688 |
|
688 | |||
689 | if raw_id in commit_cache: |
|
689 | if raw_id in commit_cache: | |
690 | commit = commit_cache[raw_id] |
|
690 | commit = commit_cache[raw_id] | |
691 | else: |
|
691 | else: | |
692 | try: |
|
692 | try: | |
693 | commit = commits_source_repo.get_commit(raw_id) |
|
693 | commit = commits_source_repo.get_commit(raw_id) | |
694 | except CommitDoesNotExistError: |
|
694 | except CommitDoesNotExistError: | |
695 | # in case we fail extracting still use "dummy" commit |
|
695 | # in case we fail extracting still use "dummy" commit | |
696 | # for display in commit diff |
|
696 | # for display in commit diff | |
697 | commit = h.AttributeDict( |
|
697 | commit = h.AttributeDict( | |
698 | {'raw_id': raw_id, |
|
698 | {'raw_id': raw_id, | |
699 | 'message': 'EMPTY or MISSING COMMIT'}) |
|
699 | 'message': 'EMPTY or MISSING COMMIT'}) | |
700 | c.commit_changes.append([c_type, commit]) |
|
700 | c.commit_changes.append([c_type, commit]) | |
701 |
|
701 | |||
702 | # current user review statuses for each version |
|
702 | # current user review statuses for each version | |
703 | c.review_versions = {} |
|
703 | c.review_versions = {} | |
704 | if self._rhodecode_user.user_id in allowed_reviewers: |
|
704 | if self._rhodecode_user.user_id in allowed_reviewers: | |
705 | for co in general_comments: |
|
705 | for co in general_comments: | |
706 | if co.author.user_id == self._rhodecode_user.user_id: |
|
706 | if co.author.user_id == self._rhodecode_user.user_id: | |
707 | status = co.status_change |
|
707 | status = co.status_change | |
708 | if status: |
|
708 | if status: | |
709 | _ver_pr = status[0].comment.pull_request_version_id |
|
709 | _ver_pr = status[0].comment.pull_request_version_id | |
710 | c.review_versions[_ver_pr] = status[0] |
|
710 | c.review_versions[_ver_pr] = status[0] | |
711 |
|
711 | |||
712 | return self._get_template_context(c) |
|
712 | return self._get_template_context(c) | |
713 |
|
713 | |||
714 | def get_commits( |
|
714 | def get_commits( | |
715 | self, commits_source_repo, pull_request_at_ver, source_commit, |
|
715 | self, commits_source_repo, pull_request_at_ver, source_commit, | |
716 | source_ref_id, source_scm, target_commit, target_ref_id, target_scm, |
|
716 | source_ref_id, source_scm, target_commit, target_ref_id, target_scm, | |
717 | maybe_unreachable=False): |
|
717 | maybe_unreachable=False): | |
718 |
|
718 | |||
719 | commit_cache = collections.OrderedDict() |
|
719 | commit_cache = collections.OrderedDict() | |
720 | missing_requirements = False |
|
720 | missing_requirements = False | |
721 |
|
721 | |||
722 | try: |
|
722 | try: | |
723 | pre_load = ["author", "date", "message", "branch", "parents"] |
|
723 | pre_load = ["author", "date", "message", "branch", "parents"] | |
724 |
|
724 | |||
725 | pull_request_commits = pull_request_at_ver.revisions |
|
725 | pull_request_commits = pull_request_at_ver.revisions | |
726 | log.debug('Loading %s commits from %s', |
|
726 | log.debug('Loading %s commits from %s', | |
727 | len(pull_request_commits), commits_source_repo) |
|
727 | len(pull_request_commits), commits_source_repo) | |
728 |
|
728 | |||
729 | for rev in pull_request_commits: |
|
729 | for rev in pull_request_commits: | |
730 | comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load, |
|
730 | comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load, | |
731 | maybe_unreachable=maybe_unreachable) |
|
731 | maybe_unreachable=maybe_unreachable) | |
732 | commit_cache[comm.raw_id] = comm |
|
732 | commit_cache[comm.raw_id] = comm | |
733 |
|
733 | |||
734 | # Order here matters, we first need to get target, and then |
|
734 | # Order here matters, we first need to get target, and then | |
735 | # the source |
|
735 | # the source | |
736 | target_commit = commits_source_repo.get_commit( |
|
736 | target_commit = commits_source_repo.get_commit( | |
737 | commit_id=safe_str(target_ref_id)) |
|
737 | commit_id=safe_str(target_ref_id)) | |
738 |
|
738 | |||
739 | source_commit = commits_source_repo.get_commit( |
|
739 | source_commit = commits_source_repo.get_commit( | |
740 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) |
|
740 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) | |
741 | except CommitDoesNotExistError: |
|
741 | except CommitDoesNotExistError: | |
742 | log.warning('Failed to get commit from `{}` repo'.format( |
|
742 | log.warning('Failed to get commit from `{}` repo'.format( | |
743 | commits_source_repo), exc_info=True) |
|
743 | commits_source_repo), exc_info=True) | |
744 | except RepositoryRequirementError: |
|
744 | except RepositoryRequirementError: | |
745 | log.warning('Failed to get all required data from repo', exc_info=True) |
|
745 | log.warning('Failed to get all required data from repo', exc_info=True) | |
746 | missing_requirements = True |
|
746 | missing_requirements = True | |
747 |
|
747 | |||
748 | pr_ancestor_id = pull_request_at_ver.common_ancestor_id |
|
748 | pr_ancestor_id = pull_request_at_ver.common_ancestor_id | |
749 |
|
749 | |||
750 | try: |
|
750 | try: | |
751 | ancestor_commit = source_scm.get_commit(pr_ancestor_id) |
|
751 | ancestor_commit = source_scm.get_commit(pr_ancestor_id) | |
752 | except Exception: |
|
752 | except Exception: | |
753 | ancestor_commit = None |
|
753 | ancestor_commit = None | |
754 |
|
754 | |||
755 | return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit |
|
755 | return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit | |
756 |
|
756 | |||
757 | def assure_not_empty_repo(self): |
|
757 | def assure_not_empty_repo(self): | |
758 | _ = self.request.translate |
|
758 | _ = self.request.translate | |
759 |
|
759 | |||
760 | try: |
|
760 | try: | |
761 | self.db_repo.scm_instance().get_commit() |
|
761 | self.db_repo.scm_instance().get_commit() | |
762 | except EmptyRepositoryError: |
|
762 | except EmptyRepositoryError: | |
763 | h.flash(h.literal(_('There are no commits yet')), |
|
763 | h.flash(h.literal(_('There are no commits yet')), | |
764 | category='warning') |
|
764 | category='warning') | |
765 | raise HTTPFound( |
|
765 | raise HTTPFound( | |
766 | h.route_path('repo_summary', repo_name=self.db_repo.repo_name)) |
|
766 | h.route_path('repo_summary', repo_name=self.db_repo.repo_name)) | |
767 |
|
767 | |||
768 | @LoginRequired() |
|
768 | @LoginRequired() | |
769 | @NotAnonymous() |
|
769 | @NotAnonymous() | |
770 | @HasRepoPermissionAnyDecorator( |
|
770 | @HasRepoPermissionAnyDecorator( | |
771 | 'repository.read', 'repository.write', 'repository.admin') |
|
771 | 'repository.read', 'repository.write', 'repository.admin') | |
772 | @view_config( |
|
772 | @view_config( | |
773 | route_name='pullrequest_new', request_method='GET', |
|
773 | route_name='pullrequest_new', request_method='GET', | |
774 | renderer='rhodecode:templates/pullrequests/pullrequest.mako') |
|
774 | renderer='rhodecode:templates/pullrequests/pullrequest.mako') | |
775 | def pull_request_new(self): |
|
775 | def pull_request_new(self): | |
776 | _ = self.request.translate |
|
776 | _ = self.request.translate | |
777 | c = self.load_default_context() |
|
777 | c = self.load_default_context() | |
778 |
|
778 | |||
779 | self.assure_not_empty_repo() |
|
779 | self.assure_not_empty_repo() | |
780 | source_repo = self.db_repo |
|
780 | source_repo = self.db_repo | |
781 |
|
781 | |||
782 | commit_id = self.request.GET.get('commit') |
|
782 | commit_id = self.request.GET.get('commit') | |
783 | branch_ref = self.request.GET.get('branch') |
|
783 | branch_ref = self.request.GET.get('branch') | |
784 | bookmark_ref = self.request.GET.get('bookmark') |
|
784 | bookmark_ref = self.request.GET.get('bookmark') | |
785 |
|
785 | |||
786 | try: |
|
786 | try: | |
787 | source_repo_data = PullRequestModel().generate_repo_data( |
|
787 | source_repo_data = PullRequestModel().generate_repo_data( | |
788 | source_repo, commit_id=commit_id, |
|
788 | source_repo, commit_id=commit_id, | |
789 | branch=branch_ref, bookmark=bookmark_ref, |
|
789 | branch=branch_ref, bookmark=bookmark_ref, | |
790 | translator=self.request.translate) |
|
790 | translator=self.request.translate) | |
791 | except CommitDoesNotExistError as e: |
|
791 | except CommitDoesNotExistError as e: | |
792 | log.exception(e) |
|
792 | log.exception(e) | |
793 | h.flash(_('Commit does not exist'), 'error') |
|
793 | h.flash(_('Commit does not exist'), 'error') | |
794 | raise HTTPFound( |
|
794 | raise HTTPFound( | |
795 | h.route_path('pullrequest_new', repo_name=source_repo.repo_name)) |
|
795 | h.route_path('pullrequest_new', repo_name=source_repo.repo_name)) | |
796 |
|
796 | |||
797 | default_target_repo = source_repo |
|
797 | default_target_repo = source_repo | |
798 |
|
798 | |||
799 | if source_repo.parent and c.has_origin_repo_read_perm: |
|
799 | if source_repo.parent and c.has_origin_repo_read_perm: | |
800 | parent_vcs_obj = source_repo.parent.scm_instance() |
|
800 | parent_vcs_obj = source_repo.parent.scm_instance() | |
801 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
801 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): | |
802 | # change default if we have a parent repo |
|
802 | # change default if we have a parent repo | |
803 | default_target_repo = source_repo.parent |
|
803 | default_target_repo = source_repo.parent | |
804 |
|
804 | |||
805 | target_repo_data = PullRequestModel().generate_repo_data( |
|
805 | target_repo_data = PullRequestModel().generate_repo_data( | |
806 | default_target_repo, translator=self.request.translate) |
|
806 | default_target_repo, translator=self.request.translate) | |
807 |
|
807 | |||
808 | selected_source_ref = source_repo_data['refs']['selected_ref'] |
|
808 | selected_source_ref = source_repo_data['refs']['selected_ref'] | |
809 | title_source_ref = '' |
|
809 | title_source_ref = '' | |
810 | if selected_source_ref: |
|
810 | if selected_source_ref: | |
811 | title_source_ref = selected_source_ref.split(':', 2)[1] |
|
811 | title_source_ref = selected_source_ref.split(':', 2)[1] | |
812 | c.default_title = PullRequestModel().generate_pullrequest_title( |
|
812 | c.default_title = PullRequestModel().generate_pullrequest_title( | |
813 | source=source_repo.repo_name, |
|
813 | source=source_repo.repo_name, | |
814 | source_ref=title_source_ref, |
|
814 | source_ref=title_source_ref, | |
815 | target=default_target_repo.repo_name |
|
815 | target=default_target_repo.repo_name | |
816 | ) |
|
816 | ) | |
817 |
|
817 | |||
818 | c.default_repo_data = { |
|
818 | c.default_repo_data = { | |
819 | 'source_repo_name': source_repo.repo_name, |
|
819 | 'source_repo_name': source_repo.repo_name, | |
820 | 'source_refs_json': json.dumps(source_repo_data), |
|
820 | 'source_refs_json': json.dumps(source_repo_data), | |
821 | 'target_repo_name': default_target_repo.repo_name, |
|
821 | 'target_repo_name': default_target_repo.repo_name, | |
822 | 'target_refs_json': json.dumps(target_repo_data), |
|
822 | 'target_refs_json': json.dumps(target_repo_data), | |
823 | } |
|
823 | } | |
824 | c.default_source_ref = selected_source_ref |
|
824 | c.default_source_ref = selected_source_ref | |
825 |
|
825 | |||
826 | return self._get_template_context(c) |
|
826 | return self._get_template_context(c) | |
827 |
|
827 | |||
828 | @LoginRequired() |
|
828 | @LoginRequired() | |
829 | @NotAnonymous() |
|
829 | @NotAnonymous() | |
830 | @HasRepoPermissionAnyDecorator( |
|
830 | @HasRepoPermissionAnyDecorator( | |
831 | 'repository.read', 'repository.write', 'repository.admin') |
|
831 | 'repository.read', 'repository.write', 'repository.admin') | |
832 | @view_config( |
|
832 | @view_config( | |
833 | route_name='pullrequest_repo_refs', request_method='GET', |
|
833 | route_name='pullrequest_repo_refs', request_method='GET', | |
834 | renderer='json_ext', xhr=True) |
|
834 | renderer='json_ext', xhr=True) | |
835 | def pull_request_repo_refs(self): |
|
835 | def pull_request_repo_refs(self): | |
836 | self.load_default_context() |
|
836 | self.load_default_context() | |
837 | target_repo_name = self.request.matchdict['target_repo_name'] |
|
837 | target_repo_name = self.request.matchdict['target_repo_name'] | |
838 | repo = Repository.get_by_repo_name(target_repo_name) |
|
838 | repo = Repository.get_by_repo_name(target_repo_name) | |
839 | if not repo: |
|
839 | if not repo: | |
840 | raise HTTPNotFound() |
|
840 | raise HTTPNotFound() | |
841 |
|
841 | |||
842 | target_perm = HasRepoPermissionAny( |
|
842 | target_perm = HasRepoPermissionAny( | |
843 | 'repository.read', 'repository.write', 'repository.admin')( |
|
843 | 'repository.read', 'repository.write', 'repository.admin')( | |
844 | target_repo_name) |
|
844 | target_repo_name) | |
845 | if not target_perm: |
|
845 | if not target_perm: | |
846 | raise HTTPNotFound() |
|
846 | raise HTTPNotFound() | |
847 |
|
847 | |||
848 | return PullRequestModel().generate_repo_data( |
|
848 | return PullRequestModel().generate_repo_data( | |
849 | repo, translator=self.request.translate) |
|
849 | repo, translator=self.request.translate) | |
850 |
|
850 | |||
851 | @LoginRequired() |
|
851 | @LoginRequired() | |
852 | @NotAnonymous() |
|
852 | @NotAnonymous() | |
853 | @HasRepoPermissionAnyDecorator( |
|
853 | @HasRepoPermissionAnyDecorator( | |
854 | 'repository.read', 'repository.write', 'repository.admin') |
|
854 | 'repository.read', 'repository.write', 'repository.admin') | |
855 | @view_config( |
|
855 | @view_config( | |
856 | route_name='pullrequest_repo_targets', request_method='GET', |
|
856 | route_name='pullrequest_repo_targets', request_method='GET', | |
857 | renderer='json_ext', xhr=True) |
|
857 | renderer='json_ext', xhr=True) | |
858 | def pullrequest_repo_targets(self): |
|
858 | def pullrequest_repo_targets(self): | |
859 | _ = self.request.translate |
|
859 | _ = self.request.translate | |
860 | filter_query = self.request.GET.get('query') |
|
860 | filter_query = self.request.GET.get('query') | |
861 |
|
861 | |||
862 | # get the parents |
|
862 | # get the parents | |
863 | parent_target_repos = [] |
|
863 | parent_target_repos = [] | |
864 | if self.db_repo.parent: |
|
864 | if self.db_repo.parent: | |
865 | parents_query = Repository.query() \ |
|
865 | parents_query = Repository.query() \ | |
866 | .order_by(func.length(Repository.repo_name)) \ |
|
866 | .order_by(func.length(Repository.repo_name)) \ | |
867 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) |
|
867 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) | |
868 |
|
868 | |||
869 | if filter_query: |
|
869 | if filter_query: | |
870 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
870 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) | |
871 | parents_query = parents_query.filter( |
|
871 | parents_query = parents_query.filter( | |
872 | Repository.repo_name.ilike(ilike_expression)) |
|
872 | Repository.repo_name.ilike(ilike_expression)) | |
873 | parents = parents_query.limit(20).all() |
|
873 | parents = parents_query.limit(20).all() | |
874 |
|
874 | |||
875 | for parent in parents: |
|
875 | for parent in parents: | |
876 | parent_vcs_obj = parent.scm_instance() |
|
876 | parent_vcs_obj = parent.scm_instance() | |
877 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
877 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): | |
878 | parent_target_repos.append(parent) |
|
878 | parent_target_repos.append(parent) | |
879 |
|
879 | |||
880 | # get other forks, and repo itself |
|
880 | # get other forks, and repo itself | |
881 | query = Repository.query() \ |
|
881 | query = Repository.query() \ | |
882 | .order_by(func.length(Repository.repo_name)) \ |
|
882 | .order_by(func.length(Repository.repo_name)) \ | |
883 | .filter( |
|
883 | .filter( | |
884 | or_(Repository.repo_id == self.db_repo.repo_id, # repo itself |
|
884 | or_(Repository.repo_id == self.db_repo.repo_id, # repo itself | |
885 | Repository.fork_id == self.db_repo.repo_id) # forks of this repo |
|
885 | Repository.fork_id == self.db_repo.repo_id) # forks of this repo | |
886 | ) \ |
|
886 | ) \ | |
887 | .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos])) |
|
887 | .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos])) | |
888 |
|
888 | |||
889 | if filter_query: |
|
889 | if filter_query: | |
890 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
890 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) | |
891 | query = query.filter(Repository.repo_name.ilike(ilike_expression)) |
|
891 | query = query.filter(Repository.repo_name.ilike(ilike_expression)) | |
892 |
|
892 | |||
893 | limit = max(20 - len(parent_target_repos), 5) # not less then 5 |
|
893 | limit = max(20 - len(parent_target_repos), 5) # not less then 5 | |
894 | target_repos = query.limit(limit).all() |
|
894 | target_repos = query.limit(limit).all() | |
895 |
|
895 | |||
896 | all_target_repos = target_repos + parent_target_repos |
|
896 | all_target_repos = target_repos + parent_target_repos | |
897 |
|
897 | |||
898 | repos = [] |
|
898 | repos = [] | |
899 | # This checks permissions to the repositories |
|
899 | # This checks permissions to the repositories | |
900 | for obj in ScmModel().get_repos(all_target_repos): |
|
900 | for obj in ScmModel().get_repos(all_target_repos): | |
901 | repos.append({ |
|
901 | repos.append({ | |
902 | 'id': obj['name'], |
|
902 | 'id': obj['name'], | |
903 | 'text': obj['name'], |
|
903 | 'text': obj['name'], | |
904 | 'type': 'repo', |
|
904 | 'type': 'repo', | |
905 | 'repo_id': obj['dbrepo']['repo_id'], |
|
905 | 'repo_id': obj['dbrepo']['repo_id'], | |
906 | 'repo_type': obj['dbrepo']['repo_type'], |
|
906 | 'repo_type': obj['dbrepo']['repo_type'], | |
907 | 'private': obj['dbrepo']['private'], |
|
907 | 'private': obj['dbrepo']['private'], | |
908 |
|
908 | |||
909 | }) |
|
909 | }) | |
910 |
|
910 | |||
911 | data = { |
|
911 | data = { | |
912 | 'more': False, |
|
912 | 'more': False, | |
913 | 'results': [{ |
|
913 | 'results': [{ | |
914 | 'text': _('Repositories'), |
|
914 | 'text': _('Repositories'), | |
915 | 'children': repos |
|
915 | 'children': repos | |
916 | }] if repos else [] |
|
916 | }] if repos else [] | |
917 | } |
|
917 | } | |
918 | return data |
|
918 | return data | |
919 |
|
919 | |||
920 | @LoginRequired() |
|
920 | @LoginRequired() | |
921 | @NotAnonymous() |
|
921 | @NotAnonymous() | |
922 | @HasRepoPermissionAnyDecorator( |
|
922 | @HasRepoPermissionAnyDecorator( | |
923 | 'repository.read', 'repository.write', 'repository.admin') |
|
923 | 'repository.read', 'repository.write', 'repository.admin') | |
924 | @CSRFRequired() |
|
924 | @CSRFRequired() | |
925 | @view_config( |
|
925 | @view_config( | |
926 | route_name='pullrequest_create', request_method='POST', |
|
926 | route_name='pullrequest_create', request_method='POST', | |
927 | renderer=None) |
|
927 | renderer=None) | |
928 | def pull_request_create(self): |
|
928 | def pull_request_create(self): | |
929 | _ = self.request.translate |
|
929 | _ = self.request.translate | |
930 | self.assure_not_empty_repo() |
|
930 | self.assure_not_empty_repo() | |
931 | self.load_default_context() |
|
931 | self.load_default_context() | |
932 |
|
932 | |||
933 | controls = peppercorn.parse(self.request.POST.items()) |
|
933 | controls = peppercorn.parse(self.request.POST.items()) | |
934 |
|
934 | |||
935 | try: |
|
935 | try: | |
936 | form = PullRequestForm( |
|
936 | form = PullRequestForm( | |
937 | self.request.translate, self.db_repo.repo_id)() |
|
937 | self.request.translate, self.db_repo.repo_id)() | |
938 | _form = form.to_python(controls) |
|
938 | _form = form.to_python(controls) | |
939 | except formencode.Invalid as errors: |
|
939 | except formencode.Invalid as errors: | |
940 | if errors.error_dict.get('revisions'): |
|
940 | if errors.error_dict.get('revisions'): | |
941 | msg = 'Revisions: %s' % errors.error_dict['revisions'] |
|
941 | msg = 'Revisions: %s' % errors.error_dict['revisions'] | |
942 | elif errors.error_dict.get('pullrequest_title'): |
|
942 | elif errors.error_dict.get('pullrequest_title'): | |
943 | msg = errors.error_dict.get('pullrequest_title') |
|
943 | msg = errors.error_dict.get('pullrequest_title') | |
944 | else: |
|
944 | else: | |
945 | msg = _('Error creating pull request: {}').format(errors) |
|
945 | msg = _('Error creating pull request: {}').format(errors) | |
946 | log.exception(msg) |
|
946 | log.exception(msg) | |
947 | h.flash(msg, 'error') |
|
947 | h.flash(msg, 'error') | |
948 |
|
948 | |||
949 | # would rather just go back to form ... |
|
949 | # would rather just go back to form ... | |
950 | raise HTTPFound( |
|
950 | raise HTTPFound( | |
951 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) |
|
951 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) | |
952 |
|
952 | |||
953 | source_repo = _form['source_repo'] |
|
953 | source_repo = _form['source_repo'] | |
954 | source_ref = _form['source_ref'] |
|
954 | source_ref = _form['source_ref'] | |
955 | target_repo = _form['target_repo'] |
|
955 | target_repo = _form['target_repo'] | |
956 | target_ref = _form['target_ref'] |
|
956 | target_ref = _form['target_ref'] | |
957 | commit_ids = _form['revisions'][::-1] |
|
957 | commit_ids = _form['revisions'][::-1] | |
958 | common_ancestor_id = _form['common_ancestor'] |
|
958 | common_ancestor_id = _form['common_ancestor'] | |
959 |
|
959 | |||
960 | # find the ancestor for this pr |
|
960 | # find the ancestor for this pr | |
961 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) |
|
961 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) | |
962 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) |
|
962 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) | |
963 |
|
963 | |||
964 | if not (source_db_repo or target_db_repo): |
|
964 | if not (source_db_repo or target_db_repo): | |
965 | h.flash(_('source_repo or target repo not found'), category='error') |
|
965 | h.flash(_('source_repo or target repo not found'), category='error') | |
966 | raise HTTPFound( |
|
966 | raise HTTPFound( | |
967 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) |
|
967 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) | |
968 |
|
968 | |||
969 | # re-check permissions again here |
|
969 | # re-check permissions again here | |
970 | # source_repo we must have read permissions |
|
970 | # source_repo we must have read permissions | |
971 |
|
971 | |||
972 | source_perm = HasRepoPermissionAny( |
|
972 | source_perm = HasRepoPermissionAny( | |
973 | 'repository.read', 'repository.write', 'repository.admin')( |
|
973 | 'repository.read', 'repository.write', 'repository.admin')( | |
974 | source_db_repo.repo_name) |
|
974 | source_db_repo.repo_name) | |
975 | if not source_perm: |
|
975 | if not source_perm: | |
976 | msg = _('Not Enough permissions to source repo `{}`.'.format( |
|
976 | msg = _('Not Enough permissions to source repo `{}`.'.format( | |
977 | source_db_repo.repo_name)) |
|
977 | source_db_repo.repo_name)) | |
978 | h.flash(msg, category='error') |
|
978 | h.flash(msg, category='error') | |
979 | # copy the args back to redirect |
|
979 | # copy the args back to redirect | |
980 | org_query = self.request.GET.mixed() |
|
980 | org_query = self.request.GET.mixed() | |
981 | raise HTTPFound( |
|
981 | raise HTTPFound( | |
982 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
982 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, | |
983 | _query=org_query)) |
|
983 | _query=org_query)) | |
984 |
|
984 | |||
985 | # target repo we must have read permissions, and also later on |
|
985 | # target repo we must have read permissions, and also later on | |
986 | # we want to check branch permissions here |
|
986 | # we want to check branch permissions here | |
987 | target_perm = HasRepoPermissionAny( |
|
987 | target_perm = HasRepoPermissionAny( | |
988 | 'repository.read', 'repository.write', 'repository.admin')( |
|
988 | 'repository.read', 'repository.write', 'repository.admin')( | |
989 | target_db_repo.repo_name) |
|
989 | target_db_repo.repo_name) | |
990 | if not target_perm: |
|
990 | if not target_perm: | |
991 | msg = _('Not Enough permissions to target repo `{}`.'.format( |
|
991 | msg = _('Not Enough permissions to target repo `{}`.'.format( | |
992 | target_db_repo.repo_name)) |
|
992 | target_db_repo.repo_name)) | |
993 | h.flash(msg, category='error') |
|
993 | h.flash(msg, category='error') | |
994 | # copy the args back to redirect |
|
994 | # copy the args back to redirect | |
995 | org_query = self.request.GET.mixed() |
|
995 | org_query = self.request.GET.mixed() | |
996 | raise HTTPFound( |
|
996 | raise HTTPFound( | |
997 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
997 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, | |
998 | _query=org_query)) |
|
998 | _query=org_query)) | |
999 |
|
999 | |||
1000 | source_scm = source_db_repo.scm_instance() |
|
1000 | source_scm = source_db_repo.scm_instance() | |
1001 | target_scm = target_db_repo.scm_instance() |
|
1001 | target_scm = target_db_repo.scm_instance() | |
1002 |
|
1002 | |||
1003 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) |
|
1003 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) | |
1004 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) |
|
1004 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) | |
1005 |
|
1005 | |||
1006 | ancestor = source_scm.get_common_ancestor( |
|
1006 | ancestor = source_scm.get_common_ancestor( | |
1007 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
1007 | source_commit.raw_id, target_commit.raw_id, target_scm) | |
1008 |
|
1008 | |||
1009 | # recalculate target ref based on ancestor |
|
1009 | # recalculate target ref based on ancestor | |
1010 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') |
|
1010 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') | |
1011 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) |
|
1011 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) | |
1012 |
|
1012 | |||
1013 | get_default_reviewers_data, validate_default_reviewers = \ |
|
1013 | get_default_reviewers_data, validate_default_reviewers = \ | |
1014 | PullRequestModel().get_reviewer_functions() |
|
1014 | PullRequestModel().get_reviewer_functions() | |
1015 |
|
1015 | |||
1016 | # recalculate reviewers logic, to make sure we can validate this |
|
1016 | # recalculate reviewers logic, to make sure we can validate this | |
1017 | reviewer_rules = get_default_reviewers_data( |
|
1017 | reviewer_rules = get_default_reviewers_data( | |
1018 | self._rhodecode_db_user, source_db_repo, |
|
1018 | self._rhodecode_db_user, source_db_repo, | |
1019 | source_commit, target_db_repo, target_commit) |
|
1019 | source_commit, target_db_repo, target_commit) | |
1020 |
|
1020 | |||
1021 | given_reviewers = _form['review_members'] |
|
1021 | given_reviewers = _form['review_members'] | |
1022 | reviewers = validate_default_reviewers( |
|
1022 | reviewers = validate_default_reviewers( | |
1023 | given_reviewers, reviewer_rules) |
|
1023 | given_reviewers, reviewer_rules) | |
1024 |
|
1024 | |||
1025 | pullrequest_title = _form['pullrequest_title'] |
|
1025 | pullrequest_title = _form['pullrequest_title'] | |
1026 | title_source_ref = source_ref.split(':', 2)[1] |
|
1026 | title_source_ref = source_ref.split(':', 2)[1] | |
1027 | if not pullrequest_title: |
|
1027 | if not pullrequest_title: | |
1028 | pullrequest_title = PullRequestModel().generate_pullrequest_title( |
|
1028 | pullrequest_title = PullRequestModel().generate_pullrequest_title( | |
1029 | source=source_repo, |
|
1029 | source=source_repo, | |
1030 | source_ref=title_source_ref, |
|
1030 | source_ref=title_source_ref, | |
1031 | target=target_repo |
|
1031 | target=target_repo | |
1032 | ) |
|
1032 | ) | |
1033 |
|
1033 | |||
1034 | description = _form['pullrequest_desc'] |
|
1034 | description = _form['pullrequest_desc'] | |
1035 | description_renderer = _form['description_renderer'] |
|
1035 | description_renderer = _form['description_renderer'] | |
1036 |
|
1036 | |||
1037 | try: |
|
1037 | try: | |
1038 | pull_request = PullRequestModel().create( |
|
1038 | pull_request = PullRequestModel().create( | |
1039 | created_by=self._rhodecode_user.user_id, |
|
1039 | created_by=self._rhodecode_user.user_id, | |
1040 | source_repo=source_repo, |
|
1040 | source_repo=source_repo, | |
1041 | source_ref=source_ref, |
|
1041 | source_ref=source_ref, | |
1042 | target_repo=target_repo, |
|
1042 | target_repo=target_repo, | |
1043 | target_ref=target_ref, |
|
1043 | target_ref=target_ref, | |
1044 | revisions=commit_ids, |
|
1044 | revisions=commit_ids, | |
1045 | common_ancestor_id=common_ancestor_id, |
|
1045 | common_ancestor_id=common_ancestor_id, | |
1046 | reviewers=reviewers, |
|
1046 | reviewers=reviewers, | |
1047 | title=pullrequest_title, |
|
1047 | title=pullrequest_title, | |
1048 | description=description, |
|
1048 | description=description, | |
1049 | description_renderer=description_renderer, |
|
1049 | description_renderer=description_renderer, | |
1050 | reviewer_data=reviewer_rules, |
|
1050 | reviewer_data=reviewer_rules, | |
1051 | auth_user=self._rhodecode_user |
|
1051 | auth_user=self._rhodecode_user | |
1052 | ) |
|
1052 | ) | |
1053 | Session().commit() |
|
1053 | Session().commit() | |
1054 |
|
1054 | |||
1055 | h.flash(_('Successfully opened new pull request'), |
|
1055 | h.flash(_('Successfully opened new pull request'), | |
1056 | category='success') |
|
1056 | category='success') | |
1057 | except Exception: |
|
1057 | except Exception: | |
1058 | msg = _('Error occurred during creation of this pull request.') |
|
1058 | msg = _('Error occurred during creation of this pull request.') | |
1059 | log.exception(msg) |
|
1059 | log.exception(msg) | |
1060 | h.flash(msg, category='error') |
|
1060 | h.flash(msg, category='error') | |
1061 |
|
1061 | |||
1062 | # copy the args back to redirect |
|
1062 | # copy the args back to redirect | |
1063 | org_query = self.request.GET.mixed() |
|
1063 | org_query = self.request.GET.mixed() | |
1064 | raise HTTPFound( |
|
1064 | raise HTTPFound( | |
1065 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
1065 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, | |
1066 | _query=org_query)) |
|
1066 | _query=org_query)) | |
1067 |
|
1067 | |||
1068 | raise HTTPFound( |
|
1068 | raise HTTPFound( | |
1069 | h.route_path('pullrequest_show', repo_name=target_repo, |
|
1069 | h.route_path('pullrequest_show', repo_name=target_repo, | |
1070 | pull_request_id=pull_request.pull_request_id)) |
|
1070 | pull_request_id=pull_request.pull_request_id)) | |
1071 |
|
1071 | |||
1072 | @LoginRequired() |
|
1072 | @LoginRequired() | |
1073 | @NotAnonymous() |
|
1073 | @NotAnonymous() | |
1074 | @HasRepoPermissionAnyDecorator( |
|
1074 | @HasRepoPermissionAnyDecorator( | |
1075 | 'repository.read', 'repository.write', 'repository.admin') |
|
1075 | 'repository.read', 'repository.write', 'repository.admin') | |
1076 | @CSRFRequired() |
|
1076 | @CSRFRequired() | |
1077 | @view_config( |
|
1077 | @view_config( | |
1078 | route_name='pullrequest_update', request_method='POST', |
|
1078 | route_name='pullrequest_update', request_method='POST', | |
1079 | renderer='json_ext') |
|
1079 | renderer='json_ext') | |
1080 | def pull_request_update(self): |
|
1080 | def pull_request_update(self): | |
1081 | pull_request = PullRequest.get_or_404( |
|
1081 | pull_request = PullRequest.get_or_404( | |
1082 | self.request.matchdict['pull_request_id']) |
|
1082 | self.request.matchdict['pull_request_id']) | |
1083 | _ = self.request.translate |
|
1083 | _ = self.request.translate | |
1084 |
|
1084 | |||
1085 | self.load_default_context() |
|
1085 | self.load_default_context() | |
1086 | redirect_url = None |
|
1086 | redirect_url = None | |
1087 |
|
1087 | |||
1088 | if pull_request.is_closed(): |
|
1088 | if pull_request.is_closed(): | |
1089 | log.debug('update: forbidden because pull request is closed') |
|
1089 | log.debug('update: forbidden because pull request is closed') | |
1090 | msg = _(u'Cannot update closed pull requests.') |
|
1090 | msg = _(u'Cannot update closed pull requests.') | |
1091 | h.flash(msg, category='error') |
|
1091 | h.flash(msg, category='error') | |
1092 | return {'response': True, |
|
1092 | return {'response': True, | |
1093 | 'redirect_url': redirect_url} |
|
1093 | 'redirect_url': redirect_url} | |
1094 |
|
1094 | |||
1095 | is_state_changing = pull_request.is_state_changing() |
|
1095 | is_state_changing = pull_request.is_state_changing() | |
1096 |
|
1096 | |||
1097 | # only owner or admin can update it |
|
1097 | # only owner or admin can update it | |
1098 | allowed_to_update = PullRequestModel().check_user_update( |
|
1098 | allowed_to_update = PullRequestModel().check_user_update( | |
1099 | pull_request, self._rhodecode_user) |
|
1099 | pull_request, self._rhodecode_user) | |
1100 | if allowed_to_update: |
|
1100 | if allowed_to_update: | |
1101 | controls = peppercorn.parse(self.request.POST.items()) |
|
1101 | controls = peppercorn.parse(self.request.POST.items()) | |
1102 | force_refresh = str2bool(self.request.POST.get('force_refresh')) |
|
1102 | force_refresh = str2bool(self.request.POST.get('force_refresh')) | |
1103 |
|
1103 | |||
1104 | if 'review_members' in controls: |
|
1104 | if 'review_members' in controls: | |
1105 | self._update_reviewers( |
|
1105 | self._update_reviewers( | |
1106 | pull_request, controls['review_members'], |
|
1106 | pull_request, controls['review_members'], | |
1107 | pull_request.reviewer_data) |
|
1107 | pull_request.reviewer_data) | |
1108 | elif str2bool(self.request.POST.get('update_commits', 'false')): |
|
1108 | elif str2bool(self.request.POST.get('update_commits', 'false')): | |
1109 | if is_state_changing: |
|
1109 | if is_state_changing: | |
1110 | log.debug('commits update: forbidden because pull request is in state %s', |
|
1110 | log.debug('commits update: forbidden because pull request is in state %s', | |
1111 | pull_request.pull_request_state) |
|
1111 | pull_request.pull_request_state) | |
1112 | msg = _(u'Cannot update pull requests commits in state other than `{}`. ' |
|
1112 | msg = _(u'Cannot update pull requests commits in state other than `{}`. ' | |
1113 | u'Current state is: `{}`').format( |
|
1113 | u'Current state is: `{}`').format( | |
1114 | PullRequest.STATE_CREATED, pull_request.pull_request_state) |
|
1114 | PullRequest.STATE_CREATED, pull_request.pull_request_state) | |
1115 | h.flash(msg, category='error') |
|
1115 | h.flash(msg, category='error') | |
1116 | return {'response': True, |
|
1116 | return {'response': True, | |
1117 | 'redirect_url': redirect_url} |
|
1117 | 'redirect_url': redirect_url} | |
1118 |
|
1118 | |||
1119 | self._update_commits(pull_request) |
|
1119 | self._update_commits(pull_request) | |
1120 | if force_refresh: |
|
1120 | if force_refresh: | |
1121 | redirect_url = h.route_path( |
|
1121 | redirect_url = h.route_path( | |
1122 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
1122 | 'pullrequest_show', repo_name=self.db_repo_name, | |
1123 | pull_request_id=pull_request.pull_request_id, |
|
1123 | pull_request_id=pull_request.pull_request_id, | |
1124 | _query={"force_refresh": 1}) |
|
1124 | _query={"force_refresh": 1}) | |
1125 | elif str2bool(self.request.POST.get('edit_pull_request', 'false')): |
|
1125 | elif str2bool(self.request.POST.get('edit_pull_request', 'false')): | |
1126 | self._edit_pull_request(pull_request) |
|
1126 | self._edit_pull_request(pull_request) | |
1127 | else: |
|
1127 | else: | |
1128 | raise HTTPBadRequest() |
|
1128 | raise HTTPBadRequest() | |
1129 |
|
1129 | |||
1130 | return {'response': True, |
|
1130 | return {'response': True, | |
1131 | 'redirect_url': redirect_url} |
|
1131 | 'redirect_url': redirect_url} | |
1132 | raise HTTPForbidden() |
|
1132 | raise HTTPForbidden() | |
1133 |
|
1133 | |||
1134 | def _edit_pull_request(self, pull_request): |
|
1134 | def _edit_pull_request(self, pull_request): | |
1135 | _ = self.request.translate |
|
1135 | _ = self.request.translate | |
1136 |
|
1136 | |||
1137 | try: |
|
1137 | try: | |
1138 | PullRequestModel().edit( |
|
1138 | PullRequestModel().edit( | |
1139 | pull_request, |
|
1139 | pull_request, | |
1140 | self.request.POST.get('title'), |
|
1140 | self.request.POST.get('title'), | |
1141 | self.request.POST.get('description'), |
|
1141 | self.request.POST.get('description'), | |
1142 | self.request.POST.get('description_renderer'), |
|
1142 | self.request.POST.get('description_renderer'), | |
1143 | self._rhodecode_user) |
|
1143 | self._rhodecode_user) | |
1144 | except ValueError: |
|
1144 | except ValueError: | |
1145 | msg = _(u'Cannot update closed pull requests.') |
|
1145 | msg = _(u'Cannot update closed pull requests.') | |
1146 | h.flash(msg, category='error') |
|
1146 | h.flash(msg, category='error') | |
1147 | return |
|
1147 | return | |
1148 | else: |
|
1148 | else: | |
1149 | Session().commit() |
|
1149 | Session().commit() | |
1150 |
|
1150 | |||
1151 | msg = _(u'Pull request title & description updated.') |
|
1151 | msg = _(u'Pull request title & description updated.') | |
1152 | h.flash(msg, category='success') |
|
1152 | h.flash(msg, category='success') | |
1153 | return |
|
1153 | return | |
1154 |
|
1154 | |||
1155 | def _update_commits(self, pull_request): |
|
1155 | def _update_commits(self, pull_request): | |
1156 | _ = self.request.translate |
|
1156 | _ = self.request.translate | |
1157 |
|
1157 | |||
1158 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1158 | with pull_request.set_state(PullRequest.STATE_UPDATING): | |
1159 | resp = PullRequestModel().update_commits( |
|
1159 | resp = PullRequestModel().update_commits( | |
1160 | pull_request, self._rhodecode_db_user) |
|
1160 | pull_request, self._rhodecode_db_user) | |
1161 |
|
1161 | |||
1162 | if resp.executed: |
|
1162 | if resp.executed: | |
1163 |
|
1163 | |||
1164 | if resp.target_changed and resp.source_changed: |
|
1164 | if resp.target_changed and resp.source_changed: | |
1165 | changed = 'target and source repositories' |
|
1165 | changed = 'target and source repositories' | |
1166 | elif resp.target_changed and not resp.source_changed: |
|
1166 | elif resp.target_changed and not resp.source_changed: | |
1167 | changed = 'target repository' |
|
1167 | changed = 'target repository' | |
1168 | elif not resp.target_changed and resp.source_changed: |
|
1168 | elif not resp.target_changed and resp.source_changed: | |
1169 | changed = 'source repository' |
|
1169 | changed = 'source repository' | |
1170 | else: |
|
1170 | else: | |
1171 | changed = 'nothing' |
|
1171 | changed = 'nothing' | |
1172 |
|
1172 | |||
1173 | msg = _(u'Pull request updated to "{source_commit_id}" with ' |
|
1173 | msg = _(u'Pull request updated to "{source_commit_id}" with ' | |
1174 | u'{count_added} added, {count_removed} removed commits. ' |
|
1174 | u'{count_added} added, {count_removed} removed commits. ' | |
1175 | u'Source of changes: {change_source}') |
|
1175 | u'Source of changes: {change_source}') | |
1176 | msg = msg.format( |
|
1176 | msg = msg.format( | |
1177 | source_commit_id=pull_request.source_ref_parts.commit_id, |
|
1177 | source_commit_id=pull_request.source_ref_parts.commit_id, | |
1178 | count_added=len(resp.changes.added), |
|
1178 | count_added=len(resp.changes.added), | |
1179 | count_removed=len(resp.changes.removed), |
|
1179 | count_removed=len(resp.changes.removed), | |
1180 | change_source=changed) |
|
1180 | change_source=changed) | |
1181 | h.flash(msg, category='success') |
|
1181 | h.flash(msg, category='success') | |
1182 |
|
1182 | |||
1183 | channel = '/repo${}$/pr/{}'.format( |
|
1183 | channel = '/repo${}$/pr/{}'.format( | |
1184 | pull_request.target_repo.repo_name, pull_request.pull_request_id) |
|
1184 | pull_request.target_repo.repo_name, pull_request.pull_request_id) | |
1185 | message = msg + ( |
|
1185 | message = msg + ( | |
1186 | ' - <a onclick="window.location.reload()">' |
|
1186 | ' - <a onclick="window.location.reload()">' | |
1187 | '<strong>{}</strong></a>'.format(_('Reload page'))) |
|
1187 | '<strong>{}</strong></a>'.format(_('Reload page'))) | |
1188 | channelstream.post_message( |
|
1188 | channelstream.post_message( | |
1189 | channel, message, self._rhodecode_user.username, |
|
1189 | channel, message, self._rhodecode_user.username, | |
1190 | registry=self.request.registry) |
|
1190 | registry=self.request.registry) | |
1191 | else: |
|
1191 | else: | |
1192 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] |
|
1192 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] | |
1193 | warning_reasons = [ |
|
1193 | warning_reasons = [ | |
1194 | UpdateFailureReason.NO_CHANGE, |
|
1194 | UpdateFailureReason.NO_CHANGE, | |
1195 | UpdateFailureReason.WRONG_REF_TYPE, |
|
1195 | UpdateFailureReason.WRONG_REF_TYPE, | |
1196 | ] |
|
1196 | ] | |
1197 | category = 'warning' if resp.reason in warning_reasons else 'error' |
|
1197 | category = 'warning' if resp.reason in warning_reasons else 'error' | |
1198 | h.flash(msg, category=category) |
|
1198 | h.flash(msg, category=category) | |
1199 |
|
1199 | |||
1200 | @LoginRequired() |
|
1200 | @LoginRequired() | |
1201 | @NotAnonymous() |
|
1201 | @NotAnonymous() | |
1202 | @HasRepoPermissionAnyDecorator( |
|
1202 | @HasRepoPermissionAnyDecorator( | |
1203 | 'repository.read', 'repository.write', 'repository.admin') |
|
1203 | 'repository.read', 'repository.write', 'repository.admin') | |
1204 | @CSRFRequired() |
|
1204 | @CSRFRequired() | |
1205 | @view_config( |
|
1205 | @view_config( | |
1206 | route_name='pullrequest_merge', request_method='POST', |
|
1206 | route_name='pullrequest_merge', request_method='POST', | |
1207 | renderer='json_ext') |
|
1207 | renderer='json_ext') | |
1208 | def pull_request_merge(self): |
|
1208 | def pull_request_merge(self): | |
1209 | """ |
|
1209 | """ | |
1210 | Merge will perform a server-side merge of the specified |
|
1210 | Merge will perform a server-side merge of the specified | |
1211 | pull request, if the pull request is approved and mergeable. |
|
1211 | pull request, if the pull request is approved and mergeable. | |
1212 | After successful merging, the pull request is automatically |
|
1212 | After successful merging, the pull request is automatically | |
1213 | closed, with a relevant comment. |
|
1213 | closed, with a relevant comment. | |
1214 | """ |
|
1214 | """ | |
1215 | pull_request = PullRequest.get_or_404( |
|
1215 | pull_request = PullRequest.get_or_404( | |
1216 | self.request.matchdict['pull_request_id']) |
|
1216 | self.request.matchdict['pull_request_id']) | |
1217 | _ = self.request.translate |
|
1217 | _ = self.request.translate | |
1218 |
|
1218 | |||
1219 | if pull_request.is_state_changing(): |
|
1219 | if pull_request.is_state_changing(): | |
1220 | log.debug('show: forbidden because pull request is in state %s', |
|
1220 | log.debug('show: forbidden because pull request is in state %s', | |
1221 | pull_request.pull_request_state) |
|
1221 | pull_request.pull_request_state) | |
1222 | msg = _(u'Cannot merge pull requests in state other than `{}`. ' |
|
1222 | msg = _(u'Cannot merge pull requests in state other than `{}`. ' | |
1223 | u'Current state is: `{}`').format(PullRequest.STATE_CREATED, |
|
1223 | u'Current state is: `{}`').format(PullRequest.STATE_CREATED, | |
1224 | pull_request.pull_request_state) |
|
1224 | pull_request.pull_request_state) | |
1225 | h.flash(msg, category='error') |
|
1225 | h.flash(msg, category='error') | |
1226 | raise HTTPFound( |
|
1226 | raise HTTPFound( | |
1227 | h.route_path('pullrequest_show', |
|
1227 | h.route_path('pullrequest_show', | |
1228 | repo_name=pull_request.target_repo.repo_name, |
|
1228 | repo_name=pull_request.target_repo.repo_name, | |
1229 | pull_request_id=pull_request.pull_request_id)) |
|
1229 | pull_request_id=pull_request.pull_request_id)) | |
1230 |
|
1230 | |||
1231 | self.load_default_context() |
|
1231 | self.load_default_context() | |
1232 |
|
1232 | |||
1233 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1233 | with pull_request.set_state(PullRequest.STATE_UPDATING): | |
1234 | check = MergeCheck.validate( |
|
1234 | check = MergeCheck.validate( | |
1235 | pull_request, auth_user=self._rhodecode_user, |
|
1235 | pull_request, auth_user=self._rhodecode_user, | |
1236 | translator=self.request.translate) |
|
1236 | translator=self.request.translate) | |
1237 | merge_possible = not check.failed |
|
1237 | merge_possible = not check.failed | |
1238 |
|
1238 | |||
1239 | for err_type, error_msg in check.errors: |
|
1239 | for err_type, error_msg in check.errors: | |
1240 | h.flash(error_msg, category=err_type) |
|
1240 | h.flash(error_msg, category=err_type) | |
1241 |
|
1241 | |||
1242 | if merge_possible: |
|
1242 | if merge_possible: | |
1243 | log.debug("Pre-conditions checked, trying to merge.") |
|
1243 | log.debug("Pre-conditions checked, trying to merge.") | |
1244 | extras = vcs_operation_context( |
|
1244 | extras = vcs_operation_context( | |
1245 | self.request.environ, repo_name=pull_request.target_repo.repo_name, |
|
1245 | self.request.environ, repo_name=pull_request.target_repo.repo_name, | |
1246 | username=self._rhodecode_db_user.username, action='push', |
|
1246 | username=self._rhodecode_db_user.username, action='push', | |
1247 | scm=pull_request.target_repo.repo_type) |
|
1247 | scm=pull_request.target_repo.repo_type) | |
1248 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1248 | with pull_request.set_state(PullRequest.STATE_UPDATING): | |
1249 | self._merge_pull_request( |
|
1249 | self._merge_pull_request( | |
1250 | pull_request, self._rhodecode_db_user, extras) |
|
1250 | pull_request, self._rhodecode_db_user, extras) | |
1251 | else: |
|
1251 | else: | |
1252 | log.debug("Pre-conditions failed, NOT merging.") |
|
1252 | log.debug("Pre-conditions failed, NOT merging.") | |
1253 |
|
1253 | |||
1254 | raise HTTPFound( |
|
1254 | raise HTTPFound( | |
1255 | h.route_path('pullrequest_show', |
|
1255 | h.route_path('pullrequest_show', | |
1256 | repo_name=pull_request.target_repo.repo_name, |
|
1256 | repo_name=pull_request.target_repo.repo_name, | |
1257 | pull_request_id=pull_request.pull_request_id)) |
|
1257 | pull_request_id=pull_request.pull_request_id)) | |
1258 |
|
1258 | |||
1259 | def _merge_pull_request(self, pull_request, user, extras): |
|
1259 | def _merge_pull_request(self, pull_request, user, extras): | |
1260 | _ = self.request.translate |
|
1260 | _ = self.request.translate | |
1261 | merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras) |
|
1261 | merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras) | |
1262 |
|
1262 | |||
1263 | if merge_resp.executed: |
|
1263 | if merge_resp.executed: | |
1264 | log.debug("The merge was successful, closing the pull request.") |
|
1264 | log.debug("The merge was successful, closing the pull request.") | |
1265 | PullRequestModel().close_pull_request( |
|
1265 | PullRequestModel().close_pull_request( | |
1266 | pull_request.pull_request_id, user) |
|
1266 | pull_request.pull_request_id, user) | |
1267 | Session().commit() |
|
1267 | Session().commit() | |
1268 | msg = _('Pull request was successfully merged and closed.') |
|
1268 | msg = _('Pull request was successfully merged and closed.') | |
1269 | h.flash(msg, category='success') |
|
1269 | h.flash(msg, category='success') | |
1270 | else: |
|
1270 | else: | |
1271 | log.debug( |
|
1271 | log.debug( | |
1272 | "The merge was not successful. Merge response: %s", merge_resp) |
|
1272 | "The merge was not successful. Merge response: %s", merge_resp) | |
1273 | msg = merge_resp.merge_status_message |
|
1273 | msg = merge_resp.merge_status_message | |
1274 | h.flash(msg, category='error') |
|
1274 | h.flash(msg, category='error') | |
1275 |
|
1275 | |||
1276 | def _update_reviewers(self, pull_request, review_members, reviewer_rules): |
|
1276 | def _update_reviewers(self, pull_request, review_members, reviewer_rules): | |
1277 | _ = self.request.translate |
|
1277 | _ = self.request.translate | |
1278 |
|
1278 | |||
1279 | get_default_reviewers_data, validate_default_reviewers = \ |
|
1279 | get_default_reviewers_data, validate_default_reviewers = \ | |
1280 | PullRequestModel().get_reviewer_functions() |
|
1280 | PullRequestModel().get_reviewer_functions() | |
1281 |
|
1281 | |||
1282 | try: |
|
1282 | try: | |
1283 | reviewers = validate_default_reviewers(review_members, reviewer_rules) |
|
1283 | reviewers = validate_default_reviewers(review_members, reviewer_rules) | |
1284 | except ValueError as e: |
|
1284 | except ValueError as e: | |
1285 | log.error('Reviewers Validation: {}'.format(e)) |
|
1285 | log.error('Reviewers Validation: {}'.format(e)) | |
1286 | h.flash(e, category='error') |
|
1286 | h.flash(e, category='error') | |
1287 | return |
|
1287 | return | |
1288 |
|
1288 | |||
1289 | old_calculated_status = pull_request.calculated_review_status() |
|
1289 | old_calculated_status = pull_request.calculated_review_status() | |
1290 | PullRequestModel().update_reviewers( |
|
1290 | PullRequestModel().update_reviewers( | |
1291 | pull_request, reviewers, self._rhodecode_user) |
|
1291 | pull_request, reviewers, self._rhodecode_user) | |
1292 | h.flash(_('Pull request reviewers updated.'), category='success') |
|
1292 | h.flash(_('Pull request reviewers updated.'), category='success') | |
1293 | Session().commit() |
|
1293 | Session().commit() | |
1294 |
|
1294 | |||
1295 | # trigger status changed if change in reviewers changes the status |
|
1295 | # trigger status changed if change in reviewers changes the status | |
1296 | calculated_status = pull_request.calculated_review_status() |
|
1296 | calculated_status = pull_request.calculated_review_status() | |
1297 | if old_calculated_status != calculated_status: |
|
1297 | if old_calculated_status != calculated_status: | |
1298 | PullRequestModel().trigger_pull_request_hook( |
|
1298 | PullRequestModel().trigger_pull_request_hook( | |
1299 | pull_request, self._rhodecode_user, 'review_status_change', |
|
1299 | pull_request, self._rhodecode_user, 'review_status_change', | |
1300 | data={'status': calculated_status}) |
|
1300 | data={'status': calculated_status}) | |
1301 |
|
1301 | |||
1302 | @LoginRequired() |
|
1302 | @LoginRequired() | |
1303 | @NotAnonymous() |
|
1303 | @NotAnonymous() | |
1304 | @HasRepoPermissionAnyDecorator( |
|
1304 | @HasRepoPermissionAnyDecorator( | |
1305 | 'repository.read', 'repository.write', 'repository.admin') |
|
1305 | 'repository.read', 'repository.write', 'repository.admin') | |
1306 | @CSRFRequired() |
|
1306 | @CSRFRequired() | |
1307 | @view_config( |
|
1307 | @view_config( | |
1308 | route_name='pullrequest_delete', request_method='POST', |
|
1308 | route_name='pullrequest_delete', request_method='POST', | |
1309 | renderer='json_ext') |
|
1309 | renderer='json_ext') | |
1310 | def pull_request_delete(self): |
|
1310 | def pull_request_delete(self): | |
1311 | _ = self.request.translate |
|
1311 | _ = self.request.translate | |
1312 |
|
1312 | |||
1313 | pull_request = PullRequest.get_or_404( |
|
1313 | pull_request = PullRequest.get_or_404( | |
1314 | self.request.matchdict['pull_request_id']) |
|
1314 | self.request.matchdict['pull_request_id']) | |
1315 | self.load_default_context() |
|
1315 | self.load_default_context() | |
1316 |
|
1316 | |||
1317 | pr_closed = pull_request.is_closed() |
|
1317 | pr_closed = pull_request.is_closed() | |
1318 | allowed_to_delete = PullRequestModel().check_user_delete( |
|
1318 | allowed_to_delete = PullRequestModel().check_user_delete( | |
1319 | pull_request, self._rhodecode_user) and not pr_closed |
|
1319 | pull_request, self._rhodecode_user) and not pr_closed | |
1320 |
|
1320 | |||
1321 | # only owner can delete it ! |
|
1321 | # only owner can delete it ! | |
1322 | if allowed_to_delete: |
|
1322 | if allowed_to_delete: | |
1323 | PullRequestModel().delete(pull_request, self._rhodecode_user) |
|
1323 | PullRequestModel().delete(pull_request, self._rhodecode_user) | |
1324 | Session().commit() |
|
1324 | Session().commit() | |
1325 | h.flash(_('Successfully deleted pull request'), |
|
1325 | h.flash(_('Successfully deleted pull request'), | |
1326 | category='success') |
|
1326 | category='success') | |
1327 | raise HTTPFound(h.route_path('pullrequest_show_all', |
|
1327 | raise HTTPFound(h.route_path('pullrequest_show_all', | |
1328 | repo_name=self.db_repo_name)) |
|
1328 | repo_name=self.db_repo_name)) | |
1329 |
|
1329 | |||
1330 | log.warning('user %s tried to delete pull request without access', |
|
1330 | log.warning('user %s tried to delete pull request without access', | |
1331 | self._rhodecode_user) |
|
1331 | self._rhodecode_user) | |
1332 | raise HTTPNotFound() |
|
1332 | raise HTTPNotFound() | |
1333 |
|
1333 | |||
1334 | @LoginRequired() |
|
1334 | @LoginRequired() | |
1335 | @NotAnonymous() |
|
1335 | @NotAnonymous() | |
1336 | @HasRepoPermissionAnyDecorator( |
|
1336 | @HasRepoPermissionAnyDecorator( | |
1337 | 'repository.read', 'repository.write', 'repository.admin') |
|
1337 | 'repository.read', 'repository.write', 'repository.admin') | |
1338 | @CSRFRequired() |
|
1338 | @CSRFRequired() | |
1339 | @view_config( |
|
1339 | @view_config( | |
1340 | route_name='pullrequest_comment_create', request_method='POST', |
|
1340 | route_name='pullrequest_comment_create', request_method='POST', | |
1341 | renderer='json_ext') |
|
1341 | renderer='json_ext') | |
1342 | def pull_request_comment_create(self): |
|
1342 | def pull_request_comment_create(self): | |
1343 | _ = self.request.translate |
|
1343 | _ = self.request.translate | |
1344 |
|
1344 | |||
1345 | pull_request = PullRequest.get_or_404( |
|
1345 | pull_request = PullRequest.get_or_404( | |
1346 | self.request.matchdict['pull_request_id']) |
|
1346 | self.request.matchdict['pull_request_id']) | |
1347 | pull_request_id = pull_request.pull_request_id |
|
1347 | pull_request_id = pull_request.pull_request_id | |
1348 |
|
1348 | |||
1349 | if pull_request.is_closed(): |
|
1349 | if pull_request.is_closed(): | |
1350 | log.debug('comment: forbidden because pull request is closed') |
|
1350 | log.debug('comment: forbidden because pull request is closed') | |
1351 | raise HTTPForbidden() |
|
1351 | raise HTTPForbidden() | |
1352 |
|
1352 | |||
1353 | allowed_to_comment = PullRequestModel().check_user_comment( |
|
1353 | allowed_to_comment = PullRequestModel().check_user_comment( | |
1354 | pull_request, self._rhodecode_user) |
|
1354 | pull_request, self._rhodecode_user) | |
1355 | if not allowed_to_comment: |
|
1355 | if not allowed_to_comment: | |
1356 | log.debug( |
|
1356 | log.debug( | |
1357 | 'comment: forbidden because pull request is from forbidden repo') |
|
1357 | 'comment: forbidden because pull request is from forbidden repo') | |
1358 | raise HTTPForbidden() |
|
1358 | raise HTTPForbidden() | |
1359 |
|
1359 | |||
1360 | c = self.load_default_context() |
|
1360 | c = self.load_default_context() | |
1361 |
|
1361 | |||
1362 | status = self.request.POST.get('changeset_status', None) |
|
1362 | status = self.request.POST.get('changeset_status', None) | |
1363 | text = self.request.POST.get('text') |
|
1363 | text = self.request.POST.get('text') | |
1364 | comment_type = self.request.POST.get('comment_type') |
|
1364 | comment_type = self.request.POST.get('comment_type') | |
1365 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
1365 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) | |
1366 | close_pull_request = self.request.POST.get('close_pull_request') |
|
1366 | close_pull_request = self.request.POST.get('close_pull_request') | |
1367 |
|
1367 | |||
1368 | # the logic here should work like following, if we submit close |
|
1368 | # the logic here should work like following, if we submit close | |
1369 | # pr comment, use `close_pull_request_with_comment` function |
|
1369 | # pr comment, use `close_pull_request_with_comment` function | |
1370 | # else handle regular comment logic |
|
1370 | # else handle regular comment logic | |
1371 |
|
1371 | |||
1372 | if close_pull_request: |
|
1372 | if close_pull_request: | |
1373 | # only owner or admin or person with write permissions |
|
1373 | # only owner or admin or person with write permissions | |
1374 | allowed_to_close = PullRequestModel().check_user_update( |
|
1374 | allowed_to_close = PullRequestModel().check_user_update( | |
1375 | pull_request, self._rhodecode_user) |
|
1375 | pull_request, self._rhodecode_user) | |
1376 | if not allowed_to_close: |
|
1376 | if not allowed_to_close: | |
1377 | log.debug('comment: forbidden because not allowed to close ' |
|
1377 | log.debug('comment: forbidden because not allowed to close ' | |
1378 | 'pull request %s', pull_request_id) |
|
1378 | 'pull request %s', pull_request_id) | |
1379 | raise HTTPForbidden() |
|
1379 | raise HTTPForbidden() | |
1380 |
|
1380 | |||
1381 | # This also triggers `review_status_change` |
|
1381 | # This also triggers `review_status_change` | |
1382 | comment, status = PullRequestModel().close_pull_request_with_comment( |
|
1382 | comment, status = PullRequestModel().close_pull_request_with_comment( | |
1383 | pull_request, self._rhodecode_user, self.db_repo, message=text, |
|
1383 | pull_request, self._rhodecode_user, self.db_repo, message=text, | |
1384 | auth_user=self._rhodecode_user) |
|
1384 | auth_user=self._rhodecode_user) | |
1385 | Session().flush() |
|
1385 | Session().flush() | |
1386 |
|
1386 | |||
1387 | PullRequestModel().trigger_pull_request_hook( |
|
1387 | PullRequestModel().trigger_pull_request_hook( | |
1388 | pull_request, self._rhodecode_user, 'comment', |
|
1388 | pull_request, self._rhodecode_user, 'comment', | |
1389 | data={'comment': comment}) |
|
1389 | data={'comment': comment}) | |
1390 |
|
1390 | |||
1391 | else: |
|
1391 | else: | |
1392 | # regular comment case, could be inline, or one with status. |
|
1392 | # regular comment case, could be inline, or one with status. | |
1393 | # for that one we check also permissions |
|
1393 | # for that one we check also permissions | |
1394 |
|
1394 | |||
1395 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
1395 | allowed_to_change_status = PullRequestModel().check_user_change_status( | |
1396 | pull_request, self._rhodecode_user) |
|
1396 | pull_request, self._rhodecode_user) | |
1397 |
|
1397 | |||
1398 | if status and allowed_to_change_status: |
|
1398 | if status and allowed_to_change_status: | |
1399 | message = (_('Status change %(transition_icon)s %(status)s') |
|
1399 | message = (_('Status change %(transition_icon)s %(status)s') | |
1400 | % {'transition_icon': '>', |
|
1400 | % {'transition_icon': '>', | |
1401 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
1401 | 'status': ChangesetStatus.get_status_lbl(status)}) | |
1402 | text = text or message |
|
1402 | text = text or message | |
1403 |
|
1403 | |||
1404 | comment = CommentsModel().create( |
|
1404 | comment = CommentsModel().create( | |
1405 | text=text, |
|
1405 | text=text, | |
1406 | repo=self.db_repo.repo_id, |
|
1406 | repo=self.db_repo.repo_id, | |
1407 | user=self._rhodecode_user.user_id, |
|
1407 | user=self._rhodecode_user.user_id, | |
1408 | pull_request=pull_request, |
|
1408 | pull_request=pull_request, | |
1409 | f_path=self.request.POST.get('f_path'), |
|
1409 | f_path=self.request.POST.get('f_path'), | |
1410 | line_no=self.request.POST.get('line'), |
|
1410 | line_no=self.request.POST.get('line'), | |
1411 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
1411 | status_change=(ChangesetStatus.get_status_lbl(status) | |
1412 | if status and allowed_to_change_status else None), |
|
1412 | if status and allowed_to_change_status else None), | |
1413 | status_change_type=(status |
|
1413 | status_change_type=(status | |
1414 | if status and allowed_to_change_status else None), |
|
1414 | if status and allowed_to_change_status else None), | |
1415 | comment_type=comment_type, |
|
1415 | comment_type=comment_type, | |
1416 | resolves_comment_id=resolves_comment_id, |
|
1416 | resolves_comment_id=resolves_comment_id, | |
1417 | auth_user=self._rhodecode_user |
|
1417 | auth_user=self._rhodecode_user | |
1418 | ) |
|
1418 | ) | |
1419 |
|
1419 | |||
1420 | if allowed_to_change_status: |
|
1420 | if allowed_to_change_status: | |
1421 | # calculate old status before we change it |
|
1421 | # calculate old status before we change it | |
1422 | old_calculated_status = pull_request.calculated_review_status() |
|
1422 | old_calculated_status = pull_request.calculated_review_status() | |
1423 |
|
1423 | |||
1424 | # get status if set ! |
|
1424 | # get status if set ! | |
1425 | if status: |
|
1425 | if status: | |
1426 | ChangesetStatusModel().set_status( |
|
1426 | ChangesetStatusModel().set_status( | |
1427 | self.db_repo.repo_id, |
|
1427 | self.db_repo.repo_id, | |
1428 | status, |
|
1428 | status, | |
1429 | self._rhodecode_user.user_id, |
|
1429 | self._rhodecode_user.user_id, | |
1430 | comment, |
|
1430 | comment, | |
1431 | pull_request=pull_request |
|
1431 | pull_request=pull_request | |
1432 | ) |
|
1432 | ) | |
1433 |
|
1433 | |||
1434 | Session().flush() |
|
1434 | Session().flush() | |
1435 | # this is somehow required to get access to some relationship |
|
1435 | # this is somehow required to get access to some relationship | |
1436 | # loaded on comment |
|
1436 | # loaded on comment | |
1437 | Session().refresh(comment) |
|
1437 | Session().refresh(comment) | |
1438 |
|
1438 | |||
1439 | PullRequestModel().trigger_pull_request_hook( |
|
1439 | PullRequestModel().trigger_pull_request_hook( | |
1440 | pull_request, self._rhodecode_user, 'comment', |
|
1440 | pull_request, self._rhodecode_user, 'comment', | |
1441 | data={'comment': comment}) |
|
1441 | data={'comment': comment}) | |
1442 |
|
1442 | |||
1443 | # we now calculate the status of pull request, and based on that |
|
1443 | # we now calculate the status of pull request, and based on that | |
1444 | # calculation we set the commits status |
|
1444 | # calculation we set the commits status | |
1445 | calculated_status = pull_request.calculated_review_status() |
|
1445 | calculated_status = pull_request.calculated_review_status() | |
1446 | if old_calculated_status != calculated_status: |
|
1446 | if old_calculated_status != calculated_status: | |
1447 | PullRequestModel().trigger_pull_request_hook( |
|
1447 | PullRequestModel().trigger_pull_request_hook( | |
1448 | pull_request, self._rhodecode_user, 'review_status_change', |
|
1448 | pull_request, self._rhodecode_user, 'review_status_change', | |
1449 | data={'status': calculated_status}) |
|
1449 | data={'status': calculated_status}) | |
1450 |
|
1450 | |||
1451 | Session().commit() |
|
1451 | Session().commit() | |
1452 |
|
1452 | |||
1453 | data = { |
|
1453 | data = { | |
1454 | 'target_id': h.safeid(h.safe_unicode( |
|
1454 | 'target_id': h.safeid(h.safe_unicode( | |
1455 | self.request.POST.get('f_path'))), |
|
1455 | self.request.POST.get('f_path'))), | |
1456 | } |
|
1456 | } | |
1457 | if comment: |
|
1457 | if comment: | |
1458 | c.co = comment |
|
1458 | c.co = comment | |
1459 | rendered_comment = render( |
|
1459 | rendered_comment = render( | |
1460 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
1460 | 'rhodecode:templates/changeset/changeset_comment_block.mako', | |
1461 | self._get_template_context(c), self.request) |
|
1461 | self._get_template_context(c), self.request) | |
1462 |
|
1462 | |||
1463 | data.update(comment.get_dict()) |
|
1463 | data.update(comment.get_dict()) | |
1464 | data.update({'rendered_text': rendered_comment}) |
|
1464 | data.update({'rendered_text': rendered_comment}) | |
1465 |
|
1465 | |||
1466 | return data |
|
1466 | return data | |
1467 |
|
1467 | |||
1468 | @LoginRequired() |
|
1468 | @LoginRequired() | |
1469 | @NotAnonymous() |
|
1469 | @NotAnonymous() | |
1470 | @HasRepoPermissionAnyDecorator( |
|
1470 | @HasRepoPermissionAnyDecorator( | |
1471 | 'repository.read', 'repository.write', 'repository.admin') |
|
1471 | 'repository.read', 'repository.write', 'repository.admin') | |
1472 | @CSRFRequired() |
|
1472 | @CSRFRequired() | |
1473 | @view_config( |
|
1473 | @view_config( | |
1474 | route_name='pullrequest_comment_delete', request_method='POST', |
|
1474 | route_name='pullrequest_comment_delete', request_method='POST', | |
1475 | renderer='json_ext') |
|
1475 | renderer='json_ext') | |
1476 | def pull_request_comment_delete(self): |
|
1476 | def pull_request_comment_delete(self): | |
1477 | pull_request = PullRequest.get_or_404( |
|
1477 | pull_request = PullRequest.get_or_404( | |
1478 | self.request.matchdict['pull_request_id']) |
|
1478 | self.request.matchdict['pull_request_id']) | |
1479 |
|
1479 | |||
1480 | comment = ChangesetComment.get_or_404( |
|
1480 | comment = ChangesetComment.get_or_404( | |
1481 | self.request.matchdict['comment_id']) |
|
1481 | self.request.matchdict['comment_id']) | |
1482 | comment_id = comment.comment_id |
|
1482 | comment_id = comment.comment_id | |
1483 |
|
1483 | |||
1484 | if comment.immutable: |
|
1484 | if comment.immutable: | |
1485 | # don't allow deleting comments that are immutable |
|
1485 | # don't allow deleting comments that are immutable | |
1486 | raise HTTPForbidden() |
|
1486 | raise HTTPForbidden() | |
1487 |
|
1487 | |||
1488 | if pull_request.is_closed(): |
|
1488 | if pull_request.is_closed(): | |
1489 | log.debug('comment: forbidden because pull request is closed') |
|
1489 | log.debug('comment: forbidden because pull request is closed') | |
1490 | raise HTTPForbidden() |
|
1490 | raise HTTPForbidden() | |
1491 |
|
1491 | |||
1492 | if not comment: |
|
1492 | if not comment: | |
1493 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
1493 | log.debug('Comment with id:%s not found, skipping', comment_id) | |
1494 | # comment already deleted in another call probably |
|
1494 | # comment already deleted in another call probably | |
1495 | return True |
|
1495 | return True | |
1496 |
|
1496 | |||
1497 | if comment.pull_request.is_closed(): |
|
1497 | if comment.pull_request.is_closed(): | |
1498 | # don't allow deleting comments on closed pull request |
|
1498 | # don't allow deleting comments on closed pull request | |
1499 | raise HTTPForbidden() |
|
1499 | raise HTTPForbidden() | |
1500 |
|
1500 | |||
1501 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
1501 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |
1502 | super_admin = h.HasPermissionAny('hg.admin')() |
|
1502 | super_admin = h.HasPermissionAny('hg.admin')() | |
1503 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id |
|
1503 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id | |
1504 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
1504 | is_repo_comment = comment.repo.repo_name == self.db_repo_name | |
1505 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
1505 | comment_repo_admin = is_repo_admin and is_repo_comment | |
1506 |
|
1506 | |||
1507 | if super_admin or comment_owner or comment_repo_admin: |
|
1507 | if super_admin or comment_owner or comment_repo_admin: | |
1508 | old_calculated_status = comment.pull_request.calculated_review_status() |
|
1508 | old_calculated_status = comment.pull_request.calculated_review_status() | |
1509 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) |
|
1509 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) | |
1510 | Session().commit() |
|
1510 | Session().commit() | |
1511 | calculated_status = comment.pull_request.calculated_review_status() |
|
1511 | calculated_status = comment.pull_request.calculated_review_status() | |
1512 | if old_calculated_status != calculated_status: |
|
1512 | if old_calculated_status != calculated_status: | |
1513 | PullRequestModel().trigger_pull_request_hook( |
|
1513 | PullRequestModel().trigger_pull_request_hook( | |
1514 | comment.pull_request, self._rhodecode_user, 'review_status_change', |
|
1514 | comment.pull_request, self._rhodecode_user, 'review_status_change', | |
1515 | data={'status': calculated_status}) |
|
1515 | data={'status': calculated_status}) | |
1516 | return True |
|
1516 | return True | |
1517 | else: |
|
1517 | else: | |
1518 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
1518 | log.warning('No permissions for user %s to delete comment_id: %s', | |
1519 | self._rhodecode_db_user, comment_id) |
|
1519 | self._rhodecode_db_user, comment_id) | |
1520 | raise HTTPNotFound() |
|
1520 | raise HTTPNotFound() | |
|
1521 | ||||
|
1522 | @LoginRequired() | |||
|
1523 | @NotAnonymous() | |||
|
1524 | @HasRepoPermissionAnyDecorator( | |||
|
1525 | 'repository.read', 'repository.write', 'repository.admin') | |||
|
1526 | @CSRFRequired() | |||
|
1527 | @view_config( | |||
|
1528 | route_name='pullrequest_comment_edit', request_method='POST', | |||
|
1529 | renderer='json_ext') | |||
|
1530 | def pull_request_comment_edit(self): | |||
|
1531 | pull_request = PullRequest.get_or_404( | |||
|
1532 | self.request.matchdict['pull_request_id'] | |||
|
1533 | ) | |||
|
1534 | comment = ChangesetComment.get_or_404( | |||
|
1535 | self.request.matchdict['comment_id'] | |||
|
1536 | ) | |||
|
1537 | comment_id = comment.comment_id | |||
|
1538 | ||||
|
1539 | if comment.immutable: | |||
|
1540 | # don't allow deleting comments that are immutable | |||
|
1541 | raise HTTPForbidden() | |||
|
1542 | ||||
|
1543 | if pull_request.is_closed(): | |||
|
1544 | log.debug('comment: forbidden because pull request is closed') | |||
|
1545 | raise HTTPForbidden() | |||
|
1546 | ||||
|
1547 | if not comment: | |||
|
1548 | log.debug('Comment with id:%s not found, skipping', comment_id) | |||
|
1549 | # comment already deleted in another call probably | |||
|
1550 | return True | |||
|
1551 | ||||
|
1552 | if comment.pull_request.is_closed(): | |||
|
1553 | # don't allow deleting comments on closed pull request | |||
|
1554 | raise HTTPForbidden() | |||
|
1555 | ||||
|
1556 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |||
|
1557 | super_admin = h.HasPermissionAny('hg.admin')() | |||
|
1558 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id | |||
|
1559 | is_repo_comment = comment.repo.repo_name == self.db_repo_name | |||
|
1560 | comment_repo_admin = is_repo_admin and is_repo_comment | |||
|
1561 | ||||
|
1562 | if super_admin or comment_owner or comment_repo_admin: | |||
|
1563 | text = self.request.POST.get('text') | |||
|
1564 | version = self.request.POST.get('version') | |||
|
1565 | if text == comment.text: | |||
|
1566 | log.warning( | |||
|
1567 | 'Comment(PR): ' | |||
|
1568 | 'Trying to create new version ' | |||
|
1569 | 'of existing comment {}'.format( | |||
|
1570 | comment_id, | |||
|
1571 | ) | |||
|
1572 | ) | |||
|
1573 | raise HTTPNotFound() | |||
|
1574 | if version.isdigit(): | |||
|
1575 | version = int(version) | |||
|
1576 | else: | |||
|
1577 | log.warning( | |||
|
1578 | 'Comment(PR): Wrong version type {} {} ' | |||
|
1579 | 'for comment {}'.format( | |||
|
1580 | version, | |||
|
1581 | type(version), | |||
|
1582 | comment_id, | |||
|
1583 | ) | |||
|
1584 | ) | |||
|
1585 | raise HTTPNotFound() | |||
|
1586 | ||||
|
1587 | comment_history = CommentsModel().edit( | |||
|
1588 | comment_id=comment_id, | |||
|
1589 | text=text, | |||
|
1590 | auth_user=self._rhodecode_user, | |||
|
1591 | version=version, | |||
|
1592 | ) | |||
|
1593 | if not comment_history: | |||
|
1594 | raise HTTPNotFound() | |||
|
1595 | Session().commit() | |||
|
1596 | return { | |||
|
1597 | 'comment_history_id': comment_history.comment_history_id, | |||
|
1598 | 'comment_id': comment.comment_id, | |||
|
1599 | 'comment_version': comment_history.version, | |||
|
1600 | } | |||
|
1601 | else: | |||
|
1602 | log.warning( | |||
|
1603 | 'No permissions for user {} to edit comment_id: {}'.format( | |||
|
1604 | self._rhodecode_db_user, comment_id | |||
|
1605 | ) | |||
|
1606 | ) | |||
|
1607 | raise HTTPNotFound() |
@@ -1,293 +1,295 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2017-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2017-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 | import datetime |
|
22 | import datetime | |
23 |
|
23 | |||
24 | from rhodecode.lib.jsonalchemy import JsonRaw |
|
24 | from rhodecode.lib.jsonalchemy import JsonRaw | |
25 | from rhodecode.model import meta |
|
25 | from rhodecode.model import meta | |
26 | from rhodecode.model.db import User, UserLog, Repository |
|
26 | from rhodecode.model.db import User, UserLog, Repository | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | log = logging.getLogger(__name__) |
|
29 | log = logging.getLogger(__name__) | |
30 |
|
30 | |||
31 | # action as key, and expected action_data as value |
|
31 | # action as key, and expected action_data as value | |
32 | ACTIONS_V1 = { |
|
32 | ACTIONS_V1 = { | |
33 | 'user.login.success': {'user_agent': ''}, |
|
33 | 'user.login.success': {'user_agent': ''}, | |
34 | 'user.login.failure': {'user_agent': ''}, |
|
34 | 'user.login.failure': {'user_agent': ''}, | |
35 | 'user.logout': {'user_agent': ''}, |
|
35 | 'user.logout': {'user_agent': ''}, | |
36 | 'user.register': {}, |
|
36 | 'user.register': {}, | |
37 | 'user.password.reset_request': {}, |
|
37 | 'user.password.reset_request': {}, | |
38 | 'user.push': {'user_agent': '', 'commit_ids': []}, |
|
38 | 'user.push': {'user_agent': '', 'commit_ids': []}, | |
39 | 'user.pull': {'user_agent': ''}, |
|
39 | 'user.pull': {'user_agent': ''}, | |
40 |
|
40 | |||
41 | 'user.create': {'data': {}}, |
|
41 | 'user.create': {'data': {}}, | |
42 | 'user.delete': {'old_data': {}}, |
|
42 | 'user.delete': {'old_data': {}}, | |
43 | 'user.edit': {'old_data': {}}, |
|
43 | 'user.edit': {'old_data': {}}, | |
44 | 'user.edit.permissions': {}, |
|
44 | 'user.edit.permissions': {}, | |
45 | 'user.edit.ip.add': {'ip': {}, 'user': {}}, |
|
45 | 'user.edit.ip.add': {'ip': {}, 'user': {}}, | |
46 | 'user.edit.ip.delete': {'ip': {}, 'user': {}}, |
|
46 | 'user.edit.ip.delete': {'ip': {}, 'user': {}}, | |
47 | 'user.edit.token.add': {'token': {}, 'user': {}}, |
|
47 | 'user.edit.token.add': {'token': {}, 'user': {}}, | |
48 | 'user.edit.token.delete': {'token': {}, 'user': {}}, |
|
48 | 'user.edit.token.delete': {'token': {}, 'user': {}}, | |
49 | 'user.edit.email.add': {'email': ''}, |
|
49 | 'user.edit.email.add': {'email': ''}, | |
50 | 'user.edit.email.delete': {'email': ''}, |
|
50 | 'user.edit.email.delete': {'email': ''}, | |
51 | 'user.edit.ssh_key.add': {'token': {}, 'user': {}}, |
|
51 | 'user.edit.ssh_key.add': {'token': {}, 'user': {}}, | |
52 | 'user.edit.ssh_key.delete': {'token': {}, 'user': {}}, |
|
52 | 'user.edit.ssh_key.delete': {'token': {}, 'user': {}}, | |
53 | 'user.edit.password_reset.enabled': {}, |
|
53 | 'user.edit.password_reset.enabled': {}, | |
54 | 'user.edit.password_reset.disabled': {}, |
|
54 | 'user.edit.password_reset.disabled': {}, | |
55 |
|
55 | |||
56 | 'user_group.create': {'data': {}}, |
|
56 | 'user_group.create': {'data': {}}, | |
57 | 'user_group.delete': {'old_data': {}}, |
|
57 | 'user_group.delete': {'old_data': {}}, | |
58 | 'user_group.edit': {'old_data': {}}, |
|
58 | 'user_group.edit': {'old_data': {}}, | |
59 | 'user_group.edit.permissions': {}, |
|
59 | 'user_group.edit.permissions': {}, | |
60 | 'user_group.edit.member.add': {'user': {}}, |
|
60 | 'user_group.edit.member.add': {'user': {}}, | |
61 | 'user_group.edit.member.delete': {'user': {}}, |
|
61 | 'user_group.edit.member.delete': {'user': {}}, | |
62 |
|
62 | |||
63 | 'repo.create': {'data': {}}, |
|
63 | 'repo.create': {'data': {}}, | |
64 | 'repo.fork': {'data': {}}, |
|
64 | 'repo.fork': {'data': {}}, | |
65 | 'repo.edit': {'old_data': {}}, |
|
65 | 'repo.edit': {'old_data': {}}, | |
66 | 'repo.edit.permissions': {}, |
|
66 | 'repo.edit.permissions': {}, | |
67 | 'repo.edit.permissions.branch': {}, |
|
67 | 'repo.edit.permissions.branch': {}, | |
68 | 'repo.archive': {'old_data': {}}, |
|
68 | 'repo.archive': {'old_data': {}}, | |
69 | 'repo.delete': {'old_data': {}}, |
|
69 | 'repo.delete': {'old_data': {}}, | |
70 |
|
70 | |||
71 | 'repo.archive.download': {'user_agent': '', 'archive_name': '', |
|
71 | 'repo.archive.download': {'user_agent': '', 'archive_name': '', | |
72 | 'archive_spec': '', 'archive_cached': ''}, |
|
72 | 'archive_spec': '', 'archive_cached': ''}, | |
73 |
|
73 | |||
74 | 'repo.permissions.branch_rule.create': {}, |
|
74 | 'repo.permissions.branch_rule.create': {}, | |
75 | 'repo.permissions.branch_rule.edit': {}, |
|
75 | 'repo.permissions.branch_rule.edit': {}, | |
76 | 'repo.permissions.branch_rule.delete': {}, |
|
76 | 'repo.permissions.branch_rule.delete': {}, | |
77 |
|
77 | |||
78 | 'repo.pull_request.create': '', |
|
78 | 'repo.pull_request.create': '', | |
79 | 'repo.pull_request.edit': '', |
|
79 | 'repo.pull_request.edit': '', | |
80 | 'repo.pull_request.delete': '', |
|
80 | 'repo.pull_request.delete': '', | |
81 | 'repo.pull_request.close': '', |
|
81 | 'repo.pull_request.close': '', | |
82 | 'repo.pull_request.merge': '', |
|
82 | 'repo.pull_request.merge': '', | |
83 | 'repo.pull_request.vote': '', |
|
83 | 'repo.pull_request.vote': '', | |
84 | 'repo.pull_request.comment.create': '', |
|
84 | 'repo.pull_request.comment.create': '', | |
|
85 | 'repo.pull_request.comment.edit': '', | |||
85 | 'repo.pull_request.comment.delete': '', |
|
86 | 'repo.pull_request.comment.delete': '', | |
86 |
|
87 | |||
87 | 'repo.pull_request.reviewer.add': '', |
|
88 | 'repo.pull_request.reviewer.add': '', | |
88 | 'repo.pull_request.reviewer.delete': '', |
|
89 | 'repo.pull_request.reviewer.delete': '', | |
89 |
|
90 | |||
90 | 'repo.commit.strip': {'commit_id': ''}, |
|
91 | 'repo.commit.strip': {'commit_id': ''}, | |
91 | 'repo.commit.comment.create': {'data': {}}, |
|
92 | 'repo.commit.comment.create': {'data': {}}, | |
92 | 'repo.commit.comment.delete': {'data': {}}, |
|
93 | 'repo.commit.comment.delete': {'data': {}}, | |
|
94 | 'repo.commit.comment.edit': {'data': {}}, | |||
93 | 'repo.commit.vote': '', |
|
95 | 'repo.commit.vote': '', | |
94 |
|
96 | |||
95 | 'repo.artifact.add': '', |
|
97 | 'repo.artifact.add': '', | |
96 | 'repo.artifact.delete': '', |
|
98 | 'repo.artifact.delete': '', | |
97 |
|
99 | |||
98 | 'repo_group.create': {'data': {}}, |
|
100 | 'repo_group.create': {'data': {}}, | |
99 | 'repo_group.edit': {'old_data': {}}, |
|
101 | 'repo_group.edit': {'old_data': {}}, | |
100 | 'repo_group.edit.permissions': {}, |
|
102 | 'repo_group.edit.permissions': {}, | |
101 | 'repo_group.delete': {'old_data': {}}, |
|
103 | 'repo_group.delete': {'old_data': {}}, | |
102 | } |
|
104 | } | |
103 |
|
105 | |||
104 | ACTIONS = ACTIONS_V1 |
|
106 | ACTIONS = ACTIONS_V1 | |
105 |
|
107 | |||
106 | SOURCE_WEB = 'source_web' |
|
108 | SOURCE_WEB = 'source_web' | |
107 | SOURCE_API = 'source_api' |
|
109 | SOURCE_API = 'source_api' | |
108 |
|
110 | |||
109 |
|
111 | |||
110 | class UserWrap(object): |
|
112 | class UserWrap(object): | |
111 | """ |
|
113 | """ | |
112 | Fake object used to imitate AuthUser |
|
114 | Fake object used to imitate AuthUser | |
113 | """ |
|
115 | """ | |
114 |
|
116 | |||
115 | def __init__(self, user_id=None, username=None, ip_addr=None): |
|
117 | def __init__(self, user_id=None, username=None, ip_addr=None): | |
116 | self.user_id = user_id |
|
118 | self.user_id = user_id | |
117 | self.username = username |
|
119 | self.username = username | |
118 | self.ip_addr = ip_addr |
|
120 | self.ip_addr = ip_addr | |
119 |
|
121 | |||
120 |
|
122 | |||
121 | class RepoWrap(object): |
|
123 | class RepoWrap(object): | |
122 | """ |
|
124 | """ | |
123 | Fake object used to imitate RepoObject that audit logger requires |
|
125 | Fake object used to imitate RepoObject that audit logger requires | |
124 | """ |
|
126 | """ | |
125 |
|
127 | |||
126 | def __init__(self, repo_id=None, repo_name=None): |
|
128 | def __init__(self, repo_id=None, repo_name=None): | |
127 | self.repo_id = repo_id |
|
129 | self.repo_id = repo_id | |
128 | self.repo_name = repo_name |
|
130 | self.repo_name = repo_name | |
129 |
|
131 | |||
130 |
|
132 | |||
131 | def _store_log(action_name, action_data, user_id, username, user_data, |
|
133 | def _store_log(action_name, action_data, user_id, username, user_data, | |
132 | ip_address, repository_id, repository_name): |
|
134 | ip_address, repository_id, repository_name): | |
133 | user_log = UserLog() |
|
135 | user_log = UserLog() | |
134 | user_log.version = UserLog.VERSION_2 |
|
136 | user_log.version = UserLog.VERSION_2 | |
135 |
|
137 | |||
136 | user_log.action = action_name |
|
138 | user_log.action = action_name | |
137 | user_log.action_data = action_data or JsonRaw(u'{}') |
|
139 | user_log.action_data = action_data or JsonRaw(u'{}') | |
138 |
|
140 | |||
139 | user_log.user_ip = ip_address |
|
141 | user_log.user_ip = ip_address | |
140 |
|
142 | |||
141 | user_log.user_id = user_id |
|
143 | user_log.user_id = user_id | |
142 | user_log.username = username |
|
144 | user_log.username = username | |
143 | user_log.user_data = user_data or JsonRaw(u'{}') |
|
145 | user_log.user_data = user_data or JsonRaw(u'{}') | |
144 |
|
146 | |||
145 | user_log.repository_id = repository_id |
|
147 | user_log.repository_id = repository_id | |
146 | user_log.repository_name = repository_name |
|
148 | user_log.repository_name = repository_name | |
147 |
|
149 | |||
148 | user_log.action_date = datetime.datetime.now() |
|
150 | user_log.action_date = datetime.datetime.now() | |
149 |
|
151 | |||
150 | return user_log |
|
152 | return user_log | |
151 |
|
153 | |||
152 |
|
154 | |||
153 | def store_web(*args, **kwargs): |
|
155 | def store_web(*args, **kwargs): | |
154 | action_data = {} |
|
156 | action_data = {} | |
155 | org_action_data = kwargs.pop('action_data', {}) |
|
157 | org_action_data = kwargs.pop('action_data', {}) | |
156 | action_data.update(org_action_data) |
|
158 | action_data.update(org_action_data) | |
157 | action_data['source'] = SOURCE_WEB |
|
159 | action_data['source'] = SOURCE_WEB | |
158 | kwargs['action_data'] = action_data |
|
160 | kwargs['action_data'] = action_data | |
159 |
|
161 | |||
160 | return store(*args, **kwargs) |
|
162 | return store(*args, **kwargs) | |
161 |
|
163 | |||
162 |
|
164 | |||
163 | def store_api(*args, **kwargs): |
|
165 | def store_api(*args, **kwargs): | |
164 | action_data = {} |
|
166 | action_data = {} | |
165 | org_action_data = kwargs.pop('action_data', {}) |
|
167 | org_action_data = kwargs.pop('action_data', {}) | |
166 | action_data.update(org_action_data) |
|
168 | action_data.update(org_action_data) | |
167 | action_data['source'] = SOURCE_API |
|
169 | action_data['source'] = SOURCE_API | |
168 | kwargs['action_data'] = action_data |
|
170 | kwargs['action_data'] = action_data | |
169 |
|
171 | |||
170 | return store(*args, **kwargs) |
|
172 | return store(*args, **kwargs) | |
171 |
|
173 | |||
172 |
|
174 | |||
173 | def store(action, user, action_data=None, user_data=None, ip_addr=None, |
|
175 | def store(action, user, action_data=None, user_data=None, ip_addr=None, | |
174 | repo=None, sa_session=None, commit=False): |
|
176 | repo=None, sa_session=None, commit=False): | |
175 | """ |
|
177 | """ | |
176 | Audit logger for various actions made by users, typically this |
|
178 | Audit logger for various actions made by users, typically this | |
177 | results in a call such:: |
|
179 | results in a call such:: | |
178 |
|
180 | |||
179 | from rhodecode.lib import audit_logger |
|
181 | from rhodecode.lib import audit_logger | |
180 |
|
182 | |||
181 | audit_logger.store( |
|
183 | audit_logger.store( | |
182 | 'repo.edit', user=self._rhodecode_user) |
|
184 | 'repo.edit', user=self._rhodecode_user) | |
183 | audit_logger.store( |
|
185 | audit_logger.store( | |
184 | 'repo.delete', action_data={'data': repo_data}, |
|
186 | 'repo.delete', action_data={'data': repo_data}, | |
185 | user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8')) |
|
187 | user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8')) | |
186 |
|
188 | |||
187 | # repo action |
|
189 | # repo action | |
188 | audit_logger.store( |
|
190 | audit_logger.store( | |
189 | 'repo.delete', |
|
191 | 'repo.delete', | |
190 | user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'), |
|
192 | user=audit_logger.UserWrap(username='itried-login', ip_addr='8.8.8.8'), | |
191 | repo=audit_logger.RepoWrap(repo_name='some-repo')) |
|
193 | repo=audit_logger.RepoWrap(repo_name='some-repo')) | |
192 |
|
194 | |||
193 | # repo action, when we know and have the repository object already |
|
195 | # repo action, when we know and have the repository object already | |
194 | audit_logger.store( |
|
196 | audit_logger.store( | |
195 | 'repo.delete', action_data={'source': audit_logger.SOURCE_WEB, }, |
|
197 | 'repo.delete', action_data={'source': audit_logger.SOURCE_WEB, }, | |
196 | user=self._rhodecode_user, |
|
198 | user=self._rhodecode_user, | |
197 | repo=repo_object) |
|
199 | repo=repo_object) | |
198 |
|
200 | |||
199 | # alternative wrapper to the above |
|
201 | # alternative wrapper to the above | |
200 | audit_logger.store_web( |
|
202 | audit_logger.store_web( | |
201 | 'repo.delete', action_data={}, |
|
203 | 'repo.delete', action_data={}, | |
202 | user=self._rhodecode_user, |
|
204 | user=self._rhodecode_user, | |
203 | repo=repo_object) |
|
205 | repo=repo_object) | |
204 |
|
206 | |||
205 | # without an user ? |
|
207 | # without an user ? | |
206 | audit_logger.store( |
|
208 | audit_logger.store( | |
207 | 'user.login.failure', |
|
209 | 'user.login.failure', | |
208 | user=audit_logger.UserWrap( |
|
210 | user=audit_logger.UserWrap( | |
209 | username=self.request.params.get('username'), |
|
211 | username=self.request.params.get('username'), | |
210 | ip_addr=self.request.remote_addr)) |
|
212 | ip_addr=self.request.remote_addr)) | |
211 |
|
213 | |||
212 | """ |
|
214 | """ | |
213 | from rhodecode.lib.utils2 import safe_unicode |
|
215 | from rhodecode.lib.utils2 import safe_unicode | |
214 | from rhodecode.lib.auth import AuthUser |
|
216 | from rhodecode.lib.auth import AuthUser | |
215 |
|
217 | |||
216 | action_spec = ACTIONS.get(action, None) |
|
218 | action_spec = ACTIONS.get(action, None) | |
217 | if action_spec is None: |
|
219 | if action_spec is None: | |
218 | raise ValueError('Action `{}` is not supported'.format(action)) |
|
220 | raise ValueError('Action `{}` is not supported'.format(action)) | |
219 |
|
221 | |||
220 | if not sa_session: |
|
222 | if not sa_session: | |
221 | sa_session = meta.Session() |
|
223 | sa_session = meta.Session() | |
222 |
|
224 | |||
223 | try: |
|
225 | try: | |
224 | username = getattr(user, 'username', None) |
|
226 | username = getattr(user, 'username', None) | |
225 | if not username: |
|
227 | if not username: | |
226 | pass |
|
228 | pass | |
227 |
|
229 | |||
228 | user_id = getattr(user, 'user_id', None) |
|
230 | user_id = getattr(user, 'user_id', None) | |
229 | if not user_id: |
|
231 | if not user_id: | |
230 | # maybe we have username ? Try to figure user_id from username |
|
232 | # maybe we have username ? Try to figure user_id from username | |
231 | if username: |
|
233 | if username: | |
232 | user_id = getattr( |
|
234 | user_id = getattr( | |
233 | User.get_by_username(username), 'user_id', None) |
|
235 | User.get_by_username(username), 'user_id', None) | |
234 |
|
236 | |||
235 | ip_addr = ip_addr or getattr(user, 'ip_addr', None) |
|
237 | ip_addr = ip_addr or getattr(user, 'ip_addr', None) | |
236 | if not ip_addr: |
|
238 | if not ip_addr: | |
237 | pass |
|
239 | pass | |
238 |
|
240 | |||
239 | if not user_data: |
|
241 | if not user_data: | |
240 | # try to get this from the auth user |
|
242 | # try to get this from the auth user | |
241 | if isinstance(user, AuthUser): |
|
243 | if isinstance(user, AuthUser): | |
242 | user_data = { |
|
244 | user_data = { | |
243 | 'username': user.username, |
|
245 | 'username': user.username, | |
244 | 'email': user.email, |
|
246 | 'email': user.email, | |
245 | } |
|
247 | } | |
246 |
|
248 | |||
247 | repository_name = getattr(repo, 'repo_name', None) |
|
249 | repository_name = getattr(repo, 'repo_name', None) | |
248 | repository_id = getattr(repo, 'repo_id', None) |
|
250 | repository_id = getattr(repo, 'repo_id', None) | |
249 | if not repository_id: |
|
251 | if not repository_id: | |
250 | # maybe we have repo_name ? Try to figure repo_id from repo_name |
|
252 | # maybe we have repo_name ? Try to figure repo_id from repo_name | |
251 | if repository_name: |
|
253 | if repository_name: | |
252 | repository_id = getattr( |
|
254 | repository_id = getattr( | |
253 | Repository.get_by_repo_name(repository_name), 'repo_id', None) |
|
255 | Repository.get_by_repo_name(repository_name), 'repo_id', None) | |
254 |
|
256 | |||
255 | action_name = safe_unicode(action) |
|
257 | action_name = safe_unicode(action) | |
256 | ip_address = safe_unicode(ip_addr) |
|
258 | ip_address = safe_unicode(ip_addr) | |
257 |
|
259 | |||
258 | with sa_session.no_autoflush: |
|
260 | with sa_session.no_autoflush: | |
259 | update_user_last_activity(sa_session, user_id) |
|
261 | update_user_last_activity(sa_session, user_id) | |
260 |
|
262 | |||
261 | user_log = _store_log( |
|
263 | user_log = _store_log( | |
262 | action_name=action_name, |
|
264 | action_name=action_name, | |
263 | action_data=action_data or {}, |
|
265 | action_data=action_data or {}, | |
264 | user_id=user_id, |
|
266 | user_id=user_id, | |
265 | username=username, |
|
267 | username=username, | |
266 | user_data=user_data or {}, |
|
268 | user_data=user_data or {}, | |
267 | ip_address=ip_address, |
|
269 | ip_address=ip_address, | |
268 | repository_id=repository_id, |
|
270 | repository_id=repository_id, | |
269 | repository_name=repository_name |
|
271 | repository_name=repository_name | |
270 | ) |
|
272 | ) | |
271 |
|
273 | |||
272 | sa_session.add(user_log) |
|
274 | sa_session.add(user_log) | |
273 |
|
275 | |||
274 | if commit: |
|
276 | if commit: | |
275 | sa_session.commit() |
|
277 | sa_session.commit() | |
276 |
|
278 | |||
277 | entry_id = user_log.entry_id or '' |
|
279 | entry_id = user_log.entry_id or '' | |
278 | log.info('AUDIT[%s]: Logging action: `%s` by user:id:%s[%s] ip:%s', |
|
280 | log.info('AUDIT[%s]: Logging action: `%s` by user:id:%s[%s] ip:%s', | |
279 | entry_id, action_name, user_id, username, ip_address) |
|
281 | entry_id, action_name, user_id, username, ip_address) | |
280 |
|
282 | |||
281 | except Exception: |
|
283 | except Exception: | |
282 | log.exception('AUDIT: failed to store audit log') |
|
284 | log.exception('AUDIT: failed to store audit log') | |
283 |
|
285 | |||
284 |
|
286 | |||
285 | def update_user_last_activity(sa_session, user_id): |
|
287 | def update_user_last_activity(sa_session, user_id): | |
286 | _last_activity = datetime.datetime.now() |
|
288 | _last_activity = datetime.datetime.now() | |
287 | try: |
|
289 | try: | |
288 | sa_session.query(User).filter(User.user_id == user_id).update( |
|
290 | sa_session.query(User).filter(User.user_id == user_id).update( | |
289 | {"last_activity": _last_activity}) |
|
291 | {"last_activity": _last_activity}) | |
290 | log.debug( |
|
292 | log.debug( | |
291 | 'updated user `%s` last activity to:%s', user_id, _last_activity) |
|
293 | 'updated user `%s` last activity to:%s', user_id, _last_activity) | |
292 | except Exception: |
|
294 | except Exception: | |
293 | log.exception("Failed last activity update") |
|
295 | log.exception("Failed last activity update") |
@@ -1,774 +1,829 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | comments model for RhodeCode |
|
22 | comments model for RhodeCode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import logging |
|
25 | import logging | |
26 | import traceback |
|
26 | import traceback | |
27 | import collections |
|
27 | import collections | |
28 |
|
28 | |||
29 | from pyramid.threadlocal import get_current_registry, get_current_request |
|
29 | from pyramid.threadlocal import get_current_registry, get_current_request | |
30 | from sqlalchemy.sql.expression import null |
|
30 | from sqlalchemy.sql.expression import null | |
31 | from sqlalchemy.sql.functions import coalesce |
|
31 | from sqlalchemy.sql.functions import coalesce | |
32 |
|
32 | |||
33 | from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils |
|
33 | from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils | |
34 | from rhodecode.lib import audit_logger |
|
34 | from rhodecode.lib import audit_logger | |
35 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str |
|
35 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str | |
36 | from rhodecode.model import BaseModel |
|
36 | from rhodecode.model import BaseModel | |
37 | from rhodecode.model.db import ( |
|
37 | from rhodecode.model.db import ( | |
38 | ChangesetComment, User, Notification, PullRequest, AttributeDict) |
|
38 | ChangesetComment, | |
|
39 | User, | |||
|
40 | Notification, | |||
|
41 | PullRequest, | |||
|
42 | AttributeDict, | |||
|
43 | ChangesetCommentHistory, | |||
|
44 | ) | |||
39 | from rhodecode.model.notification import NotificationModel |
|
45 | from rhodecode.model.notification import NotificationModel | |
40 | from rhodecode.model.meta import Session |
|
46 | from rhodecode.model.meta import Session | |
41 | from rhodecode.model.settings import VcsSettingsModel |
|
47 | from rhodecode.model.settings import VcsSettingsModel | |
42 | from rhodecode.model.notification import EmailNotificationModel |
|
48 | from rhodecode.model.notification import EmailNotificationModel | |
43 | from rhodecode.model.validation_schema.schemas import comment_schema |
|
49 | from rhodecode.model.validation_schema.schemas import comment_schema | |
44 |
|
50 | |||
45 |
|
51 | |||
46 | log = logging.getLogger(__name__) |
|
52 | log = logging.getLogger(__name__) | |
47 |
|
53 | |||
48 |
|
54 | |||
49 | class CommentsModel(BaseModel): |
|
55 | class CommentsModel(BaseModel): | |
50 |
|
56 | |||
51 | cls = ChangesetComment |
|
57 | cls = ChangesetComment | |
52 |
|
58 | |||
53 | DIFF_CONTEXT_BEFORE = 3 |
|
59 | DIFF_CONTEXT_BEFORE = 3 | |
54 | DIFF_CONTEXT_AFTER = 3 |
|
60 | DIFF_CONTEXT_AFTER = 3 | |
55 |
|
61 | |||
56 | def __get_commit_comment(self, changeset_comment): |
|
62 | def __get_commit_comment(self, changeset_comment): | |
57 | return self._get_instance(ChangesetComment, changeset_comment) |
|
63 | return self._get_instance(ChangesetComment, changeset_comment) | |
58 |
|
64 | |||
59 | def __get_pull_request(self, pull_request): |
|
65 | def __get_pull_request(self, pull_request): | |
60 | return self._get_instance(PullRequest, pull_request) |
|
66 | return self._get_instance(PullRequest, pull_request) | |
61 |
|
67 | |||
62 | def _extract_mentions(self, s): |
|
68 | def _extract_mentions(self, s): | |
63 | user_objects = [] |
|
69 | user_objects = [] | |
64 | for username in extract_mentioned_users(s): |
|
70 | for username in extract_mentioned_users(s): | |
65 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
71 | user_obj = User.get_by_username(username, case_insensitive=True) | |
66 | if user_obj: |
|
72 | if user_obj: | |
67 | user_objects.append(user_obj) |
|
73 | user_objects.append(user_obj) | |
68 | return user_objects |
|
74 | return user_objects | |
69 |
|
75 | |||
70 | def _get_renderer(self, global_renderer='rst', request=None): |
|
76 | def _get_renderer(self, global_renderer='rst', request=None): | |
71 | request = request or get_current_request() |
|
77 | request = request or get_current_request() | |
72 |
|
78 | |||
73 | try: |
|
79 | try: | |
74 | global_renderer = request.call_context.visual.default_renderer |
|
80 | global_renderer = request.call_context.visual.default_renderer | |
75 | except AttributeError: |
|
81 | except AttributeError: | |
76 | log.debug("Renderer not set, falling back " |
|
82 | log.debug("Renderer not set, falling back " | |
77 | "to default renderer '%s'", global_renderer) |
|
83 | "to default renderer '%s'", global_renderer) | |
78 | except Exception: |
|
84 | except Exception: | |
79 | log.error(traceback.format_exc()) |
|
85 | log.error(traceback.format_exc()) | |
80 | return global_renderer |
|
86 | return global_renderer | |
81 |
|
87 | |||
82 | def aggregate_comments(self, comments, versions, show_version, inline=False): |
|
88 | def aggregate_comments(self, comments, versions, show_version, inline=False): | |
83 | # group by versions, and count until, and display objects |
|
89 | # group by versions, and count until, and display objects | |
84 |
|
90 | |||
85 | comment_groups = collections.defaultdict(list) |
|
91 | comment_groups = collections.defaultdict(list) | |
86 | [comment_groups[ |
|
92 | [comment_groups[ | |
87 | _co.pull_request_version_id].append(_co) for _co in comments] |
|
93 | _co.pull_request_version_id].append(_co) for _co in comments] | |
88 |
|
94 | |||
89 | def yield_comments(pos): |
|
95 | def yield_comments(pos): | |
90 | for co in comment_groups[pos]: |
|
96 | for co in comment_groups[pos]: | |
91 | yield co |
|
97 | yield co | |
92 |
|
98 | |||
93 | comment_versions = collections.defaultdict( |
|
99 | comment_versions = collections.defaultdict( | |
94 | lambda: collections.defaultdict(list)) |
|
100 | lambda: collections.defaultdict(list)) | |
95 | prev_prvid = -1 |
|
101 | prev_prvid = -1 | |
96 | # fake last entry with None, to aggregate on "latest" version which |
|
102 | # fake last entry with None, to aggregate on "latest" version which | |
97 | # doesn't have an pull_request_version_id |
|
103 | # doesn't have an pull_request_version_id | |
98 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: |
|
104 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: | |
99 | prvid = ver.pull_request_version_id |
|
105 | prvid = ver.pull_request_version_id | |
100 | if prev_prvid == -1: |
|
106 | if prev_prvid == -1: | |
101 | prev_prvid = prvid |
|
107 | prev_prvid = prvid | |
102 |
|
108 | |||
103 | for co in yield_comments(prvid): |
|
109 | for co in yield_comments(prvid): | |
104 | comment_versions[prvid]['at'].append(co) |
|
110 | comment_versions[prvid]['at'].append(co) | |
105 |
|
111 | |||
106 | # save until |
|
112 | # save until | |
107 | current = comment_versions[prvid]['at'] |
|
113 | current = comment_versions[prvid]['at'] | |
108 | prev_until = comment_versions[prev_prvid]['until'] |
|
114 | prev_until = comment_versions[prev_prvid]['until'] | |
109 | cur_until = prev_until + current |
|
115 | cur_until = prev_until + current | |
110 | comment_versions[prvid]['until'].extend(cur_until) |
|
116 | comment_versions[prvid]['until'].extend(cur_until) | |
111 |
|
117 | |||
112 | # save outdated |
|
118 | # save outdated | |
113 | if inline: |
|
119 | if inline: | |
114 | outdated = [x for x in cur_until |
|
120 | outdated = [x for x in cur_until | |
115 | if x.outdated_at_version(show_version)] |
|
121 | if x.outdated_at_version(show_version)] | |
116 | else: |
|
122 | else: | |
117 | outdated = [x for x in cur_until |
|
123 | outdated = [x for x in cur_until | |
118 | if x.older_than_version(show_version)] |
|
124 | if x.older_than_version(show_version)] | |
119 | display = [x for x in cur_until if x not in outdated] |
|
125 | display = [x for x in cur_until if x not in outdated] | |
120 |
|
126 | |||
121 | comment_versions[prvid]['outdated'] = outdated |
|
127 | comment_versions[prvid]['outdated'] = outdated | |
122 | comment_versions[prvid]['display'] = display |
|
128 | comment_versions[prvid]['display'] = display | |
123 |
|
129 | |||
124 | prev_prvid = prvid |
|
130 | prev_prvid = prvid | |
125 |
|
131 | |||
126 | return comment_versions |
|
132 | return comment_versions | |
127 |
|
133 | |||
128 | def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None): |
|
134 | def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None): | |
129 | qry = Session().query(ChangesetComment) \ |
|
135 | qry = Session().query(ChangesetComment) \ | |
130 | .filter(ChangesetComment.repo == repo) |
|
136 | .filter(ChangesetComment.repo == repo) | |
131 |
|
137 | |||
132 | if comment_type and comment_type in ChangesetComment.COMMENT_TYPES: |
|
138 | if comment_type and comment_type in ChangesetComment.COMMENT_TYPES: | |
133 | qry = qry.filter(ChangesetComment.comment_type == comment_type) |
|
139 | qry = qry.filter(ChangesetComment.comment_type == comment_type) | |
134 |
|
140 | |||
135 | if user: |
|
141 | if user: | |
136 | user = self._get_user(user) |
|
142 | user = self._get_user(user) | |
137 | if user: |
|
143 | if user: | |
138 | qry = qry.filter(ChangesetComment.user_id == user.user_id) |
|
144 | qry = qry.filter(ChangesetComment.user_id == user.user_id) | |
139 |
|
145 | |||
140 | if commit_id: |
|
146 | if commit_id: | |
141 | qry = qry.filter(ChangesetComment.revision == commit_id) |
|
147 | qry = qry.filter(ChangesetComment.revision == commit_id) | |
142 |
|
148 | |||
143 | qry = qry.order_by(ChangesetComment.created_on) |
|
149 | qry = qry.order_by(ChangesetComment.created_on) | |
144 | return qry.all() |
|
150 | return qry.all() | |
145 |
|
151 | |||
146 | def get_repository_unresolved_todos(self, repo): |
|
152 | def get_repository_unresolved_todos(self, repo): | |
147 | todos = Session().query(ChangesetComment) \ |
|
153 | todos = Session().query(ChangesetComment) \ | |
148 | .filter(ChangesetComment.repo == repo) \ |
|
154 | .filter(ChangesetComment.repo == repo) \ | |
149 | .filter(ChangesetComment.resolved_by == None) \ |
|
155 | .filter(ChangesetComment.resolved_by == None) \ | |
150 | .filter(ChangesetComment.comment_type |
|
156 | .filter(ChangesetComment.comment_type | |
151 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
157 | == ChangesetComment.COMMENT_TYPE_TODO) | |
152 | todos = todos.all() |
|
158 | todos = todos.all() | |
153 |
|
159 | |||
154 | return todos |
|
160 | return todos | |
155 |
|
161 | |||
156 | def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True): |
|
162 | def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True): | |
157 |
|
163 | |||
158 | todos = Session().query(ChangesetComment) \ |
|
164 | todos = Session().query(ChangesetComment) \ | |
159 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
165 | .filter(ChangesetComment.pull_request == pull_request) \ | |
160 | .filter(ChangesetComment.resolved_by == None) \ |
|
166 | .filter(ChangesetComment.resolved_by == None) \ | |
161 | .filter(ChangesetComment.comment_type |
|
167 | .filter(ChangesetComment.comment_type | |
162 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
168 | == ChangesetComment.COMMENT_TYPE_TODO) | |
163 |
|
169 | |||
164 | if not show_outdated: |
|
170 | if not show_outdated: | |
165 | todos = todos.filter( |
|
171 | todos = todos.filter( | |
166 | coalesce(ChangesetComment.display_state, '') != |
|
172 | coalesce(ChangesetComment.display_state, '') != | |
167 | ChangesetComment.COMMENT_OUTDATED) |
|
173 | ChangesetComment.COMMENT_OUTDATED) | |
168 |
|
174 | |||
169 | todos = todos.all() |
|
175 | todos = todos.all() | |
170 |
|
176 | |||
171 | return todos |
|
177 | return todos | |
172 |
|
178 | |||
173 | def get_pull_request_resolved_todos(self, pull_request, show_outdated=True): |
|
179 | def get_pull_request_resolved_todos(self, pull_request, show_outdated=True): | |
174 |
|
180 | |||
175 | todos = Session().query(ChangesetComment) \ |
|
181 | todos = Session().query(ChangesetComment) \ | |
176 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
182 | .filter(ChangesetComment.pull_request == pull_request) \ | |
177 | .filter(ChangesetComment.resolved_by != None) \ |
|
183 | .filter(ChangesetComment.resolved_by != None) \ | |
178 | .filter(ChangesetComment.comment_type |
|
184 | .filter(ChangesetComment.comment_type | |
179 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
185 | == ChangesetComment.COMMENT_TYPE_TODO) | |
180 |
|
186 | |||
181 | if not show_outdated: |
|
187 | if not show_outdated: | |
182 | todos = todos.filter( |
|
188 | todos = todos.filter( | |
183 | coalesce(ChangesetComment.display_state, '') != |
|
189 | coalesce(ChangesetComment.display_state, '') != | |
184 | ChangesetComment.COMMENT_OUTDATED) |
|
190 | ChangesetComment.COMMENT_OUTDATED) | |
185 |
|
191 | |||
186 | todos = todos.all() |
|
192 | todos = todos.all() | |
187 |
|
193 | |||
188 | return todos |
|
194 | return todos | |
189 |
|
195 | |||
190 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): |
|
196 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): | |
191 |
|
197 | |||
192 | todos = Session().query(ChangesetComment) \ |
|
198 | todos = Session().query(ChangesetComment) \ | |
193 | .filter(ChangesetComment.revision == commit_id) \ |
|
199 | .filter(ChangesetComment.revision == commit_id) \ | |
194 | .filter(ChangesetComment.resolved_by == None) \ |
|
200 | .filter(ChangesetComment.resolved_by == None) \ | |
195 | .filter(ChangesetComment.comment_type |
|
201 | .filter(ChangesetComment.comment_type | |
196 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
202 | == ChangesetComment.COMMENT_TYPE_TODO) | |
197 |
|
203 | |||
198 | if not show_outdated: |
|
204 | if not show_outdated: | |
199 | todos = todos.filter( |
|
205 | todos = todos.filter( | |
200 | coalesce(ChangesetComment.display_state, '') != |
|
206 | coalesce(ChangesetComment.display_state, '') != | |
201 | ChangesetComment.COMMENT_OUTDATED) |
|
207 | ChangesetComment.COMMENT_OUTDATED) | |
202 |
|
208 | |||
203 | todos = todos.all() |
|
209 | todos = todos.all() | |
204 |
|
210 | |||
205 | return todos |
|
211 | return todos | |
206 |
|
212 | |||
207 | def get_commit_resolved_todos(self, commit_id, show_outdated=True): |
|
213 | def get_commit_resolved_todos(self, commit_id, show_outdated=True): | |
208 |
|
214 | |||
209 | todos = Session().query(ChangesetComment) \ |
|
215 | todos = Session().query(ChangesetComment) \ | |
210 | .filter(ChangesetComment.revision == commit_id) \ |
|
216 | .filter(ChangesetComment.revision == commit_id) \ | |
211 | .filter(ChangesetComment.resolved_by != None) \ |
|
217 | .filter(ChangesetComment.resolved_by != None) \ | |
212 | .filter(ChangesetComment.comment_type |
|
218 | .filter(ChangesetComment.comment_type | |
213 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
219 | == ChangesetComment.COMMENT_TYPE_TODO) | |
214 |
|
220 | |||
215 | if not show_outdated: |
|
221 | if not show_outdated: | |
216 | todos = todos.filter( |
|
222 | todos = todos.filter( | |
217 | coalesce(ChangesetComment.display_state, '') != |
|
223 | coalesce(ChangesetComment.display_state, '') != | |
218 | ChangesetComment.COMMENT_OUTDATED) |
|
224 | ChangesetComment.COMMENT_OUTDATED) | |
219 |
|
225 | |||
220 | todos = todos.all() |
|
226 | todos = todos.all() | |
221 |
|
227 | |||
222 | return todos |
|
228 | return todos | |
223 |
|
229 | |||
224 | def _log_audit_action(self, action, action_data, auth_user, comment): |
|
230 | def _log_audit_action(self, action, action_data, auth_user, comment): | |
225 | audit_logger.store( |
|
231 | audit_logger.store( | |
226 | action=action, |
|
232 | action=action, | |
227 | action_data=action_data, |
|
233 | action_data=action_data, | |
228 | user=auth_user, |
|
234 | user=auth_user, | |
229 | repo=comment.repo) |
|
235 | repo=comment.repo) | |
230 |
|
236 | |||
231 | def create(self, text, repo, user, commit_id=None, pull_request=None, |
|
237 | def create(self, text, repo, user, commit_id=None, pull_request=None, | |
232 | f_path=None, line_no=None, status_change=None, |
|
238 | f_path=None, line_no=None, status_change=None, | |
233 | status_change_type=None, comment_type=None, |
|
239 | status_change_type=None, comment_type=None, | |
234 | resolves_comment_id=None, closing_pr=False, send_email=True, |
|
240 | resolves_comment_id=None, closing_pr=False, send_email=True, | |
235 | renderer=None, auth_user=None, extra_recipients=None): |
|
241 | renderer=None, auth_user=None, extra_recipients=None): | |
236 | """ |
|
242 | """ | |
237 | Creates new comment for commit or pull request. |
|
243 | Creates new comment for commit or pull request. | |
238 | IF status_change is not none this comment is associated with a |
|
244 | IF status_change is not none this comment is associated with a | |
239 | status change of commit or commit associated with pull request |
|
245 | status change of commit or commit associated with pull request | |
240 |
|
246 | |||
241 | :param text: |
|
247 | :param text: | |
242 | :param repo: |
|
248 | :param repo: | |
243 | :param user: |
|
249 | :param user: | |
244 | :param commit_id: |
|
250 | :param commit_id: | |
245 | :param pull_request: |
|
251 | :param pull_request: | |
246 | :param f_path: |
|
252 | :param f_path: | |
247 | :param line_no: |
|
253 | :param line_no: | |
248 | :param status_change: Label for status change |
|
254 | :param status_change: Label for status change | |
249 | :param comment_type: Type of comment |
|
255 | :param comment_type: Type of comment | |
250 | :param resolves_comment_id: id of comment which this one will resolve |
|
256 | :param resolves_comment_id: id of comment which this one will resolve | |
251 | :param status_change_type: type of status change |
|
257 | :param status_change_type: type of status change | |
252 | :param closing_pr: |
|
258 | :param closing_pr: | |
253 | :param send_email: |
|
259 | :param send_email: | |
254 | :param renderer: pick renderer for this comment |
|
260 | :param renderer: pick renderer for this comment | |
255 | :param auth_user: current authenticated user calling this method |
|
261 | :param auth_user: current authenticated user calling this method | |
256 | :param extra_recipients: list of extra users to be added to recipients |
|
262 | :param extra_recipients: list of extra users to be added to recipients | |
257 | """ |
|
263 | """ | |
258 |
|
264 | |||
259 | if not text: |
|
265 | if not text: | |
260 | log.warning('Missing text for comment, skipping...') |
|
266 | log.warning('Missing text for comment, skipping...') | |
261 | return |
|
267 | return | |
262 | request = get_current_request() |
|
268 | request = get_current_request() | |
263 | _ = request.translate |
|
269 | _ = request.translate | |
264 |
|
270 | |||
265 | if not renderer: |
|
271 | if not renderer: | |
266 | renderer = self._get_renderer(request=request) |
|
272 | renderer = self._get_renderer(request=request) | |
267 |
|
273 | |||
268 | repo = self._get_repo(repo) |
|
274 | repo = self._get_repo(repo) | |
269 | user = self._get_user(user) |
|
275 | user = self._get_user(user) | |
270 | auth_user = auth_user or user |
|
276 | auth_user = auth_user or user | |
271 |
|
277 | |||
272 | schema = comment_schema.CommentSchema() |
|
278 | schema = comment_schema.CommentSchema() | |
273 | validated_kwargs = schema.deserialize(dict( |
|
279 | validated_kwargs = schema.deserialize(dict( | |
274 | comment_body=text, |
|
280 | comment_body=text, | |
275 | comment_type=comment_type, |
|
281 | comment_type=comment_type, | |
276 | comment_file=f_path, |
|
282 | comment_file=f_path, | |
277 | comment_line=line_no, |
|
283 | comment_line=line_no, | |
278 | renderer_type=renderer, |
|
284 | renderer_type=renderer, | |
279 | status_change=status_change_type, |
|
285 | status_change=status_change_type, | |
280 | resolves_comment_id=resolves_comment_id, |
|
286 | resolves_comment_id=resolves_comment_id, | |
281 | repo=repo.repo_id, |
|
287 | repo=repo.repo_id, | |
282 | user=user.user_id, |
|
288 | user=user.user_id, | |
283 | )) |
|
289 | )) | |
284 |
|
290 | |||
285 | comment = ChangesetComment() |
|
291 | comment = ChangesetComment() | |
286 | comment.renderer = validated_kwargs['renderer_type'] |
|
292 | comment.renderer = validated_kwargs['renderer_type'] | |
287 | comment.text = validated_kwargs['comment_body'] |
|
293 | comment.text = validated_kwargs['comment_body'] | |
288 | comment.f_path = validated_kwargs['comment_file'] |
|
294 | comment.f_path = validated_kwargs['comment_file'] | |
289 | comment.line_no = validated_kwargs['comment_line'] |
|
295 | comment.line_no = validated_kwargs['comment_line'] | |
290 | comment.comment_type = validated_kwargs['comment_type'] |
|
296 | comment.comment_type = validated_kwargs['comment_type'] | |
291 |
|
297 | |||
292 | comment.repo = repo |
|
298 | comment.repo = repo | |
293 | comment.author = user |
|
299 | comment.author = user | |
294 | resolved_comment = self.__get_commit_comment( |
|
300 | resolved_comment = self.__get_commit_comment( | |
295 | validated_kwargs['resolves_comment_id']) |
|
301 | validated_kwargs['resolves_comment_id']) | |
296 | # check if the comment actually belongs to this PR |
|
302 | # check if the comment actually belongs to this PR | |
297 | if resolved_comment and resolved_comment.pull_request and \ |
|
303 | if resolved_comment and resolved_comment.pull_request and \ | |
298 | resolved_comment.pull_request != pull_request: |
|
304 | resolved_comment.pull_request != pull_request: | |
299 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
305 | log.warning('Comment tried to resolved unrelated todo comment: %s', | |
300 | resolved_comment) |
|
306 | resolved_comment) | |
301 | # comment not bound to this pull request, forbid |
|
307 | # comment not bound to this pull request, forbid | |
302 | resolved_comment = None |
|
308 | resolved_comment = None | |
303 |
|
309 | |||
304 | elif resolved_comment and resolved_comment.repo and \ |
|
310 | elif resolved_comment and resolved_comment.repo and \ | |
305 | resolved_comment.repo != repo: |
|
311 | resolved_comment.repo != repo: | |
306 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
312 | log.warning('Comment tried to resolved unrelated todo comment: %s', | |
307 | resolved_comment) |
|
313 | resolved_comment) | |
308 | # comment not bound to this repo, forbid |
|
314 | # comment not bound to this repo, forbid | |
309 | resolved_comment = None |
|
315 | resolved_comment = None | |
310 |
|
316 | |||
311 | comment.resolved_comment = resolved_comment |
|
317 | comment.resolved_comment = resolved_comment | |
312 |
|
318 | |||
313 | pull_request_id = pull_request |
|
319 | pull_request_id = pull_request | |
314 |
|
320 | |||
315 | commit_obj = None |
|
321 | commit_obj = None | |
316 | pull_request_obj = None |
|
322 | pull_request_obj = None | |
317 |
|
323 | |||
318 | if commit_id: |
|
324 | if commit_id: | |
319 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT |
|
325 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT | |
320 | # do a lookup, so we don't pass something bad here |
|
326 | # do a lookup, so we don't pass something bad here | |
321 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) |
|
327 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) | |
322 | comment.revision = commit_obj.raw_id |
|
328 | comment.revision = commit_obj.raw_id | |
323 |
|
329 | |||
324 | elif pull_request_id: |
|
330 | elif pull_request_id: | |
325 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
331 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT | |
326 | pull_request_obj = self.__get_pull_request(pull_request_id) |
|
332 | pull_request_obj = self.__get_pull_request(pull_request_id) | |
327 | comment.pull_request = pull_request_obj |
|
333 | comment.pull_request = pull_request_obj | |
328 | else: |
|
334 | else: | |
329 | raise Exception('Please specify commit or pull_request_id') |
|
335 | raise Exception('Please specify commit or pull_request_id') | |
330 |
|
336 | |||
331 | Session().add(comment) |
|
337 | Session().add(comment) | |
332 | Session().flush() |
|
338 | Session().flush() | |
333 | kwargs = { |
|
339 | kwargs = { | |
334 | 'user': user, |
|
340 | 'user': user, | |
335 | 'renderer_type': renderer, |
|
341 | 'renderer_type': renderer, | |
336 | 'repo_name': repo.repo_name, |
|
342 | 'repo_name': repo.repo_name, | |
337 | 'status_change': status_change, |
|
343 | 'status_change': status_change, | |
338 | 'status_change_type': status_change_type, |
|
344 | 'status_change_type': status_change_type, | |
339 | 'comment_body': text, |
|
345 | 'comment_body': text, | |
340 | 'comment_file': f_path, |
|
346 | 'comment_file': f_path, | |
341 | 'comment_line': line_no, |
|
347 | 'comment_line': line_no, | |
342 | 'comment_type': comment_type or 'note', |
|
348 | 'comment_type': comment_type or 'note', | |
343 | 'comment_id': comment.comment_id |
|
349 | 'comment_id': comment.comment_id | |
344 | } |
|
350 | } | |
345 |
|
351 | |||
346 | if commit_obj: |
|
352 | if commit_obj: | |
347 | recipients = ChangesetComment.get_users( |
|
353 | recipients = ChangesetComment.get_users( | |
348 | revision=commit_obj.raw_id) |
|
354 | revision=commit_obj.raw_id) | |
349 | # add commit author if it's in RhodeCode system |
|
355 | # add commit author if it's in RhodeCode system | |
350 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
356 | cs_author = User.get_from_cs_author(commit_obj.author) | |
351 | if not cs_author: |
|
357 | if not cs_author: | |
352 | # use repo owner if we cannot extract the author correctly |
|
358 | # use repo owner if we cannot extract the author correctly | |
353 | cs_author = repo.user |
|
359 | cs_author = repo.user | |
354 | recipients += [cs_author] |
|
360 | recipients += [cs_author] | |
355 |
|
361 | |||
356 | commit_comment_url = self.get_url(comment, request=request) |
|
362 | commit_comment_url = self.get_url(comment, request=request) | |
357 | commit_comment_reply_url = self.get_url( |
|
363 | commit_comment_reply_url = self.get_url( | |
358 | comment, request=request, |
|
364 | comment, request=request, | |
359 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
365 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) | |
360 |
|
366 | |||
361 | target_repo_url = h.link_to( |
|
367 | target_repo_url = h.link_to( | |
362 | repo.repo_name, |
|
368 | repo.repo_name, | |
363 | h.route_url('repo_summary', repo_name=repo.repo_name)) |
|
369 | h.route_url('repo_summary', repo_name=repo.repo_name)) | |
364 |
|
370 | |||
365 | # commit specifics |
|
371 | # commit specifics | |
366 | kwargs.update({ |
|
372 | kwargs.update({ | |
367 | 'commit': commit_obj, |
|
373 | 'commit': commit_obj, | |
368 | 'commit_message': commit_obj.message, |
|
374 | 'commit_message': commit_obj.message, | |
369 | 'commit_target_repo_url': target_repo_url, |
|
375 | 'commit_target_repo_url': target_repo_url, | |
370 | 'commit_comment_url': commit_comment_url, |
|
376 | 'commit_comment_url': commit_comment_url, | |
371 | 'commit_comment_reply_url': commit_comment_reply_url |
|
377 | 'commit_comment_reply_url': commit_comment_reply_url | |
372 | }) |
|
378 | }) | |
373 |
|
379 | |||
374 | elif pull_request_obj: |
|
380 | elif pull_request_obj: | |
375 | # get the current participants of this pull request |
|
381 | # get the current participants of this pull request | |
376 | recipients = ChangesetComment.get_users( |
|
382 | recipients = ChangesetComment.get_users( | |
377 | pull_request_id=pull_request_obj.pull_request_id) |
|
383 | pull_request_id=pull_request_obj.pull_request_id) | |
378 | # add pull request author |
|
384 | # add pull request author | |
379 | recipients += [pull_request_obj.author] |
|
385 | recipients += [pull_request_obj.author] | |
380 |
|
386 | |||
381 | # add the reviewers to notification |
|
387 | # add the reviewers to notification | |
382 | recipients += [x.user for x in pull_request_obj.reviewers] |
|
388 | recipients += [x.user for x in pull_request_obj.reviewers] | |
383 |
|
389 | |||
384 | pr_target_repo = pull_request_obj.target_repo |
|
390 | pr_target_repo = pull_request_obj.target_repo | |
385 | pr_source_repo = pull_request_obj.source_repo |
|
391 | pr_source_repo = pull_request_obj.source_repo | |
386 |
|
392 | |||
387 | pr_comment_url = self.get_url(comment, request=request) |
|
393 | pr_comment_url = self.get_url(comment, request=request) | |
388 | pr_comment_reply_url = self.get_url( |
|
394 | pr_comment_reply_url = self.get_url( | |
389 | comment, request=request, |
|
395 | comment, request=request, | |
390 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
396 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) | |
391 |
|
397 | |||
392 | pr_url = h.route_url( |
|
398 | pr_url = h.route_url( | |
393 | 'pullrequest_show', |
|
399 | 'pullrequest_show', | |
394 | repo_name=pr_target_repo.repo_name, |
|
400 | repo_name=pr_target_repo.repo_name, | |
395 | pull_request_id=pull_request_obj.pull_request_id, ) |
|
401 | pull_request_id=pull_request_obj.pull_request_id, ) | |
396 |
|
402 | |||
397 | # set some variables for email notification |
|
403 | # set some variables for email notification | |
398 | pr_target_repo_url = h.route_url( |
|
404 | pr_target_repo_url = h.route_url( | |
399 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
405 | 'repo_summary', repo_name=pr_target_repo.repo_name) | |
400 |
|
406 | |||
401 | pr_source_repo_url = h.route_url( |
|
407 | pr_source_repo_url = h.route_url( | |
402 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
408 | 'repo_summary', repo_name=pr_source_repo.repo_name) | |
403 |
|
409 | |||
404 | # pull request specifics |
|
410 | # pull request specifics | |
405 | kwargs.update({ |
|
411 | kwargs.update({ | |
406 | 'pull_request': pull_request_obj, |
|
412 | 'pull_request': pull_request_obj, | |
407 | 'pr_id': pull_request_obj.pull_request_id, |
|
413 | 'pr_id': pull_request_obj.pull_request_id, | |
408 | 'pull_request_url': pr_url, |
|
414 | 'pull_request_url': pr_url, | |
409 | 'pull_request_target_repo': pr_target_repo, |
|
415 | 'pull_request_target_repo': pr_target_repo, | |
410 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
416 | 'pull_request_target_repo_url': pr_target_repo_url, | |
411 | 'pull_request_source_repo': pr_source_repo, |
|
417 | 'pull_request_source_repo': pr_source_repo, | |
412 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
418 | 'pull_request_source_repo_url': pr_source_repo_url, | |
413 | 'pr_comment_url': pr_comment_url, |
|
419 | 'pr_comment_url': pr_comment_url, | |
414 | 'pr_comment_reply_url': pr_comment_reply_url, |
|
420 | 'pr_comment_reply_url': pr_comment_reply_url, | |
415 | 'pr_closing': closing_pr, |
|
421 | 'pr_closing': closing_pr, | |
416 | }) |
|
422 | }) | |
417 |
|
423 | |||
418 | recipients += [self._get_user(u) for u in (extra_recipients or [])] |
|
424 | recipients += [self._get_user(u) for u in (extra_recipients or [])] | |
419 |
|
425 | |||
420 | if send_email: |
|
426 | if send_email: | |
421 | # pre-generate the subject for notification itself |
|
427 | # pre-generate the subject for notification itself | |
422 | (subject, |
|
428 | (subject, | |
423 | _h, _e, # we don't care about those |
|
429 | _h, _e, # we don't care about those | |
424 | body_plaintext) = EmailNotificationModel().render_email( |
|
430 | body_plaintext) = EmailNotificationModel().render_email( | |
425 | notification_type, **kwargs) |
|
431 | notification_type, **kwargs) | |
426 |
|
432 | |||
427 | mention_recipients = set( |
|
433 | mention_recipients = set( | |
428 | self._extract_mentions(text)).difference(recipients) |
|
434 | self._extract_mentions(text)).difference(recipients) | |
429 |
|
435 | |||
430 | # create notification objects, and emails |
|
436 | # create notification objects, and emails | |
431 | NotificationModel().create( |
|
437 | NotificationModel().create( | |
432 | created_by=user, |
|
438 | created_by=user, | |
433 | notification_subject=subject, |
|
439 | notification_subject=subject, | |
434 | notification_body=body_plaintext, |
|
440 | notification_body=body_plaintext, | |
435 | notification_type=notification_type, |
|
441 | notification_type=notification_type, | |
436 | recipients=recipients, |
|
442 | recipients=recipients, | |
437 | mention_recipients=mention_recipients, |
|
443 | mention_recipients=mention_recipients, | |
438 | email_kwargs=kwargs, |
|
444 | email_kwargs=kwargs, | |
439 | ) |
|
445 | ) | |
440 |
|
446 | |||
441 | Session().flush() |
|
447 | Session().flush() | |
442 | if comment.pull_request: |
|
448 | if comment.pull_request: | |
443 | action = 'repo.pull_request.comment.create' |
|
449 | action = 'repo.pull_request.comment.create' | |
444 | else: |
|
450 | else: | |
445 | action = 'repo.commit.comment.create' |
|
451 | action = 'repo.commit.comment.create' | |
446 |
|
452 | |||
447 | comment_data = comment.get_api_data() |
|
453 | comment_data = comment.get_api_data() | |
448 | self._log_audit_action( |
|
454 | self._log_audit_action( | |
449 | action, {'data': comment_data}, auth_user, comment) |
|
455 | action, {'data': comment_data}, auth_user, comment) | |
450 |
|
456 | |||
451 | msg_url = '' |
|
457 | msg_url = '' | |
452 | channel = None |
|
458 | channel = None | |
453 | if commit_obj: |
|
459 | if commit_obj: | |
454 | msg_url = commit_comment_url |
|
460 | msg_url = commit_comment_url | |
455 | repo_name = repo.repo_name |
|
461 | repo_name = repo.repo_name | |
456 | channel = u'/repo${}$/commit/{}'.format( |
|
462 | channel = u'/repo${}$/commit/{}'.format( | |
457 | repo_name, |
|
463 | repo_name, | |
458 | commit_obj.raw_id |
|
464 | commit_obj.raw_id | |
459 | ) |
|
465 | ) | |
460 | elif pull_request_obj: |
|
466 | elif pull_request_obj: | |
461 | msg_url = pr_comment_url |
|
467 | msg_url = pr_comment_url | |
462 | repo_name = pr_target_repo.repo_name |
|
468 | repo_name = pr_target_repo.repo_name | |
463 | channel = u'/repo${}$/pr/{}'.format( |
|
469 | channel = u'/repo${}$/pr/{}'.format( | |
464 | repo_name, |
|
470 | repo_name, | |
465 | pull_request_id |
|
471 | pull_request_id | |
466 | ) |
|
472 | ) | |
467 |
|
473 | |||
468 | message = '<strong>{}</strong> {} - ' \ |
|
474 | message = '<strong>{}</strong> {} - ' \ | |
469 | '<a onclick="window.location=\'{}\';' \ |
|
475 | '<a onclick="window.location=\'{}\';' \ | |
470 | 'window.location.reload()">' \ |
|
476 | 'window.location.reload()">' \ | |
471 | '<strong>{}</strong></a>' |
|
477 | '<strong>{}</strong></a>' | |
472 | message = message.format( |
|
478 | message = message.format( | |
473 | user.username, _('made a comment'), msg_url, |
|
479 | user.username, _('made a comment'), msg_url, | |
474 | _('Show it now')) |
|
480 | _('Show it now')) | |
475 |
|
481 | |||
476 | channelstream.post_message( |
|
482 | channelstream.post_message( | |
477 | channel, message, user.username, |
|
483 | channel, message, user.username, | |
478 | registry=get_current_registry()) |
|
484 | registry=get_current_registry()) | |
479 |
|
485 | |||
480 | return comment |
|
486 | return comment | |
481 |
|
487 | |||
|
488 | def edit(self, comment_id, text, auth_user, version): | |||
|
489 | """ | |||
|
490 | Change existing comment for commit or pull request. | |||
|
491 | ||||
|
492 | :param comment_id: | |||
|
493 | :param text: | |||
|
494 | :param auth_user: current authenticated user calling this method | |||
|
495 | :param version: last comment version | |||
|
496 | """ | |||
|
497 | if not text: | |||
|
498 | log.warning('Missing text for comment, skipping...') | |||
|
499 | return | |||
|
500 | ||||
|
501 | comment = ChangesetComment.get(comment_id) | |||
|
502 | old_comment_text = comment.text | |||
|
503 | comment.text = text | |||
|
504 | comment_version = ChangesetCommentHistory.get_version(comment_id) | |||
|
505 | if (comment_version - version) != 1: | |||
|
506 | log.warning( | |||
|
507 | 'Version mismatch, skipping... ' | |||
|
508 | 'version {} but should be {}'.format( | |||
|
509 | (version - 1), | |||
|
510 | comment_version, | |||
|
511 | ) | |||
|
512 | ) | |||
|
513 | return | |||
|
514 | comment_history = ChangesetCommentHistory() | |||
|
515 | comment_history.comment_id = comment_id | |||
|
516 | comment_history.version = comment_version | |||
|
517 | comment_history.created_by_user_id = auth_user.user_id | |||
|
518 | comment_history.text = old_comment_text | |||
|
519 | # TODO add email notification | |||
|
520 | Session().add(comment_history) | |||
|
521 | Session().add(comment) | |||
|
522 | Session().flush() | |||
|
523 | ||||
|
524 | if comment.pull_request: | |||
|
525 | action = 'repo.pull_request.comment.edit' | |||
|
526 | else: | |||
|
527 | action = 'repo.commit.comment.edit' | |||
|
528 | ||||
|
529 | comment_data = comment.get_api_data() | |||
|
530 | comment_data['old_comment_text'] = old_comment_text | |||
|
531 | self._log_audit_action( | |||
|
532 | action, {'data': comment_data}, auth_user, comment) | |||
|
533 | ||||
|
534 | return comment_history | |||
|
535 | ||||
482 | def delete(self, comment, auth_user): |
|
536 | def delete(self, comment, auth_user): | |
483 | """ |
|
537 | """ | |
484 | Deletes given comment |
|
538 | Deletes given comment | |
485 | """ |
|
539 | """ | |
486 | comment = self.__get_commit_comment(comment) |
|
540 | comment = self.__get_commit_comment(comment) | |
487 | old_data = comment.get_api_data() |
|
541 | old_data = comment.get_api_data() | |
488 | Session().delete(comment) |
|
542 | Session().delete(comment) | |
489 |
|
543 | |||
490 | if comment.pull_request: |
|
544 | if comment.pull_request: | |
491 | action = 'repo.pull_request.comment.delete' |
|
545 | action = 'repo.pull_request.comment.delete' | |
492 | else: |
|
546 | else: | |
493 | action = 'repo.commit.comment.delete' |
|
547 | action = 'repo.commit.comment.delete' | |
494 |
|
548 | |||
495 | self._log_audit_action( |
|
549 | self._log_audit_action( | |
496 | action, {'old_data': old_data}, auth_user, comment) |
|
550 | action, {'old_data': old_data}, auth_user, comment) | |
497 |
|
551 | |||
498 | return comment |
|
552 | return comment | |
499 |
|
553 | |||
500 | def get_all_comments(self, repo_id, revision=None, pull_request=None): |
|
554 | def get_all_comments(self, repo_id, revision=None, pull_request=None): | |
501 | q = ChangesetComment.query()\ |
|
555 | q = ChangesetComment.query()\ | |
502 | .filter(ChangesetComment.repo_id == repo_id) |
|
556 | .filter(ChangesetComment.repo_id == repo_id) | |
503 | if revision: |
|
557 | if revision: | |
504 | q = q.filter(ChangesetComment.revision == revision) |
|
558 | q = q.filter(ChangesetComment.revision == revision) | |
505 | elif pull_request: |
|
559 | elif pull_request: | |
506 | pull_request = self.__get_pull_request(pull_request) |
|
560 | pull_request = self.__get_pull_request(pull_request) | |
507 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
561 | q = q.filter(ChangesetComment.pull_request == pull_request) | |
508 | else: |
|
562 | else: | |
509 | raise Exception('Please specify commit or pull_request') |
|
563 | raise Exception('Please specify commit or pull_request') | |
510 | q = q.order_by(ChangesetComment.created_on) |
|
564 | q = q.order_by(ChangesetComment.created_on) | |
511 | return q.all() |
|
565 | return q.all() | |
512 |
|
566 | |||
513 | def get_url(self, comment, request=None, permalink=False, anchor=None): |
|
567 | def get_url(self, comment, request=None, permalink=False, anchor=None): | |
514 | if not request: |
|
568 | if not request: | |
515 | request = get_current_request() |
|
569 | request = get_current_request() | |
516 |
|
570 | |||
517 | comment = self.__get_commit_comment(comment) |
|
571 | comment = self.__get_commit_comment(comment) | |
518 | if anchor is None: |
|
572 | if anchor is None: | |
519 | anchor = 'comment-{}'.format(comment.comment_id) |
|
573 | anchor = 'comment-{}'.format(comment.comment_id) | |
520 |
|
574 | |||
521 | if comment.pull_request: |
|
575 | if comment.pull_request: | |
522 | pull_request = comment.pull_request |
|
576 | pull_request = comment.pull_request | |
523 | if permalink: |
|
577 | if permalink: | |
524 | return request.route_url( |
|
578 | return request.route_url( | |
525 | 'pull_requests_global', |
|
579 | 'pull_requests_global', | |
526 | pull_request_id=pull_request.pull_request_id, |
|
580 | pull_request_id=pull_request.pull_request_id, | |
527 | _anchor=anchor) |
|
581 | _anchor=anchor) | |
528 | else: |
|
582 | else: | |
529 | return request.route_url( |
|
583 | return request.route_url( | |
530 | 'pullrequest_show', |
|
584 | 'pullrequest_show', | |
531 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
585 | repo_name=safe_str(pull_request.target_repo.repo_name), | |
532 | pull_request_id=pull_request.pull_request_id, |
|
586 | pull_request_id=pull_request.pull_request_id, | |
533 | _anchor=anchor) |
|
587 | _anchor=anchor) | |
534 |
|
588 | |||
535 | else: |
|
589 | else: | |
536 | repo = comment.repo |
|
590 | repo = comment.repo | |
537 | commit_id = comment.revision |
|
591 | commit_id = comment.revision | |
538 |
|
592 | |||
539 | if permalink: |
|
593 | if permalink: | |
540 | return request.route_url( |
|
594 | return request.route_url( | |
541 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
595 | 'repo_commit', repo_name=safe_str(repo.repo_id), | |
542 | commit_id=commit_id, |
|
596 | commit_id=commit_id, | |
543 | _anchor=anchor) |
|
597 | _anchor=anchor) | |
544 |
|
598 | |||
545 | else: |
|
599 | else: | |
546 | return request.route_url( |
|
600 | return request.route_url( | |
547 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
601 | 'repo_commit', repo_name=safe_str(repo.repo_name), | |
548 | commit_id=commit_id, |
|
602 | commit_id=commit_id, | |
549 | _anchor=anchor) |
|
603 | _anchor=anchor) | |
550 |
|
604 | |||
551 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
605 | def get_comments(self, repo_id, revision=None, pull_request=None): | |
552 | """ |
|
606 | """ | |
553 | Gets main comments based on revision or pull_request_id |
|
607 | Gets main comments based on revision or pull_request_id | |
554 |
|
608 | |||
555 | :param repo_id: |
|
609 | :param repo_id: | |
556 | :param revision: |
|
610 | :param revision: | |
557 | :param pull_request: |
|
611 | :param pull_request: | |
558 | """ |
|
612 | """ | |
559 |
|
613 | |||
560 | q = ChangesetComment.query()\ |
|
614 | q = ChangesetComment.query()\ | |
561 | .filter(ChangesetComment.repo_id == repo_id)\ |
|
615 | .filter(ChangesetComment.repo_id == repo_id)\ | |
562 | .filter(ChangesetComment.line_no == None)\ |
|
616 | .filter(ChangesetComment.line_no == None)\ | |
563 | .filter(ChangesetComment.f_path == None) |
|
617 | .filter(ChangesetComment.f_path == None) | |
564 | if revision: |
|
618 | if revision: | |
565 | q = q.filter(ChangesetComment.revision == revision) |
|
619 | q = q.filter(ChangesetComment.revision == revision) | |
566 | elif pull_request: |
|
620 | elif pull_request: | |
567 | pull_request = self.__get_pull_request(pull_request) |
|
621 | pull_request = self.__get_pull_request(pull_request) | |
568 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
622 | q = q.filter(ChangesetComment.pull_request == pull_request) | |
569 | else: |
|
623 | else: | |
570 | raise Exception('Please specify commit or pull_request') |
|
624 | raise Exception('Please specify commit or pull_request') | |
571 | q = q.order_by(ChangesetComment.created_on) |
|
625 | q = q.order_by(ChangesetComment.created_on) | |
572 | return q.all() |
|
626 | return q.all() | |
573 |
|
627 | |||
574 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): |
|
628 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): | |
575 | q = self._get_inline_comments_query(repo_id, revision, pull_request) |
|
629 | q = self._get_inline_comments_query(repo_id, revision, pull_request) | |
576 | return self._group_comments_by_path_and_line_number(q) |
|
630 | return self._group_comments_by_path_and_line_number(q) | |
577 |
|
631 | |||
578 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, |
|
632 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, | |
579 | version=None): |
|
633 | version=None): | |
580 | inline_cnt = 0 |
|
634 | inline_cnt = 0 | |
581 | for fname, per_line_comments in inline_comments.iteritems(): |
|
635 | for fname, per_line_comments in inline_comments.iteritems(): | |
582 | for lno, comments in per_line_comments.iteritems(): |
|
636 | for lno, comments in per_line_comments.iteritems(): | |
583 | for comm in comments: |
|
637 | for comm in comments: | |
584 | if not comm.outdated_at_version(version) and skip_outdated: |
|
638 | if not comm.outdated_at_version(version) and skip_outdated: | |
585 | inline_cnt += 1 |
|
639 | inline_cnt += 1 | |
586 |
|
640 | |||
587 | return inline_cnt |
|
641 | return inline_cnt | |
588 |
|
642 | |||
589 | def get_outdated_comments(self, repo_id, pull_request): |
|
643 | def get_outdated_comments(self, repo_id, pull_request): | |
590 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments |
|
644 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments | |
591 | # of a pull request. |
|
645 | # of a pull request. | |
592 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
646 | q = self._all_inline_comments_of_pull_request(pull_request) | |
593 | q = q.filter( |
|
647 | q = q.filter( | |
594 | ChangesetComment.display_state == |
|
648 | ChangesetComment.display_state == | |
595 | ChangesetComment.COMMENT_OUTDATED |
|
649 | ChangesetComment.COMMENT_OUTDATED | |
596 | ).order_by(ChangesetComment.comment_id.asc()) |
|
650 | ).order_by(ChangesetComment.comment_id.asc()) | |
597 |
|
651 | |||
598 | return self._group_comments_by_path_and_line_number(q) |
|
652 | return self._group_comments_by_path_and_line_number(q) | |
599 |
|
653 | |||
600 | def _get_inline_comments_query(self, repo_id, revision, pull_request): |
|
654 | def _get_inline_comments_query(self, repo_id, revision, pull_request): | |
601 | # TODO: johbo: Split this into two methods: One for PR and one for |
|
655 | # TODO: johbo: Split this into two methods: One for PR and one for | |
602 | # commit. |
|
656 | # commit. | |
603 | if revision: |
|
657 | if revision: | |
604 | q = Session().query(ChangesetComment).filter( |
|
658 | q = Session().query(ChangesetComment).filter( | |
605 | ChangesetComment.repo_id == repo_id, |
|
659 | ChangesetComment.repo_id == repo_id, | |
606 | ChangesetComment.line_no != null(), |
|
660 | ChangesetComment.line_no != null(), | |
607 | ChangesetComment.f_path != null(), |
|
661 | ChangesetComment.f_path != null(), | |
608 | ChangesetComment.revision == revision) |
|
662 | ChangesetComment.revision == revision) | |
609 |
|
663 | |||
610 | elif pull_request: |
|
664 | elif pull_request: | |
611 | pull_request = self.__get_pull_request(pull_request) |
|
665 | pull_request = self.__get_pull_request(pull_request) | |
612 | if not CommentsModel.use_outdated_comments(pull_request): |
|
666 | if not CommentsModel.use_outdated_comments(pull_request): | |
613 | q = self._visible_inline_comments_of_pull_request(pull_request) |
|
667 | q = self._visible_inline_comments_of_pull_request(pull_request) | |
614 | else: |
|
668 | else: | |
615 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
669 | q = self._all_inline_comments_of_pull_request(pull_request) | |
616 |
|
670 | |||
617 | else: |
|
671 | else: | |
618 | raise Exception('Please specify commit or pull_request_id') |
|
672 | raise Exception('Please specify commit or pull_request_id') | |
619 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
673 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
620 | return q |
|
674 | return q | |
621 |
|
675 | |||
622 | def _group_comments_by_path_and_line_number(self, q): |
|
676 | def _group_comments_by_path_and_line_number(self, q): | |
623 | comments = q.all() |
|
677 | comments = q.all() | |
624 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) |
|
678 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) | |
625 | for co in comments: |
|
679 | for co in comments: | |
626 | paths[co.f_path][co.line_no].append(co) |
|
680 | paths[co.f_path][co.line_no].append(co) | |
627 | return paths |
|
681 | return paths | |
628 |
|
682 | |||
629 | @classmethod |
|
683 | @classmethod | |
630 | def needed_extra_diff_context(cls): |
|
684 | def needed_extra_diff_context(cls): | |
631 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) |
|
685 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) | |
632 |
|
686 | |||
633 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): |
|
687 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): | |
634 | if not CommentsModel.use_outdated_comments(pull_request): |
|
688 | if not CommentsModel.use_outdated_comments(pull_request): | |
635 | return |
|
689 | return | |
636 |
|
690 | |||
637 | comments = self._visible_inline_comments_of_pull_request(pull_request) |
|
691 | comments = self._visible_inline_comments_of_pull_request(pull_request) | |
638 | comments_to_outdate = comments.all() |
|
692 | comments_to_outdate = comments.all() | |
639 |
|
693 | |||
640 | for comment in comments_to_outdate: |
|
694 | for comment in comments_to_outdate: | |
641 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) |
|
695 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) | |
642 |
|
696 | |||
643 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): |
|
697 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): | |
644 | diff_line = _parse_comment_line_number(comment.line_no) |
|
698 | diff_line = _parse_comment_line_number(comment.line_no) | |
645 |
|
699 | |||
646 | try: |
|
700 | try: | |
647 | old_context = old_diff_proc.get_context_of_line( |
|
701 | old_context = old_diff_proc.get_context_of_line( | |
648 | path=comment.f_path, diff_line=diff_line) |
|
702 | path=comment.f_path, diff_line=diff_line) | |
649 | new_context = new_diff_proc.get_context_of_line( |
|
703 | new_context = new_diff_proc.get_context_of_line( | |
650 | path=comment.f_path, diff_line=diff_line) |
|
704 | path=comment.f_path, diff_line=diff_line) | |
651 | except (diffs.LineNotInDiffException, |
|
705 | except (diffs.LineNotInDiffException, | |
652 | diffs.FileNotInDiffException): |
|
706 | diffs.FileNotInDiffException): | |
653 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
707 | comment.display_state = ChangesetComment.COMMENT_OUTDATED | |
654 | return |
|
708 | return | |
655 |
|
709 | |||
656 | if old_context == new_context: |
|
710 | if old_context == new_context: | |
657 | return |
|
711 | return | |
658 |
|
712 | |||
659 | if self._should_relocate_diff_line(diff_line): |
|
713 | if self._should_relocate_diff_line(diff_line): | |
660 | new_diff_lines = new_diff_proc.find_context( |
|
714 | new_diff_lines = new_diff_proc.find_context( | |
661 | path=comment.f_path, context=old_context, |
|
715 | path=comment.f_path, context=old_context, | |
662 | offset=self.DIFF_CONTEXT_BEFORE) |
|
716 | offset=self.DIFF_CONTEXT_BEFORE) | |
663 | if not new_diff_lines: |
|
717 | if not new_diff_lines: | |
664 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
718 | comment.display_state = ChangesetComment.COMMENT_OUTDATED | |
665 | else: |
|
719 | else: | |
666 | new_diff_line = self._choose_closest_diff_line( |
|
720 | new_diff_line = self._choose_closest_diff_line( | |
667 | diff_line, new_diff_lines) |
|
721 | diff_line, new_diff_lines) | |
668 | comment.line_no = _diff_to_comment_line_number(new_diff_line) |
|
722 | comment.line_no = _diff_to_comment_line_number(new_diff_line) | |
669 | else: |
|
723 | else: | |
670 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
724 | comment.display_state = ChangesetComment.COMMENT_OUTDATED | |
671 |
|
725 | |||
672 | def _should_relocate_diff_line(self, diff_line): |
|
726 | def _should_relocate_diff_line(self, diff_line): | |
673 | """ |
|
727 | """ | |
674 | Checks if relocation shall be tried for the given `diff_line`. |
|
728 | Checks if relocation shall be tried for the given `diff_line`. | |
675 |
|
729 | |||
676 | If a comment points into the first lines, then we can have a situation |
|
730 | If a comment points into the first lines, then we can have a situation | |
677 | that after an update another line has been added on top. In this case |
|
731 | that after an update another line has been added on top. In this case | |
678 | we would find the context still and move the comment around. This |
|
732 | we would find the context still and move the comment around. This | |
679 | would be wrong. |
|
733 | would be wrong. | |
680 | """ |
|
734 | """ | |
681 | should_relocate = ( |
|
735 | should_relocate = ( | |
682 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or |
|
736 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or | |
683 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) |
|
737 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) | |
684 | return should_relocate |
|
738 | return should_relocate | |
685 |
|
739 | |||
686 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): |
|
740 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): | |
687 | candidate = new_diff_lines[0] |
|
741 | candidate = new_diff_lines[0] | |
688 | best_delta = _diff_line_delta(diff_line, candidate) |
|
742 | best_delta = _diff_line_delta(diff_line, candidate) | |
689 | for new_diff_line in new_diff_lines[1:]: |
|
743 | for new_diff_line in new_diff_lines[1:]: | |
690 | delta = _diff_line_delta(diff_line, new_diff_line) |
|
744 | delta = _diff_line_delta(diff_line, new_diff_line) | |
691 | if delta < best_delta: |
|
745 | if delta < best_delta: | |
692 | candidate = new_diff_line |
|
746 | candidate = new_diff_line | |
693 | best_delta = delta |
|
747 | best_delta = delta | |
694 | return candidate |
|
748 | return candidate | |
695 |
|
749 | |||
696 | def _visible_inline_comments_of_pull_request(self, pull_request): |
|
750 | def _visible_inline_comments_of_pull_request(self, pull_request): | |
697 | comments = self._all_inline_comments_of_pull_request(pull_request) |
|
751 | comments = self._all_inline_comments_of_pull_request(pull_request) | |
698 | comments = comments.filter( |
|
752 | comments = comments.filter( | |
699 | coalesce(ChangesetComment.display_state, '') != |
|
753 | coalesce(ChangesetComment.display_state, '') != | |
700 | ChangesetComment.COMMENT_OUTDATED) |
|
754 | ChangesetComment.COMMENT_OUTDATED) | |
701 | return comments |
|
755 | return comments | |
702 |
|
756 | |||
703 | def _all_inline_comments_of_pull_request(self, pull_request): |
|
757 | def _all_inline_comments_of_pull_request(self, pull_request): | |
704 | comments = Session().query(ChangesetComment)\ |
|
758 | comments = Session().query(ChangesetComment)\ | |
705 | .filter(ChangesetComment.line_no != None)\ |
|
759 | .filter(ChangesetComment.line_no != None)\ | |
706 | .filter(ChangesetComment.f_path != None)\ |
|
760 | .filter(ChangesetComment.f_path != None)\ | |
707 | .filter(ChangesetComment.pull_request == pull_request) |
|
761 | .filter(ChangesetComment.pull_request == pull_request) | |
708 | return comments |
|
762 | return comments | |
709 |
|
763 | |||
710 | def _all_general_comments_of_pull_request(self, pull_request): |
|
764 | def _all_general_comments_of_pull_request(self, pull_request): | |
711 | comments = Session().query(ChangesetComment)\ |
|
765 | comments = Session().query(ChangesetComment)\ | |
712 | .filter(ChangesetComment.line_no == None)\ |
|
766 | .filter(ChangesetComment.line_no == None)\ | |
713 | .filter(ChangesetComment.f_path == None)\ |
|
767 | .filter(ChangesetComment.f_path == None)\ | |
714 | .filter(ChangesetComment.pull_request == pull_request) |
|
768 | .filter(ChangesetComment.pull_request == pull_request) | |
|
769 | ||||
715 | return comments |
|
770 | return comments | |
716 |
|
771 | |||
717 | @staticmethod |
|
772 | @staticmethod | |
718 | def use_outdated_comments(pull_request): |
|
773 | def use_outdated_comments(pull_request): | |
719 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
774 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) | |
720 | settings = settings_model.get_general_settings() |
|
775 | settings = settings_model.get_general_settings() | |
721 | return settings.get('rhodecode_use_outdated_comments', False) |
|
776 | return settings.get('rhodecode_use_outdated_comments', False) | |
722 |
|
777 | |||
723 | def trigger_commit_comment_hook(self, repo, user, action, data=None): |
|
778 | def trigger_commit_comment_hook(self, repo, user, action, data=None): | |
724 | repo = self._get_repo(repo) |
|
779 | repo = self._get_repo(repo) | |
725 | target_scm = repo.scm_instance() |
|
780 | target_scm = repo.scm_instance() | |
726 | if action == 'create': |
|
781 | if action == 'create': | |
727 | trigger_hook = hooks_utils.trigger_comment_commit_hooks |
|
782 | trigger_hook = hooks_utils.trigger_comment_commit_hooks | |
728 | elif action == 'edit': |
|
783 | elif action == 'edit': | |
729 | # TODO(dan): when this is supported we trigger edit hook too |
|
784 | # TODO(dan): when this is supported we trigger edit hook too | |
730 | return |
|
785 | return | |
731 | else: |
|
786 | else: | |
732 | return |
|
787 | return | |
733 |
|
788 | |||
734 | log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s', |
|
789 | log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s', | |
735 | repo, action, trigger_hook) |
|
790 | repo, action, trigger_hook) | |
736 | trigger_hook( |
|
791 | trigger_hook( | |
737 | username=user.username, |
|
792 | username=user.username, | |
738 | repo_name=repo.repo_name, |
|
793 | repo_name=repo.repo_name, | |
739 | repo_type=target_scm.alias, |
|
794 | repo_type=target_scm.alias, | |
740 | repo=repo, |
|
795 | repo=repo, | |
741 | data=data) |
|
796 | data=data) | |
742 |
|
797 | |||
743 |
|
798 | |||
744 | def _parse_comment_line_number(line_no): |
|
799 | def _parse_comment_line_number(line_no): | |
745 | """ |
|
800 | """ | |
746 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. |
|
801 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. | |
747 | """ |
|
802 | """ | |
748 | old_line = None |
|
803 | old_line = None | |
749 | new_line = None |
|
804 | new_line = None | |
750 | if line_no.startswith('o'): |
|
805 | if line_no.startswith('o'): | |
751 | old_line = int(line_no[1:]) |
|
806 | old_line = int(line_no[1:]) | |
752 | elif line_no.startswith('n'): |
|
807 | elif line_no.startswith('n'): | |
753 | new_line = int(line_no[1:]) |
|
808 | new_line = int(line_no[1:]) | |
754 | else: |
|
809 | else: | |
755 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") |
|
810 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") | |
756 | return diffs.DiffLineNumber(old_line, new_line) |
|
811 | return diffs.DiffLineNumber(old_line, new_line) | |
757 |
|
812 | |||
758 |
|
813 | |||
759 | def _diff_to_comment_line_number(diff_line): |
|
814 | def _diff_to_comment_line_number(diff_line): | |
760 | if diff_line.new is not None: |
|
815 | if diff_line.new is not None: | |
761 | return u'n{}'.format(diff_line.new) |
|
816 | return u'n{}'.format(diff_line.new) | |
762 | elif diff_line.old is not None: |
|
817 | elif diff_line.old is not None: | |
763 | return u'o{}'.format(diff_line.old) |
|
818 | return u'o{}'.format(diff_line.old) | |
764 | return u'' |
|
819 | return u'' | |
765 |
|
820 | |||
766 |
|
821 | |||
767 | def _diff_line_delta(a, b): |
|
822 | def _diff_line_delta(a, b): | |
768 | if None not in (a.new, b.new): |
|
823 | if None not in (a.new, b.new): | |
769 | return abs(a.new - b.new) |
|
824 | return abs(a.new - b.new) | |
770 | elif None not in (a.old, b.old): |
|
825 | elif None not in (a.old, b.old): | |
771 | return abs(a.old - b.old) |
|
826 | return abs(a.old - b.old) | |
772 | else: |
|
827 | else: | |
773 | raise ValueError( |
|
828 | raise ValueError( | |
774 | "Cannot compute delta between {} and {}".format(a, b)) |
|
829 | "Cannot compute delta between {} and {}".format(a, b)) |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now