##// END OF EJS Templates
release: Merge default into stable for release preparation
super-admin -
r4729:f8c5eac8 merge stable
parent child Browse files
Show More
@@ -0,0 +1,55 b''
1 |RCE| 4.26.0 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2021-08-06
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13
14
15 General
16 ^^^^^^^
17
18 - Caches: introduce invalidation as a safer ways to expire keys, deleting them are more problematic.
19 - Caches: improved locking problems with distributed lock new cache backend.
20 - Pull requests: optimize db transaction logic.
21 This should prevent potential problems with locking of pull-requests that have a lot of reviewers.
22 - Pull requests: updates use retry logic in case of update is locked/fails for some concurrency issues.
23 - Pull requests: allow forced state change to repo admins too.
24 - SSH: handle subrepos better when using SSH communication.
25
26
27 Security
28 ^^^^^^^^
29
30 - Drafts comments: don't allow to view history for others than owner.
31 - Validators: apply username validator to prevent bad values being searched in DB, and potential XSS payload sent via validators.
32
33
34 Performance
35 ^^^^^^^^^^^
36
37 - SSH: use pre-compiled backends for faster matching of vcs detection.
38 - Routing: don't check channelstream connections for faster handling of this route.
39 - Routing: skip vcsdetection for ops view so they are not checked against the vcs operations.
40
41
42 Fixes
43 ^^^^^
44
45 - Permissions: flush all users permissions when creating a new user group.
46 - Repos: recover properly from bad extraction of repo_id from URL and DB calls.
47 - Comments history: fixed fetching of history for comments
48 - Pull requests: fix potential crash on providing a wrong order-by type column.
49 - Caches: report damaged DB on key iterations too not only the GET call
50 - API: added proper full permission flush on API calls when creating repos and repo groups.
51
52 Upgrade notes
53 ^^^^^^^^^^^^^
54
55 - Scheduled release 4.26.0.
@@ -1,6 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.25.2
2 current_version = 4.26.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:rhodecode/VERSION]
5 [bumpversion:file:rhodecode/VERSION]
6
@@ -5,25 +5,20 b' done = false'
5 done = true
5 done = true
6
6
7 [task:rc_tools_pinned]
7 [task:rc_tools_pinned]
8 done = true
9
8
10 [task:fixes_on_stable]
9 [task:fixes_on_stable]
11 done = true
12
10
13 [task:pip2nix_generated]
11 [task:pip2nix_generated]
14 done = true
15
12
16 [task:changelog_updated]
13 [task:changelog_updated]
17 done = true
18
14
19 [task:generate_api_docs]
15 [task:generate_api_docs]
20 done = true
16
17 [task:updated_translation]
21
18
22 [release]
19 [release]
23 state = prepared
20 state = in_progress
24 version = 4.25.2
21 version = 4.26.0
25
26 [task:updated_translation]
27
22
28 [task:generate_js_routes]
23 [task:generate_js_routes]
29
24
@@ -391,6 +391,8 b' rc_cache.cache_perms.expiration_time = 3'
391 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
391 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
392 #rc_cache.cache_perms.arguments.distributed_lock = true
392 #rc_cache.cache_perms.arguments.distributed_lock = true
393
393
394 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
395 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
394
396
395 ; ***************************************************
397 ; ***************************************************
396 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
398 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
@@ -414,6 +416,8 b' rc_cache.cache_repo.expiration_time = 25'
414 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
416 ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends
415 #rc_cache.cache_repo.arguments.distributed_lock = true
417 #rc_cache.cache_repo.arguments.distributed_lock = true
416
418
419 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
420 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
417
421
418 ; ##############
422 ; ##############
419 ; BEAKER SESSION
423 ; BEAKER SESSION
@@ -9,6 +9,7 b' Release Notes'
9 .. toctree::
9 .. toctree::
10 :maxdepth: 1
10 :maxdepth: 1
11
11
12 release-notes-4.26.0.rst
12 release-notes-4.25.2.rst
13 release-notes-4.25.2.rst
13 release-notes-4.25.1.rst
14 release-notes-4.25.1.rst
14 release-notes-4.25.0.rst
15 release-notes-4.25.0.rst
@@ -1883,7 +1883,7 b' self: super: {'
1883 };
1883 };
1884 };
1884 };
1885 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1885 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1886 name = "rhodecode-enterprise-ce-4.25.2";
1886 name = "rhodecode-enterprise-ce-4.26.0";
1887 buildInputs = [
1887 buildInputs = [
1888 self."pytest"
1888 self."pytest"
1889 self."py"
1889 self."py"
@@ -1,1 +1,1 b''
1 4.25.2 No newline at end of file
1 4.26.0 No newline at end of file
@@ -41,13 +41,14 b' def trigger_user_permission_flush(event)'
41 automatic flush of permission caches, so the users affected receive new permissions
41 automatic flush of permission caches, so the users affected receive new permissions
42 Right Away
42 Right Away
43 """
43 """
44
44 invalidate = True
45 affected_user_ids = set(event.user_ids)
45 affected_user_ids = set(event.user_ids)
46 for user_id in affected_user_ids:
46 for user_id in affected_user_ids:
47 for cache_namespace_uid_tmpl in cache_namespaces:
47 for cache_namespace_uid_tmpl in cache_namespaces:
48 cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id)
48 cache_namespace_uid = cache_namespace_uid_tmpl.format(user_id)
49 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid)
49 del_keys = rc_cache.clear_cache_namespace(
50 log.debug('Deleted %s cache keys for user_id: %s and namespace %s',
50 'cache_perms', cache_namespace_uid, invalidate=invalidate)
51 log.debug('Invalidated %s cache keys for user_id: %s and namespace %s',
51 del_keys, user_id, cache_namespace_uid)
52 del_keys, user_id, cache_namespace_uid)
52
53
53
54
@@ -247,8 +247,7 b' class AdminUserGroupsView(BaseAppView, D'
247 % user_group_name, category='error')
247 % user_group_name, category='error')
248 raise HTTPFound(h.route_path('user_groups_new'))
248 raise HTTPFound(h.route_path('user_groups_new'))
249
249
250 affected_user_ids = [self._rhodecode_user.user_id]
250 PermissionModel().trigger_permission_flush()
251 PermissionModel().trigger_permission_flush(affected_user_ids)
252
251
253 raise HTTPFound(
252 raise HTTPFound(
254 h.route_path('edit_user_group', user_group_id=user_group_id))
253 h.route_path('edit_user_group', user_group_id=user_group_id))
@@ -173,7 +173,7 b' def includeme(config):'
173
173
174 config.add_route(
174 config.add_route(
175 name='repo_commit_comment_history_view',
175 name='repo_commit_comment_history_view',
176 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_history_id}/history_view', repo_route=True)
176 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/history_view/{comment_history_id}', repo_route=True)
177 config.add_view(
177 config.add_view(
178 RepoCommitsView,
178 RepoCommitsView,
179 attr='repo_commit_comment_history_view',
179 attr='repo_commit_comment_history_view',
@@ -72,7 +72,7 b' class RepoChangelogView(RepoAppView):'
72 h.flash(msg, category='error')
72 h.flash(msg, category='error')
73 raise HTTPNotFound()
73 raise HTTPNotFound()
74 except RepositoryError as e:
74 except RepositoryError as e:
75 h.flash(safe_str(h.escape(e)), category='error')
75 h.flash(h.escape(safe_str(e)), category='error')
76 raise HTTPNotFound()
76 raise HTTPNotFound()
77
77
78 def _graph(self, repo, commits, prev_data=None, next_data=None):
78 def _graph(self, repo, commits, prev_data=None, next_data=None):
@@ -238,14 +238,14 b' class RepoChangelogView(RepoAppView):'
238 f_path=f_path, commit_id=commit_id)
238 f_path=f_path, commit_id=commit_id)
239
239
240 except EmptyRepositoryError as e:
240 except EmptyRepositoryError as e:
241 h.flash(safe_str(h.escape(e)), category='warning')
241 h.flash(h.escape(safe_str(e)), category='warning')
242 raise HTTPFound(
242 raise HTTPFound(
243 h.route_path('repo_summary', repo_name=self.db_repo_name))
243 h.route_path('repo_summary', repo_name=self.db_repo_name))
244 except HTTPFound:
244 except HTTPFound:
245 raise
245 raise
246 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
246 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
247 log.exception(safe_str(e))
247 log.exception(safe_str(e))
248 h.flash(safe_str(h.escape(e)), category='error')
248 h.flash(h.escape(safe_str(e)), category='error')
249
249
250 if commit_id:
250 if commit_id:
251 # from single commit page, we redirect to main commits
251 # from single commit page, we redirect to main commits
@@ -539,9 +539,10 b' class RepoCommitsView(RepoAppView):'
539 @CSRFRequired()
539 @CSRFRequired()
540 def repo_commit_comment_history_view(self):
540 def repo_commit_comment_history_view(self):
541 c = self.load_default_context()
541 c = self.load_default_context()
542 comment_id = self.request.matchdict['comment_id']
542 comment_history_id = self.request.matchdict['comment_history_id']
543 comment_history_id = self.request.matchdict['comment_history_id']
543
544
544 comment = ChangesetComment.get_or_404(comment_history_id)
545 comment = ChangesetComment.get_or_404(comment_id)
545 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
546 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
546 if comment.draft and not comment_owner:
547 if comment.draft and not comment_owner:
547 # if we see draft comments history, we only allow this for owner
548 # if we see draft comments history, we only allow this for owner
@@ -70,7 +70,7 b' class RepoCompareView(RepoAppView):'
70
70
71 except RepositoryError as e:
71 except RepositoryError as e:
72 log.exception(safe_str(e))
72 log.exception(safe_str(e))
73 h.flash(safe_str(h.escape(e)), category='warning')
73 h.flash(h.escape(safe_str(e)), category='warning')
74 if not partial:
74 if not partial:
75 raise HTTPFound(
75 raise HTTPFound(
76 h.route_path('repo_summary', repo_name=repo.repo_name))
76 h.route_path('repo_summary', repo_name=repo.repo_name))
@@ -186,7 +186,7 b' class RepoFilesView(RepoAppView):'
186 h.flash(msg, category='error')
186 h.flash(msg, category='error')
187 raise HTTPNotFound()
187 raise HTTPNotFound()
188 except RepositoryError as e:
188 except RepositoryError as e:
189 h.flash(safe_str(h.escape(e)), category='error')
189 h.flash(h.escape(safe_str(e)), category='error')
190 raise HTTPNotFound()
190 raise HTTPNotFound()
191
191
192 def _get_filenode_or_redirect(self, commit_obj, path):
192 def _get_filenode_or_redirect(self, commit_obj, path):
@@ -206,7 +206,7 b' class RepoFilesView(RepoAppView):'
206 raise HTTPNotFound()
206 raise HTTPNotFound()
207 except RepositoryError as e:
207 except RepositoryError as e:
208 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
208 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
209 h.flash(safe_str(h.escape(e)), category='error')
209 h.flash(h.escape(safe_str(e)), category='error')
210 raise HTTPNotFound()
210 raise HTTPNotFound()
211
211
212 return file_node
212 return file_node
@@ -733,7 +733,7 b' class RepoFilesView(RepoAppView):'
733 c.commit.raw_id, f_path)
733 c.commit.raw_id, f_path)
734
734
735 except RepositoryError as e:
735 except RepositoryError as e:
736 h.flash(safe_str(h.escape(e)), category='error')
736 h.flash(h.escape(safe_str(e)), category='error')
737 raise HTTPNotFound()
737 raise HTTPNotFound()
738
738
739 if self.request.environ.get('HTTP_X_PJAX'):
739 if self.request.environ.get('HTTP_X_PJAX'):
@@ -927,7 +927,7 b' class RepoFilesView(RepoAppView):'
927 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
927 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
928 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
928 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
929 log.exception(safe_str(e))
929 log.exception(safe_str(e))
930 h.flash(safe_str(h.escape(e)), category='error')
930 h.flash(h.escape(safe_str(e)), category='error')
931 raise HTTPFound(h.route_path(
931 raise HTTPFound(h.route_path(
932 'repo_files', repo_name=self.db_repo_name,
932 'repo_files', repo_name=self.db_repo_name,
933 commit_id='tip', f_path='/'))
933 commit_id='tip', f_path='/'))
@@ -1444,7 +1444,7 b' class RepoFilesView(RepoAppView):'
1444 'contain .. in the path'), category='warning')
1444 'contain .. in the path'), category='warning')
1445 raise HTTPFound(default_redirect_url)
1445 raise HTTPFound(default_redirect_url)
1446 except (NodeError, NodeAlreadyExistsError) as e:
1446 except (NodeError, NodeAlreadyExistsError) as e:
1447 h.flash(_(h.escape(e)), category='error')
1447 h.flash(h.escape(safe_str(e)), category='error')
1448 except Exception:
1448 except Exception:
1449 log.exception('Error occurred during commit')
1449 log.exception('Error occurred during commit')
1450 h.flash(_('Error occurred during commit'), category='error')
1450 h.flash(_('Error occurred during commit'), category='error')
@@ -10,7 +10,7 b' from redis import StrictRedis'
10 __version__ = '3.7.0'
10 __version__ = '3.7.0'
11
11
12 loggers = {
12 loggers = {
13 k: getLogger("rhodecode" + ".".join((__name__, k)))
13 k: getLogger("rhodecode." + ".".join((__name__, k)))
14 for k in [
14 for k in [
15 "acquire",
15 "acquire",
16 "refresh.thread.start",
16 "refresh.thread.start",
@@ -221,10 +221,11 b' class Lock(object):'
221 """
221 """
222 logger = loggers["acquire"]
222 logger = loggers["acquire"]
223
223
224 logger.debug("Getting %r ...", self._name)
224 logger.debug("Getting acquire on %r ...", self._name)
225
225
226 if self._held:
226 if self._held:
227 raise AlreadyAcquired("Already acquired from this Lock instance.")
227 owner_id = self.get_owner_id()
228 raise AlreadyAcquired("Already acquired from this Lock instance. Lock id: {}".format(owner_id))
228
229
229 if not blocking and timeout is not None:
230 if not blocking and timeout is not None:
230 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
231 raise TimeoutNotUsable("Timeout cannot be used if blocking=False")
@@ -166,6 +166,9 b' def detect_vcs_request(environ, backends'
166 # static files no detection
166 # static files no detection
167 '_static',
167 '_static',
168
168
169 # skip ops ping
170 '_admin/ops/ping',
171
169 # full channelstream connect should be VCS skipped
172 # full channelstream connect should be VCS skipped
170 '_admin/channelstream/connect',
173 '_admin/channelstream/connect',
171 ]
174 ]
@@ -33,6 +33,8 b' from dogpile.cache.backends import redis'
33 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
33 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
34 from dogpile.cache.util import memoized_property
34 from dogpile.cache.util import memoized_property
35
35
36 from pyramid.settings import asbool
37
36 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
38 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
37
39
38
40
@@ -224,6 +226,16 b' class FileNamespaceBackend(PickleSeriali'
224
226
225
227
226 class BaseRedisBackend(redis_backend.RedisBackend):
228 class BaseRedisBackend(redis_backend.RedisBackend):
229 key_prefix = ''
230
231 def __init__(self, arguments):
232 super(BaseRedisBackend, self).__init__(arguments)
233 self._lock_timeout = self.lock_timeout
234 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
235
236 if self._lock_auto_renewal and not self._lock_timeout:
237 # set default timeout for auto_renewal
238 self._lock_timeout = 30
227
239
228 def _create_client(self):
240 def _create_client(self):
229 args = {}
241 args = {}
@@ -287,17 +299,10 b' class BaseRedisBackend(redis_backend.Red'
287
299
288 def get_mutex(self, key):
300 def get_mutex(self, key):
289 if self.distributed_lock:
301 if self.distributed_lock:
290 import redis_lock
291 lock_key = redis_backend.u('_lock_{0}').format(key)
302 lock_key = redis_backend.u('_lock_{0}').format(key)
292 log.debug('Trying to acquire Redis lock for key %s', lock_key)
303 log.debug('Trying to acquire Redis lock for key %s', lock_key)
293 lock = redis_lock.Lock(
304 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
294 redis_client=self.client,
305 auto_renewal=self._lock_auto_renewal)
295 name=lock_key,
296 expire=self.lock_timeout,
297 auto_renewal=False,
298 strict=True,
299 )
300 return lock
301 else:
306 else:
302 return None
307 return None
303
308
@@ -310,3 +315,40 b' class RedisPickleBackend(PickleSerialize'
310 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
315 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
311 key_prefix = 'redis_msgpack_backend'
316 key_prefix = 'redis_msgpack_backend'
312 pass
317 pass
318
319
320 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
321 import redis_lock
322
323 class _RedisLockWrapper(object):
324 """LockWrapper for redis_lock"""
325
326 @classmethod
327 def get_lock(cls):
328 return redis_lock.Lock(
329 redis_client=client,
330 name=lock_key,
331 expire=lock_timeout,
332 auto_renewal=auto_renewal,
333 strict=True,
334 )
335
336 def __init__(self):
337 self.lock = self.get_lock()
338
339 def acquire(self, wait=True):
340 try:
341 return self.lock.acquire(wait)
342 except redis_lock.AlreadyAcquired:
343 return False
344 except redis_lock.AlreadyStarted:
345 # refresh thread exists, but it also means we acquired the lock
346 return True
347
348 def release(self):
349 try:
350 self.lock.release()
351 except redis_lock.NotAcquired:
352 pass
353
354 return _RedisLockWrapper()
@@ -261,12 +261,15 b' def get_or_create_region(region_name, re'
261 return region_obj
261 return region_obj
262
262
263
263
264 def clear_cache_namespace(cache_region, cache_namespace_uid):
264 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
265 region = get_or_create_region(cache_region, cache_namespace_uid)
265 region = get_or_create_region(cache_region, cache_namespace_uid)
266 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
266 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
267 num_delete_keys = len(cache_keys)
267 num_delete_keys = len(cache_keys)
268 if num_delete_keys:
268 if invalidate:
269 region.delete_multi(cache_keys)
269 region.invalidate(hard=False)
270 else:
271 if num_delete_keys:
272 region.delete_multi(cache_keys)
270 return num_delete_keys
273 return num_delete_keys
271
274
272
275
@@ -345,14 +345,14 b' class PullRequestModel(BaseModel):'
345 if only_created:
345 if only_created:
346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347
347
348 if order_by:
348 order_map = {
349 order_map = {
349 'name_raw': PullRequest.pull_request_id,
350 'name_raw': PullRequest.pull_request_id,
350 'id': PullRequest.pull_request_id,
351 'id': PullRequest.pull_request_id,
351 'title': PullRequest.title,
352 'title': PullRequest.title,
352 'updated_on_raw': PullRequest.updated_on,
353 'updated_on_raw': PullRequest.updated_on,
353 'target_repo': PullRequest.target_repo_id
354 'target_repo': PullRequest.target_repo_id
354 }
355 }
355 if order_by and order_by in order_map:
356 if order_dir == 'asc':
356 if order_dir == 'asc':
357 q = q.order_by(order_map[order_by].asc())
357 q = q.order_by(order_map[order_by].asc())
358 else:
358 else:
@@ -499,13 +499,13 b' class PullRequestModel(BaseModel):'
499 pull_request_alias.description.ilike(like_expression),
499 pull_request_alias.description.ilike(like_expression),
500 ))
500 ))
501
501
502 if order_by:
502 order_map = {
503 order_map = {
503 'name_raw': pull_request_alias.pull_request_id,
504 'name_raw': pull_request_alias.pull_request_id,
504 'title': pull_request_alias.title,
505 'title': pull_request_alias.title,
505 'updated_on_raw': pull_request_alias.updated_on,
506 'updated_on_raw': pull_request_alias.updated_on,
506 'target_repo': pull_request_alias.target_repo_id
507 'target_repo': pull_request_alias.target_repo_id
507 }
508 }
508 if order_by and order_by in order_map:
509 if order_dir == 'asc':
509 if order_dir == 'asc':
510 q = q.order_by(order_map[order_by].asc())
510 q = q.order_by(order_map[order_by].asc())
511 else:
511 else:
@@ -585,13 +585,14 b' class PullRequestModel(BaseModel):'
585 PullRequest.title.ilike(like_expression),
585 PullRequest.title.ilike(like_expression),
586 PullRequest.description.ilike(like_expression),
586 PullRequest.description.ilike(like_expression),
587 ))
587 ))
588 if order_by:
588
589 order_map = {
589 order_map = {
590 'name_raw': PullRequest.pull_request_id,
590 'name_raw': PullRequest.pull_request_id,
591 'title': PullRequest.title,
591 'title': PullRequest.title,
592 'updated_on_raw': PullRequest.updated_on,
592 'updated_on_raw': PullRequest.updated_on,
593 'target_repo': PullRequest.target_repo_id
593 'target_repo': PullRequest.target_repo_id
594 }
594 }
595 if order_by and order_by in order_map:
595 if order_dir == 'asc':
596 if order_dir == 'asc':
596 q = q.order_by(order_map[order_by].asc())
597 q = q.order_by(order_map[order_by].asc())
597 else:
598 else:
@@ -665,13 +666,13 b' class PullRequestModel(BaseModel):'
665 pull_request_alias.description.ilike(like_expression),
666 pull_request_alias.description.ilike(like_expression),
666 ))
667 ))
667
668
668 if order_by:
669 order_map = {
669 order_map = {
670 'name_raw': pull_request_alias.pull_request_id,
670 'name_raw': pull_request_alias.pull_request_id,
671 'title': pull_request_alias.title,
671 'title': pull_request_alias.title,
672 'updated_on_raw': pull_request_alias.updated_on,
672 'updated_on_raw': pull_request_alias.updated_on,
673 'target_repo': pull_request_alias.target_repo_id
673 'target_repo': pull_request_alias.target_repo_id
674 }
674 }
675 if order_by and order_by in order_map:
675 if order_dir == 'asc':
676 if order_dir == 'asc':
676 q = q.order_by(order_map[order_by].asc())
677 q = q.order_by(order_map[order_by].asc())
677 else:
678 else:
@@ -125,13 +125,15 b' class RepoModel(BaseModel):'
125 :param repo_name:
125 :param repo_name:
126 :return: repo object if matched else None
126 :return: repo object if matched else None
127 """
127 """
128
128 _repo_id = None
129 try:
129 try:
130 _repo_id = self._extract_id_from_repo_name(repo_name)
130 _repo_id = self._extract_id_from_repo_name(repo_name)
131 if _repo_id:
131 if _repo_id:
132 return self.get(_repo_id)
132 return self.get(_repo_id)
133 except Exception:
133 except Exception:
134 log.exception('Failed to extract repo_name from URL')
134 log.exception('Failed to extract repo_name from URL')
135 if _repo_id:
136 Session().rollback()
135
137
136 return None
138 return None
137
139
@@ -285,7 +285,8 b' class ScmModel(BaseModel):'
285 repo.update_commit_cache(config=config, cs_cache=None)
285 repo.update_commit_cache(config=config, cs_cache=None)
286 if delete:
286 if delete:
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
288 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
288 rc_cache.clear_cache_namespace(
289 'cache_repo', cache_namespace_uid, invalidate=True)
289
290
290 def toggle_following_repo(self, follow_repo_id, user_id):
291 def toggle_following_repo(self, follow_repo_id, user_id):
291
292
@@ -289,7 +289,7 b' function registerRCRoutes() {'
289 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
289 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
290 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
290 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
291 pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']);
291 pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']);
292 pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_history_id)s/history_view', ['repo_name', 'commit_id', 'comment_history_id']);
292 pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/history_view/%(comment_history_id)s', ['repo_name', 'commit_id', 'comment_id', 'comment_history_id']);
293 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
293 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
294 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
294 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
295 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
295 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
@@ -572,7 +572,8 b' var CommentsController = function() {'
572 'repo_commit_comment_history_view',
572 'repo_commit_comment_history_view',
573 {
573 {
574 'repo_name': templateContext.repo_name,
574 'repo_name': templateContext.repo_name,
575 'commit_id': comment_id,
575 'commit_id': null, // We don't need to check the commit data here...
576 'comment_id': comment_id,
576 'comment_history_id': comment_history_id,
577 'comment_history_id': comment_history_id,
577 }
578 }
578 );
579 );
General Comments 0
You need to be logged in to leave comments. Login now