##// END OF EJS Templates
feat(remap and rescan): added more relient remap and removal option, and also split the logic to either add or cleanup
super-admin -
r5619:c9e499e7 default
parent child Browse files
Show More
@@ -0,0 +1,44 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import mock
20 import pytest
21
22 from rhodecode.model.scm import ScmModel
23 from rhodecode.api.tests.utils import (
24 build_data, api_call, assert_ok, assert_error, crash)
25
26
27 @pytest.mark.usefixtures("testuser_api", "app")
28 class TestCleanupRepos(object):
29 def test_api_cleanup_repos(self):
30 id_, params = build_data(self.apikey, 'cleanup_repos')
31 response = api_call(self.app, params)
32
33 expected = {'removed': [], 'errors': []}
34 assert_ok(id_, expected, given=response.body)
35
36 def test_api_cleanup_repos_error(self):
37
38 id_, params = build_data(self.apikey, 'cleanup_repos', )
39
40 with mock.patch('rhodecode.lib.utils.repo2db_cleanup', side_effect=crash):
41 response = api_call(self.app, params)
42
43 expected = 'Error occurred during repo storage cleanup action'
44 assert_error(id_, expected, given=response.body)
@@ -19,7 +19,6 b''
19 import mock
19 import mock
20 import pytest
20 import pytest
21
21
22 from rhodecode.model.scm import ScmModel
23 from rhodecode.api.tests.utils import (
22 from rhodecode.api.tests.utils import (
24 build_data, api_call, assert_ok, assert_error, crash)
23 build_data, api_call, assert_ok, assert_error, crash)
25
24
@@ -30,13 +29,14 b' class TestRescanRepos(object):'
30 id_, params = build_data(self.apikey, 'rescan_repos')
29 id_, params = build_data(self.apikey, 'rescan_repos')
31 response = api_call(self.app, params)
30 response = api_call(self.app, params)
32
31
33 expected = {'added': [], 'removed': []}
32 expected = {'added': [], 'errors': []}
34 assert_ok(id_, expected, given=response.body)
33 assert_ok(id_, expected, given=response.body)
35
34
36 @mock.patch.object(ScmModel, 'repo_scan', crash)
35 def test_api_rescan_repos_error(self):
37 def test_api_rescann_error(self):
38 id_, params = build_data(self.apikey, 'rescan_repos', )
36 id_, params = build_data(self.apikey, 'rescan_repos', )
39 response = api_call(self.app, params)
37
38 with mock.patch('rhodecode.lib.utils.repo2db_mapper', side_effect=crash):
39 response = api_call(self.app, params)
40
40
41 expected = 'Error occurred during rescan repositories action'
41 expected = 'Error occurred during rescan repositories action'
42 assert_error(id_, expected, given=response.body)
42 assert_error(id_, expected, given=response.body)
@@ -18,14 +18,13 b''
18
18
19 import logging
19 import logging
20 import itertools
20 import itertools
21 import base64
22
21
23 from rhodecode.api import (
22 from rhodecode.api import (
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
23 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
25
24
26 from rhodecode.api.utils import (
25 from rhodecode.api.utils import (
27 Optional, OAttr, has_superadmin_permission, get_user_or_error)
26 Optional, OAttr, has_superadmin_permission, get_user_or_error)
28 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
27 from rhodecode.lib.utils import get_rhodecode_repo_store_path
29 from rhodecode.lib import system_info
28 from rhodecode.lib import system_info
30 from rhodecode.lib import user_sessions
29 from rhodecode.lib import user_sessions
31 from rhodecode.lib import exc_tracking
30 from rhodecode.lib import exc_tracking
@@ -33,9 +32,7 b' from rhodecode.lib.ext_json import json'
33 from rhodecode.lib.utils2 import safe_int
32 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.model.db import UserIpMap
33 from rhodecode.model.db import UserIpMap
35 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.scm import ScmModel
36 from rhodecode.apps.file_store import utils as store_utils
35
37 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
38 FileOverSizeException
39
36
40 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
41
38
@@ -158,13 +155,10 b' def get_ip(request, apiuser, userid=Opti'
158
155
159
156
160 @jsonrpc_method()
157 @jsonrpc_method()
161 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
158 def rescan_repos(request, apiuser):
162 """
159 """
163 Triggers a rescan of the specified repositories.
160 Triggers a rescan of the specified repositories.
164
161 It returns list of added repositories, and errors during scan.
165 * If the ``remove_obsolete`` option is set, it also deletes repositories
166 that are found in the database but not on the file system, so called
167 "clean zombies".
168
162
169 This command can only be run using an |authtoken| with admin rights to
163 This command can only be run using an |authtoken| with admin rights to
170 the specified repository.
164 the specified repository.
@@ -173,9 +167,6 b' def rescan_repos(request, apiuser, remov'
173
167
174 :param apiuser: This is filled automatically from the |authtoken|.
168 :param apiuser: This is filled automatically from the |authtoken|.
175 :type apiuser: AuthUser
169 :type apiuser: AuthUser
176 :param remove_obsolete: Deletes repositories from the database that
177 are not found on the filesystem.
178 :type remove_obsolete: Optional(``True`` | ``False``)
179
170
180 Example output:
171 Example output:
181
172
@@ -184,7 +175,7 b' def rescan_repos(request, apiuser, remov'
184 id : <id_given_in_input>
175 id : <id_given_in_input>
185 result : {
176 result : {
186 'added': [<added repository name>,...]
177 'added': [<added repository name>,...]
187 'removed': [<removed repository name>,...]
178 'errors': [<error_list>,...]
188 }
179 }
189 error : null
180 error : null
190
181
@@ -199,28 +190,24 b' def rescan_repos(request, apiuser, remov'
199 }
190 }
200
191
201 """
192 """
193 from rhodecode.lib.utils import repo2db_mapper # re-import for testing patches
194
202 if not has_superadmin_permission(apiuser):
195 if not has_superadmin_permission(apiuser):
203 raise JSONRPCForbidden()
196 raise JSONRPCForbidden()
204
197
205 try:
198 try:
206 rm_obsolete = Optional.extract(remove_obsolete)
199 added, errors = repo2db_mapper(ScmModel().repo_scan(), force_hooks_rebuild=True)
207 added, removed = repo2db_mapper(ScmModel().repo_scan(),
200 return {'added': added, 'errors': errors}
208 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
209 return {'added': added, 'removed': removed}
210 except Exception:
201 except Exception:
211 log.exception('Failed to run repo rescann')
202 log.exception('Failed to run repo rescan')
212 raise JSONRPCError(
203 raise JSONRPCError(
213 'Error occurred during rescan repositories action'
204 'Error occurred during rescan repositories action'
214 )
205 )
215
206
216 @jsonrpc_method()
207 @jsonrpc_method()
217 def cleanup_repos(request, apiuser, remove_obsolete=Optional(False)):
208 def cleanup_repos(request, apiuser):
218 """
209 """
219 Triggers a rescan of the specified repositories.
210 Triggers a cleanup of non-existing repositories or repository groups in filesystem.
220
221 * If the ``remove_obsolete`` option is set, it also deletes repositories
222 that are found in the database but not on the file system, so called
223 "clean zombies".
224
211
225 This command can only be run using an |authtoken| with admin rights to
212 This command can only be run using an |authtoken| with admin rights to
226 the specified repository.
213 the specified repository.
@@ -229,9 +216,6 b' def cleanup_repos(request, apiuser, remo'
229
216
230 :param apiuser: This is filled automatically from the |authtoken|.
217 :param apiuser: This is filled automatically from the |authtoken|.
231 :type apiuser: AuthUser
218 :type apiuser: AuthUser
232 :param remove_obsolete: Deletes repositories from the database that
233 are not found on the filesystem.
234 :type remove_obsolete: Optional(``True`` | ``False``)
235
219
236 Example output:
220 Example output:
237
221
@@ -239,8 +223,8 b' def cleanup_repos(request, apiuser, remo'
239
223
240 id : <id_given_in_input>
224 id : <id_given_in_input>
241 result : {
225 result : {
242 'added': [<added repository name>,...]
226 'removed': [<removed repository name or repository group name>,...]
243 'removed': [<removed repository name>,...]
227 'errors': [<error list of failures to remove>,...]
244 }
228 }
245 error : null
229 error : null
246
230
@@ -251,22 +235,22 b' def cleanup_repos(request, apiuser, remo'
251 id : <id_given_in_input>
235 id : <id_given_in_input>
252 result : null
236 result : null
253 error : {
237 error : {
254 'Error occurred during rescan repositories action'
238 'Error occurred during repo storage cleanup action'
255 }
239 }
256
240
257 """
241 """
242 from rhodecode.lib.utils import repo2db_cleanup # re-import for testing patches
243
258 if not has_superadmin_permission(apiuser):
244 if not has_superadmin_permission(apiuser):
259 raise JSONRPCForbidden()
245 raise JSONRPCForbidden()
260
246
261 try:
247 try:
262 rm_obsolete = Optional.extract(remove_obsolete)
248 removed, errors = repo2db_cleanup()
263 added, removed = repo2db_mapper(ScmModel().repo_scan(),
249 return {'removed': removed, 'errors': errors}
264 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
265 return {'added': added, 'removed': removed}
266 except Exception:
250 except Exception:
267 log.exception('Failed to run repo rescann')
251 log.exception('Failed to run repo storage cleanup')
268 raise JSONRPCError(
252 raise JSONRPCError(
269 'Error occurred during rescan repositories action'
253 'Error occurred during repo storage cleanup action'
270 )
254 )
271
255
272
256
@@ -361,12 +361,21 b' def admin_routes(config):'
361 renderer='rhodecode:templates/admin/settings/settings.mako')
361 renderer='rhodecode:templates/admin/settings/settings.mako')
362
362
363 config.add_route(
363 config.add_route(
364 name='admin_settings_mapping_update',
364 name='admin_settings_mapping_create',
365 pattern='/settings/mapping/update')
365 pattern='/settings/mapping/create')
366 config.add_view(
366 config.add_view(
367 AdminSettingsView,
367 AdminSettingsView,
368 attr='settings_mapping_update',
368 attr='settings_mapping_create',
369 route_name='admin_settings_mapping_update', request_method='POST',
369 route_name='admin_settings_mapping_create', request_method='POST',
370 renderer='rhodecode:templates/admin/settings/settings.mako')
371
372 config.add_route(
373 name='admin_settings_mapping_cleanup',
374 pattern='/settings/mapping/cleanup')
375 config.add_view(
376 AdminSettingsView,
377 attr='settings_mapping_cleanup',
378 route_name='admin_settings_mapping_cleanup', request_method='POST',
370 renderer='rhodecode:templates/admin/settings/settings.mako')
379 renderer='rhodecode:templates/admin/settings/settings.mako')
371
380
372 config.add_route(
381 config.add_route(
@@ -110,9 +110,11 b' class TestAdminRepos(object):'
110 repo_type=backend.alias,
110 repo_type=backend.alias,
111 repo_description=description,
111 repo_description=description,
112 csrf_token=csrf_token))
112 csrf_token=csrf_token))
113
113 try:
114 self.assert_repository_is_created_correctly(
114 self.assert_repository_is_created_correctly(repo_name, description, backend)
115 repo_name, description, backend)
115 finally:
116 RepoModel().delete(numeric_repo)
117 Session().commit()
116
118
117 @pytest.mark.parametrize("suffix", ['', '_ąćę'], ids=['', 'non-ascii'])
119 @pytest.mark.parametrize("suffix", ['', '_ąćę'], ids=['', 'non-ascii'])
118 def test_create_in_group(
120 def test_create_in_group(
@@ -38,7 +38,7 b' from rhodecode.lib.auth import ('
38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
39 from rhodecode.lib.celerylib import tasks, run_task
39 from rhodecode.lib.celerylib import tasks, run_task
40 from rhodecode.lib.str_utils import safe_str
40 from rhodecode.lib.str_utils import safe_str
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path, repo2db_cleanup
42 from rhodecode.lib.utils2 import str2bool, AttributeDict
42 from rhodecode.lib.utils2 import str2bool, AttributeDict
43 from rhodecode.lib.index import searcher_from_config
43 from rhodecode.lib.index import searcher_from_config
44
44
@@ -233,13 +233,12 b' class AdminSettingsView(BaseAppView):'
233 @LoginRequired()
233 @LoginRequired()
234 @HasPermissionAllDecorator('hg.admin')
234 @HasPermissionAllDecorator('hg.admin')
235 @CSRFRequired()
235 @CSRFRequired()
236 def settings_mapping_update(self):
236 def settings_mapping_create(self):
237 _ = self.request.translate
237 _ = self.request.translate
238 c = self.load_default_context()
238 c = self.load_default_context()
239 c.active = 'mapping'
239 c.active = 'mapping'
240 rm_obsolete = self.request.POST.get('destroy', False)
241 invalidate_cache = self.request.POST.get('invalidate', False)
240 invalidate_cache = self.request.POST.get('invalidate', False)
242 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
241 log.debug('rescanning repo location')
243
242
244 if invalidate_cache:
243 if invalidate_cache:
245 log.debug('invalidating all repositories cache')
244 log.debug('invalidating all repositories cache')
@@ -247,16 +246,34 b' class AdminSettingsView(BaseAppView):'
247 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
246 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
248
247
249 filesystem_repos = ScmModel().repo_scan()
248 filesystem_repos = ScmModel().repo_scan()
250 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, force_hooks_rebuild=True)
249 added, errors = repo2db_mapper(filesystem_repos, force_hooks_rebuild=True)
251 PermissionModel().trigger_permission_flush()
250 PermissionModel().trigger_permission_flush()
252
251
253 def _repr(rm_repo):
252 def _repr(rm_repo):
254 return ', '.join(map(safe_str, rm_repo)) or '-'
253 return ', '.join(map(safe_str, rm_repo)) or '-'
255
254
256 h.flash(_('Repositories successfully '
255 if errors:
257 'rescanned added: %s ; removed: %s') %
256 h.flash(_('Errors during scan: {}').format(_repr(errors), ), category='error')
258 (_repr(added), _repr(removed)),
257
259 category='success')
258 h.flash(_('Repositories successfully scanned: Added: {}').format(_repr(added)), category='success')
259 raise HTTPFound(h.route_path('admin_settings_mapping'))
260
261 @LoginRequired()
262 @HasPermissionAllDecorator('hg.admin')
263 @CSRFRequired()
264 def settings_mapping_cleanup(self):
265 _ = self.request.translate
266 c = self.load_default_context()
267 c.active = 'mapping'
268 log.debug('rescanning repo location')
269
270 removed, errors = repo2db_cleanup()
271 PermissionModel().trigger_permission_flush()
272
273 def _repr(rm_repo):
274 return ', '.join(map(safe_str, rm_repo)) or '-'
275
276 h.flash(_('Repositories successfully scanned: Errors: {}, Added: {}').format(errors, _repr(removed)), category='success')
260 raise HTTPFound(h.route_path('admin_settings_mapping'))
277 raise HTTPFound(h.route_path('admin_settings_mapping'))
261
278
262 @LoginRequired()
279 @LoginRequired()
@@ -582,23 +582,19 b' def map_groups(path):'
582 return group
582 return group
583
583
584
584
585 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
585 def repo2db_mapper(initial_repo_list, force_hooks_rebuild=False):
586 """
586 """
587 maps all repos given in initial_repo_list, non existing repositories
587 maps all repos given in initial_repo_list, non-existing repositories
588 are created, if remove_obsolete is True it also checks for db entries
588 are created
589 that are not in initial_repo_list and removes them.
590
591 :param initial_repo_list: list of repositories found by scanning methods
592 :param remove_obsolete: check for obsolete entries in database
593 """
589 """
594 from rhodecode.model.repo import RepoModel
590 from rhodecode.model.repo import RepoModel
595 from rhodecode.model.repo_group import RepoGroupModel
596 from rhodecode.model.settings import SettingsModel
591 from rhodecode.model.settings import SettingsModel
597
592
598 sa = meta.Session()
593 sa = meta.Session()
599 repo_model = RepoModel()
594 repo_model = RepoModel()
600 user = User.get_first_super_admin()
595 user = User.get_first_super_admin()
601 added = []
596 added = []
597 errors = []
602
598
603 # creation defaults
599 # creation defaults
604 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
600 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
@@ -616,9 +612,7 b' def repo2db_mapper(initial_repo_list, re'
616 if not db_repo:
612 if not db_repo:
617 log.info('repository `%s` not found in the database, creating now', name)
613 log.info('repository `%s` not found in the database, creating now', name)
618 added.append(name)
614 added.append(name)
619 desc = (repo.description
615 desc = repo.description if repo.description != 'unknown' else f'{name} repository'
620 if repo.description != 'unknown'
621 else '%s repository' % name)
622
616
623 db_repo = repo_model._create_repo(
617 db_repo = repo_model._create_repo(
624 repo_name=name,
618 repo_name=name,
@@ -633,76 +627,115 b' def repo2db_mapper(initial_repo_list, re'
633 state=Repository.STATE_CREATED
627 state=Repository.STATE_CREATED
634 )
628 )
635 sa.commit()
629 sa.commit()
630
631 try:
632 config = db_repo._config
633 config.set('extensions', 'largefiles', '')
634 scm_repo = db_repo.scm_instance(config=config)
635 except Exception:
636 log.error(traceback.format_exc())
637 errors.append(f'getting vcs instance for {name} failed')
638 continue
639
640 try:
641 db_repo.update_commit_cache(recursive=False)
642 except Exception:
643 log.error(traceback.format_exc())
644 errors.append(f'update_commit_cache for {name} failed')
645 continue
646
647 try:
648 scm_repo.install_hooks(force=force_hooks_rebuild)
649 except Exception:
650 log.error(traceback.format_exc())
651 errors.append(f'install_hooks for {name} failed')
652 continue
653
654 try:
636 # we added that repo just now, and make sure we updated server info
655 # we added that repo just now, and make sure we updated server info
637 if db_repo.repo_type == 'git':
656 if db_repo.repo_type == 'git':
638 git_repo = db_repo.scm_instance()
639 # update repository server-info
657 # update repository server-info
640 log.debug('Running update server info')
658 log.debug('Running update server info')
641 git_repo._update_server_info(force=True)
659 scm_repo._update_server_info(force=True)
642
660 except Exception:
643 db_repo.update_commit_cache(recursive=False)
661 log.error(traceback.format_exc())
662 errors.append(f'update_server_info for {name} failed')
663 continue
644
664
645 config = db_repo._config
665 return added, errors
646 config.set('extensions', 'largefiles', '')
647 repo = db_repo.scm_instance(config=config)
648 repo.install_hooks(force=force_hooks_rebuild)
649
666
667 def repo2db_cleanup(skip_repos=None, skip_groups=None):
668 from rhodecode.model.repo import RepoModel
669 from rhodecode.model.repo_group import RepoGroupModel
670
671 sa = meta.Session()
650 removed = []
672 removed = []
651 if remove_obsolete:
673 errors = []
652 # remove from database those repositories that are not in the filesystem
674
653 for repo in sa.query(Repository).all():
675
654 if repo.repo_name not in list(initial_repo_list.keys()):
676 all_repos = Repository.execute(
655 log.debug("Removing non-existing repository found in db `%s`",
677 Repository.select(Repository)\
656 repo.repo_name)
678 .order_by(Repository.repo_name)
657 try:
679 ).scalars()
658 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
659 sa.commit()
660 removed.append(repo.repo_name)
661 except Exception:
662 # don't hold further removals on error
663 log.error(traceback.format_exc())
664 sa.rollback()
665
680
666 def splitter(full_repo_name):
681 # remove from database those repositories that are not in the filesystem
667 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
682 for db_repo in all_repos:
668 gr_name = None
683 db_repo_name = db_repo.repo_name
669 if len(_parts) == 2:
684 if skip_repos and db_repo_name in skip_repos:
670 gr_name = _parts[0]
685 log.debug('Skipping repo `%s`', db_repo_name)
671 return gr_name
686 continue
687 try:
688 instance = db_repo.scm_instance()
689 except Exception:
690 instance = None
672
691
673 initial_repo_group_list = [splitter(x) for x in
692 if not instance:
674 list(initial_repo_list.keys()) if splitter(x)]
693 log.debug("Removing non-existing repository found in db `%s`", db_repo_name)
694 try:
695 RepoModel(sa).delete(db_repo, forks='detach', fs_remove=False, call_events=False)
696 sa.commit()
697 removed.append(db_repo_name)
698 except Exception:
699 # don't hold further removals on error
700 log.error(traceback.format_exc())
701 sa.rollback()
702 errors.append(db_repo_name)
675
703
676 # remove from database those repository groups that are not in the
704 # remove from database those repository groups that are not in the
677 # filesystem due to parent child relationships we need to delete them
705 # filesystem due to parent child relationships we need to delete them
678 # in a specific order of most nested first
706 # in a specific order of most nested first
679 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
707 all_groups = RepoGroup.execute(
680 def nested_sort(gr):
708 RepoGroup.select(RepoGroup.group_name)\
681 return len(gr.split('/'))
709 .order_by(RepoGroup.group_name)
682 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
710 ).scalars().all()
683 if group_name not in initial_repo_group_list:
711
684 repo_group = RepoGroup.get_by_group_name(group_name)
712 def nested_sort(gr):
685 if (repo_group.children.all() or
713 return len(gr.split('/'))
686 not RepoGroupModel().check_exist_filesystem(
687 group_name=group_name, exc_on_failure=False)):
688 continue
689
714
690 log.info(
715 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
691 'Removing non-existing repository group found in db `%s`',
716 if skip_groups and group_name in skip_groups:
692 group_name)
717 log.debug('Skipping repo group `%s`', group_name)
693 try:
718 continue
694 RepoGroupModel(sa).delete(group_name, fs_remove=False)
719
695 sa.commit()
720 repo_group = RepoGroup.get_by_group_name(group_name)
696 removed.append(group_name)
721
697 except Exception:
722 if repo_group.children.all() or not RepoGroupModel().check_exist_filesystem(group_name=group_name, exc_on_failure=False):
698 # don't hold further removals on error
723 continue
699 log.exception(
724
700 'Unable to remove repository group `%s`',
725 log.info('Removing non-existing repository group found in db `%s`', group_name)
701 group_name)
702 sa.rollback()
703 raise
704
726
705 return added, removed
727 try:
728 RepoGroupModel(sa).delete(group_name, fs_remove=False, call_events=False)
729 sa.commit()
730 removed.append(group_name)
731 except Exception:
732 # don't hold further removals on error
733 log.exception('Unable to remove repository group `%s`',group_name)
734 sa.rollback()
735 errors.append(group_name)
736
737 return removed, errors
738
706
739
707 def deep_reload_package(package_name):
740 def deep_reload_package(package_name):
708 """
741 """
@@ -829,7 +862,7 b' def create_test_repositories(test_path, '
829 raise ImportError('Failed to import rc_testdata, '
862 raise ImportError('Failed to import rc_testdata, '
830 'please make sure this package is installed from requirements_test.txt')
863 'please make sure this package is installed from requirements_test.txt')
831
864
832 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
865 from rhodecode.bootstrap import HG_REPO, GIT_REPO, SVN_REPO
833
866
834 log.debug('making test vcs repositories at %s', test_path)
867 log.debug('making test vcs repositories at %s', test_path)
835
868
@@ -58,9 +58,10 b' def get_vcs_instance(repo_path, *args, *'
58 raise VCSError(f"Given path {repo_path} is not a directory")
58 raise VCSError(f"Given path {repo_path} is not a directory")
59 except VCSError:
59 except VCSError:
60 log.exception(
60 log.exception(
61 'Perhaps this repository is in db and not in '
61 'Perhaps this repository is in db and not in filesystem.'
62 'filesystem run rescan repositories with '
62 'Run cleanup filesystem option from admin settings under Remap and rescan'
63 '"destroy old data" option from admin panel')
63 )
64
64 return None
65 return None
65
66
66 return backend(repo_path=repo_path, *args, **kwargs)
67 return backend(repo_path=repo_path, *args, **kwargs)
@@ -745,7 +745,7 b' class RepoModel(BaseModel):'
745 log.error(traceback.format_exc())
745 log.error(traceback.format_exc())
746 raise
746 raise
747
747
748 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None):
748 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None, call_events=True):
749 """
749 """
750 Delete given repository, forks parameter defines what do do with
750 Delete given repository, forks parameter defines what do do with
751 attached forks. Throws AttachedForksError if deleted repo has attached
751 attached forks. Throws AttachedForksError if deleted repo has attached
@@ -760,47 +760,54 b' class RepoModel(BaseModel):'
760 if not cur_user:
760 if not cur_user:
761 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
761 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
762 repo = self._get_repo(repo)
762 repo = self._get_repo(repo)
763 if repo:
763 if not repo:
764 if forks == 'detach':
764 return False
765 for r in repo.forks:
766 r.fork = None
767 self.sa.add(r)
768 elif forks == 'delete':
769 for r in repo.forks:
770 self.delete(r, forks='delete')
771 elif [f for f in repo.forks]:
772 raise AttachedForksError()
773
765
774 # check for pull requests
766 if forks == 'detach':
775 pr_sources = repo.pull_requests_source
767 for r in repo.forks:
776 pr_targets = repo.pull_requests_target
768 r.fork = None
777 if pull_requests != 'delete' and (pr_sources or pr_targets):
769 self.sa.add(r)
778 raise AttachedPullRequestsError()
770 elif forks == 'delete':
771 for r in repo.forks:
772 self.delete(r, forks='delete')
773 elif [f for f in repo.forks]:
774 raise AttachedForksError()
775
776 # check for pull requests
777 pr_sources = repo.pull_requests_source
778 pr_targets = repo.pull_requests_target
779 if pull_requests != 'delete' and (pr_sources or pr_targets):
780 raise AttachedPullRequestsError()
779
781
780 artifacts_objs = repo.artifacts
782 artifacts_objs = repo.artifacts
781 if artifacts == 'delete':
783 if artifacts == 'delete':
782 for a in artifacts_objs:
784 for a in artifacts_objs:
783 self.sa.delete(a)
785 self.sa.delete(a)
784 elif [a for a in artifacts_objs]:
786 elif [a for a in artifacts_objs]:
785 raise AttachedArtifactsError()
787 raise AttachedArtifactsError()
786
788
787 old_repo_dict = repo.get_dict()
789 old_repo_dict = repo.get_dict()
790 if call_events:
788 events.trigger(events.RepoPreDeleteEvent(repo))
791 events.trigger(events.RepoPreDeleteEvent(repo))
789 try:
792
790 self.sa.delete(repo)
793 try:
791 if fs_remove:
794 self.sa.delete(repo)
792 self._delete_filesystem_repo(repo)
795 if fs_remove:
793 else:
796 self._delete_filesystem_repo(repo)
794 log.debug('skipping removal from filesystem')
797 else:
795 old_repo_dict.update({
798 log.debug('skipping removal from filesystem')
796 'deleted_by': cur_user,
799 old_repo_dict.update({
797 'deleted_on': time.time(),
800 'deleted_by': cur_user,
798 })
801 'deleted_on': time.time(),
802 })
803 if call_events:
799 hooks_base.delete_repository(**old_repo_dict)
804 hooks_base.delete_repository(**old_repo_dict)
800 events.trigger(events.RepoDeleteEvent(repo))
805 events.trigger(events.RepoDeleteEvent(repo))
801 except Exception:
806 except Exception:
802 log.error(traceback.format_exc())
807 log.error(traceback.format_exc())
803 raise
808 raise
809
810 return True
804
811
805 def grant_user_permission(self, repo, user, perm):
812 def grant_user_permission(self, repo, user, perm):
806 """
813 """
@@ -156,7 +156,7 b' class RepoGroupModel(BaseModel):'
156
156
157 def check_exist_filesystem(self, group_name, exc_on_failure=True):
157 def check_exist_filesystem(self, group_name, exc_on_failure=True):
158 create_path = os.path.join(self.repos_path, group_name)
158 create_path = os.path.join(self.repos_path, group_name)
159 log.debug('creating new group in %s', create_path)
159 log.debug('checking FS presence for repo group in %s', create_path)
160
160
161 if os.path.isdir(create_path):
161 if os.path.isdir(create_path):
162 if exc_on_failure:
162 if exc_on_failure:
@@ -573,10 +573,11 b' class RepoGroupModel(BaseModel):'
573 log.error(traceback.format_exc())
573 log.error(traceback.format_exc())
574 raise
574 raise
575
575
576 def delete(self, repo_group, force_delete=False, fs_remove=True):
576 def delete(self, repo_group, force_delete=False, fs_remove=True, call_events=True):
577 repo_group = self._get_repo_group(repo_group)
577 repo_group = self._get_repo_group(repo_group)
578 if not repo_group:
578 if not repo_group:
579 return False
579 return False
580 repo_group_name = repo_group.group_name
580 try:
581 try:
581 self.sa.delete(repo_group)
582 self.sa.delete(repo_group)
582 if fs_remove:
583 if fs_remove:
@@ -585,13 +586,15 b' class RepoGroupModel(BaseModel):'
585 log.debug('skipping removal from filesystem')
586 log.debug('skipping removal from filesystem')
586
587
587 # Trigger delete event.
588 # Trigger delete event.
588 events.trigger(events.RepoGroupDeleteEvent(repo_group))
589 if call_events:
589 return True
590 events.trigger(events.RepoGroupDeleteEvent(repo_group))
590
591
591 except Exception:
592 except Exception:
592 log.error('Error removing repo_group %s', repo_group)
593 log.error('Error removing repo_group %s', repo_group_name)
593 raise
594 raise
594
595
596 return True
597
595 def grant_user_permission(self, repo_group, user, perm):
598 def grant_user_permission(self, repo_group, user, perm):
596 """
599 """
597 Grant permission for user on given repository group, or update
600 Grant permission for user on given repository group, or update
@@ -121,7 +121,7 b' def scan_repositories_if_enabled(event):'
121 from rhodecode.lib.utils import repo2db_mapper
121 from rhodecode.lib.utils import repo2db_mapper
122 scm = ScmModel()
122 scm = ScmModel()
123 repositories = scm.repo_scan(scm.repos_path)
123 repositories = scm.repo_scan(scm.repos_path)
124 repo2db_mapper(repositories, remove_obsolete=False)
124 repo2db_mapper(repositories)
125
125
126
126
127 def write_metadata_if_needed(event):
127 def write_metadata_if_needed(event):
@@ -1,33 +1,45 b''
1 ${h.secure_form(h.route_path('admin_settings_mapping_update'), request=request)}
1
2
2
3 <div class="panel panel-default">
3 <div class="panel panel-default">
4 <div class="panel-heading">
4 <div class="panel-heading">
5 <h3 class="panel-title">${_('Import New Groups or Repositories')}</h3>
5 <h3 class="panel-title">${_('Import new repository groups and repositories')}</h3>
6 </div>
6 </div>
7 <div class="panel-body">
7 <div class="panel-body">
8
8 ${h.secure_form(h.route_path('admin_settings_mapping_create'), request=request)}
9 <p>
9 <p>
10 ${_('This function will scann all data under the current storage path location at')} <code>${c.storage_path}</code>
10 ${_('This function will scan all data under the current storage path location at')} <code>${c.storage_path}</code><br/>
11 ${_('Each folder will be imported as a new repository group, and each repository found will be also imported to root level or corresponding repository group')}
11 </p>
12 </p>
12
13
13 <div class="checkbox">
14 <div class="checkbox">
14 ${h.checkbox('destroy',True)}
15 <label for="destroy">${_('Destroy old data')}</label>
16 </div>
17 <span class="help-block">${_('In case a repository or a group was deleted from the filesystem and it still exists in the database, check this option to remove obsolete data from the database.')}</span>
18
19 <div class="checkbox">
20 ${h.checkbox('invalidate',True)}
15 ${h.checkbox('invalidate',True)}
21 <label for="invalidate"> ${_('Invalidate cache for all repositories')}</label>
16 <label for="invalidate"> ${_('Invalidate cache for all repositories')}</label>
22 </div>
17 </div>
23 <span class="help-block">${_('Each cache data for repositories will be cleaned with this option selected. Use this to reload data and clear cache keys.')}</span>
18 <span class="help-block">${_('Each cache data for repositories will be cleaned with this option selected. Use this to reload data and clear cache keys.')}</span>
24
19
25 <div class="buttons">
20 <div class="buttons">
26 ${h.submit('rescan',_('Rescan Filesystem'),class_="btn")}
21 ${h.submit('rescan',_('Scan filesystem'),class_="btn")}
27 </div>
22 </div>
28
23 ${h.end_form()}
29 </div>
24 </div>
30 </div>
25 </div>
31
26
32
27
33 ${h.end_form()}
28 <div class="panel panel-default">
29 <div class="panel-heading">
30 <h3 class="panel-title">${_('Cleanup removed Repository Groups or Repositories')}</h3>
31 </div>
32 <div class="panel-body">
33 ${h.secure_form(h.route_path('admin_settings_mapping_cleanup'), request=request)}
34 <p>
35 ${_('This function will scan all data under the current storage path location at')} <code>${c.storage_path}</code>
36 ${_('Then it will remove all repository groups and repositories that are no longer present in the filesystem.')}
37 </p>
38
39 <div class="buttons">
40 ${h.submit('rescan',_('Cleanup filesystem'),class_="btn btn-danger")}
41 </div>
42 ${h.end_form()}
43 </div>
44 </div>
45
@@ -1558,6 +1558,7 b' def stub_integration_settings():'
1558
1558
1559 @pytest.fixture()
1559 @pytest.fixture()
1560 def repo_integration_stub(request, repo_stub, StubIntegrationType, stub_integration_settings):
1560 def repo_integration_stub(request, repo_stub, StubIntegrationType, stub_integration_settings):
1561 repo_id = repo_stub.repo_id
1561 integration = IntegrationModel().create(
1562 integration = IntegrationModel().create(
1562 StubIntegrationType,
1563 StubIntegrationType,
1563 settings=stub_integration_settings,
1564 settings=stub_integration_settings,
@@ -1571,6 +1572,7 b' def repo_integration_stub(request, repo_'
1571 @request.addfinalizer
1572 @request.addfinalizer
1572 def cleanup():
1573 def cleanup():
1573 IntegrationModel().delete(integration)
1574 IntegrationModel().delete(integration)
1575 RepoModel().delete(repo_id)
1574
1576
1575 return integration
1577 return integration
1576
1578
@@ -20,23 +20,21 b' import time'
20 import pytest
20 import pytest
21
21
22 from rhodecode import events
22 from rhodecode import events
23 from rhodecode.model.repo import RepoModel
23 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 from rhodecode.model.db import Session, Integration
25 from rhodecode.model.db import Session, Integration
25 from rhodecode.model.integration import IntegrationModel
26 from rhodecode.model.integration import IntegrationModel
26
27
27
28
28 class TestDeleteScopesDeletesIntegrations(object):
29 class TestDeleteScopesDeletesIntegrations(object):
29 def test_delete_repo_with_integration_deletes_integration(
30 def test_delete_repo_with_integration_deletes_integration(self, repo_integration_stub):
30 self, repo_integration_stub):
31 RepoModel().delete(repo_integration_stub.repo)
31
32 Session().delete(repo_integration_stub.repo)
33 Session().commit()
32 Session().commit()
34 Session().expire_all()
33 Session().expire_all()
35 integration = Integration.get(repo_integration_stub.integration_id)
34 integration = Integration.get(repo_integration_stub.integration_id)
36 assert integration is None
35 assert integration is None
37
36
38 def test_delete_repo_group_with_integration_deletes_integration(
37 def test_delete_repo_group_with_integration_deletes_integration(self, repogroup_integration_stub):
39 self, repogroup_integration_stub):
40
38
41 Session().delete(repogroup_integration_stub.repo_group)
39 Session().delete(repogroup_integration_stub.repo_group)
42 Session().commit()
40 Session().commit()
@@ -52,7 +50,7 b' def counter():'
52 global count
50 global count
53 val = count
51 val = count
54 count += 1
52 count += 1
55 return '{}_{}'.format(val, time.time())
53 return f'{val}_{time.time()}'
56
54
57
55
58 @pytest.fixture()
56 @pytest.fixture()
@@ -18,20 +18,25 b''
18
18
19 import multiprocessing
19 import multiprocessing
20 import os
20 import os
21 import shutil
21
22
22 import mock
23 import mock
23 import py
24 import py
24 import pytest
25 import pytest
25
26
27 import rhodecode
26 from rhodecode.lib import caching_query
28 from rhodecode.lib import caching_query
27 from rhodecode.lib import utils
29 from rhodecode.lib import utils
28 from rhodecode.lib.str_utils import safe_bytes
30 from rhodecode.lib.str_utils import safe_bytes
29 from rhodecode.model import settings
31 from rhodecode.model import settings
30 from rhodecode.model import db
32 from rhodecode.model import db
31 from rhodecode.model import meta
33 from rhodecode.model import meta
34 from rhodecode.model.meta import Session
32 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo_group import RepoGroupModel
36 from rhodecode.model.repo_group import RepoGroupModel
37 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.settings import UiSetting, SettingsModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
39 from rhodecode.tests.fixtures.fixture_pyramid import rhodecode_factory
35 from rhodecode.tests.fixtures.rc_fixture import Fixture
40 from rhodecode.tests.fixtures.rc_fixture import Fixture
36 from rhodecode_tools.lib.hash_utils import md5_safe
41 from rhodecode_tools.lib.hash_utils import md5_safe
37 from rhodecode.lib.ext_json import json
42 from rhodecode.lib.ext_json import json
@@ -230,7 +235,7 b' def platform_encodes_filenames():'
230 return path_with_latin1 != read_path
235 return path_with_latin1 != read_path
231
236
232
237
233 def test_repo2db_mapper_groups(repo_groups):
238 def test_repo2db_cleaner_removes_zombie_groups(repo_groups):
234 session = meta.Session()
239 session = meta.Session()
235 zombie_group, parent_group, child_group = repo_groups
240 zombie_group, parent_group, child_group = repo_groups
236 zombie_path = os.path.join(
241 zombie_path = os.path.join(
@@ -238,10 +243,9 b' def test_repo2db_mapper_groups(repo_grou'
238 os.rmdir(zombie_path)
243 os.rmdir(zombie_path)
239
244
240 # Avoid removing test repos when calling repo2db_mapper
245 # Avoid removing test repos when calling repo2db_mapper
241 repo_list = {
246 repo_list = [repo.repo_name for repo in session.query(db.Repository).all()]
242 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
247
243 }
248 utils.repo2db_cleanup(skip_repos=repo_list)
244 utils.repo2db_mapper(repo_list, remove_obsolete=True)
245
249
246 groups_in_db = session.query(db.RepoGroup).all()
250 groups_in_db = session.query(db.RepoGroup).all()
247 assert child_group in groups_in_db
251 assert child_group in groups_in_db
@@ -249,20 +253,68 b' def test_repo2db_mapper_groups(repo_grou'
249 assert zombie_path not in groups_in_db
253 assert zombie_path not in groups_in_db
250
254
251
255
252 def test_repo2db_mapper_enables_largefiles(backend):
256
257 @pytest.mark.backends("hg", "git", "svn")
258 def test_repo2db_cleaner_removes_zombie_repos(backend):
253 repo = backend.create_repo()
259 repo = backend.create_repo()
254 repo_list = {repo.repo_name: 'test'}
260 zombie_path = repo.repo_full_path
255 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
261 shutil.rmtree(zombie_path)
256 utils.repo2db_mapper(repo_list, remove_obsolete=False)
262
257 _, kwargs = scm_mock.call_args
263 removed, errors = utils.repo2db_cleanup()
258 assert kwargs['config'].get('extensions', 'largefiles') == ''
264 assert len(removed) == 1
265 assert not errors
259
266
260
267
261 @pytest.mark.backends("git", "svn")
268 def test_repo2db_mapper_adds_new_repos(request, backend):
269 repo = backend.create_repo()
270 cleanup_repos = []
271 cleanup_groups = []
272 for num in range(5):
273 copy_repo_name = f'{repo.repo_name}-{num}'
274 copy_repo_path = f'{repo.repo_full_path}-{num}'
275
276 shutil.copytree(repo.repo_full_path, copy_repo_path)
277 cleanup_repos.append(copy_repo_name)
278
279 for gr_num in range(5):
280 gr_name = f'my_gr_{gr_num}'
281 dest_gr = os.path.join(os.path.dirname(repo.repo_full_path), gr_name)
282 os.makedirs(dest_gr, exist_ok=True)
283
284 copy_repo_name = f'{gr_name}/{repo.repo_name}-{gr_num}'
285 copy_repo_path = f'{dest_gr}/{repo.repo_name}-{gr_num}'
286
287 shutil.copytree(repo.repo_full_path, copy_repo_path)
288 cleanup_repos.append(copy_repo_name)
289 cleanup_groups.append(gr_name)
290
291 repo_list = ScmModel().repo_scan()
292
293 added, errors = utils.repo2db_mapper(repo_list)
294 Session().commit()
295 assert not errors
296
297 assert len(added) == 10
298
299 @request.addfinalizer
300 def cleanup():
301 for _repo in cleanup_repos:
302 del_result = RepoModel().delete(_repo, call_events=False)
303 Session().commit()
304 assert del_result is True
305
306 for _repo_group in cleanup_groups:
307 del_result = RepoGroupModel().delete(_repo_group, force_delete=True, call_events=False)
308 Session().commit()
309 assert del_result is True
310
311
262 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
312 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
263 repo = backend.create_repo()
313 repo = backend.create_repo()
264 repo_list = {repo.repo_name: 'test'}
314 repo_list = {repo.repo_name: 'test'}
265 utils.repo2db_mapper(repo_list, remove_obsolete=False)
315 added, errors = utils.repo2db_mapper(repo_list)
316 assert not errors
317 assert repo.scm_instance().get_hooks_info() == {'pre_version': rhodecode.__version__, 'post_version': rhodecode.__version__}
266
318
267
319
268 @pytest.mark.backends("git", "svn")
320 @pytest.mark.backends("git", "svn")
@@ -271,7 +323,9 b' def test_repo2db_mapper_installs_hooks_f'
271 RepoModel().delete(repo, fs_remove=False)
323 RepoModel().delete(repo, fs_remove=False)
272 meta.Session().commit()
324 meta.Session().commit()
273 repo_list = {repo.repo_name: repo.scm_instance()}
325 repo_list = {repo.repo_name: repo.scm_instance()}
274 utils.repo2db_mapper(repo_list, remove_obsolete=False)
326 added, errors = utils.repo2db_mapper(repo_list)
327 assert not errors
328 assert len(added) == 1
275
329
276
330
277 class TestPasswordChanged(object):
331 class TestPasswordChanged(object):
@@ -453,7 +507,7 b' class TestGetEnabledHooks(object):'
453
507
454 def test_obfuscate_url_pw():
508 def test_obfuscate_url_pw():
455 from rhodecode.lib.utils2 import obfuscate_url_pw
509 from rhodecode.lib.utils2 import obfuscate_url_pw
456 engine = u'/home/repos/malmö'
510 engine = '/home/repos/malmö'
457 assert obfuscate_url_pw(engine)
511 assert obfuscate_url_pw(engine)
458
512
459
513
@@ -182,7 +182,8 b' def get_url_defs():'
182 "admin_settings_vcs_svn_pattern_delete": ADMIN_PREFIX
182 "admin_settings_vcs_svn_pattern_delete": ADMIN_PREFIX
183 + "/settings/vcs/svn_pattern_delete",
183 + "/settings/vcs/svn_pattern_delete",
184 "admin_settings_mapping": ADMIN_PREFIX + "/settings/mapping",
184 "admin_settings_mapping": ADMIN_PREFIX + "/settings/mapping",
185 "admin_settings_mapping_update": ADMIN_PREFIX + "/settings/mapping/update",
185 "admin_settings_mapping_create": ADMIN_PREFIX + "/settings/mapping/create",
186 "admin_settings_mapping_cleanup": ADMIN_PREFIX + "/settings/mapping/cleanup",
186 "admin_settings_visual": ADMIN_PREFIX + "/settings/visual",
187 "admin_settings_visual": ADMIN_PREFIX + "/settings/visual",
187 "admin_settings_visual_update": ADMIN_PREFIX + "/settings/visual/update",
188 "admin_settings_visual_update": ADMIN_PREFIX + "/settings/visual/update",
188 "admin_settings_issuetracker": ADMIN_PREFIX + "/settings/issue-tracker",
189 "admin_settings_issuetracker": ADMIN_PREFIX + "/settings/issue-tracker",
General Comments 0
You need to be logged in to leave comments. Login now