##// END OF EJS Templates
feat(remap and rescan): added more relient remap and removal option, and also split the logic to either add or cleanup
super-admin -
r5619:c9e499e7 default
parent child Browse files
Show More
@@ -0,0 +1,44 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import mock
20 import pytest
21
22 from rhodecode.model.scm import ScmModel
23 from rhodecode.api.tests.utils import (
24 build_data, api_call, assert_ok, assert_error, crash)
25
26
27 @pytest.mark.usefixtures("testuser_api", "app")
28 class TestCleanupRepos(object):
29 def test_api_cleanup_repos(self):
30 id_, params = build_data(self.apikey, 'cleanup_repos')
31 response = api_call(self.app, params)
32
33 expected = {'removed': [], 'errors': []}
34 assert_ok(id_, expected, given=response.body)
35
36 def test_api_cleanup_repos_error(self):
37
38 id_, params = build_data(self.apikey, 'cleanup_repos', )
39
40 with mock.patch('rhodecode.lib.utils.repo2db_cleanup', side_effect=crash):
41 response = api_call(self.app, params)
42
43 expected = 'Error occurred during repo storage cleanup action'
44 assert_error(id_, expected, given=response.body)
@@ -19,7 +19,6 b''
19 19 import mock
20 20 import pytest
21 21
22 from rhodecode.model.scm import ScmModel
23 22 from rhodecode.api.tests.utils import (
24 23 build_data, api_call, assert_ok, assert_error, crash)
25 24
@@ -30,13 +29,14 b' class TestRescanRepos(object):'
30 29 id_, params = build_data(self.apikey, 'rescan_repos')
31 30 response = api_call(self.app, params)
32 31
33 expected = {'added': [], 'removed': []}
32 expected = {'added': [], 'errors': []}
34 33 assert_ok(id_, expected, given=response.body)
35 34
36 @mock.patch.object(ScmModel, 'repo_scan', crash)
37 def test_api_rescann_error(self):
35 def test_api_rescan_repos_error(self):
38 36 id_, params = build_data(self.apikey, 'rescan_repos', )
39 response = api_call(self.app, params)
37
38 with mock.patch('rhodecode.lib.utils.repo2db_mapper', side_effect=crash):
39 response = api_call(self.app, params)
40 40
41 41 expected = 'Error occurred during rescan repositories action'
42 42 assert_error(id_, expected, given=response.body)
@@ -18,14 +18,13 b''
18 18
19 19 import logging
20 20 import itertools
21 import base64
22 21
23 22 from rhodecode.api import (
24 23 jsonrpc_method, JSONRPCError, JSONRPCForbidden, find_methods)
25 24
26 25 from rhodecode.api.utils import (
27 26 Optional, OAttr, has_superadmin_permission, get_user_or_error)
28 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
27 from rhodecode.lib.utils import get_rhodecode_repo_store_path
29 28 from rhodecode.lib import system_info
30 29 from rhodecode.lib import user_sessions
31 30 from rhodecode.lib import exc_tracking
@@ -33,9 +32,7 b' from rhodecode.lib.ext_json import json'
33 32 from rhodecode.lib.utils2 import safe_int
34 33 from rhodecode.model.db import UserIpMap
35 34 from rhodecode.model.scm import ScmModel
36 from rhodecode.apps.file_store import utils as store_utils
37 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
38 FileOverSizeException
35
39 36
40 37 log = logging.getLogger(__name__)
41 38
@@ -158,13 +155,10 b' def get_ip(request, apiuser, userid=Opti'
158 155
159 156
160 157 @jsonrpc_method()
161 def rescan_repos(request, apiuser, remove_obsolete=Optional(False)):
158 def rescan_repos(request, apiuser):
162 159 """
163 160 Triggers a rescan of the specified repositories.
164
165 * If the ``remove_obsolete`` option is set, it also deletes repositories
166 that are found in the database but not on the file system, so called
167 "clean zombies".
161 It returns list of added repositories, and errors during scan.
168 162
169 163 This command can only be run using an |authtoken| with admin rights to
170 164 the specified repository.
@@ -173,9 +167,6 b' def rescan_repos(request, apiuser, remov'
173 167
174 168 :param apiuser: This is filled automatically from the |authtoken|.
175 169 :type apiuser: AuthUser
176 :param remove_obsolete: Deletes repositories from the database that
177 are not found on the filesystem.
178 :type remove_obsolete: Optional(``True`` | ``False``)
179 170
180 171 Example output:
181 172
@@ -184,7 +175,7 b' def rescan_repos(request, apiuser, remov'
184 175 id : <id_given_in_input>
185 176 result : {
186 177 'added': [<added repository name>,...]
187 'removed': [<removed repository name>,...]
178 'errors': [<error_list>,...]
188 179 }
189 180 error : null
190 181
@@ -199,28 +190,24 b' def rescan_repos(request, apiuser, remov'
199 190 }
200 191
201 192 """
193 from rhodecode.lib.utils import repo2db_mapper # re-import for testing patches
194
202 195 if not has_superadmin_permission(apiuser):
203 196 raise JSONRPCForbidden()
204 197
205 198 try:
206 rm_obsolete = Optional.extract(remove_obsolete)
207 added, removed = repo2db_mapper(ScmModel().repo_scan(),
208 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
209 return {'added': added, 'removed': removed}
199 added, errors = repo2db_mapper(ScmModel().repo_scan(), force_hooks_rebuild=True)
200 return {'added': added, 'errors': errors}
210 201 except Exception:
211 log.exception('Failed to run repo rescann')
202 log.exception('Failed to run repo rescan')
212 203 raise JSONRPCError(
213 204 'Error occurred during rescan repositories action'
214 205 )
215 206
216 207 @jsonrpc_method()
217 def cleanup_repos(request, apiuser, remove_obsolete=Optional(False)):
208 def cleanup_repos(request, apiuser):
218 209 """
219 Triggers a rescan of the specified repositories.
220
221 * If the ``remove_obsolete`` option is set, it also deletes repositories
222 that are found in the database but not on the file system, so called
223 "clean zombies".
210 Triggers a cleanup of non-existing repositories or repository groups in filesystem.
224 211
225 212 This command can only be run using an |authtoken| with admin rights to
226 213 the specified repository.
@@ -229,9 +216,6 b' def cleanup_repos(request, apiuser, remo'
229 216
230 217 :param apiuser: This is filled automatically from the |authtoken|.
231 218 :type apiuser: AuthUser
232 :param remove_obsolete: Deletes repositories from the database that
233 are not found on the filesystem.
234 :type remove_obsolete: Optional(``True`` | ``False``)
235 219
236 220 Example output:
237 221
@@ -239,8 +223,8 b' def cleanup_repos(request, apiuser, remo'
239 223
240 224 id : <id_given_in_input>
241 225 result : {
242 'added': [<added repository name>,...]
243 'removed': [<removed repository name>,...]
226 'removed': [<removed repository name or repository group name>,...]
227 'errors': [<error list of failures to remove>,...]
244 228 }
245 229 error : null
246 230
@@ -251,22 +235,22 b' def cleanup_repos(request, apiuser, remo'
251 235 id : <id_given_in_input>
252 236 result : null
253 237 error : {
254 'Error occurred during rescan repositories action'
238 'Error occurred during repo storage cleanup action'
255 239 }
256 240
257 241 """
242 from rhodecode.lib.utils import repo2db_cleanup # re-import for testing patches
243
258 244 if not has_superadmin_permission(apiuser):
259 245 raise JSONRPCForbidden()
260 246
261 247 try:
262 rm_obsolete = Optional.extract(remove_obsolete)
263 added, removed = repo2db_mapper(ScmModel().repo_scan(),
264 remove_obsolete=rm_obsolete, force_hooks_rebuild=True)
265 return {'added': added, 'removed': removed}
248 removed, errors = repo2db_cleanup()
249 return {'removed': removed, 'errors': errors}
266 250 except Exception:
267 log.exception('Failed to run repo rescann')
251 log.exception('Failed to run repo storage cleanup')
268 252 raise JSONRPCError(
269 'Error occurred during rescan repositories action'
253 'Error occurred during repo storage cleanup action'
270 254 )
271 255
272 256
@@ -361,12 +361,21 b' def admin_routes(config):'
361 361 renderer='rhodecode:templates/admin/settings/settings.mako')
362 362
363 363 config.add_route(
364 name='admin_settings_mapping_update',
365 pattern='/settings/mapping/update')
364 name='admin_settings_mapping_create',
365 pattern='/settings/mapping/create')
366 366 config.add_view(
367 367 AdminSettingsView,
368 attr='settings_mapping_update',
369 route_name='admin_settings_mapping_update', request_method='POST',
368 attr='settings_mapping_create',
369 route_name='admin_settings_mapping_create', request_method='POST',
370 renderer='rhodecode:templates/admin/settings/settings.mako')
371
372 config.add_route(
373 name='admin_settings_mapping_cleanup',
374 pattern='/settings/mapping/cleanup')
375 config.add_view(
376 AdminSettingsView,
377 attr='settings_mapping_cleanup',
378 route_name='admin_settings_mapping_cleanup', request_method='POST',
370 379 renderer='rhodecode:templates/admin/settings/settings.mako')
371 380
372 381 config.add_route(
@@ -110,9 +110,11 b' class TestAdminRepos(object):'
110 110 repo_type=backend.alias,
111 111 repo_description=description,
112 112 csrf_token=csrf_token))
113
114 self.assert_repository_is_created_correctly(
115 repo_name, description, backend)
113 try:
114 self.assert_repository_is_created_correctly(repo_name, description, backend)
115 finally:
116 RepoModel().delete(numeric_repo)
117 Session().commit()
116 118
117 119 @pytest.mark.parametrize("suffix", ['', '_ąćę'], ids=['', 'non-ascii'])
118 120 def test_create_in_group(
@@ -38,7 +38,7 b' from rhodecode.lib.auth import ('
38 38 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
39 39 from rhodecode.lib.celerylib import tasks, run_task
40 40 from rhodecode.lib.str_utils import safe_str
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path
41 from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_repo_store_path, repo2db_cleanup
42 42 from rhodecode.lib.utils2 import str2bool, AttributeDict
43 43 from rhodecode.lib.index import searcher_from_config
44 44
@@ -233,13 +233,12 b' class AdminSettingsView(BaseAppView):'
233 233 @LoginRequired()
234 234 @HasPermissionAllDecorator('hg.admin')
235 235 @CSRFRequired()
236 def settings_mapping_update(self):
236 def settings_mapping_create(self):
237 237 _ = self.request.translate
238 238 c = self.load_default_context()
239 239 c.active = 'mapping'
240 rm_obsolete = self.request.POST.get('destroy', False)
241 240 invalidate_cache = self.request.POST.get('invalidate', False)
242 log.debug('rescanning repo location with destroy obsolete=%s', rm_obsolete)
241 log.debug('rescanning repo location')
243 242
244 243 if invalidate_cache:
245 244 log.debug('invalidating all repositories cache')
@@ -247,16 +246,34 b' class AdminSettingsView(BaseAppView):'
247 246 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
248 247
249 248 filesystem_repos = ScmModel().repo_scan()
250 added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, force_hooks_rebuild=True)
249 added, errors = repo2db_mapper(filesystem_repos, force_hooks_rebuild=True)
251 250 PermissionModel().trigger_permission_flush()
252 251
253 252 def _repr(rm_repo):
254 253 return ', '.join(map(safe_str, rm_repo)) or '-'
255 254
256 h.flash(_('Repositories successfully '
257 'rescanned added: %s ; removed: %s') %
258 (_repr(added), _repr(removed)),
259 category='success')
255 if errors:
256 h.flash(_('Errors during scan: {}').format(_repr(errors), ), category='error')
257
258 h.flash(_('Repositories successfully scanned: Added: {}').format(_repr(added)), category='success')
259 raise HTTPFound(h.route_path('admin_settings_mapping'))
260
261 @LoginRequired()
262 @HasPermissionAllDecorator('hg.admin')
263 @CSRFRequired()
264 def settings_mapping_cleanup(self):
265 _ = self.request.translate
266 c = self.load_default_context()
267 c.active = 'mapping'
268 log.debug('rescanning repo location')
269
270 removed, errors = repo2db_cleanup()
271 PermissionModel().trigger_permission_flush()
272
273 def _repr(rm_repo):
274 return ', '.join(map(safe_str, rm_repo)) or '-'
275
276 h.flash(_('Repositories successfully scanned: Errors: {}, Added: {}').format(errors, _repr(removed)), category='success')
260 277 raise HTTPFound(h.route_path('admin_settings_mapping'))
261 278
262 279 @LoginRequired()
@@ -582,23 +582,19 b' def map_groups(path):'
582 582 return group
583 583
584 584
585 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
585 def repo2db_mapper(initial_repo_list, force_hooks_rebuild=False):
586 586 """
587 maps all repos given in initial_repo_list, non existing repositories
588 are created, if remove_obsolete is True it also checks for db entries
589 that are not in initial_repo_list and removes them.
590
591 :param initial_repo_list: list of repositories found by scanning methods
592 :param remove_obsolete: check for obsolete entries in database
587 maps all repos given in initial_repo_list, non-existing repositories
588 are created
593 589 """
594 590 from rhodecode.model.repo import RepoModel
595 from rhodecode.model.repo_group import RepoGroupModel
596 591 from rhodecode.model.settings import SettingsModel
597 592
598 593 sa = meta.Session()
599 594 repo_model = RepoModel()
600 595 user = User.get_first_super_admin()
601 596 added = []
597 errors = []
602 598
603 599 # creation defaults
604 600 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
@@ -616,9 +612,7 b' def repo2db_mapper(initial_repo_list, re'
616 612 if not db_repo:
617 613 log.info('repository `%s` not found in the database, creating now', name)
618 614 added.append(name)
619 desc = (repo.description
620 if repo.description != 'unknown'
621 else '%s repository' % name)
615 desc = repo.description if repo.description != 'unknown' else f'{name} repository'
622 616
623 617 db_repo = repo_model._create_repo(
624 618 repo_name=name,
@@ -633,76 +627,115 b' def repo2db_mapper(initial_repo_list, re'
633 627 state=Repository.STATE_CREATED
634 628 )
635 629 sa.commit()
630
631 try:
632 config = db_repo._config
633 config.set('extensions', 'largefiles', '')
634 scm_repo = db_repo.scm_instance(config=config)
635 except Exception:
636 log.error(traceback.format_exc())
637 errors.append(f'getting vcs instance for {name} failed')
638 continue
639
640 try:
641 db_repo.update_commit_cache(recursive=False)
642 except Exception:
643 log.error(traceback.format_exc())
644 errors.append(f'update_commit_cache for {name} failed')
645 continue
646
647 try:
648 scm_repo.install_hooks(force=force_hooks_rebuild)
649 except Exception:
650 log.error(traceback.format_exc())
651 errors.append(f'install_hooks for {name} failed')
652 continue
653
654 try:
636 655 # we added that repo just now, and make sure we updated server info
637 656 if db_repo.repo_type == 'git':
638 git_repo = db_repo.scm_instance()
639 657 # update repository server-info
640 658 log.debug('Running update server info')
641 git_repo._update_server_info(force=True)
642
643 db_repo.update_commit_cache(recursive=False)
659 scm_repo._update_server_info(force=True)
660 except Exception:
661 log.error(traceback.format_exc())
662 errors.append(f'update_server_info for {name} failed')
663 continue
644 664
645 config = db_repo._config
646 config.set('extensions', 'largefiles', '')
647 repo = db_repo.scm_instance(config=config)
648 repo.install_hooks(force=force_hooks_rebuild)
665 return added, errors
649 666
667 def repo2db_cleanup(skip_repos=None, skip_groups=None):
668 from rhodecode.model.repo import RepoModel
669 from rhodecode.model.repo_group import RepoGroupModel
670
671 sa = meta.Session()
650 672 removed = []
651 if remove_obsolete:
652 # remove from database those repositories that are not in the filesystem
653 for repo in sa.query(Repository).all():
654 if repo.repo_name not in list(initial_repo_list.keys()):
655 log.debug("Removing non-existing repository found in db `%s`",
656 repo.repo_name)
657 try:
658 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
659 sa.commit()
660 removed.append(repo.repo_name)
661 except Exception:
662 # don't hold further removals on error
663 log.error(traceback.format_exc())
664 sa.rollback()
673 errors = []
674
675
676 all_repos = Repository.execute(
677 Repository.select(Repository)\
678 .order_by(Repository.repo_name)
679 ).scalars()
665 680
666 def splitter(full_repo_name):
667 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
668 gr_name = None
669 if len(_parts) == 2:
670 gr_name = _parts[0]
671 return gr_name
681 # remove from database those repositories that are not in the filesystem
682 for db_repo in all_repos:
683 db_repo_name = db_repo.repo_name
684 if skip_repos and db_repo_name in skip_repos:
685 log.debug('Skipping repo `%s`', db_repo_name)
686 continue
687 try:
688 instance = db_repo.scm_instance()
689 except Exception:
690 instance = None
672 691
673 initial_repo_group_list = [splitter(x) for x in
674 list(initial_repo_list.keys()) if splitter(x)]
692 if not instance:
693 log.debug("Removing non-existing repository found in db `%s`", db_repo_name)
694 try:
695 RepoModel(sa).delete(db_repo, forks='detach', fs_remove=False, call_events=False)
696 sa.commit()
697 removed.append(db_repo_name)
698 except Exception:
699 # don't hold further removals on error
700 log.error(traceback.format_exc())
701 sa.rollback()
702 errors.append(db_repo_name)
675 703
676 # remove from database those repository groups that are not in the
677 # filesystem due to parent child relationships we need to delete them
678 # in a specific order of most nested first
679 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
680 def nested_sort(gr):
681 return len(gr.split('/'))
682 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
683 if group_name not in initial_repo_group_list:
684 repo_group = RepoGroup.get_by_group_name(group_name)
685 if (repo_group.children.all() or
686 not RepoGroupModel().check_exist_filesystem(
687 group_name=group_name, exc_on_failure=False)):
688 continue
704 # remove from database those repository groups that are not in the
705 # filesystem due to parent child relationships we need to delete them
706 # in a specific order of most nested first
707 all_groups = RepoGroup.execute(
708 RepoGroup.select(RepoGroup.group_name)\
709 .order_by(RepoGroup.group_name)
710 ).scalars().all()
711
712 def nested_sort(gr):
713 return len(gr.split('/'))
689 714
690 log.info(
691 'Removing non-existing repository group found in db `%s`',
692 group_name)
693 try:
694 RepoGroupModel(sa).delete(group_name, fs_remove=False)
695 sa.commit()
696 removed.append(group_name)
697 except Exception:
698 # don't hold further removals on error
699 log.exception(
700 'Unable to remove repository group `%s`',
701 group_name)
702 sa.rollback()
703 raise
715 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
716 if skip_groups and group_name in skip_groups:
717 log.debug('Skipping repo group `%s`', group_name)
718 continue
719
720 repo_group = RepoGroup.get_by_group_name(group_name)
721
722 if repo_group.children.all() or not RepoGroupModel().check_exist_filesystem(group_name=group_name, exc_on_failure=False):
723 continue
724
725 log.info('Removing non-existing repository group found in db `%s`', group_name)
704 726
705 return added, removed
727 try:
728 RepoGroupModel(sa).delete(group_name, fs_remove=False, call_events=False)
729 sa.commit()
730 removed.append(group_name)
731 except Exception:
732 # don't hold further removals on error
733 log.exception('Unable to remove repository group `%s`',group_name)
734 sa.rollback()
735 errors.append(group_name)
736
737 return removed, errors
738
706 739
707 740 def deep_reload_package(package_name):
708 741 """
@@ -829,7 +862,7 b' def create_test_repositories(test_path, '
829 862 raise ImportError('Failed to import rc_testdata, '
830 863 'please make sure this package is installed from requirements_test.txt')
831 864
832 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
865 from rhodecode.bootstrap import HG_REPO, GIT_REPO, SVN_REPO
833 866
834 867 log.debug('making test vcs repositories at %s', test_path)
835 868
@@ -58,9 +58,10 b' def get_vcs_instance(repo_path, *args, *'
58 58 raise VCSError(f"Given path {repo_path} is not a directory")
59 59 except VCSError:
60 60 log.exception(
61 'Perhaps this repository is in db and not in '
62 'filesystem run rescan repositories with '
63 '"destroy old data" option from admin panel')
61 'Perhaps this repository is in db and not in filesystem.'
62 'Run cleanup filesystem option from admin settings under Remap and rescan'
63 )
64
64 65 return None
65 66
66 67 return backend(repo_path=repo_path, *args, **kwargs)
@@ -745,7 +745,7 b' class RepoModel(BaseModel):'
745 745 log.error(traceback.format_exc())
746 746 raise
747 747
748 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None):
748 def delete(self, repo, forks=None, pull_requests=None, artifacts=None, fs_remove=True, cur_user=None, call_events=True):
749 749 """
750 750 Delete given repository, forks parameter defines what do do with
751 751 attached forks. Throws AttachedForksError if deleted repo has attached
@@ -760,47 +760,54 b' class RepoModel(BaseModel):'
760 760 if not cur_user:
761 761 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
762 762 repo = self._get_repo(repo)
763 if repo:
764 if forks == 'detach':
765 for r in repo.forks:
766 r.fork = None
767 self.sa.add(r)
768 elif forks == 'delete':
769 for r in repo.forks:
770 self.delete(r, forks='delete')
771 elif [f for f in repo.forks]:
772 raise AttachedForksError()
763 if not repo:
764 return False
773 765
774 # check for pull requests
775 pr_sources = repo.pull_requests_source
776 pr_targets = repo.pull_requests_target
777 if pull_requests != 'delete' and (pr_sources or pr_targets):
778 raise AttachedPullRequestsError()
766 if forks == 'detach':
767 for r in repo.forks:
768 r.fork = None
769 self.sa.add(r)
770 elif forks == 'delete':
771 for r in repo.forks:
772 self.delete(r, forks='delete')
773 elif [f for f in repo.forks]:
774 raise AttachedForksError()
775
776 # check for pull requests
777 pr_sources = repo.pull_requests_source
778 pr_targets = repo.pull_requests_target
779 if pull_requests != 'delete' and (pr_sources or pr_targets):
780 raise AttachedPullRequestsError()
779 781
780 artifacts_objs = repo.artifacts
781 if artifacts == 'delete':
782 for a in artifacts_objs:
783 self.sa.delete(a)
784 elif [a for a in artifacts_objs]:
785 raise AttachedArtifactsError()
782 artifacts_objs = repo.artifacts
783 if artifacts == 'delete':
784 for a in artifacts_objs:
785 self.sa.delete(a)
786 elif [a for a in artifacts_objs]:
787 raise AttachedArtifactsError()
786 788
787 old_repo_dict = repo.get_dict()
789 old_repo_dict = repo.get_dict()
790 if call_events:
788 791 events.trigger(events.RepoPreDeleteEvent(repo))
789 try:
790 self.sa.delete(repo)
791 if fs_remove:
792 self._delete_filesystem_repo(repo)
793 else:
794 log.debug('skipping removal from filesystem')
795 old_repo_dict.update({
796 'deleted_by': cur_user,
797 'deleted_on': time.time(),
798 })
792
793 try:
794 self.sa.delete(repo)
795 if fs_remove:
796 self._delete_filesystem_repo(repo)
797 else:
798 log.debug('skipping removal from filesystem')
799 old_repo_dict.update({
800 'deleted_by': cur_user,
801 'deleted_on': time.time(),
802 })
803 if call_events:
799 804 hooks_base.delete_repository(**old_repo_dict)
800 805 events.trigger(events.RepoDeleteEvent(repo))
801 except Exception:
802 log.error(traceback.format_exc())
803 raise
806 except Exception:
807 log.error(traceback.format_exc())
808 raise
809
810 return True
804 811
805 812 def grant_user_permission(self, repo, user, perm):
806 813 """
@@ -156,7 +156,7 b' class RepoGroupModel(BaseModel):'
156 156
157 157 def check_exist_filesystem(self, group_name, exc_on_failure=True):
158 158 create_path = os.path.join(self.repos_path, group_name)
159 log.debug('creating new group in %s', create_path)
159 log.debug('checking FS presence for repo group in %s', create_path)
160 160
161 161 if os.path.isdir(create_path):
162 162 if exc_on_failure:
@@ -573,10 +573,11 b' class RepoGroupModel(BaseModel):'
573 573 log.error(traceback.format_exc())
574 574 raise
575 575
576 def delete(self, repo_group, force_delete=False, fs_remove=True):
576 def delete(self, repo_group, force_delete=False, fs_remove=True, call_events=True):
577 577 repo_group = self._get_repo_group(repo_group)
578 578 if not repo_group:
579 579 return False
580 repo_group_name = repo_group.group_name
580 581 try:
581 582 self.sa.delete(repo_group)
582 583 if fs_remove:
@@ -585,13 +586,15 b' class RepoGroupModel(BaseModel):'
585 586 log.debug('skipping removal from filesystem')
586 587
587 588 # Trigger delete event.
588 events.trigger(events.RepoGroupDeleteEvent(repo_group))
589 return True
589 if call_events:
590 events.trigger(events.RepoGroupDeleteEvent(repo_group))
590 591
591 592 except Exception:
592 log.error('Error removing repo_group %s', repo_group)
593 log.error('Error removing repo_group %s', repo_group_name)
593 594 raise
594 595
596 return True
597
595 598 def grant_user_permission(self, repo_group, user, perm):
596 599 """
597 600 Grant permission for user on given repository group, or update
@@ -121,7 +121,7 b' def scan_repositories_if_enabled(event):'
121 121 from rhodecode.lib.utils import repo2db_mapper
122 122 scm = ScmModel()
123 123 repositories = scm.repo_scan(scm.repos_path)
124 repo2db_mapper(repositories, remove_obsolete=False)
124 repo2db_mapper(repositories)
125 125
126 126
127 127 def write_metadata_if_needed(event):
@@ -1,33 +1,45 b''
1 ${h.secure_form(h.route_path('admin_settings_mapping_update'), request=request)}
1
2 2
3 3 <div class="panel panel-default">
4 4 <div class="panel-heading">
5 <h3 class="panel-title">${_('Import New Groups or Repositories')}</h3>
5 <h3 class="panel-title">${_('Import new repository groups and repositories')}</h3>
6 6 </div>
7 7 <div class="panel-body">
8
8 ${h.secure_form(h.route_path('admin_settings_mapping_create'), request=request)}
9 9 <p>
10 ${_('This function will scann all data under the current storage path location at')} <code>${c.storage_path}</code>
10 ${_('This function will scan all data under the current storage path location at')} <code>${c.storage_path}</code><br/>
11 ${_('Each folder will be imported as a new repository group, and each repository found will be also imported to root level or corresponding repository group')}
11 12 </p>
12 13
13 14 <div class="checkbox">
14 ${h.checkbox('destroy',True)}
15 <label for="destroy">${_('Destroy old data')}</label>
16 </div>
17 <span class="help-block">${_('In case a repository or a group was deleted from the filesystem and it still exists in the database, check this option to remove obsolete data from the database.')}</span>
18
19 <div class="checkbox">
20 15 ${h.checkbox('invalidate',True)}
21 16 <label for="invalidate"> ${_('Invalidate cache for all repositories')}</label>
22 17 </div>
23 18 <span class="help-block">${_('Each cache data for repositories will be cleaned with this option selected. Use this to reload data and clear cache keys.')}</span>
24 19
25 20 <div class="buttons">
26 ${h.submit('rescan',_('Rescan Filesystem'),class_="btn")}
21 ${h.submit('rescan',_('Scan filesystem'),class_="btn")}
27 22 </div>
28
23 ${h.end_form()}
29 24 </div>
30 25 </div>
31 26
32 27
33 ${h.end_form()}
28 <div class="panel panel-default">
29 <div class="panel-heading">
30 <h3 class="panel-title">${_('Cleanup removed Repository Groups or Repositories')}</h3>
31 </div>
32 <div class="panel-body">
33 ${h.secure_form(h.route_path('admin_settings_mapping_cleanup'), request=request)}
34 <p>
35 ${_('This function will scan all data under the current storage path location at')} <code>${c.storage_path}</code>
36 ${_('Then it will remove all repository groups and repositories that are no longer present in the filesystem.')}
37 </p>
38
39 <div class="buttons">
40 ${h.submit('rescan',_('Cleanup filesystem'),class_="btn btn-danger")}
41 </div>
42 ${h.end_form()}
43 </div>
44 </div>
45
@@ -1558,6 +1558,7 b' def stub_integration_settings():'
1558 1558
1559 1559 @pytest.fixture()
1560 1560 def repo_integration_stub(request, repo_stub, StubIntegrationType, stub_integration_settings):
1561 repo_id = repo_stub.repo_id
1561 1562 integration = IntegrationModel().create(
1562 1563 StubIntegrationType,
1563 1564 settings=stub_integration_settings,
@@ -1571,6 +1572,7 b' def repo_integration_stub(request, repo_'
1571 1572 @request.addfinalizer
1572 1573 def cleanup():
1573 1574 IntegrationModel().delete(integration)
1575 RepoModel().delete(repo_id)
1574 1576
1575 1577 return integration
1576 1578
@@ -20,23 +20,21 b' import time'
20 20 import pytest
21 21
22 22 from rhodecode import events
23 from rhodecode.model.repo import RepoModel
23 24 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 25 from rhodecode.model.db import Session, Integration
25 26 from rhodecode.model.integration import IntegrationModel
26 27
27 28
28 29 class TestDeleteScopesDeletesIntegrations(object):
29 def test_delete_repo_with_integration_deletes_integration(
30 self, repo_integration_stub):
31
32 Session().delete(repo_integration_stub.repo)
30 def test_delete_repo_with_integration_deletes_integration(self, repo_integration_stub):
31 RepoModel().delete(repo_integration_stub.repo)
33 32 Session().commit()
34 33 Session().expire_all()
35 34 integration = Integration.get(repo_integration_stub.integration_id)
36 35 assert integration is None
37 36
38 def test_delete_repo_group_with_integration_deletes_integration(
39 self, repogroup_integration_stub):
37 def test_delete_repo_group_with_integration_deletes_integration(self, repogroup_integration_stub):
40 38
41 39 Session().delete(repogroup_integration_stub.repo_group)
42 40 Session().commit()
@@ -52,7 +50,7 b' def counter():'
52 50 global count
53 51 val = count
54 52 count += 1
55 return '{}_{}'.format(val, time.time())
53 return f'{val}_{time.time()}'
56 54
57 55
58 56 @pytest.fixture()
@@ -18,20 +18,25 b''
18 18
19 19 import multiprocessing
20 20 import os
21 import shutil
21 22
22 23 import mock
23 24 import py
24 25 import pytest
25 26
27 import rhodecode
26 28 from rhodecode.lib import caching_query
27 29 from rhodecode.lib import utils
28 30 from rhodecode.lib.str_utils import safe_bytes
29 31 from rhodecode.model import settings
30 32 from rhodecode.model import db
31 33 from rhodecode.model import meta
34 from rhodecode.model.meta import Session
32 35 from rhodecode.model.repo import RepoModel
33 36 from rhodecode.model.repo_group import RepoGroupModel
37 from rhodecode.model.scm import ScmModel
34 38 from rhodecode.model.settings import UiSetting, SettingsModel
39 from rhodecode.tests.fixtures.fixture_pyramid import rhodecode_factory
35 40 from rhodecode.tests.fixtures.rc_fixture import Fixture
36 41 from rhodecode_tools.lib.hash_utils import md5_safe
37 42 from rhodecode.lib.ext_json import json
@@ -230,7 +235,7 b' def platform_encodes_filenames():'
230 235 return path_with_latin1 != read_path
231 236
232 237
233 def test_repo2db_mapper_groups(repo_groups):
238 def test_repo2db_cleaner_removes_zombie_groups(repo_groups):
234 239 session = meta.Session()
235 240 zombie_group, parent_group, child_group = repo_groups
236 241 zombie_path = os.path.join(
@@ -238,10 +243,9 b' def test_repo2db_mapper_groups(repo_grou'
238 243 os.rmdir(zombie_path)
239 244
240 245 # Avoid removing test repos when calling repo2db_mapper
241 repo_list = {
242 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
243 }
244 utils.repo2db_mapper(repo_list, remove_obsolete=True)
246 repo_list = [repo.repo_name for repo in session.query(db.Repository).all()]
247
248 utils.repo2db_cleanup(skip_repos=repo_list)
245 249
246 250 groups_in_db = session.query(db.RepoGroup).all()
247 251 assert child_group in groups_in_db
@@ -249,20 +253,68 b' def test_repo2db_mapper_groups(repo_grou'
249 253 assert zombie_path not in groups_in_db
250 254
251 255
252 def test_repo2db_mapper_enables_largefiles(backend):
256
257 @pytest.mark.backends("hg", "git", "svn")
258 def test_repo2db_cleaner_removes_zombie_repos(backend):
253 259 repo = backend.create_repo()
254 repo_list = {repo.repo_name: 'test'}
255 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
256 utils.repo2db_mapper(repo_list, remove_obsolete=False)
257 _, kwargs = scm_mock.call_args
258 assert kwargs['config'].get('extensions', 'largefiles') == ''
260 zombie_path = repo.repo_full_path
261 shutil.rmtree(zombie_path)
262
263 removed, errors = utils.repo2db_cleanup()
264 assert len(removed) == 1
265 assert not errors
259 266
260 267
261 @pytest.mark.backends("git", "svn")
268 def test_repo2db_mapper_adds_new_repos(request, backend):
269 repo = backend.create_repo()
270 cleanup_repos = []
271 cleanup_groups = []
272 for num in range(5):
273 copy_repo_name = f'{repo.repo_name}-{num}'
274 copy_repo_path = f'{repo.repo_full_path}-{num}'
275
276 shutil.copytree(repo.repo_full_path, copy_repo_path)
277 cleanup_repos.append(copy_repo_name)
278
279 for gr_num in range(5):
280 gr_name = f'my_gr_{gr_num}'
281 dest_gr = os.path.join(os.path.dirname(repo.repo_full_path), gr_name)
282 os.makedirs(dest_gr, exist_ok=True)
283
284 copy_repo_name = f'{gr_name}/{repo.repo_name}-{gr_num}'
285 copy_repo_path = f'{dest_gr}/{repo.repo_name}-{gr_num}'
286
287 shutil.copytree(repo.repo_full_path, copy_repo_path)
288 cleanup_repos.append(copy_repo_name)
289 cleanup_groups.append(gr_name)
290
291 repo_list = ScmModel().repo_scan()
292
293 added, errors = utils.repo2db_mapper(repo_list)
294 Session().commit()
295 assert not errors
296
297 assert len(added) == 10
298
299 @request.addfinalizer
300 def cleanup():
301 for _repo in cleanup_repos:
302 del_result = RepoModel().delete(_repo, call_events=False)
303 Session().commit()
304 assert del_result is True
305
306 for _repo_group in cleanup_groups:
307 del_result = RepoGroupModel().delete(_repo_group, force_delete=True, call_events=False)
308 Session().commit()
309 assert del_result is True
310
311
262 312 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
263 313 repo = backend.create_repo()
264 314 repo_list = {repo.repo_name: 'test'}
265 utils.repo2db_mapper(repo_list, remove_obsolete=False)
315 added, errors = utils.repo2db_mapper(repo_list)
316 assert not errors
317 assert repo.scm_instance().get_hooks_info() == {'pre_version': rhodecode.__version__, 'post_version': rhodecode.__version__}
266 318
267 319
268 320 @pytest.mark.backends("git", "svn")
@@ -271,7 +323,9 b' def test_repo2db_mapper_installs_hooks_f'
271 323 RepoModel().delete(repo, fs_remove=False)
272 324 meta.Session().commit()
273 325 repo_list = {repo.repo_name: repo.scm_instance()}
274 utils.repo2db_mapper(repo_list, remove_obsolete=False)
326 added, errors = utils.repo2db_mapper(repo_list)
327 assert not errors
328 assert len(added) == 1
275 329
276 330
277 331 class TestPasswordChanged(object):
@@ -453,7 +507,7 b' class TestGetEnabledHooks(object):'
453 507
454 508 def test_obfuscate_url_pw():
455 509 from rhodecode.lib.utils2 import obfuscate_url_pw
456 engine = u'/home/repos/malmö'
510 engine = '/home/repos/malmö'
457 511 assert obfuscate_url_pw(engine)
458 512
459 513
@@ -182,7 +182,8 b' def get_url_defs():'
182 182 "admin_settings_vcs_svn_pattern_delete": ADMIN_PREFIX
183 183 + "/settings/vcs/svn_pattern_delete",
184 184 "admin_settings_mapping": ADMIN_PREFIX + "/settings/mapping",
185 "admin_settings_mapping_update": ADMIN_PREFIX + "/settings/mapping/update",
185 "admin_settings_mapping_create": ADMIN_PREFIX + "/settings/mapping/create",
186 "admin_settings_mapping_cleanup": ADMIN_PREFIX + "/settings/mapping/cleanup",
186 187 "admin_settings_visual": ADMIN_PREFIX + "/settings/visual",
187 188 "admin_settings_visual_update": ADMIN_PREFIX + "/settings/visual/update",
188 189 "admin_settings_issuetracker": ADMIN_PREFIX + "/settings/issue-tracker",
General Comments 0
You need to be logged in to leave comments. Login now