diff --git a/conftest.py b/conftest.py --- a/conftest.py +++ b/conftest.py @@ -27,8 +27,11 @@ from rhodecode.tests.conftest_common imp pytest_plugins = [ - "rhodecode.tests.fixture_mods.fixture_pyramid", - "rhodecode.tests.fixture_mods.fixture_utils", + "rhodecode.tests.fixtures.fixture_pyramid", + "rhodecode.tests.fixtures.fixture_utils", + "rhodecode.tests.fixtures.function_scoped_baseapp", + "rhodecode.tests.fixtures.module_scoped_baseapp", + "rhodecode.tests.fixtures.rcextensions_fixtures", ] diff --git a/pyproject.toml b/pyproject.toml --- a/pyproject.toml +++ b/pyproject.toml @@ -65,8 +65,7 @@ dependencies = {file = ["requirements.tx optional-dependencies.tests = {file = ["requirements_test.txt"]} [tool.ruff] - -select = [ +lint.select = [ # Pyflakes "F", # Pycodestyle @@ -75,16 +74,13 @@ select = [ # isort "I001" ] - -ignore = [ +lint.ignore = [ "E501", # line too long, handled by black ] - # Same as Black. line-length = 120 -[tool.ruff.isort] - +[tool.ruff.lint.isort] known-first-party = ["rhodecode"] [tool.ruff.format] diff --git a/pytest.ini b/pytest.ini --- a/pytest.ini +++ b/pytest.ini @@ -4,8 +4,10 @@ norecursedirs = rhodecode/public rhodeco cache_dir = /tmp/.pytest_cache pyramid_config = rhodecode/tests/rhodecode.ini -vcsserver_protocol = http -vcsserver_config_http = rhodecode/tests/vcsserver_http.ini + +vcsserver_config = rhodecode/tests/vcsserver_http.ini +rhodecode_config = rhodecode/tests/rhodecode.ini +celery_config = rhodecode/tests/rhodecode.ini addopts = --pdbcls=IPython.terminal.debugger:TerminalPdb diff --git a/rhodecode/api/tests/test_add_field_to_repo.py b/rhodecode/api/tests/test_add_field_to_repo.py --- a/rhodecode/api/tests/test_add_field_to_repo.py +++ b/rhodecode/api/tests/test_add_field_to_repo.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 diff --git a/rhodecode/api/tests/test_create_gist.py b/rhodecode/api/tests/test_create_gist.py --- a/rhodecode/api/tests/test_create_gist.py +++ b/rhodecode/api/tests/test_create_gist.py @@ -24,7 +24,7 @@ from rhodecode.model.db import Gist from rhodecode.model.gist import GistModel from rhodecode.api.tests.utils import ( build_data, api_call, assert_error, assert_ok, crash) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture @pytest.mark.usefixtures("testuser_api", "app") diff --git a/rhodecode/api/tests/test_create_repo.py b/rhodecode/api/tests/test_create_repo.py --- a/rhodecode/api/tests/test_create_repo.py +++ b/rhodecode/api/tests/test_create_repo.py @@ -27,7 +27,7 @@ from rhodecode.model.user import UserMod from rhodecode.tests import TEST_USER_ADMIN_LOGIN from rhodecode.api.tests.utils import ( build_data, api_call, assert_ok, assert_error, crash) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.lib.ext_json import json from rhodecode.lib.str_utils import safe_str diff --git a/rhodecode/api/tests/test_create_repo_group.py b/rhodecode/api/tests/test_create_repo_group.py --- a/rhodecode/api/tests/test_create_repo_group.py +++ b/rhodecode/api/tests/test_create_repo_group.py @@ -26,7 +26,7 @@ from rhodecode.model.user import UserMod from rhodecode.tests import TEST_USER_ADMIN_LOGIN from rhodecode.api.tests.utils import ( build_data, api_call, assert_ok, assert_error, crash) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture fixture = Fixture() diff --git a/rhodecode/api/tests/test_create_user.py b/rhodecode/api/tests/test_create_user.py --- a/rhodecode/api/tests/test_create_user.py +++ b/rhodecode/api/tests/test_create_user.py @@ -26,7 +26,7 @@ from rhodecode.tests import ( TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL) from rhodecode.api.tests.utils import ( build_data, api_call, assert_ok, assert_error, jsonify, crash) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.model.db import RepoGroup diff --git a/rhodecode/api/tests/test_create_user_group.py b/rhodecode/api/tests/test_create_user_group.py --- a/rhodecode/api/tests/test_create_user_group.py +++ b/rhodecode/api/tests/test_create_user_group.py @@ -25,7 +25,7 @@ from rhodecode.model.user import UserMod from rhodecode.model.user_group import UserGroupModel from rhodecode.api.tests.utils import ( build_data, api_call, assert_error, assert_ok, crash, jsonify) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture @pytest.mark.usefixtures("testuser_api", "app") diff --git a/rhodecode/api/tests/test_fork_repo.py b/rhodecode/api/tests/test_fork_repo.py --- a/rhodecode/api/tests/test_fork_repo.py +++ b/rhodecode/api/tests/test_fork_repo.py @@ -28,7 +28,7 @@ from rhodecode.model.user import UserMod from rhodecode.tests import TEST_USER_ADMIN_LOGIN from rhodecode.api.tests.utils import ( build_data, api_call, assert_error, assert_ok, crash) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture fixture = Fixture() diff --git a/rhodecode/api/tests/test_update_repo.py b/rhodecode/api/tests/test_update_repo.py --- a/rhodecode/api/tests/test_update_repo.py +++ b/rhodecode/api/tests/test_update_repo.py @@ -25,8 +25,8 @@ from rhodecode.model.scm import ScmModel from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN from rhodecode.api.tests.utils import ( build_data, api_call, assert_error, assert_ok, crash, jsonify) -from rhodecode.tests.fixture import Fixture -from rhodecode.tests.fixture_mods.fixture_utils import plain_http_host_only_stub +from rhodecode.tests.fixtures.rc_fixture import Fixture +from rhodecode.tests.fixtures.fixture_utils import plain_http_host_only_stub fixture = Fixture() diff --git a/rhodecode/apps/admin/tests/test_admin_audit_logs.py b/rhodecode/apps/admin/tests/test_admin_audit_logs.py --- a/rhodecode/apps/admin/tests/test_admin_audit_logs.py +++ b/rhodecode/apps/admin/tests/test_admin_audit_logs.py @@ -26,7 +26,7 @@ import pytest from rhodecode.lib.str_utils import safe_str from rhodecode.tests import * from rhodecode.tests.routes import route_path -from rhodecode.tests.fixture import FIXTURES +from rhodecode.tests.fixtures.rc_fixture import FIXTURES from rhodecode.model.db import UserLog from rhodecode.model.meta import Session diff --git a/rhodecode/apps/admin/tests/test_admin_main_views.py b/rhodecode/apps/admin/tests/test_admin_main_views.py --- a/rhodecode/apps/admin/tests/test_admin_main_views.py +++ b/rhodecode/apps/admin/tests/test_admin_main_views.py @@ -20,7 +20,7 @@ import pytest from rhodecode.tests import TestController -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/admin/tests/test_admin_repos.py b/rhodecode/apps/admin/tests/test_admin_repos.py --- a/rhodecode/apps/admin/tests/test_admin_repos.py +++ b/rhodecode/apps/admin/tests/test_admin_repos.py @@ -37,7 +37,7 @@ from rhodecode.model.user import UserMod from rhodecode.tests import ( login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) -from rhodecode.tests.fixture import Fixture, error_function +from rhodecode.tests.fixtures.rc_fixture import Fixture, error_function from rhodecode.tests.utils import repo_on_filesystem from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/admin/tests/test_admin_repository_groups.py b/rhodecode/apps/admin/tests/test_admin_repository_groups.py --- a/rhodecode/apps/admin/tests/test_admin_repository_groups.py +++ b/rhodecode/apps/admin/tests/test_admin_repository_groups.py @@ -27,7 +27,7 @@ from rhodecode.model.meta import Session from rhodecode.model.repo_group import RepoGroupModel from rhodecode.tests import ( assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/admin/tests/test_admin_user_groups.py b/rhodecode/apps/admin/tests/test_admin_user_groups.py --- a/rhodecode/apps/admin/tests/test_admin_user_groups.py +++ b/rhodecode/apps/admin/tests/test_admin_user_groups.py @@ -24,7 +24,7 @@ from rhodecode.model.meta import Session from rhodecode.tests import ( TestController, assert_session_flash) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/admin/tests/test_admin_users.py b/rhodecode/apps/admin/tests/test_admin_users.py --- a/rhodecode/apps/admin/tests/test_admin_users.py +++ b/rhodecode/apps/admin/tests/test_admin_users.py @@ -28,7 +28,7 @@ from rhodecode.model.user import UserMod from rhodecode.tests import ( TestController, TEST_USER_REGULAR_LOGIN, assert_session_flash) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/admin/tests/test_admin_users_ssh_keys.py b/rhodecode/apps/admin/tests/test_admin_users_ssh_keys.py --- a/rhodecode/apps/admin/tests/test_admin_users_ssh_keys.py +++ b/rhodecode/apps/admin/tests/test_admin_users_ssh_keys.py @@ -22,7 +22,7 @@ import pytest from rhodecode.model.db import User, UserSshKeys from rhodecode.tests import TestController, assert_session_flash -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/home/tests/test_get_goto_switched_data.py b/rhodecode/apps/home/tests/test_get_goto_switched_data.py --- a/rhodecode/apps/home/tests/test_get_goto_switched_data.py +++ b/rhodecode/apps/home/tests/test_get_goto_switched_data.py @@ -27,7 +27,7 @@ from rhodecode.model.repo_group import R from rhodecode.model.db import Session, Repository, RepoGroup from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/home/tests/test_get_repo_list_data.py b/rhodecode/apps/home/tests/test_get_repo_list_data.py --- a/rhodecode/apps/home/tests/test_get_repo_list_data.py +++ b/rhodecode/apps/home/tests/test_get_repo_list_data.py @@ -22,7 +22,7 @@ from rhodecode.model.db import Repositor from rhodecode.lib.ext_json import json from rhodecode.tests import TestController -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/home/tests/test_get_user_data.py b/rhodecode/apps/home/tests/test_get_user_data.py --- a/rhodecode/apps/home/tests/test_get_user_data.py +++ b/rhodecode/apps/home/tests/test_get_user_data.py @@ -20,7 +20,7 @@ import pytest from rhodecode.lib.ext_json import json from rhodecode.tests import TestController -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/home/tests/test_get_user_group_data.py b/rhodecode/apps/home/tests/test_get_user_group_data.py --- a/rhodecode/apps/home/tests/test_get_user_group_data.py +++ b/rhodecode/apps/home/tests/test_get_user_group_data.py @@ -40,7 +40,7 @@ import pytest from rhodecode.lib.ext_json import json from rhodecode.tests import TestController -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/home/tests/test_home.py b/rhodecode/apps/home/tests/test_home.py --- a/rhodecode/apps/home/tests/test_home.py +++ b/rhodecode/apps/home/tests/test_home.py @@ -24,7 +24,7 @@ from rhodecode.model.db import Repositor from rhodecode.model.meta import Session from rhodecode.model.settings import SettingsModel from rhodecode.tests import TestController -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/login/tests/test_2fa.py b/rhodecode/apps/login/tests/test_2fa.py --- a/rhodecode/apps/login/tests/test_2fa.py +++ b/rhodecode/apps/login/tests/test_2fa.py @@ -3,7 +3,7 @@ import mock from rhodecode.lib.type_utils import AttributeDict from rhodecode.model.meta import Session -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path from rhodecode.model.settings import SettingsModel diff --git a/rhodecode/apps/login/tests/test_login.py b/rhodecode/apps/login/tests/test_login.py --- a/rhodecode/apps/login/tests/test_login.py +++ b/rhodecode/apps/login/tests/test_login.py @@ -31,7 +31,7 @@ from rhodecode.model.meta import Session from rhodecode.tests import ( assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, no_newline_id_generator) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/login/tests/test_password_reset.py b/rhodecode/apps/login/tests/test_password_reset.py --- a/rhodecode/apps/login/tests/test_password_reset.py +++ b/rhodecode/apps/login/tests/test_password_reset.py @@ -22,7 +22,7 @@ from rhodecode.lib import helpers as h from rhodecode.tests import ( TestController, clear_cache_regions, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.utils import AssertResponse from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/my_account/tests/test_my_account_auth_tokens.py b/rhodecode/apps/my_account/tests/test_my_account_auth_tokens.py --- a/rhodecode/apps/my_account/tests/test_my_account_auth_tokens.py +++ b/rhodecode/apps/my_account/tests/test_my_account_auth_tokens.py @@ -22,7 +22,7 @@ from rhodecode.apps._base import ADMIN_P from rhodecode.model.db import User from rhodecode.tests import ( TestController, assert_session_flash) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/my_account/tests/test_my_account_emails.py b/rhodecode/apps/my_account/tests/test_my_account_emails.py --- a/rhodecode/apps/my_account/tests/test_my_account_emails.py +++ b/rhodecode/apps/my_account/tests/test_my_account_emails.py @@ -23,7 +23,7 @@ from rhodecode.model.db import User, Use from rhodecode.tests import ( TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL, assert_session_flash, TEST_USER_REGULAR_PASS) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/my_account/tests/test_my_account_notifications.py b/rhodecode/apps/my_account/tests/test_my_account_notifications.py --- a/rhodecode/apps/my_account/tests/test_my_account_notifications.py +++ b/rhodecode/apps/my_account/tests/test_my_account_notifications.py @@ -21,7 +21,7 @@ import pytest from rhodecode.tests import ( TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path from rhodecode.model.db import Notification, User diff --git a/rhodecode/apps/my_account/tests/test_my_account_password.py b/rhodecode/apps/my_account/tests/test_my_account_password.py --- a/rhodecode/apps/my_account/tests/test_my_account_password.py +++ b/rhodecode/apps/my_account/tests/test_my_account_password.py @@ -24,7 +24,7 @@ from rhodecode.lib.auth import check_pas from rhodecode.model.meta import Session from rhodecode.model.user import UserModel from rhodecode.tests import assert_session_flash, TestController -from rhodecode.tests.fixture import Fixture, error_function +from rhodecode.tests.fixtures.rc_fixture import Fixture, error_function from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/my_account/tests/test_my_account_profile.py b/rhodecode/apps/my_account/tests/test_my_account_profile.py --- a/rhodecode/apps/my_account/tests/test_my_account_profile.py +++ b/rhodecode/apps/my_account/tests/test_my_account_profile.py @@ -20,7 +20,7 @@ from rhodecode.tests import ( TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/my_account/tests/test_my_account_simple_views.py b/rhodecode/apps/my_account/tests/test_my_account_simple_views.py --- a/rhodecode/apps/my_account/tests/test_my_account_simple_views.py +++ b/rhodecode/apps/my_account/tests/test_my_account_simple_views.py @@ -19,7 +19,7 @@ from rhodecode.model.db import User, Repository, UserFollowing from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/my_account/tests/test_my_account_ssh_keys.py b/rhodecode/apps/my_account/tests/test_my_account_ssh_keys.py --- a/rhodecode/apps/my_account/tests/test_my_account_ssh_keys.py +++ b/rhodecode/apps/my_account/tests/test_my_account_ssh_keys.py @@ -21,7 +21,7 @@ from rhodecode.model.db import User, UserSshKeys from rhodecode.tests import TestController, assert_session_flash -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/repository/tests/test_repo_compare_on_single_file.py b/rhodecode/apps/repository/tests/test_repo_compare_on_single_file.py --- a/rhodecode/apps/repository/tests/test_repo_compare_on_single_file.py +++ b/rhodecode/apps/repository/tests/test_repo_compare_on_single_file.py @@ -22,7 +22,7 @@ import pytest from rhodecode.apps.repository.tests.test_repo_compare import ComparePage from rhodecode.lib.vcs import nodes from rhodecode.lib.vcs.backends.base import EmptyCommit -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.utils import commit_change from rhodecode.tests.routes import route_path @@ -166,14 +166,15 @@ class TestSideBySideDiff(object): response.mustcontain('Collapse 2 commits') response.mustcontain('123 file changed') - response.mustcontain( - 'r%s:%s...r%s:%s' % ( - commit1.idx, commit1.short_id, commit2.idx, commit2.short_id)) + response.mustcontain(f'r{commit1.idx}:{commit1.short_id}...r{commit2.idx}:{commit2.short_id}') response.mustcontain(f_path) - @pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)') + #@pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)') def test_diff_side_by_side_from_0_commit_with_file_filter(self, app, backend, backend_stub): + if backend.alias == 'git': + pytest.skip('GIT does not handle empty commit compare correct (missing 1 commit)') + f_path = b'test_sidebyside_file.py' commit1_content = b'content-25d7e49c18b159446c\n' commit2_content = b'content-603d6c72c46d953420\n' @@ -200,9 +201,7 @@ class TestSideBySideDiff(object): response.mustcontain('Collapse 2 commits') response.mustcontain('1 file changed') - response.mustcontain( - 'r%s:%s...r%s:%s' % ( - commit1.idx, commit1.short_id, commit2.idx, commit2.short_id)) + response.mustcontain(f'r{commit1.idx}:{commit1.short_id}...r{commit2.idx}:{commit2.short_id}') response.mustcontain(f_path) diff --git a/rhodecode/apps/repository/tests/test_repo_files.py b/rhodecode/apps/repository/tests/test_repo_files.py --- a/rhodecode/apps/repository/tests/test_repo_files.py +++ b/rhodecode/apps/repository/tests/test_repo_files.py @@ -33,7 +33,7 @@ from rhodecode.lib.vcs.conf import setti from rhodecode.model.db import Session, Repository from rhodecode.tests import assert_session_flash -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/repository/tests/test_repo_forks.py b/rhodecode/apps/repository/tests/test_repo_forks.py --- a/rhodecode/apps/repository/tests/test_repo_forks.py +++ b/rhodecode/apps/repository/tests/test_repo_forks.py @@ -21,7 +21,7 @@ import pytest from rhodecode.tests import TestController, assert_session_flash, HG_FORK, GIT_FORK -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.lib import helpers as h from rhodecode.model.db import Repository diff --git a/rhodecode/apps/repository/tests/test_repo_maintainance.py b/rhodecode/apps/repository/tests/test_repo_maintainance.py --- a/rhodecode/apps/repository/tests/test_repo_maintainance.py +++ b/rhodecode/apps/repository/tests/test_repo_maintainance.py @@ -21,7 +21,7 @@ import pytest from rhodecode.model.db import Repository, UserRepoToPerm, Permission, User -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/repository/tests/test_repo_pullrequests.py b/rhodecode/apps/repository/tests/test_repo_pullrequests.py --- a/rhodecode/apps/repository/tests/test_repo_pullrequests.py +++ b/rhodecode/apps/repository/tests/test_repo_pullrequests.py @@ -15,6 +15,9 @@ # This program is dual-licensed. If you wish to learn more about the # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ +import logging +import os + import mock import pytest @@ -41,7 +44,7 @@ from rhodecode.tests import ( TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, ) -from rhodecode.tests.fixture_mods.fixture_utils import PRTestUtility +from rhodecode.tests.fixtures.fixture_utils import PRTestUtility from rhodecode.tests.routes import route_path @@ -1050,7 +1053,6 @@ class TestPullrequestsView(object): ) assert len(notifications.all()) == 2 - @pytest.mark.xfail(reason="unable to fix this test after python3 migration") def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token): commits = [ { @@ -1125,20 +1127,38 @@ class TestPullrequestsView(object): response.mustcontain(no=["content_of_ancestor-child"]) response.mustcontain("content_of_change") - def test_merge_pull_request_enabled(self, pr_util, csrf_token): - # Clear any previous calls to rcextensions - rhodecode.EXTENSIONS.calls.clear() + def test_merge_pull_request_enabled(self, pr_util, csrf_token, rcextensions_modification): pull_request = pr_util.create_pull_request(approved=True, mergeable=True) pull_request_id = pull_request.pull_request_id - repo_name = (pull_request.target_repo.scm_instance().name,) + repo_name = pull_request.target_repo.scm_instance().name url = route_path( "pullrequest_merge", - repo_name=str(repo_name[0]), + repo_name=repo_name, pull_request_id=pull_request_id, ) - response = self.app.post(url, params={"csrf_token": csrf_token}).follow() + + rcstack_location = os.path.dirname(self.app._pyramid_registry.settings['__file__']) + rc_ext_location = os.path.join(rcstack_location, 'rcextension-output.txt') + + + mods = [ + ('_push_hook', + f""" + import os + action = kwargs['action'] + commit_ids = kwargs['commit_ids'] + with open('{rc_ext_location}', 'w') as f: + f.write('test-execution'+os.linesep) + f.write(f'{{action}}'+os.linesep) + f.write(f'{{commit_ids}}'+os.linesep) + return HookResponse(0, 'HOOK_TEST') + """) + ] + # Add the hook + with rcextensions_modification(rcstack_location, mods, create_if_missing=True, force_create=True): + response = self.app.post(url, params={"csrf_token": csrf_token}).follow() pull_request = PullRequest.get(pull_request_id) @@ -1162,12 +1182,39 @@ class TestPullrequestsView(object): assert actions[-1].action == "user.push" assert actions[-1].action_data["commit_ids"] == pr_commit_ids - # Check post_push rcextension was really executed - push_calls = rhodecode.EXTENSIONS.calls["_push_hook"] - assert len(push_calls) == 1 - unused_last_call_args, last_call_kwargs = push_calls[0] - assert last_call_kwargs["action"] == "push" - assert last_call_kwargs["commit_ids"] == pr_commit_ids + with open(rc_ext_location) as f: + f_data = f.read() + assert 'test-execution' in f_data + for commit_id in pr_commit_ids: + assert f'{commit_id}' in f_data + + def test_merge_pull_request_forbidden_by_pre_push_hook(self, pr_util, csrf_token, rcextensions_modification, caplog): + caplog.set_level(logging.WARNING, logger="rhodecode.model.pull_request") + + pull_request = pr_util.create_pull_request(approved=True, mergeable=True) + pull_request_id = pull_request.pull_request_id + repo_name = pull_request.target_repo.scm_instance().name + + url = route_path( + "pullrequest_merge", + repo_name=repo_name, + pull_request_id=pull_request_id, + ) + + rcstack_location = os.path.dirname(self.app._pyramid_registry.settings['__file__']) + + mods = [ + ('_pre_push_hook', + f""" + return HookResponse(1, 'HOOK_TEST_FORBIDDEN') + """) + ] + # Add the hook + with rcextensions_modification(rcstack_location, mods, create_if_missing=True, force_create=True): + self.app.post(url, params={"csrf_token": csrf_token}) + + assert 'Merge failed, not updating the pull request.' in [r[2] for r in caplog.record_tuples] + def test_merge_pull_request_disabled(self, pr_util, csrf_token): pull_request = pr_util.create_pull_request(mergeable=False) @@ -1523,7 +1570,6 @@ class TestPullrequestsView(object): assert pull_request.revisions == [commit_ids["change-rebased"]] - def test_remove_pull_request_branch(self, backend_git, csrf_token): branch_name = "development" commits = [ diff --git a/rhodecode/apps/repository/tests/test_repo_settings.py b/rhodecode/apps/repository/tests/test_repo_settings.py --- a/rhodecode/apps/repository/tests/test_repo_settings.py +++ b/rhodecode/apps/repository/tests/test_repo_settings.py @@ -26,7 +26,7 @@ from rhodecode.model.db import Repositor from rhodecode.model.meta import Session from rhodecode.tests import ( TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, assert_session_flash) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/repository/tests/test_repo_settings_advanced.py b/rhodecode/apps/repository/tests/test_repo_settings_advanced.py --- a/rhodecode/apps/repository/tests/test_repo_settings_advanced.py +++ b/rhodecode/apps/repository/tests/test_repo_settings_advanced.py @@ -24,7 +24,7 @@ from rhodecode.model.db import Repositor from rhodecode.model.repo import RepoModel from rhodecode.tests import ( HG_REPO, GIT_REPO, assert_session_flash, no_newline_id_generator) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.utils import repo_on_filesystem from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/repository/tests/test_repo_summary.py b/rhodecode/apps/repository/tests/test_repo_summary.py --- a/rhodecode/apps/repository/tests/test_repo_summary.py +++ b/rhodecode/apps/repository/tests/test_repo_summary.py @@ -31,7 +31,7 @@ from rhodecode.model.meta import Session from rhodecode.model.repo import RepoModel from rhodecode.model.scm import ScmModel from rhodecode.tests import assert_session_flash -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.utils import AssertResponse, repo_on_filesystem from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/repository/tests/test_repo_vcs_settings.py b/rhodecode/apps/repository/tests/test_repo_vcs_settings.py --- a/rhodecode/apps/repository/tests/test_repo_vcs_settings.py +++ b/rhodecode/apps/repository/tests/test_repo_vcs_settings.py @@ -30,7 +30,7 @@ from rhodecode.model.user import UserMod from rhodecode.tests import ( login_user_session, logout_user_session, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.utils import AssertResponse from rhodecode.tests.routes import route_path diff --git a/rhodecode/apps/repository/tests/test_vcs_settings.py b/rhodecode/apps/repository/tests/test_vcs_settings.py --- a/rhodecode/apps/repository/tests/test_vcs_settings.py +++ b/rhodecode/apps/repository/tests/test_vcs_settings.py @@ -32,16 +32,13 @@ class TestAdminRepoVcsSettings(object): @pytest.mark.parametrize('setting_name, setting_backends', [ ('hg_use_rebase_for_merging', ['hg']), ]) - def test_labs_settings_visible_if_enabled( - self, setting_name, setting_backends, backend): + def test_labs_settings_visible_if_enabled(self, setting_name, setting_backends, backend): if backend.alias not in setting_backends: pytest.skip('Setting not available for backend {}'.format(backend)) - vcs_settings_url = route_path( - 'edit_repo_vcs', repo_name=backend.repo.repo_name) + vcs_settings_url = route_path('edit_repo_vcs', repo_name=backend.repo.repo_name) - with mock.patch.dict( - rhodecode.CONFIG, {'labs_settings_active': 'true'}): + with mock.patch.dict(rhodecode.CONFIG, {'labs_settings_active': 'true'}): response = self.app.get(vcs_settings_url) assertr = response.assert_response() diff --git a/rhodecode/apps/ssh_support/lib/backends/base.py b/rhodecode/apps/ssh_support/lib/backends/base.py --- a/rhodecode/apps/ssh_support/lib/backends/base.py +++ b/rhodecode/apps/ssh_support/lib/backends/base.py @@ -20,7 +20,7 @@ import os import sys import logging -from rhodecode.lib.hook_daemon.base import prepare_callback_daemon +from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon from rhodecode.lib.ext_json import sjson as json from rhodecode.lib.vcs.conf import settings as vcs_settings from rhodecode.lib.api_utils import call_service_api @@ -162,9 +162,7 @@ class SshVcsServer(object): extras = {} extras.update(tunnel_extras) - callback_daemon, extras = prepare_callback_daemon( - extras, protocol=self.hooks_protocol, - host=vcs_settings.HOOKS_HOST) + callback_daemon, extras = prepare_callback_daemon(extras, protocol=self.hooks_protocol) with callback_daemon: try: diff --git a/rhodecode/apps/ssh_support/tests/test_server_git.py b/rhodecode/apps/ssh_support/tests/test_server_git.py --- a/rhodecode/apps/ssh_support/tests/test_server_git.py +++ b/rhodecode/apps/ssh_support/tests/test_server_git.py @@ -33,19 +33,24 @@ class GitServerCreator(object): 'app:main': { 'ssh.executable.git': git_path, 'vcs.hooks.protocol.v2': 'celery', + 'app.service_api.host': 'http://localhost', + 'app.service_api.token': 'secret4', + 'rhodecode.api.url': '/_admin/api', } } repo_name = 'test_git' repo_mode = 'receive-pack' user = plain_dummy_user() - def __init__(self): - pass + def __init__(self, service_api_url, ini_file): + self.service_api_url = service_api_url + self.ini_file = ini_file def create(self, **kwargs): + self.config_data['app:main']['app.service_api.host'] = self.service_api_url parameters = { 'store': self.root, - 'ini_path': '', + 'ini_path': self.ini_file, 'user': self.user, 'repo_name': self.repo_name, 'repo_mode': self.repo_mode, @@ -60,12 +65,30 @@ class GitServerCreator(object): return server -@pytest.fixture() -def git_server(app): - return GitServerCreator() +@pytest.fixture(scope='module') +def git_server(request, module_app, rhodecode_factory, available_port_factory): + ini_file = module_app._pyramid_settings['__file__'] + vcsserver_host = module_app._pyramid_settings['vcs.server'] + + store_dir = os.path.dirname(ini_file) + + # start rhodecode for service API + rc = rhodecode_factory( + request, + store_dir=store_dir, + port=available_port_factory(), + overrides=( + {'handler_console': {'level': 'DEBUG'}}, + {'app:main': {'vcs.server': vcsserver_host}}, + {'app:main': {'repo_store.path': store_dir}} + )) + + service_api_url = f'http://{rc.bind_addr}' + + return GitServerCreator(service_api_url, ini_file) -class TestGitServer(object): +class TestGitServer: def test_command(self, git_server): server = git_server.create() @@ -102,14 +125,14 @@ class TestGitServer(object): assert result is value def test_run_returns_executes_command(self, git_server): + from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper server = git_server.create() - from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper os.environ['SSH_CLIENT'] = '127.0.0.1' with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch: _patch.return_value = 0 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'): - exit_code = server.run() + exit_code = server.run(tunnel_extras={'config': server.ini_path}) assert exit_code == (0, False) @@ -135,7 +158,7 @@ class TestGitServer(object): 'action': action, 'ip': '10.10.10.10', 'locked_by': [None, None], - 'config': '', + 'config': git_server.ini_file, 'repo_store': store, 'server_url': None, 'hooks': ['push', 'pull'], diff --git a/rhodecode/apps/ssh_support/tests/test_server_hg.py b/rhodecode/apps/ssh_support/tests/test_server_hg.py --- a/rhodecode/apps/ssh_support/tests/test_server_hg.py +++ b/rhodecode/apps/ssh_support/tests/test_server_hg.py @@ -17,6 +17,7 @@ # and proprietary license terms, please see https://rhodecode.com/licenses/ import os + import mock import pytest @@ -32,22 +33,27 @@ class MercurialServerCreator(object): 'app:main': { 'ssh.executable.hg': hg_path, 'vcs.hooks.protocol.v2': 'celery', + 'app.service_api.host': 'http://localhost', + 'app.service_api.token': 'secret4', + 'rhodecode.api.url': '/_admin/api', } } repo_name = 'test_hg' user = plain_dummy_user() - def __init__(self): - pass + def __init__(self, service_api_url, ini_file): + self.service_api_url = service_api_url + self.ini_file = ini_file def create(self, **kwargs): + self.config_data['app:main']['app.service_api.host'] = self.service_api_url parameters = { 'store': self.root, - 'ini_path': '', + 'ini_path': self.ini_file, 'user': self.user, 'repo_name': self.repo_name, 'user_permissions': { - 'test_hg': 'repository.admin' + self.repo_name: 'repository.admin' }, 'settings': self.config_data['app:main'], 'env': plain_dummy_env() @@ -57,12 +63,30 @@ class MercurialServerCreator(object): return server -@pytest.fixture() -def hg_server(app): - return MercurialServerCreator() +@pytest.fixture(scope='module') +def hg_server(request, module_app, rhodecode_factory, available_port_factory): + ini_file = module_app._pyramid_settings['__file__'] + vcsserver_host = module_app._pyramid_settings['vcs.server'] + + store_dir = os.path.dirname(ini_file) + + # start rhodecode for service API + rc = rhodecode_factory( + request, + store_dir=store_dir, + port=available_port_factory(), + overrides=( + {'handler_console': {'level': 'DEBUG'}}, + {'app:main': {'vcs.server': vcsserver_host}}, + {'app:main': {'repo_store.path': store_dir}} + )) + + service_api_url = f'http://{rc.bind_addr}' + + return MercurialServerCreator(service_api_url, ini_file) -class TestMercurialServer(object): +class TestMercurialServer: def test_command(self, hg_server, tmpdir): server = hg_server.create() @@ -107,7 +131,7 @@ class TestMercurialServer(object): with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch: _patch.return_value = 0 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'): - exit_code = server.run() + exit_code = server.run(tunnel_extras={'config': server.ini_path}) assert exit_code == (0, False) diff --git a/rhodecode/apps/ssh_support/tests/test_server_svn.py b/rhodecode/apps/ssh_support/tests/test_server_svn.py --- a/rhodecode/apps/ssh_support/tests/test_server_svn.py +++ b/rhodecode/apps/ssh_support/tests/test_server_svn.py @@ -15,7 +15,9 @@ # This program is dual-licensed. If you wish to learn more about the # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ + import os + import mock import pytest @@ -26,39 +28,62 @@ from rhodecode.apps.ssh_support.tests.co class SubversionServerCreator(object): root = '/tmp/repo/path/' svn_path = '/usr/local/bin/svnserve' + config_data = { 'app:main': { 'ssh.executable.svn': svn_path, 'vcs.hooks.protocol.v2': 'celery', + 'app.service_api.host': 'http://localhost', + 'app.service_api.token': 'secret4', + 'rhodecode.api.url': '/_admin/api', } } repo_name = 'test-svn' user = plain_dummy_user() - def __init__(self): - pass + def __init__(self, service_api_url, ini_file): + self.service_api_url = service_api_url + self.ini_file = ini_file def create(self, **kwargs): + self.config_data['app:main']['app.service_api.host'] = self.service_api_url parameters = { 'store': self.root, + 'ini_path': self.ini_file, + 'user': self.user, 'repo_name': self.repo_name, - 'ini_path': '', - 'user': self.user, 'user_permissions': { self.repo_name: 'repository.admin' }, 'settings': self.config_data['app:main'], 'env': plain_dummy_env() } - parameters.update(kwargs) server = SubversionServer(**parameters) return server -@pytest.fixture() -def svn_server(app): - return SubversionServerCreator() +@pytest.fixture(scope='module') +def svn_server(request, module_app, rhodecode_factory, available_port_factory): + ini_file = module_app._pyramid_settings['__file__'] + vcsserver_host = module_app._pyramid_settings['vcs.server'] + + store_dir = os.path.dirname(ini_file) + + # start rhodecode for service API + rc = rhodecode_factory( + request, + store_dir=store_dir, + port=available_port_factory(), + overrides=( + {'handler_console': {'level': 'DEBUG'}}, + {'app:main': {'vcs.server': vcsserver_host}}, + {'app:main': {'repo_store.path': store_dir}} + )) + + service_api_url = f'http://{rc.bind_addr}' + + return SubversionServerCreator(service_api_url, ini_file) class TestSubversionServer(object): @@ -168,8 +193,9 @@ class TestSubversionServer(object): assert repo_name == expected_match def test_run_returns_executes_command(self, svn_server): + from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper + server = svn_server.create() - from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper os.environ['SSH_CLIENT'] = '127.0.0.1' with mock.patch.object( SubversionTunnelWrapper, 'get_first_client_response', @@ -184,20 +210,18 @@ class TestSubversionServer(object): SubversionTunnelWrapper, 'command', return_value=['date']): - exit_code = server.run() + exit_code = server.run(tunnel_extras={'config': server.ini_path}) # SVN has this differently configured, and we get in our mock env # None as return code assert exit_code == (None, False) def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server): + from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper + server = svn_server.create() - from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper - with mock.patch.object( - SubversionTunnelWrapper, 'command', - return_value=['date']): - with mock.patch.object( - SubversionTunnelWrapper, 'get_first_client_response', + with mock.patch.object(SubversionTunnelWrapper, 'command', return_value=['date']): + with mock.patch.object(SubversionTunnelWrapper, 'get_first_client_response', return_value=None): - exit_code = server.run() + exit_code = server.run(tunnel_extras={'config': server.ini_path}) assert exit_code == (1, False) diff --git a/rhodecode/apps/user_group/tests/test_user_groups.py b/rhodecode/apps/user_group/tests/test_user_groups.py --- a/rhodecode/apps/user_group/tests/test_user_groups.py +++ b/rhodecode/apps/user_group/tests/test_user_groups.py @@ -22,7 +22,7 @@ from rhodecode.tests import ( TestController, assert_session_flash, TEST_USER_ADMIN_LOGIN) from rhodecode.model.db import UserGroup from rhodecode.model.meta import Session -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/user_group_profile/tests/test_user_group.py b/rhodecode/apps/user_group_profile/tests/test_user_group.py --- a/rhodecode/apps/user_group_profile/tests/test_user_group.py +++ b/rhodecode/apps/user_group_profile/tests/test_user_group.py @@ -18,7 +18,7 @@ from rhodecode.model.user_group import UserGroupModel from rhodecode.tests import ( TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path fixture = Fixture() diff --git a/rhodecode/apps/user_profile/tests/test_users.py b/rhodecode/apps/user_profile/tests/test_users.py --- a/rhodecode/apps/user_profile/tests/test_users.py +++ b/rhodecode/apps/user_profile/tests/test_users.py @@ -22,7 +22,7 @@ from rhodecode.model.db import User from rhodecode.tests import ( TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.utils import AssertResponse from rhodecode.tests.routes import route_path diff --git a/rhodecode/config/environment.py b/rhodecode/config/environment.py --- a/rhodecode/config/environment.py +++ b/rhodecode/config/environment.py @@ -30,7 +30,7 @@ from rhodecode.lib.vcs import connect_vc log = logging.getLogger(__name__) -def propagate_rhodecode_config(global_config, settings, config): +def propagate_rhodecode_config(global_config, settings, config, full=True): # Store the settings to make them available to other modules. settings_merged = global_config.copy() settings_merged.update(settings) @@ -40,7 +40,7 @@ def propagate_rhodecode_config(global_co rhodecode.PYRAMID_SETTINGS = settings_merged rhodecode.CONFIG = settings_merged - if 'default_user_id' not in rhodecode.CONFIG: + if full and 'default_user_id' not in rhodecode.CONFIG: rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id() log.debug('set rhodecode.CONFIG data') @@ -93,6 +93,7 @@ def load_pyramid_environment(global_conf # first run, to store data... propagate_rhodecode_config(global_config, settings, {}) + if vcs_server_enabled: connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) else: diff --git a/rhodecode/config/middleware.py b/rhodecode/config/middleware.py --- a/rhodecode/config/middleware.py +++ b/rhodecode/config/middleware.py @@ -101,6 +101,9 @@ def make_pyramid_app(global_config, **se patches.inspect_getargspec() patches.repoze_sendmail_lf_fix() + # first init, so load_pyramid_enviroment, can access some critical data, like __file__ + propagate_rhodecode_config(global_config, {}, {}, full=False) + load_pyramid_environment(global_config, settings) # Static file view comes first diff --git a/rhodecode/config/rcextensions/__init__.py b/rhodecode/config/rcextensions/__init__.py --- a/rhodecode/config/rcextensions/__init__.py +++ b/rhodecode/config/rcextensions/__init__.py @@ -17,7 +17,7 @@ # and proprietary license terms, please see https://rhodecode.com/licenses/ """ -rcextensions module, please edit `hooks.py` to over write hooks logic +rcextensions module, please edit `hooks.py` to over-write hooks logic """ from .hooks import ( diff --git a/rhodecode/config/rcextensions/examples/validate_pushed_files_name_and_size.py b/rhodecode/config/rcextensions/examples/validate_pushed_files_name_and_size.py --- a/rhodecode/config/rcextensions/examples/validate_pushed_files_name_and_size.py +++ b/rhodecode/config/rcextensions/examples/validate_pushed_files_name_and_size.py @@ -85,7 +85,7 @@ def _pre_push_hook(*args, **kwargs): # check files names if forbidden_files: - reason = 'File {} is forbidden to be pushed'.format(file_name) + reason = f'File {file_name} is forbidden to be pushed' for forbidden_pattern in forbid_files: # here we can also filter for operation, e.g if check for only ADDED files # if operation == 'A': diff --git a/rhodecode/config/rcextensions/helpers/extra_fields.py b/rhodecode/config/rcextensions/helpers/extra_fields.py --- a/rhodecode/config/rcextensions/helpers/extra_fields.py +++ b/rhodecode/config/rcextensions/helpers/extra_fields.py @@ -1,4 +1,3 @@ - # Copyright (C) 2016-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -55,7 +54,7 @@ def run(*args, **kwargs): return fields -class _Undefined(object): +class _Undefined: pass @@ -67,7 +66,7 @@ def get_field(extra_fields_data, key, de if key not in extra_fields_data: if isinstance(default, _Undefined): - raise ValueError('key {} not present in extra_fields'.format(key)) + raise ValueError(f'key {key} not present in extra_fields') return default # NOTE(dan): from metadata we get field_label, field_value, field_desc, field_type diff --git a/rhodecode/config/rcextensions/helpers/extract_post_commits.py b/rhodecode/config/rcextensions/helpers/extract_post_commits.py --- a/rhodecode/config/rcextensions/helpers/extract_post_commits.py +++ b/rhodecode/config/rcextensions/helpers/extract_post_commits.py @@ -1,4 +1,3 @@ - # Copyright (C) 2016-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify diff --git a/rhodecode/config/rcextensions/helpers/extract_pre_commits.py b/rhodecode/config/rcextensions/helpers/extract_pre_commits.py --- a/rhodecode/config/rcextensions/helpers/extract_pre_commits.py +++ b/rhodecode/config/rcextensions/helpers/extract_pre_commits.py @@ -1,4 +1,3 @@ - # Copyright (C) 2016-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -52,7 +51,7 @@ def get_git_commits(repo, refs): cmd = [ 'log', '--pretty=format:{"commit_id": "%H", "author": "%aN <%aE>", "date": "%ad", "message": "%s"}', - '{}...{}'.format(old_rev, new_rev) + f'{old_rev}...{new_rev}' ] stdout, stderr = repo.run_git_command(cmd, extra_env=git_env) @@ -80,12 +79,12 @@ def run(*args, **kwargs): if vcs_type == 'git': for rev_data in kwargs['commit_ids']: - new_environ = dict((k, v) for k, v in rev_data['git_env']) + new_environ = {k: v for k, v in rev_data['git_env']} commits = get_git_commits(vcs_repo, kwargs['commit_ids']) if vcs_type == 'hg': for rev_data in kwargs['commit_ids']: - new_environ = dict((k, v) for k, v in rev_data['hg_env']) + new_environ = {k: v for k, v in rev_data['hg_env']} commits = get_hg_commits(vcs_repo, kwargs['commit_ids']) return commits diff --git a/rhodecode/config/rcextensions/helpers/extract_pre_files.py b/rhodecode/config/rcextensions/helpers/extract_pre_files.py --- a/rhodecode/config/rcextensions/helpers/extract_pre_files.py +++ b/rhodecode/config/rcextensions/helpers/extract_pre_files.py @@ -1,4 +1,3 @@ - # Copyright (C) 2016-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -133,12 +132,12 @@ def run(*args, **kwargs): if vcs_type == 'git': for rev_data in kwargs['commit_ids']: - new_environ = dict((k, v) for k, v in rev_data['git_env']) + new_environ = {k: v for k, v in rev_data['git_env']} files = get_git_files(repo, vcs_repo, kwargs['commit_ids']) if vcs_type == 'hg': for rev_data in kwargs['commit_ids']: - new_environ = dict((k, v) for k, v in rev_data['hg_env']) + new_environ = {k: v for k, v in rev_data['hg_env']} files = get_hg_files(repo, vcs_repo, kwargs['commit_ids']) if vcs_type == 'svn': diff --git a/rhodecode/config/rcextensions/helpers/http_call.py b/rhodecode/config/rcextensions/helpers/http_call.py --- a/rhodecode/config/rcextensions/helpers/http_call.py +++ b/rhodecode/config/rcextensions/helpers/http_call.py @@ -1,4 +1,3 @@ - # Copyright (C) 2016-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify diff --git a/rhodecode/config/rcextensions/utils.py b/rhodecode/config/rcextensions/utils.py --- a/rhodecode/config/rcextensions/utils.py +++ b/rhodecode/config/rcextensions/utils.py @@ -28,7 +28,7 @@ import urllib.error log = logging.getLogger('rhodecode.' + __name__) -class HookResponse(object): +class HookResponse: def __init__(self, status, output): self.status = status self.output = output @@ -44,6 +44,11 @@ class HookResponse(object): def __bool__(self): return self.status == 0 + def to_json(self): + return {'status': self.status, 'output': self.output} + + def __repr__(self): + return self.to_json().__repr__() class DotDict(dict): @@ -91,8 +96,8 @@ class DotDict(dict): def __repr__(self): keys = list(self.keys()) keys.sort() - args = ', '.join(['%s=%r' % (key, self[key]) for key in keys]) - return '%s(%s)' % (self.__class__.__name__, args) + args = ', '.join(['{}={!r}'.format(key, self[key]) for key in keys]) + return '{}({})'.format(self.__class__.__name__, args) @staticmethod def fromDict(d): @@ -110,7 +115,7 @@ def serialize(x): def unserialize(x): if isinstance(x, dict): - return dict((k, unserialize(v)) for k, v in x.items()) + return {k: unserialize(v) for k, v in x.items()} elif isinstance(x, (list, tuple)): return type(x)(unserialize(v) for v in x) else: @@ -161,7 +166,8 @@ def str2bool(_str) -> bool: string into boolean :param _str: string value to translate into boolean - :returns: bool from given string + :rtype: boolean + :returns: boolean from given string """ if _str is None: return False diff --git a/rhodecode/config/routing_links.py b/rhodecode/config/routing_links.py --- a/rhodecode/config/routing_links.py +++ b/rhodecode/config/routing_links.py @@ -49,22 +49,22 @@ link_config = [ { "name": "enterprise_docs", "target": "https://rhodecode.com/r1/enterprise/docs/", - "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/", + "external_target": "https://docs.rhodecode.com/4.x/rce/index.html", }, { "name": "enterprise_log_file_locations", "target": "https://rhodecode.com/r1/enterprise/docs/admin-system-overview/", - "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/admin/system-overview.html#log-files", + "external_target": "https://docs.rhodecode.com/4.x/rce/admin/system-overview.html#log-files", }, { "name": "enterprise_issue_tracker_settings", "target": "https://rhodecode.com/r1/enterprise/docs/issue-trackers-overview/", - "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/issue-trackers/issue-trackers.html", + "external_target": "https://docs.rhodecode.com/4.x/rce/issue-trackers/issue-trackers.html", }, { "name": "enterprise_svn_setup", "target": "https://rhodecode.com/r1/enterprise/docs/svn-setup/", - "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/admin/svn-http.html", + "external_target": "https://docs.rhodecode.com/4.x/rce/admin/svn-http.html", }, { "name": "enterprise_license_convert_from_old", diff --git a/rhodecode/config/utils.py b/rhodecode/config/utils.py --- a/rhodecode/config/utils.py +++ b/rhodecode/config/utils.py @@ -19,6 +19,8 @@ import os import platform +from rhodecode.lib.type_utils import str2bool + DEFAULT_USER = 'default' @@ -48,28 +50,23 @@ def initialize_database(config): engine = engine_from_config(config, 'sqlalchemy.db1.') init_model(engine, encryption_key=get_encryption_key(config)) +def initialize_test_environment(settings): + skip_test_env = str2bool(os.environ.get('RC_NO_TEST_ENV')) + if skip_test_env: + return -def initialize_test_environment(settings, test_env=None): - if test_env is None: - test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0)) + repo_store_path = os.environ.get('RC_TEST_ENV_REPO_STORE') or settings['repo_store.path'] from rhodecode.lib.utils import ( create_test_directory, create_test_database, create_test_repositories, create_test_index) - from rhodecode.tests import TESTS_TMP_PATH - from rhodecode.lib.vcs.backends.hg import largefiles_store - from rhodecode.lib.vcs.backends.git import lfs_store + create_test_directory(repo_store_path) + + create_test_database(repo_store_path, settings) # test repos - if test_env: - create_test_directory(TESTS_TMP_PATH) - # large object stores - create_test_directory(largefiles_store(TESTS_TMP_PATH)) - create_test_directory(lfs_store(TESTS_TMP_PATH)) - - create_test_database(TESTS_TMP_PATH, settings) - create_test_repositories(TESTS_TMP_PATH, settings) - create_test_index(TESTS_TMP_PATH, settings) + create_test_repositories(repo_store_path, settings) + create_test_index(repo_store_path, settings) def get_vcs_server_protocol(config): diff --git a/rhodecode/lib/exceptions.py b/rhodecode/lib/exceptions.py --- a/rhodecode/lib/exceptions.py +++ b/rhodecode/lib/exceptions.py @@ -20,8 +20,7 @@ Set of custom exceptions used in RhodeCode """ -from webob.exc import HTTPClientError -from pyramid.httpexceptions import HTTPBadGateway +from pyramid.httpexceptions import HTTPBadGateway, HTTPClientError class LdapUsernameError(Exception): @@ -102,12 +101,7 @@ class HTTPRequirementError(HTTPClientErr self.args = (message, ) -class ClientNotSupportedError(HTTPRequirementError): - title = explanation = 'Client Not Supported' - reason = None - - -class HTTPLockedRC(HTTPClientError): +class HTTPLockedRepo(HTTPClientError): """ Special Exception For locked Repos in RhodeCode, the return code can be overwritten by _code keyword argument passed into constructors @@ -131,14 +125,13 @@ class HTTPBranchProtected(HTTPClientErro Special Exception For Indicating that branch is protected in RhodeCode, the return code can be overwritten by _code keyword argument passed into constructors """ - code = 403 title = explanation = 'Branch Protected' reason = None - def __init__(self, message, *args, **kwargs): - self.title = self.explanation = message - super().__init__(*args, **kwargs) - self.args = (message, ) + +class ClientNotSupported(HTTPRequirementError): + title = explanation = 'Client Not Supported' + reason = None class IMCCommitError(Exception): diff --git a/rhodecode/lib/hook_daemon/base.py b/rhodecode/lib/hook_daemon/base.py --- a/rhodecode/lib/hook_daemon/base.py +++ b/rhodecode/lib/hook_daemon/base.py @@ -1,4 +1,4 @@ -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -16,13 +16,14 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ -import os -import time import logging +import traceback -from rhodecode.lib.config_utils import get_app_config_lightweight +from rhodecode.model import meta +from rhodecode.lib import hooks_base +from rhodecode.lib.utils2 import AttributeDict +from rhodecode.lib.exceptions import HTTPLockedRepo, HTTPBranchProtected -from rhodecode.lib.svn_txn_utils import get_txn_id_from_store log = logging.getLogger(__name__) @@ -42,53 +43,82 @@ class BaseHooksCallbackDaemon: log.debug('Exiting `%s` callback daemon', self.__class__.__name__) -class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon): +class Hooks(object): + """ + Exposes the hooks module for calling them using the local HooksModuleCallbackDaemon + """ + def __init__(self, request=None, log_prefix=''): + self.log_prefix = log_prefix + self.request = request - def __init__(self, module): - super().__init__() - self.hooks_module = module + def repo_size(self, extras): + log.debug("%sCalled repo_size of %s object", self.log_prefix, self) + return self._call_hook(hooks_base.repo_size, extras) - def __repr__(self): - return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})' - + def pre_pull(self, extras): + log.debug("%sCalled pre_pull of %s object", self.log_prefix, self) + return self._call_hook(hooks_base.pre_pull, extras) -def prepare_callback_daemon(extras, protocol, host, txn_id=None): + def post_pull(self, extras): + log.debug("%sCalled post_pull of %s object", self.log_prefix, self) + return self._call_hook(hooks_base.post_pull, extras) + + def pre_push(self, extras): + log.debug("%sCalled pre_push of %s object", self.log_prefix, self) + return self._call_hook(hooks_base.pre_push, extras) - match protocol: - case 'http': - from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon - port = 0 - if txn_id: - # read txn-id to re-use the PORT for callback daemon - repo_path = os.path.join(extras['repo_store'], extras['repository']) - txn_details = get_txn_id_from_store(repo_path, txn_id) - port = txn_details.get('port', 0) + def post_push(self, extras): + log.debug("%sCalled post_push of %s object", self.log_prefix, self) + return self._call_hook(hooks_base.post_push, extras) + + def _call_hook(self, hook, extras): + extras = AttributeDict(extras) + _server_url = extras['server_url'] - callback_daemon = HttpHooksCallbackDaemon( - txn_id=txn_id, host=host, port=port) - case 'celery': - from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon - - config = get_app_config_lightweight(extras['config']) - task_queue = config.get('celery.broker_url') - task_backend = config.get('celery.result_backend') + extras.request = self.request + try: + result = hook(extras) + if result is None: + raise Exception(f'Failed to obtain hook result from func: {hook}') + except HTTPBranchProtected as error: + # Those special cases don't need error reporting. It's a case of + # locked repo or protected branch + result = AttributeDict({ + 'status': error.code, + 'output': error.explanation + }) + except HTTPLockedRepo as error: + # Those special cases don't need error reporting. It's a case of + # locked repo or protected branch + result = AttributeDict({ + 'status': error.code, + 'output': error.explanation + }) + except Exception as error: + # locked needs different handling since we need to also + # handle PULL operations + log.exception('%sException when handling hook %s', self.log_prefix, hook) + exc_tb = traceback.format_exc() + error_args = error.args + return { + 'status': 128, + 'output': '', + 'exception': type(error).__name__, + 'exception_traceback': exc_tb, + 'exception_args': error_args, + } + finally: + meta.Session.remove() - callback_daemon = CeleryHooksCallbackDaemon(task_queue, task_backend) - case 'local': - from rhodecode.lib.hook_daemon.hook_module import Hooks - callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__) - case _: - log.error('Unsupported callback daemon protocol "%s"', protocol) - raise Exception('Unsupported callback daemon protocol.') + log.debug('%sGot hook call response %s', self.log_prefix, result) + return { + 'status': result.status, + 'output': result.output, + } - extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '') - extras['task_queue'] = getattr(callback_daemon, 'task_queue', '') - extras['task_backend'] = getattr(callback_daemon, 'task_backend', '') - extras['hooks_protocol'] = protocol - extras['time'] = time.time() + def __enter__(self): + return self - # register txn_id - extras['txn_id'] = txn_id - log.debug('Prepared a callback daemon: %s', - callback_daemon.__class__.__name__) - return callback_daemon, extras + def __exit__(self, exc_type, exc_val, exc_tb): + pass + diff --git a/rhodecode/lib/hook_daemon/celery_hooks_deamon.py b/rhodecode/lib/hook_daemon/celery_hooks_deamon.py --- a/rhodecode/lib/hook_daemon/celery_hooks_deamon.py +++ b/rhodecode/lib/hook_daemon/celery_hooks_deamon.py @@ -22,14 +22,16 @@ from rhodecode.lib.hook_daemon.base impo class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon): """ Context manger for achieving a compatibility with celery backend + It is calling a call to vcsserver, where it uses HooksCeleryClient to actually call a task from + + f'rhodecode.lib.celerylib.tasks.{method}' + """ - def __init__(self, task_queue, task_backend): - self.task_queue = task_queue - self.task_backend = task_backend + def __init__(self, broker_url, result_backend): + super().__init__() + self.broker_url = broker_url + self.result_backend = result_backend def __repr__(self): - return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})' - - def __repr__(self): - return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})' + return f'CeleryHooksCallbackDaemon(broker_url={self.broker_url}, result_backend={self.result_backend})' diff --git a/rhodecode/lib/hook_daemon/hook_module.py b/rhodecode/lib/hook_daemon/hook_module.py --- a/rhodecode/lib/hook_daemon/hook_module.py +++ b/rhodecode/lib/hook_daemon/hook_module.py @@ -17,88 +17,18 @@ # and proprietary license terms, please see https://rhodecode.com/licenses/ import logging -import traceback -from rhodecode.model import meta - -from rhodecode.lib import hooks_base -from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected -from rhodecode.lib.utils2 import AttributeDict +from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon log = logging.getLogger(__name__) -class Hooks(object): - """ - Exposes the hooks for remote callbacks - """ - def __init__(self, request=None, log_prefix=''): - self.log_prefix = log_prefix - self.request = request - - def repo_size(self, extras): - log.debug("%sCalled repo_size of %s object", self.log_prefix, self) - return self._call_hook(hooks_base.repo_size, extras) - - def pre_pull(self, extras): - log.debug("%sCalled pre_pull of %s object", self.log_prefix, self) - return self._call_hook(hooks_base.pre_pull, extras) - - def post_pull(self, extras): - log.debug("%sCalled post_pull of %s object", self.log_prefix, self) - return self._call_hook(hooks_base.post_pull, extras) - - def pre_push(self, extras): - log.debug("%sCalled pre_push of %s object", self.log_prefix, self) - return self._call_hook(hooks_base.pre_push, extras) - - def post_push(self, extras): - log.debug("%sCalled post_push of %s object", self.log_prefix, self) - return self._call_hook(hooks_base.post_push, extras) - - def _call_hook(self, hook, extras): - extras = AttributeDict(extras) - _server_url = extras['server_url'] - - extras.request = self.request +class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon): - try: - result = hook(extras) - if result is None: - raise Exception(f'Failed to obtain hook result from func: {hook}') - except HTTPBranchProtected as error: - # Those special cases don't need error reporting. It's a case of - # locked repo or protected branch - result = AttributeDict({ - 'status': error.code, - 'output': error.explanation - }) - except (HTTPLockedRC, Exception) as error: - # locked needs different handling since we need to also - # handle PULL operations - exc_tb = '' - if not isinstance(error, HTTPLockedRC): - exc_tb = traceback.format_exc() - log.exception('%sException when handling hook %s', self.log_prefix, hook) - error_args = error.args - return { - 'status': 128, - 'output': '', - 'exception': type(error).__name__, - 'exception_traceback': exc_tb, - 'exception_args': error_args, - } - finally: - meta.Session.remove() + def __init__(self, module): + super().__init__() + self.hooks_module = module - log.debug('%sGot hook call response %s', self.log_prefix, result) - return { - 'status': result.status, - 'output': result.output, - } + def __repr__(self): + return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})' - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - pass diff --git a/rhodecode/lib/hook_daemon/http_hooks_deamon.py b/rhodecode/lib/hook_daemon/http_hooks_deamon.py deleted file mode 100644 --- a/rhodecode/lib/hook_daemon/http_hooks_deamon.py +++ /dev/null @@ -1,287 +0,0 @@ -# Copyright (C) 2010-2023 RhodeCode GmbH -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License, version 3 -# (only), as published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -# This program is dual-licensed. If you wish to learn more about the -# RhodeCode Enterprise Edition, including its added features, Support services, -# and proprietary license terms, please see https://rhodecode.com/licenses/ - -import os -import logging -import traceback -import threading -import socket -import msgpack -import gevent - -from http.server import BaseHTTPRequestHandler -from socketserver import TCPServer - -from rhodecode.model import meta -from rhodecode.lib.ext_json import json -from rhodecode.lib import rc_cache -from rhodecode.lib.svn_txn_utils import get_txn_id_data_key -from rhodecode.lib.hook_daemon.hook_module import Hooks - -log = logging.getLogger(__name__) - - -class HooksHttpHandler(BaseHTTPRequestHandler): - - JSON_HOOKS_PROTO = 'json.v1' - MSGPACK_HOOKS_PROTO = 'msgpack.v1' - # starting with RhodeCode 5.0.0 MsgPack is the default, prior it used json - DEFAULT_HOOKS_PROTO = MSGPACK_HOOKS_PROTO - - @classmethod - def serialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO): - if proto == cls.MSGPACK_HOOKS_PROTO: - return msgpack.packb(data) - return json.dumps(data) - - @classmethod - def deserialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO): - if proto == cls.MSGPACK_HOOKS_PROTO: - return msgpack.unpackb(data) - return json.loads(data) - - def do_POST(self): - hooks_proto, method, extras = self._read_request() - log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto) - - txn_id = getattr(self.server, 'txn_id', None) - if txn_id: - log.debug('Computing TXN_ID based on `%s`:`%s`', - extras['repository'], extras['txn_id']) - computed_txn_id = rc_cache.utils.compute_key_from_params( - extras['repository'], extras['txn_id']) - if txn_id != computed_txn_id: - raise Exception( - 'TXN ID fail: expected {} got {} instead'.format( - txn_id, computed_txn_id)) - - request = getattr(self.server, 'request', None) - try: - hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address)) - result = self._call_hook_method(hooks, method, extras) - - except Exception as e: - exc_tb = traceback.format_exc() - result = { - 'exception': e.__class__.__name__, - 'exception_traceback': exc_tb, - 'exception_args': e.args - } - self._write_response(hooks_proto, result) - - def _read_request(self): - length = int(self.headers['Content-Length']) - # respect sent headers, fallback to OLD proto for compatability - hooks_proto = self.headers.get('rc-hooks-protocol') or self.JSON_HOOKS_PROTO - if hooks_proto == self.MSGPACK_HOOKS_PROTO: - # support for new vcsserver msgpack based protocol hooks - body = self.rfile.read(length) - data = self.deserialize_data(body) - else: - body = self.rfile.read(length) - data = self.deserialize_data(body) - - return hooks_proto, data['method'], data['extras'] - - def _write_response(self, hooks_proto, result): - self.send_response(200) - if hooks_proto == self.MSGPACK_HOOKS_PROTO: - self.send_header("Content-type", "application/msgpack") - self.end_headers() - data = self.serialize_data(result) - self.wfile.write(data) - else: - self.send_header("Content-type", "text/json") - self.end_headers() - data = self.serialize_data(result) - self.wfile.write(data) - - def _call_hook_method(self, hooks, method, extras): - try: - result = getattr(hooks, method)(extras) - finally: - meta.Session.remove() - return result - - def log_message(self, format, *args): - """ - This is an overridden method of BaseHTTPRequestHandler which logs using - a logging library instead of writing directly to stderr. - """ - - message = format % args - - log.debug( - "HOOKS: client=%s - - [%s] %s", self.client_address, - self.log_date_time_string(), message) - - -class ThreadedHookCallbackDaemon(object): - - _callback_thread = None - _daemon = None - _done = False - use_gevent = False - - def __init__(self, txn_id=None, host=None, port=None): - self._prepare(txn_id=txn_id, host=host, port=port) - if self.use_gevent: - self._run_func = self._run_gevent - self._stop_func = self._stop_gevent - else: - self._run_func = self._run - self._stop_func = self._stop - - def __enter__(self): - log.debug('Running `%s` callback daemon', self.__class__.__name__) - self._run_func() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - log.debug('Exiting `%s` callback daemon', self.__class__.__name__) - self._stop_func() - - def _prepare(self, txn_id=None, host=None, port=None): - raise NotImplementedError() - - def _run(self): - raise NotImplementedError() - - def _stop(self): - raise NotImplementedError() - - def _run_gevent(self): - raise NotImplementedError() - - def _stop_gevent(self): - raise NotImplementedError() - - -class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon): - """ - Context manager which will run a callback daemon in a background thread. - """ - - hooks_uri = None - - # From Python docs: Polling reduces our responsiveness to a shutdown - # request and wastes cpu at all other times. - POLL_INTERVAL = 0.01 - - use_gevent = False - - def __repr__(self): - return f'HttpHooksCallbackDaemon(hooks_uri={self.hooks_uri})' - - @property - def _hook_prefix(self): - return f'HOOKS: {self.hooks_uri} ' - - def get_hostname(self): - return socket.gethostname() or '127.0.0.1' - - def get_available_port(self, min_port=20000, max_port=65535): - from rhodecode.lib.utils2 import get_available_port as _get_port - return _get_port(min_port, max_port) - - def _prepare(self, txn_id=None, host=None, port=None): - from pyramid.threadlocal import get_current_request - - if not host or host == "*": - host = self.get_hostname() - if not port: - port = self.get_available_port() - - server_address = (host, port) - self.hooks_uri = f'{host}:{port}' - self.txn_id = txn_id - self._done = False - - log.debug( - "%s Preparing HTTP callback daemon registering hook object: %s", - self._hook_prefix, HooksHttpHandler) - - self._daemon = TCPServer(server_address, HooksHttpHandler) - # inject transaction_id for later verification - self._daemon.txn_id = self.txn_id - - # pass the WEB app request into daemon - self._daemon.request = get_current_request() - - def _run(self): - log.debug("Running thread-based loop of callback daemon in background") - callback_thread = threading.Thread( - target=self._daemon.serve_forever, - kwargs={'poll_interval': self.POLL_INTERVAL}) - callback_thread.daemon = True - callback_thread.start() - self._callback_thread = callback_thread - - def _run_gevent(self): - log.debug("Running gevent-based loop of callback daemon in background") - # create a new greenlet for the daemon's serve_forever method - callback_greenlet = gevent.spawn( - self._daemon.serve_forever, - poll_interval=self.POLL_INTERVAL) - - # store reference to greenlet - self._callback_greenlet = callback_greenlet - - # switch to this greenlet - gevent.sleep(0.01) - - def _stop(self): - log.debug("Waiting for background thread to finish.") - self._daemon.shutdown() - self._callback_thread.join() - self._daemon = None - self._callback_thread = None - if self.txn_id: - #TODO: figure out the repo_path... - repo_path = '' - txn_id_file = get_txn_id_data_key(repo_path, self.txn_id) - log.debug('Cleaning up TXN ID %s', txn_id_file) - if os.path.isfile(txn_id_file): - os.remove(txn_id_file) - - log.debug("Background thread done.") - - def _stop_gevent(self): - log.debug("Waiting for background greenlet to finish.") - - # if greenlet exists and is running - if self._callback_greenlet and not self._callback_greenlet.dead: - # shutdown daemon if it exists - if self._daemon: - self._daemon.shutdown() - - # kill the greenlet - self._callback_greenlet.kill() - - self._daemon = None - self._callback_greenlet = None - - if self.txn_id: - #TODO: figure out the repo_path... - repo_path = '' - txn_id_file = get_txn_id_data_key(repo_path, self.txn_id) - log.debug('Cleaning up TXN ID %s', txn_id_file) - if os.path.isfile(txn_id_file): - os.remove(txn_id_file) - - log.debug("Background greenlet done.") diff --git a/rhodecode/lib/hook_daemon/utils.py b/rhodecode/lib/hook_daemon/utils.py new file mode 100644 --- /dev/null +++ b/rhodecode/lib/hook_daemon/utils.py @@ -0,0 +1,61 @@ +# Copyright (C) 2010-2024 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import time +import logging + +from rhodecode.lib.config_utils import get_app_config_lightweight + +from rhodecode.lib.hook_daemon.base import Hooks +from rhodecode.lib.hook_daemon.hook_module import HooksModuleCallbackDaemon +from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon +from rhodecode.lib.type_utils import str2bool + +log = logging.getLogger(__name__) + + + +def prepare_callback_daemon(extras, protocol: str, txn_id=None): + hooks_config = {} + match protocol: + case 'celery': + config = get_app_config_lightweight(extras['config']) + + broker_url = config.get('celery.broker_url') + result_backend = config.get('celery.result_backend') + + hooks_config = { + 'broker_url': broker_url, + 'result_backend': result_backend, + } + + callback_daemon = CeleryHooksCallbackDaemon(broker_url, result_backend) + case 'local': + callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__) + case _: + log.error('Unsupported callback daemon protocol "%s"', protocol) + raise Exception('Unsupported callback daemon protocol.') + + extras['hooks_config'] = hooks_config + extras['hooks_protocol'] = protocol + extras['time'] = time.time() + + # register txn_id + extras['txn_id'] = txn_id + log.debug('Prepared a callback daemon: %s', callback_daemon.__class__.__name__) + return callback_daemon, extras diff --git a/rhodecode/lib/hooks_base.py b/rhodecode/lib/hooks_base.py --- a/rhodecode/lib/hooks_base.py +++ b/rhodecode/lib/hooks_base.py @@ -30,14 +30,14 @@ from rhodecode.lib import helpers as h from rhodecode.lib import audit_logger from rhodecode.lib.utils2 import safe_str, user_agent_normalizer from rhodecode.lib.exceptions import ( - HTTPLockedRC, HTTPBranchProtected, UserCreationError, ClientNotSupportedError) + HTTPLockedRepo, HTTPBranchProtected, UserCreationError, ClientNotSupported) from rhodecode.model.db import Repository, User from rhodecode.lib.statsd_client import StatsdClient log = logging.getLogger(__name__) -class HookResponse(object): +class HookResponse: def __init__(self, status, output): self.status = status self.output = output @@ -56,6 +56,8 @@ class HookResponse(object): def to_json(self): return {'status': self.status, 'output': self.output} + def __repr__(self): + return self.to_json().__repr__() def is_shadow_repo(extras): """ @@ -73,8 +75,69 @@ def check_vcs_client(extras): except ModuleNotFoundError: is_vcs_client_whitelisted = lambda *x: True backend = extras.get('scm') - if not is_vcs_client_whitelisted(extras.get('user_agent'), backend): - raise ClientNotSupportedError(f"Your {backend} client is forbidden") + user_agent = extras.get('user_agent') + if not is_vcs_client_whitelisted(user_agent, backend): + raise ClientNotSupported(f"Your {backend} client (version={user_agent}) is forbidden by security rules") + + +def check_locked_repo(extras, check_same_user=True): + user = User.get_by_username(extras.username) + output = '' + if extras.locked_by[0] and (not check_same_user or user.user_id != extras.locked_by[0]): + + locked_by = User.get(extras.locked_by[0]).username + reason = extras.locked_by[2] + # this exception is interpreted in git/hg middlewares and based + # on that proper return code is server to client + _http_ret = HTTPLockedRepo(_locked_by_explanation(extras.repository, locked_by, reason)) + if str(_http_ret.code).startswith('2'): + # 2xx Codes don't raise exceptions + output = _http_ret.title + else: + raise _http_ret + + return output + + +def check_branch_protected(extras): + if extras.commit_ids and extras.check_branch_perms: + user = User.get_by_username(extras.username) + auth_user = user.AuthUser() + repo = Repository.get_by_repo_name(extras.repository) + if not repo: + raise ValueError(f'Repo for {extras.repository} not found') + affected_branches = [] + if repo.repo_type == 'hg': + for entry in extras.commit_ids: + if entry['type'] == 'branch': + is_forced = bool(entry['multiple_heads']) + affected_branches.append([entry['name'], is_forced]) + elif repo.repo_type == 'git': + for entry in extras.commit_ids: + if entry['type'] == 'heads': + is_forced = bool(entry['pruned_sha']) + affected_branches.append([entry['name'], is_forced]) + + for branch_name, is_forced in affected_branches: + + rule, branch_perm = auth_user.get_rule_and_branch_permission(extras.repository, branch_name) + if not branch_perm: + # no branch permission found for this branch, just keep checking + continue + + if branch_perm == 'branch.push_force': + continue + elif branch_perm == 'branch.push' and is_forced is False: + continue + elif branch_perm == 'branch.push' and is_forced is True: + halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}. ' \ + f'FORCE PUSH FORBIDDEN.' + else: + halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}.' + + if halt_message: + _http_ret = HTTPBranchProtected(halt_message) + raise _http_ret def _get_scm_size(alias, root_path): @@ -109,116 +172,30 @@ def repo_size(extras): repo = Repository.get_by_repo_name(extras.repository) vcs_part = f'.{repo.repo_type}' size_vcs, size_root, size_total = _get_scm_size(vcs_part, repo.repo_full_path) - msg = (f'RhodeCode: `{repo.repo_name}` size summary {vcs_part}:{size_vcs} repo:{size_root} total:{size_total}\n') + msg = f'RhodeCode: `{repo.repo_name}` size summary {vcs_part}:{size_vcs} repo:{size_root} total:{size_total}\n' return HookResponse(0, msg) -def pre_push(extras): - """ - Hook executed before pushing code. - - It bans pushing when the repository is locked. - """ - - check_vcs_client(extras) - user = User.get_by_username(extras.username) - output = '' - if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]): - locked_by = User.get(extras.locked_by[0]).username - reason = extras.locked_by[2] - # this exception is interpreted in git/hg middlewares and based - # on that proper return code is server to client - _http_ret = HTTPLockedRC( - _locked_by_explanation(extras.repository, locked_by, reason)) - if str(_http_ret.code).startswith('2'): - # 2xx Codes don't raise exceptions - output = _http_ret.title - else: - raise _http_ret - - hook_response = '' - if not is_shadow_repo(extras): - - if extras.commit_ids and extras.check_branch_perms: - auth_user = user.AuthUser() - repo = Repository.get_by_repo_name(extras.repository) - if not repo: - raise ValueError(f'Repo for {extras.repository} not found') - affected_branches = [] - if repo.repo_type == 'hg': - for entry in extras.commit_ids: - if entry['type'] == 'branch': - is_forced = bool(entry['multiple_heads']) - affected_branches.append([entry['name'], is_forced]) - elif repo.repo_type == 'git': - for entry in extras.commit_ids: - if entry['type'] == 'heads': - is_forced = bool(entry['pruned_sha']) - affected_branches.append([entry['name'], is_forced]) - - for branch_name, is_forced in affected_branches: - - rule, branch_perm = auth_user.get_rule_and_branch_permission( - extras.repository, branch_name) - if not branch_perm: - # no branch permission found for this branch, just keep checking - continue - - if branch_perm == 'branch.push_force': - continue - elif branch_perm == 'branch.push' and is_forced is False: - continue - elif branch_perm == 'branch.push' and is_forced is True: - halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}. ' \ - f'FORCE PUSH FORBIDDEN.' - else: - halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}.' - - if halt_message: - _http_ret = HTTPBranchProtected(halt_message) - raise _http_ret - - # Propagate to external components. This is done after checking the - # lock, for consistent behavior. - hook_response = pre_push_extension( - repo_store_path=Repository.base_path(), **extras) - events.trigger(events.RepoPrePushEvent( - repo_name=extras.repository, extras=extras)) - - return HookResponse(0, output) + hook_response - - def pre_pull(extras): """ Hook executed before pulling the code. It bans pulling when the repository is locked. + It bans pulling when incorrect client is used. """ - - check_vcs_client(extras) output = '' - if extras.locked_by[0]: - locked_by = User.get(extras.locked_by[0]).username - reason = extras.locked_by[2] - # this exception is interpreted in git/hg middlewares and based - # on that proper return code is server to client - _http_ret = HTTPLockedRC( - _locked_by_explanation(extras.repository, locked_by, reason)) - if str(_http_ret.code).startswith('2'): - # 2xx Codes don't raise exceptions - output = _http_ret.title - else: - raise _http_ret + check_vcs_client(extras) + + # locking repo can, but not have to stop the operation it can also just produce output + output += check_locked_repo(extras, check_same_user=False) # Propagate to external components. This is done after checking the # lock, for consistent behavior. hook_response = '' if not is_shadow_repo(extras): extras.hook_type = extras.hook_type or 'pre_pull' - hook_response = pre_pull_extension( - repo_store_path=Repository.base_path(), **extras) - events.trigger(events.RepoPrePullEvent( - repo_name=extras.repository, extras=extras)) + hook_response = pre_pull_extension(repo_store_path=Repository.base_path(), **extras) + events.trigger(events.RepoPrePullEvent(repo_name=extras.repository, extras=extras)) return HookResponse(0, output) + hook_response @@ -239,6 +216,7 @@ def post_pull(extras): statsd.incr('rhodecode_pull_total', tags=[ f'user-agent:{user_agent_normalizer(extras.user_agent)}', ]) + output = '' # make lock is a tri state False, True, None. We only make lock on True if extras.make_lock is True and not is_shadow_repo(extras): @@ -246,18 +224,9 @@ def post_pull(extras): Repository.lock(Repository.get_by_repo_name(extras.repository), user.user_id, lock_reason=Repository.LOCK_PULL) - msg = 'Made lock on repo `{}`'.format(extras.repository) + msg = f'Made lock on repo `{extras.repository}`' output += msg - if extras.locked_by[0]: - locked_by = User.get(extras.locked_by[0]).username - reason = extras.locked_by[2] - _http_ret = HTTPLockedRC( - _locked_by_explanation(extras.repository, locked_by, reason)) - if str(_http_ret.code).startswith('2'): - # 2xx Codes don't raise exceptions - output += _http_ret.title - # Propagate to external components. hook_response = '' if not is_shadow_repo(extras): @@ -270,6 +239,33 @@ def post_pull(extras): return HookResponse(0, output) + hook_response +def pre_push(extras): + """ + Hook executed before pushing code. + + It bans pushing when the repository is locked. + It banks pushing when incorrect client is used. + It also checks for Branch protection + """ + output = '' + check_vcs_client(extras) + + # locking repo can, but not have to stop the operation it can also just produce output + output += check_locked_repo(extras) + + hook_response = '' + if not is_shadow_repo(extras): + + check_branch_protected(extras) + + # Propagate to external components. This is done after checking the + # lock, for consistent behavior. + hook_response = pre_push_extension(repo_store_path=Repository.base_path(), **extras) + events.trigger(events.RepoPrePushEvent(repo_name=extras.repository, extras=extras)) + + return HookResponse(0, output) + hook_response + + def post_push(extras): """Hook executed after user pushes to the repository.""" commit_ids = extras.commit_ids @@ -292,22 +288,13 @@ def post_push(extras): # Propagate to external components. output = '' + # make lock is a tri state False, True, None. We only release lock on False if extras.make_lock is False and not is_shadow_repo(extras): Repository.unlock(Repository.get_by_repo_name(extras.repository)) msg = f'Released lock on repo `{extras.repository}`\n' output += msg - if extras.locked_by[0]: - locked_by = User.get(extras.locked_by[0]).username - reason = extras.locked_by[2] - _http_ret = HTTPLockedRC( - _locked_by_explanation(extras.repository, locked_by, reason)) - # TODO: johbo: if not? - if str(_http_ret.code).startswith('2'): - # 2xx Codes don't raise exceptions - output += _http_ret.title - if extras.new_refs: tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format( safe_str(extras.server_url), safe_str(extras.repository)) @@ -322,11 +309,8 @@ def post_push(extras): hook_response = '' if not is_shadow_repo(extras): - hook_response = post_push_extension( - repo_store_path=Repository.base_path(), - **extras) - events.trigger(events.RepoPushEvent( - repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras)) + hook_response = post_push_extension(repo_store_path=Repository.base_path(), **extras) + events.trigger(events.RepoPushEvent(repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras)) output += 'RhodeCode: push completed\n' return HookResponse(0, output) + hook_response @@ -380,12 +364,20 @@ class ExtensionCallback(object): # with older rcextensions that require api_key present if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']: kwargs_to_pass['api_key'] = '_DEPRECATED_' - return callback(**kwargs_to_pass) + result = callback(**kwargs_to_pass) + log.debug('got rcextensions result: %s', result) + return result def is_active(self): return hasattr(rhodecode.EXTENSIONS, self._hook_name) def _get_callback(self): + if rhodecode.is_test: + log.debug('In test mode, reloading rcextensions...') + # NOTE: for test re-load rcextensions always so we can dynamically change them for testing purposes + from rhodecode.lib.utils import load_rcextensions + load_rcextensions(root_path=os.path.dirname(rhodecode.CONFIG['__file__'])) + return getattr(rhodecode.EXTENSIONS, self._hook_name, None) return getattr(rhodecode.EXTENSIONS, self._hook_name, None) diff --git a/rhodecode/lib/middleware/simplegit.py b/rhodecode/lib/middleware/simplegit.py --- a/rhodecode/lib/middleware/simplegit.py +++ b/rhodecode/lib/middleware/simplegit.py @@ -40,16 +40,6 @@ GIT_PROTO_PAT = re.compile( GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))') -def default_lfs_store(): - """ - Default lfs store location, it's consistent with Mercurials large file - store which is in .cache/largefiles - """ - from rhodecode.lib.vcs.backends.git import lfs_store - user_home = os.path.expanduser("~") - return lfs_store(user_home) - - class SimpleGit(simplevcs.SimpleVCS): SCM = 'git' @@ -151,6 +141,6 @@ class SimpleGit(simplevcs.SimpleVCS): extras['git_lfs_enabled'] = utils2.str2bool( config.get('vcs_git_lfs', 'enabled')) - extras['git_lfs_store_path'] = custom_store or default_lfs_store() + extras['git_lfs_store_path'] = custom_store extras['git_lfs_http_scheme'] = scheme return extras diff --git a/rhodecode/lib/middleware/simplevcs.py b/rhodecode/lib/middleware/simplevcs.py --- a/rhodecode/lib/middleware/simplevcs.py +++ b/rhodecode/lib/middleware/simplevcs.py @@ -1,5 +1,3 @@ - - # Copyright (C) 2014-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -32,8 +30,7 @@ from functools import wraps import time from paste.httpheaders import REMOTE_USER, AUTH_TYPE -from pyramid.httpexceptions import ( - HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError) +from pyramid.httpexceptions import HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError from zope.cachedescriptors.property import Lazy as LazyProperty import rhodecode @@ -41,10 +38,9 @@ from rhodecode.authentication.base impor from rhodecode.lib import rc_cache from rhodecode.lib.svn_txn_utils import store_txn_id_data from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware -from rhodecode.lib.base import ( - BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context) -from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError) -from rhodecode.lib.hook_daemon.base import prepare_callback_daemon +from rhodecode.lib.base import BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context +from rhodecode.lib.exceptions import UserCreationError, NotAllowedToCreateUserError +from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon from rhodecode.lib.middleware import appenlight from rhodecode.lib.middleware.utils import scm_app_http from rhodecode.lib.str_utils import safe_bytes, safe_int @@ -78,17 +74,18 @@ def initialize_generator(factory): try: init = next(gen) except StopIteration: - raise ValueError('Generator must yield at least one element.') + raise ValueError("Generator must yield at least one element.") if init != "__init__": raise ValueError('First yielded element must be "__init__".') return gen + return wrapper class SimpleVCS(object): """Common functionality for SCM HTTP handlers.""" - SCM = 'unknown' + SCM = "unknown" acl_repo_name = None url_repo_name = None @@ -100,11 +97,11 @@ class SimpleVCS(object): # we use this regex which will match only on URLs pointing to shadow # repositories. shadow_repo_re = re.compile( - '(?P(?:{slug_pat}/)*)' # repo groups - '(?P{slug_pat})/' # target repo - 'pull-request/(?P\\d+)/' # pull request - 'repository$' # shadow repo - .format(slug_pat=SLUG_RE.pattern)) + "(?P(?:{slug_pat}/)*)" # repo groups + "(?P{slug_pat})/" # target repo + "pull-request/(?P\\d+)/" # pull request + "repository$".format(slug_pat=SLUG_RE.pattern) # shadow repo + ) def __init__(self, config, registry): self.registry = registry @@ -113,15 +110,14 @@ class SimpleVCS(object): self.repo_vcs_config = base.Config() rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False) - realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH' + realm = rc_settings.get("rhodecode_realm") or "RhodeCode AUTH" # authenticate this VCS request using authfunc - auth_ret_code_detection = \ - str2bool(self.config.get('auth_ret_code_detection', False)) + auth_ret_code_detection = str2bool(self.config.get("auth_ret_code_detection", False)) self.authenticate = BasicAuth( - '', authenticate, registry, config.get('auth_ret_code'), - auth_ret_code_detection, rc_realm=realm) - self.ip_addr = '0.0.0.0' + "", authenticate, registry, config.get("auth_ret_code"), auth_ret_code_detection, rc_realm=realm + ) + self.ip_addr = "0.0.0.0" @LazyProperty def global_vcs_config(self): @@ -132,10 +128,10 @@ class SimpleVCS(object): @property def base_path(self): - settings_path = self.config.get('repo_store.path') + settings_path = self.config.get("repo_store.path") if not settings_path: - raise ValueError('FATAL: repo_store.path is empty') + raise ValueError("FATAL: repo_store.path is empty") return settings_path def set_repo_names(self, environ): @@ -164,17 +160,16 @@ class SimpleVCS(object): match_dict = match.groupdict() # Build acl repo name from regex match. - acl_repo_name = safe_str('{groups}{target}'.format( - groups=match_dict['groups'] or '', - target=match_dict['target'])) + acl_repo_name = safe_str( + "{groups}{target}".format(groups=match_dict["groups"] or "", target=match_dict["target"]) + ) # Retrieve pull request instance by ID from regex match. - pull_request = PullRequest.get(match_dict['pr_id']) + pull_request = PullRequest.get(match_dict["pr_id"]) # Only proceed if we got a pull request and if acl repo name from # URL equals the target repo name of the pull request. if pull_request and (acl_repo_name == pull_request.target_repo.repo_name): - # Get file system path to shadow repository. workspace_id = PullRequestModel()._workspace_id(pull_request) vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id) @@ -184,21 +179,23 @@ class SimpleVCS(object): self.acl_repo_name = acl_repo_name self.is_shadow_repo = True - log.debug('Setting all VCS repository names: %s', { - 'acl_repo_name': self.acl_repo_name, - 'url_repo_name': self.url_repo_name, - 'vcs_repo_name': self.vcs_repo_name, - }) + log.debug( + "Setting all VCS repository names: %s", + { + "acl_repo_name": self.acl_repo_name, + "url_repo_name": self.url_repo_name, + "vcs_repo_name": self.vcs_repo_name, + }, + ) @property def scm_app(self): - custom_implementation = self.config['vcs.scm_app_implementation'] - if custom_implementation == 'http': - log.debug('Using HTTP implementation of scm app.') + custom_implementation = self.config["vcs.scm_app_implementation"] + if custom_implementation == "http": + log.debug("Using HTTP implementation of scm app.") scm_app_impl = scm_app_http else: - log.debug('Using custom implementation of scm_app: "{}"'.format( - custom_implementation)) + log.debug('Using custom implementation of scm_app: "{}"'.format(custom_implementation)) scm_app_impl = importlib.import_module(custom_implementation) return scm_app_impl @@ -208,17 +205,18 @@ class SimpleVCS(object): with a repository_name for support of _ non changeable urls """ - data = repo_name.split('/') + data = repo_name.split("/") if len(data) >= 2: from rhodecode.model.repo import RepoModel + by_id_match = RepoModel().get_repo_by_id(repo_name) if by_id_match: data[1] = by_id_match.repo_name # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO # and we use this data - maybe_new_path = '/'.join(data) - return safe_bytes(maybe_new_path).decode('latin1') + maybe_new_path = "/".join(data) + return safe_bytes(maybe_new_path).decode("latin1") def _invalidate_cache(self, repo_name): """ @@ -231,21 +229,18 @@ class SimpleVCS(object): def is_valid_and_existing_repo(self, repo_name, base_path, scm_type): db_repo = Repository.get_by_repo_name(repo_name) if not db_repo: - log.debug('Repository `%s` not found inside the database.', - repo_name) + log.debug("Repository `%s` not found inside the database.", repo_name) return False if db_repo.repo_type != scm_type: log.warning( - 'Repository `%s` have incorrect scm_type, expected %s got %s', - repo_name, db_repo.repo_type, scm_type) + "Repository `%s` have incorrect scm_type, expected %s got %s", repo_name, db_repo.repo_type, scm_type + ) return False config = db_repo._config - config.set('extensions', 'largefiles', '') - return is_valid_repo( - repo_name, base_path, - explicit_scm=scm_type, expect_scm=scm_type, config=config) + config.set("extensions", "largefiles", "") + return is_valid_repo(repo_name, base_path, explicit_scm=scm_type, expect_scm=scm_type, config=config) def valid_and_active_user(self, user): """ @@ -267,8 +262,9 @@ class SimpleVCS(object): def is_shadow_repo_dir(self): return os.path.isdir(self.vcs_repo_name) - def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None, - plugin_id='', plugin_cache_active=False, cache_ttl=0): + def _check_permission( + self, action, user, auth_user, repo_name, ip_addr=None, plugin_id="", plugin_cache_active=False, cache_ttl=0 + ): """ Checks permissions using action (push/pull) user and repository name. If plugin_cache and ttl is set it will use the plugin which @@ -280,71 +276,67 @@ class SimpleVCS(object): :param repo_name: repository name """ - log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)', - plugin_id, plugin_cache_active, cache_ttl) + log.debug("AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)", plugin_id, plugin_cache_active, cache_ttl) user_id = user.user_id - cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}' - region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) + cache_namespace_uid = f"cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}" + region = rc_cache.get_or_create_region("cache_perms", cache_namespace_uid) - @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, - expiration_time=cache_ttl, - condition=plugin_cache_active) - def compute_perm_vcs( - cache_name, plugin_id, action, user_id, repo_name, ip_addr): - - log.debug('auth: calculating permission access now for vcs operation: %s', action) + @region.conditional_cache_on_arguments( + namespace=cache_namespace_uid, expiration_time=cache_ttl, condition=plugin_cache_active + ) + def compute_perm_vcs(cache_name, plugin_id, action, user_id, repo_name, ip_addr): + log.debug("auth: calculating permission access now for vcs operation: %s", action) # check IP inherit = user.inherit_default_permissions - ip_allowed = AuthUser.check_ip_allowed( - user_id, ip_addr, inherit_from_default=inherit) + ip_allowed = AuthUser.check_ip_allowed(user_id, ip_addr, inherit_from_default=inherit) if ip_allowed: - log.info('Access for IP:%s allowed', ip_addr) + log.info("Access for IP:%s allowed", ip_addr) else: return False - if action == 'push': - perms = ('repository.write', 'repository.admin') + if action == "push": + perms = ("repository.write", "repository.admin") if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): return False else: # any other action need at least read permission - perms = ( - 'repository.read', 'repository.write', 'repository.admin') + perms = ("repository.read", "repository.write", "repository.admin") if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): return False return True start = time.time() - log.debug('Running plugin `%s` permissions check', plugin_id) + log.debug("Running plugin `%s` permissions check", plugin_id) # for environ based auth, password can be empty, but then the validation is # on the server that fills in the env data needed for authentication - perm_result = compute_perm_vcs( - 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr) + perm_result = compute_perm_vcs("vcs_permissions", plugin_id, action, user.user_id, repo_name, ip_addr) auth_time = time.time() - start - log.debug('Permissions for plugin `%s` completed in %.4fs, ' - 'expiration time of fetched cache %.1fs.', - plugin_id, auth_time, cache_ttl) + log.debug( + "Permissions for plugin `%s` completed in %.4fs, " "expiration time of fetched cache %.1fs.", + plugin_id, + auth_time, + cache_ttl, + ) return perm_result def _get_http_scheme(self, environ): try: - return environ['wsgi.url_scheme'] + return environ["wsgi.url_scheme"] except Exception: - log.exception('Failed to read http scheme') - return 'http' + log.exception("Failed to read http scheme") + return "http" def _get_default_cache_ttl(self): # take AUTH_CACHE_TTL from the `rhodecode` auth plugin - plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode') + plugin = loadplugin("egg:rhodecode-enterprise-ce#rhodecode") plugin_settings = plugin.get_settings() - plugin_cache_active, cache_ttl = plugin.get_ttl_cache( - plugin_settings) or (False, 0) + plugin_cache_active, cache_ttl = plugin.get_ttl_cache(plugin_settings) or (False, 0) return plugin_cache_active, cache_ttl def __call__(self, environ, start_response): @@ -359,17 +351,17 @@ class SimpleVCS(object): def _handle_request(self, environ, start_response): if not self.url_repo_name: - log.warning('Repository name is empty: %s', self.url_repo_name) + log.warning("Repository name is empty: %s", self.url_repo_name) # failed to get repo name, we fail now return HTTPNotFound()(environ, start_response) - log.debug('Extracted repo name is %s', self.url_repo_name) + log.debug("Extracted repo name is %s", self.url_repo_name) ip_addr = get_ip_addr(environ) user_agent = get_user_agent(environ) username = None # skip passing error to error controller - environ['pylons.status_code_redirect'] = True + environ["pylons.status_code_redirect"] = True # ====================================================================== # GET ACTION PULL or PUSH @@ -380,17 +372,15 @@ class SimpleVCS(object): # Check if this is a request to a shadow repository of a pull request. # In this case only pull action is allowed. # ====================================================================== - if self.is_shadow_repo and action != 'pull': - reason = 'Only pull action is allowed for shadow repositories.' - log.debug('User not allowed to proceed, %s', reason) + if self.is_shadow_repo and action != "pull": + reason = "Only pull action is allowed for shadow repositories." + log.debug("User not allowed to proceed, %s", reason) return HTTPNotAcceptable(reason)(environ, start_response) # Check if the shadow repo actually exists, in case someone refers # to it, and it has been deleted because of successful merge. if self.is_shadow_repo and not self.is_shadow_repo_dir: - log.debug( - 'Shadow repo detected, and shadow repo dir `%s` is missing', - self.is_shadow_repo_dir) + log.debug("Shadow repo detected, and shadow repo dir `%s` is missing", self.is_shadow_repo_dir) return HTTPNotFound()(environ, start_response) # ====================================================================== @@ -398,7 +388,7 @@ class SimpleVCS(object): # ====================================================================== detect_force_push = False check_branch_perms = False - if action in ['pull', 'push']: + if action in ["pull", "push"]: user_obj = anonymous_user = User.get_default_user() auth_user = user_obj.AuthUser() username = anonymous_user.username @@ -406,8 +396,12 @@ class SimpleVCS(object): plugin_cache_active, cache_ttl = self._get_default_cache_ttl() # ONLY check permissions if the user is activated anonymous_perm = self._check_permission( - action, anonymous_user, auth_user, self.acl_repo_name, ip_addr, - plugin_id='anonymous_access', + action, + anonymous_user, + auth_user, + self.acl_repo_name, + ip_addr, + plugin_id="anonymous_access", plugin_cache_active=plugin_cache_active, cache_ttl=cache_ttl, ) @@ -416,12 +410,13 @@ class SimpleVCS(object): if not anonymous_user.active or not anonymous_perm: if not anonymous_user.active: - log.debug('Anonymous access is disabled, running ' - 'authentication') + log.debug("Anonymous access is disabled, running " "authentication") if not anonymous_perm: - log.debug('Not enough credentials to access repo: `%s` ' - 'repository as anonymous user', self.acl_repo_name) + log.debug( + "Not enough credentials to access repo: `%s` " "repository as anonymous user", + self.acl_repo_name, + ) username = None # ============================================================== @@ -430,19 +425,18 @@ class SimpleVCS(object): # ============================================================== # try to auth based on environ, container auth methods - log.debug('Running PRE-AUTH for container|headers based authentication') + log.debug("Running PRE-AUTH for container|headers based authentication") # headers auth, by just reading special headers and bypass the auth with user/passwd pre_auth = authenticate( - '', '', environ, VCS_TYPE, registry=self.registry, - acl_repo_name=self.acl_repo_name) + "", "", environ, VCS_TYPE, registry=self.registry, acl_repo_name=self.acl_repo_name + ) - if pre_auth and pre_auth.get('username'): - username = pre_auth['username'] - log.debug('PRE-AUTH got `%s` as username', username) + if pre_auth and pre_auth.get("username"): + username = pre_auth["username"] + log.debug("PRE-AUTH got `%s` as username", username) if pre_auth: - log.debug('PRE-AUTH successful from %s', - pre_auth.get('auth_data', {}).get('_plugin')) + log.debug("PRE-AUTH successful from %s", pre_auth.get("auth_data", {}).get("_plugin")) # If not authenticated by the container, running basic auth # before inject the calling repo_name for special scope checks @@ -463,16 +457,16 @@ class SimpleVCS(object): return HTTPNotAcceptable(reason)(environ, start_response) if isinstance(auth_result, dict): - AUTH_TYPE.update(environ, 'basic') - REMOTE_USER.update(environ, auth_result['username']) - username = auth_result['username'] - plugin = auth_result.get('auth_data', {}).get('_plugin') - log.info( - 'MAIN-AUTH successful for user `%s` from %s plugin', - username, plugin) + AUTH_TYPE.update(environ, "basic") + REMOTE_USER.update(environ, auth_result["username"]) + username = auth_result["username"] + plugin = auth_result.get("auth_data", {}).get("_plugin") + log.info("MAIN-AUTH successful for user `%s` from %s plugin", username, plugin) - plugin_cache_active, cache_ttl = auth_result.get( - 'auth_data', {}).get('_ttl_cache') or (False, 0) + plugin_cache_active, cache_ttl = auth_result.get("auth_data", {}).get("_ttl_cache") or ( + False, + 0, + ) else: return auth_result.wsgi_application(environ, start_response) @@ -488,21 +482,24 @@ class SimpleVCS(object): # check user attributes for password change flag user_obj = user auth_user = user_obj.AuthUser() - if user_obj and user_obj.username != User.DEFAULT_USER and \ - user_obj.user_data.get('force_password_change'): - reason = 'password change required' - log.debug('User not allowed to authenticate, %s', reason) + if ( + user_obj + and user_obj.username != User.DEFAULT_USER + and user_obj.user_data.get("force_password_change") + ): + reason = "password change required" + log.debug("User not allowed to authenticate, %s", reason) return HTTPNotAcceptable(reason)(environ, start_response) # check permissions for this repository perm = self._check_permission( - action, user, auth_user, self.acl_repo_name, ip_addr, - plugin, plugin_cache_active, cache_ttl) + action, user, auth_user, self.acl_repo_name, ip_addr, plugin, plugin_cache_active, cache_ttl + ) if not perm: return HTTPForbidden()(environ, start_response) - environ['rc_auth_user_id'] = str(user_id) + environ["rc_auth_user_id"] = str(user_id) - if action == 'push': + if action == "push": perms = auth_user.get_branch_permissions(self.acl_repo_name) if perms: check_branch_perms = True @@ -510,41 +507,48 @@ class SimpleVCS(object): # extras are injected into UI object and later available # in hooks executed by RhodeCode - check_locking = _should_check_locking(environ.get('QUERY_STRING')) + check_locking = _should_check_locking(environ.get("QUERY_STRING")) extras = vcs_operation_context( - environ, repo_name=self.acl_repo_name, username=username, - action=action, scm=self.SCM, check_locking=check_locking, - is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms, - detect_force_push=detect_force_push + environ, + repo_name=self.acl_repo_name, + username=username, + action=action, + scm=self.SCM, + check_locking=check_locking, + is_shadow_repo=self.is_shadow_repo, + check_branch_perms=check_branch_perms, + detect_force_push=detect_force_push, ) # ====================================================================== # REQUEST HANDLING # ====================================================================== - repo_path = os.path.join( - safe_str(self.base_path), safe_str(self.vcs_repo_name)) - log.debug('Repository path is %s', repo_path) + repo_path = os.path.join(safe_str(self.base_path), safe_str(self.vcs_repo_name)) + log.debug("Repository path is %s", repo_path) fix_PATH() log.info( '%s action on %s repo "%s" by "%s" from %s %s', - action, self.SCM, safe_str(self.url_repo_name), - safe_str(username), ip_addr, user_agent) + action, + self.SCM, + safe_str(self.url_repo_name), + safe_str(username), + ip_addr, + user_agent, + ) - return self._generate_vcs_response( - environ, start_response, repo_path, extras, action) + return self._generate_vcs_response(environ, start_response, repo_path, extras, action) def _get_txn_id(self, environ): - - for k in ['RAW_URI', 'HTTP_DESTINATION']: + for k in ["RAW_URI", "HTTP_DESTINATION"]: url = environ.get(k) if not url: continue # regex to search for svn-txn-id - pattern = r'/!svn/txr/([^/]+)/' + pattern = r"/!svn/txr/([^/]+)/" # Search for the pattern in the URL match = re.search(pattern, url) @@ -555,8 +559,7 @@ class SimpleVCS(object): return txn_id @initialize_generator - def _generate_vcs_response( - self, environ, start_response, repo_path, extras, action): + def _generate_vcs_response(self, environ, start_response, repo_path, extras, action): """ Returns a generator for the response content. @@ -565,24 +568,20 @@ class SimpleVCS(object): also handles the locking exceptions which will be triggered when the first chunk is produced by the underlying WSGI application. """ - svn_txn_id = '' - if action == 'push': + svn_txn_id = "" + if action == "push": svn_txn_id = self._get_txn_id(environ) - callback_daemon, extras = self._prepare_callback_daemon( - extras, environ, action, txn_id=svn_txn_id) + callback_daemon, extras = self._prepare_callback_daemon(extras, environ, action, txn_id=svn_txn_id) if svn_txn_id: - - port = safe_int(extras['hooks_uri'].split(':')[-1]) txn_id_data = extras.copy() - txn_id_data.update({'port': port}) - txn_id_data.update({'req_method': environ['REQUEST_METHOD']}) + txn_id_data.update({"req_method": environ["REQUEST_METHOD"]}) full_repo_path = repo_path store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data) - log.debug('HOOKS extras is %s', extras) + log.debug("HOOKS extras is %s", extras) http_scheme = self._get_http_scheme(environ) @@ -609,7 +608,7 @@ class SimpleVCS(object): try: # invalidate cache on push - if action == 'push': + if action == "push": self._invalidate_cache(self.url_repo_name) finally: meta.Session.remove() @@ -632,12 +631,12 @@ class SimpleVCS(object): """Return the WSGI app that will finally handle the request.""" raise NotImplementedError() - def _create_config(self, extras, repo_name, scheme='http'): + def _create_config(self, extras, repo_name, scheme="http"): """Create a safe config representation.""" raise NotImplementedError() def _should_use_callback_daemon(self, extras, environ, action): - if extras.get('is_shadow_repo'): + if extras.get("is_shadow_repo"): # we don't want to execute hooks, and callback daemon for shadow repos return False return True @@ -647,11 +646,9 @@ class SimpleVCS(object): if not self._should_use_callback_daemon(extras, environ, action): # disable callback daemon for actions that don't require it - protocol = 'local' + protocol = "local" - return prepare_callback_daemon( - extras, protocol=protocol, - host=vcs_settings.HOOKS_HOST, txn_id=txn_id) + return prepare_callback_daemon(extras, protocol=protocol, txn_id=txn_id) def _should_check_locking(query_string): @@ -659,4 +656,4 @@ def _should_check_locking(query_string): # server see all operation on commit; bookmarks, phases and # obsolescence marker in different transaction, we don't want to check # locking on those - return query_string not in ['cmd=listkeys'] + return query_string not in ["cmd=listkeys"] diff --git a/rhodecode/lib/utils.py b/rhodecode/lib/utils.py --- a/rhodecode/lib/utils.py +++ b/rhodecode/lib/utils.py @@ -21,6 +21,7 @@ Utilities library for RhodeCode """ import datetime +import importlib import decorator import logging @@ -42,8 +43,9 @@ from webhelpers2.text import collapse, s from mako import exceptions +import rhodecode from rhodecode import ConfigGet -from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRC +from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRepo, ClientNotSupported from rhodecode.lib.hash_utils import sha256_safe, md5, sha1 from rhodecode.lib.type_utils import AttributeDict from rhodecode.lib.str_utils import safe_bytes, safe_str @@ -86,6 +88,7 @@ def adopt_for_celery(func): @wraps(func) def wrapper(extras): extras = AttributeDict(extras) + try: # HooksResponse implements to_json method which must be used there. return func(extras).to_json() @@ -100,7 +103,18 @@ def adopt_for_celery(func): 'exception_args': error_args, 'exception_traceback': '', } - except HTTPLockedRC as error: + except ClientNotSupported as error: + # Those special cases don't need error reporting. It's a case of + # locked repo or protected branch + error_args = error.args + return { + 'status': error.code, + 'output': error.explanation, + 'exception': type(error).__name__, + 'exception_args': error_args, + 'exception_traceback': '', + } + except HTTPLockedRepo as error: # Those special cases don't need error reporting. It's a case of # locked repo or protected branch error_args = error.args @@ -117,7 +131,7 @@ def adopt_for_celery(func): 'output': '', 'exception': type(e).__name__, 'exception_args': e.args, - 'exception_traceback': '', + 'exception_traceback': traceback.format_exc(), } return wrapper @@ -411,6 +425,10 @@ def prepare_config_data(clear_session=Tr ('web', 'push_ssl', 'false'), ] for setting in ui_settings: + # skip certain deprecated keys that might be still in DB + if f"{setting.section}_{setting.key}" in ['extensions_hgsubversion']: + continue + # Todo: remove this section once transition to *.ini files will be completed if setting.section in ('largefiles', 'vcs_git_lfs'): if setting.key != 'enabled': @@ -686,22 +704,41 @@ def repo2db_mapper(initial_repo_list, re return added, removed +def deep_reload_package(package_name): + """ + Deeply reload a package by removing it and its submodules from sys.modules, + then re-importing it. + """ + # Remove the package and its submodules from sys.modules + to_reload = [name for name in sys.modules if name == package_name or name.startswith(package_name + ".")] + for module_name in to_reload: + del sys.modules[module_name] + log.debug(f"Removed module from cache: {module_name}") + + # Re-import the package + package = importlib.import_module(package_name) + log.debug(f"Re-imported package: {package_name}") + + return package def load_rcextensions(root_path): import rhodecode from rhodecode.config import conf path = os.path.join(root_path) - sys.path.append(path) + deep_reload = path in sys.path + sys.path.insert(0, path) try: - rcextensions = __import__('rcextensions') + rcextensions = __import__('rcextensions', fromlist=['']) except ImportError: if os.path.isdir(os.path.join(path, 'rcextensions')): log.warning('Unable to load rcextensions from %s', path) rcextensions = None if rcextensions: + if deep_reload: + rcextensions = deep_reload_package('rcextensions') log.info('Loaded rcextensions from %s...', rcextensions) rhodecode.EXTENSIONS = rcextensions @@ -741,6 +778,7 @@ def create_test_index(repo_location, con except ImportError: raise ImportError('Failed to import rc_testdata, ' 'please make sure this package is installed from requirements_test.txt') + rc_testdata.extract_search_index( 'vcs_search_index', os.path.dirname(config['search.location'])) @@ -785,22 +823,15 @@ def create_test_repositories(test_path, Creates test repositories in the temporary directory. Repositories are extracted from archives within the rc_testdata package. """ - import rc_testdata + try: + import rc_testdata + except ImportError: + raise ImportError('Failed to import rc_testdata, ' + 'please make sure this package is installed from requirements_test.txt') + from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO - log.debug('making test vcs repositories') - - idx_path = config['search.location'] - data_path = config['cache_dir'] - - # clean index and data - if idx_path and os.path.exists(idx_path): - log.debug('remove %s', idx_path) - shutil.rmtree(idx_path) - - if data_path and os.path.exists(data_path): - log.debug('remove %s', data_path) - shutil.rmtree(data_path) + log.debug('making test vcs repositories at %s', test_path) rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) diff --git a/rhodecode/lib/vcs/__init__.py b/rhodecode/lib/vcs/__init__.py --- a/rhodecode/lib/vcs/__init__.py +++ b/rhodecode/lib/vcs/__init__.py @@ -140,7 +140,7 @@ class CurlSession(object): try: curl.perform() except pycurl.error as exc: - log.error('Failed to call endpoint url: {} using pycurl'.format(url)) + log.error('Failed to call endpoint url: %s using pycurl', url) raise status_code = curl.getinfo(pycurl.HTTP_CODE) diff --git a/rhodecode/lib/vcs/backends/git/__init__.py b/rhodecode/lib/vcs/backends/git/__init__.py --- a/rhodecode/lib/vcs/backends/git/__init__.py +++ b/rhodecode/lib/vcs/backends/git/__init__.py @@ -45,10 +45,3 @@ def discover_git_version(raise_on_exc=Fa if raise_on_exc: raise return '' - - -def lfs_store(base_location): - """ - Return a lfs store relative to base_location - """ - return os.path.join(base_location, '.cache', 'lfs_store') diff --git a/rhodecode/lib/vcs/backends/hg/__init__.py b/rhodecode/lib/vcs/backends/hg/__init__.py --- a/rhodecode/lib/vcs/backends/hg/__init__.py +++ b/rhodecode/lib/vcs/backends/hg/__init__.py @@ -45,10 +45,3 @@ def discover_hg_version(raise_on_exc=Fal if raise_on_exc: raise return '' - - -def largefiles_store(base_location): - """ - Return a largefile store relative to base_location - """ - return os.path.join(base_location, '.cache', 'largefiles') diff --git a/rhodecode/lib/vcs/client_http.py b/rhodecode/lib/vcs/client_http.py --- a/rhodecode/lib/vcs/client_http.py +++ b/rhodecode/lib/vcs/client_http.py @@ -216,7 +216,7 @@ class RemoteRepo(object): self._cache_region, self._cache_namespace = \ remote_maker.init_cache_region(cache_repo_id) - with_wire = with_wire or {} + with_wire = with_wire or {"cache": False} repo_state_uid = with_wire.get('repo_state_uid') or 'state' diff --git a/rhodecode/model/comment.py b/rhodecode/model/comment.py --- a/rhodecode/model/comment.py +++ b/rhodecode/model/comment.py @@ -373,6 +373,7 @@ class CommentsModel(BaseModel): Session().add(comment) Session().flush() + kwargs = { 'user': user, 'renderer_type': renderer, @@ -387,8 +388,7 @@ class CommentsModel(BaseModel): } if commit_obj: - recipients = ChangesetComment.get_users( - revision=commit_obj.raw_id) + recipients = ChangesetComment.get_users(revision=commit_obj.raw_id) # add commit author if it's in RhodeCode system cs_author = User.get_from_cs_author(commit_obj.author) if not cs_author: @@ -397,16 +397,13 @@ class CommentsModel(BaseModel): recipients += [cs_author] commit_comment_url = self.get_url(comment, request=request) - commit_comment_reply_url = self.get_url( - comment, request=request, - anchor=f'comment-{comment.comment_id}/?/ReplyToComment') + commit_comment_reply_url = self.get_url(comment, request=request, anchor=f'comment-{comment.comment_id}/?/ReplyToComment') target_repo_url = h.link_to( repo.repo_name, h.route_url('repo_summary', repo_name=repo.repo_name)) - commit_url = h.route_url('repo_commit', repo_name=repo.repo_name, - commit_id=commit_id) + commit_url = h.route_url('repo_commit', repo_name=repo.repo_name, commit_id=commit_id) # commit specifics kwargs.update({ @@ -489,7 +486,6 @@ class CommentsModel(BaseModel): if not is_draft: comment_data = comment.get_api_data() - self._log_audit_action( action, {'data': comment_data}, auth_user, comment) diff --git a/rhodecode/model/pull_request.py b/rhodecode/model/pull_request.py --- a/rhodecode/model/pull_request.py +++ b/rhodecode/model/pull_request.py @@ -38,7 +38,7 @@ from rhodecode.translation import lazy_u from rhodecode.lib import helpers as h, hooks_utils, diffs from rhodecode.lib import audit_logger from collections import OrderedDict -from rhodecode.lib.hook_daemon.base import prepare_callback_daemon +from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon from rhodecode.lib.ext_json import sjson as json from rhodecode.lib.markup_renderer import ( DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) @@ -980,9 +980,7 @@ class PullRequestModel(BaseModel): target_ref = self._refresh_reference( pull_request.target_ref_parts, target_vcs) - callback_daemon, extras = prepare_callback_daemon( - extras, protocol=vcs_settings.HOOKS_PROTOCOL, - host=vcs_settings.HOOKS_HOST) + callback_daemon, extras = prepare_callback_daemon(extras, protocol=vcs_settings.HOOKS_PROTOCOL) with callback_daemon: # TODO: johbo: Implement a clean way to run a config_override diff --git a/rhodecode/model/settings.py b/rhodecode/model/settings.py --- a/rhodecode/model/settings.py +++ b/rhodecode/model/settings.py @@ -862,27 +862,3 @@ class VcsSettingsModel(object): raise ValueError( f'The given data does not contain {data_key} key') return data_keys - - def create_largeobjects_dirs_if_needed(self, repo_store_path): - """ - This is subscribed to the `pyramid.events.ApplicationCreated` event. It - does a repository scan if enabled in the settings. - """ - - from rhodecode.lib.vcs.backends.hg import largefiles_store - from rhodecode.lib.vcs.backends.git import lfs_store - - paths = [ - largefiles_store(repo_store_path), - lfs_store(repo_store_path)] - - for path in paths: - if os.path.isdir(path): - continue - if os.path.isfile(path): - continue - # not a file nor dir, we try to create it - try: - os.makedirs(path) - except Exception: - log.warning('Failed to create largefiles dir:%s', path) diff --git a/rhodecode/tests/__init__.py b/rhodecode/tests/__init__.py --- a/rhodecode/tests/__init__.py +++ b/rhodecode/tests/__init__.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -38,7 +37,7 @@ from rhodecode.lib.hash_utils import sha log = logging.getLogger(__name__) __all__ = [ - 'get_new_dir', 'TestController', + 'get_new_dir', 'TestController', 'console_printer', 'clear_cache_regions', 'assert_session_flash', 'login_user', 'no_newline_id_generator', 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO', @@ -244,3 +243,11 @@ def no_newline_id_generator(test_name): return test_name or 'test-with-empty-name' +def console_printer(*msg): + print_func = print + try: + from rich import print as print_func + except ImportError: + pass + + print_func(*msg) diff --git a/rhodecode/tests/auth_external_test.py b/rhodecode/tests/auth_external_test.py --- a/rhodecode/tests/auth_external_test.py +++ b/rhodecode/tests/auth_external_test.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -90,7 +89,7 @@ class RhodeCodeAuthPlugin(RhodeCodeExter 'firstname': firstname, 'lastname': lastname, 'groups': [], - 'email': '%s@rhodecode.com' % username, + 'email': f'{username}@rhodecode.com', 'admin': admin, 'active': active, "active_from_extern": None, diff --git a/rhodecode/tests/config/test_routing_links.py b/rhodecode/tests/config/test_routing_links.py --- a/rhodecode/tests/config/test_routing_links.py +++ b/rhodecode/tests/config/test_routing_links.py @@ -20,14 +20,14 @@ import pytest import requests from rhodecode.config import routing_links - +from rhodecode.tests import console_printer def check_connection(): try: response = requests.get('https://rhodecode.com') return response.status_code == 200 except Exception as e: - print(e) + console_printer(e) return False diff --git a/rhodecode/tests/conftest.py b/rhodecode/tests/conftest.py --- a/rhodecode/tests/conftest.py +++ b/rhodecode/tests/conftest.py @@ -1,4 +1,4 @@ -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -16,23 +16,10 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ -""" -py.test config for test suite for making push/pull operations. - -.. important:: - - You must have git >= 1.8.5 for tests to work fine. With 68b939b git started - to redirect things to stderr instead of stdout. -""" - -import pytest +import pytest # noqa import logging - -from rhodecode.authentication import AuthenticationPluginRegistry -from rhodecode.model.db import Permission, User -from rhodecode.model.meta import Session -from rhodecode.model.settings import SettingsModel -from rhodecode.model.user import UserModel +import collections +import rhodecode log = logging.getLogger(__name__) @@ -40,99 +27,3 @@ log = logging.getLogger(__name__) # Docker image running httpbin... HTTPBIN_DOMAIN = 'http://httpbin' HTTPBIN_POST = HTTPBIN_DOMAIN + '/post' - - -@pytest.fixture() -def enable_auth_plugins(request, baseapp, csrf_token): - """ - Return a factory object that when called, allows to control which - authentication plugins are enabled. - """ - - class AuthPluginManager(object): - - def cleanup(self): - self._enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) - - def enable(self, plugins_list, override=None): - return self._enable_plugins(plugins_list, override) - - def _enable_plugins(self, plugins_list, override=None): - override = override or {} - params = { - 'auth_plugins': ','.join(plugins_list), - } - - # helper translate some names to others, to fix settings code - name_map = { - 'token': 'authtoken' - } - log.debug('enable_auth_plugins: enabling following auth-plugins: %s', plugins_list) - - for module in plugins_list: - plugin_name = module.partition('#')[-1] - if plugin_name in name_map: - plugin_name = name_map[plugin_name] - enabled_plugin = f'auth_{plugin_name}_enabled' - cache_ttl = f'auth_{plugin_name}_cache_ttl' - - # default params that are needed for each plugin, - # `enabled` and `cache_ttl` - params.update({ - enabled_plugin: True, - cache_ttl: 0 - }) - if override.get: - params.update(override.get(module, {})) - - validated_params = params - - for k, v in validated_params.items(): - setting = SettingsModel().create_or_update_setting(k, v) - Session().add(setting) - Session().commit() - - AuthenticationPluginRegistry.invalidate_auth_plugins_cache(hard=True) - - enabled_plugins = SettingsModel().get_auth_plugins() - assert plugins_list == enabled_plugins - - enabler = AuthPluginManager() - request.addfinalizer(enabler.cleanup) - - return enabler - - -@pytest.fixture() -def test_user_factory(request, baseapp): - - def user_factory(username='test_user', password='qweqwe', first_name='John', last_name='Testing', **kwargs): - usr = UserModel().create_or_update( - username=username, - password=password, - email=f'{username}@rhodecode.org', - firstname=first_name, lastname=last_name) - Session().commit() - - for k, v in kwargs.items(): - setattr(usr, k, v) - Session().add(usr) - - new_usr = User.get_by_username(username) - new_usr_id = new_usr.user_id - assert new_usr == usr - - @request.addfinalizer - def cleanup(): - if User.get(new_usr_id) is None: - return - - perm = Permission.query().all() - for p in perm: - UserModel().revoke_perm(usr, p) - - UserModel().delete(new_usr_id) - Session().commit() - return usr - - return user_factory diff --git a/rhodecode/tests/conftest_common.py b/rhodecode/tests/conftest_common.py --- a/rhodecode/tests/conftest_common.py +++ b/rhodecode/tests/conftest_common.py @@ -1,4 +1,4 @@ -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -98,16 +98,16 @@ def pytest_addoption(parser): 'pyramid_config', "Set up a Pyramid environment with the specified config file.") + parser.addini('rhodecode_config', 'rhodecode config ini for tests') + parser.addini('celery_config', 'celery config ini for tests') + parser.addini('vcsserver_config', 'vcsserver config ini for tests') + vcsgroup = parser.getgroup('vcs') + vcsgroup.addoption( '--without-vcsserver', dest='with_vcsserver', action='store_false', help="Do not start the VCSServer in a background process.") - vcsgroup.addoption( - '--with-vcsserver-http', dest='vcsserver_config_http', - help="Start the HTTP VCSServer with the specified config file.") - vcsgroup.addoption( - '--vcsserver-protocol', dest='vcsserver_protocol', - help="Start the VCSServer with HTTP protocol support.") + vcsgroup.addoption( '--vcsserver-config-override', action='store', type=_parse_json, default=None, dest='vcsserver_config_override', help=( @@ -122,12 +122,6 @@ def pytest_addoption(parser): "Allows to set the port of the vcsserver. Useful when testing " "against an already running server and random ports cause " "trouble.")) - parser.addini( - 'vcsserver_config_http', - "Start the HTTP VCSServer with the specified config file.") - parser.addini( - 'vcsserver_protocol', - "Start the VCSServer with HTTP protocol support.") @pytest.hookimpl(tryfirst=True, hookwrapper=True) diff --git a/rhodecode/tests/database/__init__.py b/rhodecode/tests/database/__init__.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/database/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2010-2023 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ diff --git a/rhodecode/tests/database/conftest.py b/rhodecode/tests/database/conftest.py --- a/rhodecode/tests/database/conftest.py +++ b/rhodecode/tests/database/conftest.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -17,7 +16,7 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ -from subprocess import Popen, PIPE +import subprocess import os import sys import tempfile @@ -26,87 +25,71 @@ import pytest from sqlalchemy.engine import url from rhodecode.lib.str_utils import safe_str, safe_bytes -from rhodecode.tests.fixture import TestINI +from rhodecode.tests.fixtures.rc_fixture import TestINI def _get_dbs_from_metafunc(metafunc): - dbs_mark = metafunc.definition.get_closest_marker('dbs') + dbs_mark = metafunc.definition.get_closest_marker("dbs") if dbs_mark: # Supported backends by this test function, created from pytest.mark.dbs backends = dbs_mark.args else: - backends = metafunc.config.getoption('--dbs') + backends = metafunc.config.getoption("--dbs") return backends def pytest_generate_tests(metafunc): # Support test generation based on --dbs parameter - if 'db_backend' in metafunc.fixturenames: - requested_backends = set(metafunc.config.getoption('--dbs')) + if "db_backend" in metafunc.fixturenames: + requested_backends = set(metafunc.config.getoption("--dbs")) backends = _get_dbs_from_metafunc(metafunc) backends = requested_backends.intersection(backends) # TODO: johbo: Disabling a backend did not work out with # parametrization, find better way to achieve this. if not backends: metafunc.function._skip = True - metafunc.parametrize('db_backend_name', backends) + metafunc.parametrize("db_backend_name", backends) def pytest_collection_modifyitems(session, config, items): - remaining = [ - i for i in items if not getattr(i.obj, '_skip', False)] + remaining = [i for i in items if not getattr(i.obj, "_skip", False)] items[:] = remaining @pytest.fixture() -def db_backend( - request, db_backend_name, ini_config, tmpdir_factory): +def db_backend(request, db_backend_name, ini_config, tmpdir_factory): basetemp = tmpdir_factory.getbasetemp().strpath klass = _get_backend(db_backend_name) - option_name = '--{}-connection-string'.format(db_backend_name) + option_name = "--{}-connection-string".format(db_backend_name) connection_string = request.config.getoption(option_name) or None - return klass( - config_file=ini_config, basetemp=basetemp, - connection_string=connection_string) + return klass(config_file=ini_config, basetemp=basetemp, connection_string=connection_string) def _get_backend(backend_type): - return { - 'sqlite': SQLiteDBBackend, - 'postgres': PostgresDBBackend, - 'mysql': MySQLDBBackend, - '': EmptyDBBackend - }[backend_type] + return {"sqlite": SQLiteDBBackend, "postgres": PostgresDBBackend, "mysql": MySQLDBBackend, "": EmptyDBBackend}[ + backend_type + ] class DBBackend(object): _store = os.path.dirname(os.path.abspath(__file__)) _type = None - _base_ini_config = [{'app:main': {'vcs.start_server': 'false', - 'startup.import_repos': 'false'}}] - _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}] - _base_db_name = 'rhodecode_test_db_backend' - std_env = {'RC_TEST': '0'} + _base_ini_config = [{"app:main": {"vcs.start_server": "false", "startup.import_repos": "false"}}] + _db_url = [{"app:main": {"sqlalchemy.db1.url": ""}}] + _base_db_name = "rhodecode_test_db_backend" + std_env = {"RC_TEST": "0"} - def __init__( - self, config_file, db_name=None, basetemp=None, - connection_string=None): - - from rhodecode.lib.vcs.backends.hg import largefiles_store - from rhodecode.lib.vcs.backends.git import lfs_store - + def __init__(self, config_file, db_name=None, basetemp=None, connection_string=None): self.fixture_store = os.path.join(self._store, self._type) self.db_name = db_name or self._base_db_name self._base_ini_file = config_file - self.stderr = '' - self.stdout = '' + self.stderr = "" + self.stdout = "" self._basetemp = basetemp or tempfile.gettempdir() - self._repos_location = os.path.join(self._basetemp, 'rc_test_repos') - self._repos_hg_largefiles_store = largefiles_store(self._basetemp) - self._repos_git_lfs_store = lfs_store(self._basetemp) + self._repos_location = os.path.join(self._basetemp, "rc_test_repos") self.connection_string = connection_string @property @@ -118,8 +101,7 @@ class DBBackend(object): if not new_connection_string: new_connection_string = self.get_default_connection_string() else: - new_connection_string = new_connection_string.format( - db_name=self.db_name) + new_connection_string = new_connection_string.format(db_name=self.db_name) url_parts = url.make_url(new_connection_string) self._connection_string = new_connection_string self.user = url_parts.username @@ -127,73 +109,67 @@ class DBBackend(object): self.host = url_parts.host def get_default_connection_string(self): - raise NotImplementedError('default connection_string is required.') + raise NotImplementedError("default connection_string is required.") def execute(self, cmd, env=None, *args): """ Runs command on the system with given ``args``. """ - command = cmd + ' ' + ' '.join(args) - sys.stdout.write(f'CMD: {command}') + command = cmd + " " + " ".join(args) + sys.stdout.write(f"CMD: {command}") # Tell Python to use UTF-8 encoding out stdout _env = os.environ.copy() - _env['PYTHONIOENCODING'] = 'UTF-8' + _env["PYTHONIOENCODING"] = "UTF-8" _env.update(self.std_env) if env: _env.update(env) - self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env) + self.p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=_env) self.stdout, self.stderr = self.p.communicate() stdout_str = safe_str(self.stdout) - sys.stdout.write(f'COMMAND:{command}\n') + sys.stdout.write(f"COMMAND:{command}\n") sys.stdout.write(stdout_str) return self.stdout, self.stderr def assert_returncode_success(self): from rich import print as pprint + if not self.p.returncode == 0: pprint(safe_str(self.stderr)) - raise AssertionError(f'non 0 retcode:{self.p.returncode}') + raise AssertionError(f"non 0 retcode:{self.p.returncode}") def assert_correct_output(self, stdout, version): - assert b'UPGRADE FOR STEP %b COMPLETED' % safe_bytes(version) in stdout + assert b"UPGRADE FOR STEP %b COMPLETED" % safe_bytes(version) in stdout def setup_rhodecode_db(self, ini_params=None, env=None): if not ini_params: ini_params = self._base_ini_config ini_params.extend(self._db_url) - with TestINI(self._base_ini_file, ini_params, - self._type, destroy=True) as _ini_file: - + with TestINI(self._base_ini_file, ini_params, self._type, destroy=True) as _ini_file: if not os.path.isdir(self._repos_location): os.makedirs(self._repos_location) - if not os.path.isdir(self._repos_hg_largefiles_store): - os.makedirs(self._repos_hg_largefiles_store) - if not os.path.isdir(self._repos_git_lfs_store): - os.makedirs(self._repos_git_lfs_store) return self.execute( "rc-setup-app {0} --user=marcink " "--email=marcin@rhodeocode.com --password={1} " - "--repos={2} --force-yes".format( - _ini_file, 'qweqwe', self._repos_location), env=env) + "--repos={2} --force-yes".format(_ini_file, "qweqwe", self._repos_location), + env=env, + ) def upgrade_database(self, ini_params=None): if not ini_params: ini_params = self._base_ini_config ini_params.extend(self._db_url) - test_ini = TestINI( - self._base_ini_file, ini_params, self._type, destroy=True) + test_ini = TestINI(self._base_ini_file, ini_params, self._type, destroy=True) with test_ini as ini_file: if not os.path.isdir(self._repos_location): os.makedirs(self._repos_location) - return self.execute( - "rc-upgrade-db {0} --force-yes".format(ini_file)) + return self.execute("rc-upgrade-db {0} --force-yes".format(ini_file)) def setup_db(self): raise NotImplementedError @@ -206,7 +182,7 @@ class DBBackend(object): class EmptyDBBackend(DBBackend): - _type = '' + _type = "" def setup_db(self): pass @@ -222,21 +198,20 @@ class EmptyDBBackend(DBBackend): class SQLiteDBBackend(DBBackend): - _type = 'sqlite' + _type = "sqlite" def get_default_connection_string(self): - return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name) + return "sqlite:///{}/{}.sqlite".format(self._basetemp, self.db_name) def setup_db(self): # dump schema for tests # cp -v $TEST_DB_NAME - self._db_url = [{'app:main': { - 'sqlalchemy.db1.url': self.connection_string}}] + self._db_url = [{"app:main": {"sqlalchemy.db1.url": self.connection_string}}] def import_dump(self, dumpname): dump = os.path.join(self.fixture_store, dumpname) - target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self)) - return self.execute(f'cp -v {dump} {target}') + target = os.path.join(self._basetemp, "{0.db_name}.sqlite".format(self)) + return self.execute(f"cp -v {dump} {target}") def teardown_db(self): target_db = os.path.join(self._basetemp, self.db_name) @@ -244,39 +219,39 @@ class SQLiteDBBackend(DBBackend): class MySQLDBBackend(DBBackend): - _type = 'mysql' + _type = "mysql" def get_default_connection_string(self): - return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name) + return "mysql://root:qweqwe@127.0.0.1/{}".format(self.db_name) def setup_db(self): # dump schema for tests # mysqldump -uroot -pqweqwe $TEST_DB_NAME - self._db_url = [{'app:main': { - 'sqlalchemy.db1.url': self.connection_string}}] - return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format( - self.user, self.password, self.db_name)) + self._db_url = [{"app:main": {"sqlalchemy.db1.url": self.connection_string}}] + return self.execute( + "mysql -v -u{} -p{} -e 'create database '{}';'".format(self.user, self.password, self.db_name) + ) def import_dump(self, dumpname): dump = os.path.join(self.fixture_store, dumpname) - return self.execute("mysql -u{} -p{} {} < {}".format( - self.user, self.password, self.db_name, dump)) + return self.execute("mysql -u{} -p{} {} < {}".format(self.user, self.password, self.db_name, dump)) def teardown_db(self): - return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format( - self.user, self.password, self.db_name)) + return self.execute( + "mysql -v -u{} -p{} -e 'drop database '{}';'".format(self.user, self.password, self.db_name) + ) class PostgresDBBackend(DBBackend): - _type = 'postgres' + _type = "postgres" def get_default_connection_string(self): - return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name) + return "postgresql://postgres:qweqwe@localhost/{}".format(self.db_name) def setup_db(self): # dump schema for tests # pg_dump -U postgres -h localhost $TEST_DB_NAME - self._db_url = [{'app:main': {'sqlalchemy.db1.url': self.connection_string}}] + self._db_url = [{"app:main": {"sqlalchemy.db1.url": self.connection_string}}] cmd = f"PGPASSWORD={self.password} psql -U {self.user} -h localhost -c 'create database '{self.db_name}';'" return self.execute(cmd) diff --git a/rhodecode/tests/database/test_creation.py b/rhodecode/tests/database/test_creation.py --- a/rhodecode/tests/database/test_creation.py +++ b/rhodecode/tests/database/test_creation.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify diff --git a/rhodecode/tests/database/test_migration.py b/rhodecode/tests/database/test_migration.py --- a/rhodecode/tests/database/test_migration.py +++ b/rhodecode/tests/database/test_migration.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -21,33 +20,42 @@ import pytest @pytest.mark.dbs("postgres") -@pytest.mark.parametrize("dumpname", [ - '1.4.4.sql', - '1.5.0.sql', - '1.6.0.sql', - '1.6.0_no_repo_name_index.sql', -]) +@pytest.mark.parametrize( + "dumpname", + [ + "1.4.4.sql", + "1.5.0.sql", + "1.6.0.sql", + "1.6.0_no_repo_name_index.sql", + ], +) def test_migrate_postgres_db(db_backend, dumpname): _run_migration_test(db_backend, dumpname) @pytest.mark.dbs("sqlite") -@pytest.mark.parametrize("dumpname", [ - 'rhodecode.1.4.4.sqlite', - 'rhodecode.1.4.4_with_groups.sqlite', - 'rhodecode.1.4.4_with_ldap_active.sqlite', -]) +@pytest.mark.parametrize( + "dumpname", + [ + "rhodecode.1.4.4.sqlite", + "rhodecode.1.4.4_with_groups.sqlite", + "rhodecode.1.4.4_with_ldap_active.sqlite", + ], +) def test_migrate_sqlite_db(db_backend, dumpname): _run_migration_test(db_backend, dumpname) @pytest.mark.dbs("mysql") -@pytest.mark.parametrize("dumpname", [ - '1.4.4.sql', - '1.5.0.sql', - '1.6.0.sql', - '1.6.0_no_repo_name_index.sql', -]) +@pytest.mark.parametrize( + "dumpname", + [ + "1.4.4.sql", + "1.5.0.sql", + "1.6.0.sql", + "1.6.0_no_repo_name_index.sql", + ], +) def test_migrate_mysql_db(db_backend, dumpname): _run_migration_test(db_backend, dumpname) @@ -60,5 +68,5 @@ def _run_migration_test(db_backend, dump db_backend.import_dump(dumpname) stdout, stderr = db_backend.upgrade_database() - db_backend.assert_correct_output(stdout+stderr, version='16') + db_backend.assert_correct_output(stdout + stderr, version="16") db_backend.assert_returncode_success() diff --git a/rhodecode/tests/fixture_mods/__init__.py b/rhodecode/tests/fixtures/__init__.py rename from rhodecode/tests/fixture_mods/__init__.py rename to rhodecode/tests/fixtures/__init__.py diff --git a/rhodecode/tests/fixtures/diff_with_diff_data.diff b/rhodecode/tests/fixtures/diff_fixtures/diff_with_diff_data.diff rename from rhodecode/tests/fixtures/diff_with_diff_data.diff rename to rhodecode/tests/fixtures/diff_fixtures/diff_with_diff_data.diff diff --git a/rhodecode/tests/fixtures/git_diff_binary_and_normal.diff b/rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_and_normal.diff rename from rhodecode/tests/fixtures/git_diff_binary_and_normal.diff rename to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_and_normal.diff diff --git a/rhodecode/tests/fixtures/git_diff_binary_special_files.diff b/rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files.diff rename from rhodecode/tests/fixtures/git_diff_binary_special_files.diff rename to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files.diff diff --git a/rhodecode/tests/fixtures/git_diff_binary_special_files_2.diff b/rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files_2.diff rename from rhodecode/tests/fixtures/git_diff_binary_special_files_2.diff rename to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files_2.diff diff --git a/rhodecode/tests/fixtures/git_diff_chmod.diff b/rhodecode/tests/fixtures/diff_fixtures/git_diff_chmod.diff rename from rhodecode/tests/fixtures/git_diff_chmod.diff rename to rhodecode/tests/fixtures/diff_fixtures/git_diff_chmod.diff diff --git a/rhodecode/tests/fixtures/git_diff_js_chars.diff b/rhodecode/tests/fixtures/diff_fixtures/git_diff_js_chars.diff rename from rhodecode/tests/fixtures/git_diff_js_chars.diff rename to rhodecode/tests/fixtures/diff_fixtures/git_diff_js_chars.diff diff --git a/rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff b/rhodecode/tests/fixtures/diff_fixtures/git_diff_mod_single_binary_file.diff rename from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/git_diff_mod_single_binary_file.diff diff --git a/rhodecode/tests/fixtures/git_diff_rename_file.diff b/rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file.diff rename from rhodecode/tests/fixtures/git_diff_rename_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file.diff diff --git a/rhodecode/tests/fixtures/git_diff_rename_file_with_spaces.diff b/rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file_with_spaces.diff rename from rhodecode/tests/fixtures/git_diff_rename_file_with_spaces.diff rename to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file_with_spaces.diff diff --git a/rhodecode/tests/fixtures/git_node_history_response.json b/rhodecode/tests/fixtures/diff_fixtures/git_node_history_response.json rename from rhodecode/tests/fixtures/git_node_history_response.json rename to rhodecode/tests/fixtures/diff_fixtures/git_node_history_response.json diff --git a/rhodecode/tests/fixtures/hg_diff_add_single_binary_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_add_single_binary_file.diff rename from rhodecode/tests/fixtures/hg_diff_add_single_binary_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_add_single_binary_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_binary_and_normal.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_binary_and_normal.diff rename from rhodecode/tests/fixtures/hg_diff_binary_and_normal.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_binary_and_normal.diff diff --git a/rhodecode/tests/fixtures/hg_diff_chmod.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod.diff rename from rhodecode/tests/fixtures/hg_diff_chmod.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod.diff diff --git a/rhodecode/tests/fixtures/hg_diff_chmod_and_mod_single_binary_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod_and_mod_single_binary_file.diff rename from rhodecode/tests/fixtures/hg_diff_chmod_and_mod_single_binary_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod_and_mod_single_binary_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_copy_and_chmod_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_chmod_file.diff rename from rhodecode/tests/fixtures/hg_diff_copy_and_chmod_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_chmod_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_copy_and_modify_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_modify_file.diff rename from rhodecode/tests/fixtures/hg_diff_copy_and_modify_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_modify_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_copy_chmod_and_edit_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_chmod_and_edit_file.diff rename from rhodecode/tests/fixtures/hg_diff_copy_chmod_and_edit_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_chmod_and_edit_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_copy_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file.diff rename from rhodecode/tests/fixtures/hg_diff_copy_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_copy_file_with_spaces.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file_with_spaces.diff rename from rhodecode/tests/fixtures/hg_diff_copy_file_with_spaces.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file_with_spaces.diff diff --git a/rhodecode/tests/fixtures/hg_diff_del_single_binary_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_del_single_binary_file.diff rename from rhodecode/tests/fixtures/hg_diff_del_single_binary_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_del_single_binary_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_double_file_change_double_newline.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_double_newline.diff rename from rhodecode/tests/fixtures/hg_diff_double_file_change_double_newline.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_double_newline.diff diff --git a/rhodecode/tests/fixtures/hg_diff_double_file_change_newline.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_newline.diff rename from rhodecode/tests/fixtures/hg_diff_double_file_change_newline.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_newline.diff diff --git a/rhodecode/tests/fixtures/hg_diff_four_file_change_newline.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_four_file_change_newline.diff rename from rhodecode/tests/fixtures/hg_diff_four_file_change_newline.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_four_file_change_newline.diff diff --git a/rhodecode/tests/fixtures/hg_diff_mixed_filename_encodings.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_mixed_filename_encodings.diff rename from rhodecode/tests/fixtures/hg_diff_mixed_filename_encodings.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mixed_filename_encodings.diff diff --git a/rhodecode/tests/fixtures/hg_diff_mod_file_and_rename.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_file_and_rename.diff rename from rhodecode/tests/fixtures/hg_diff_mod_file_and_rename.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_file_and_rename.diff diff --git a/rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_binary_file.diff copy from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff copy to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_binary_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff rename from rhodecode/tests/fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff diff --git a/rhodecode/tests/fixtures/hg_diff_no_newline.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_no_newline.diff rename from rhodecode/tests/fixtures/hg_diff_no_newline.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_no_newline.diff diff --git a/rhodecode/tests/fixtures/hg_diff_rename_and_chmod_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_and_chmod_file.diff rename from rhodecode/tests/fixtures/hg_diff_rename_and_chmod_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_and_chmod_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_rename_file.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file.diff rename from rhodecode/tests/fixtures/hg_diff_rename_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file.diff diff --git a/rhodecode/tests/fixtures/hg_diff_rename_file_with_spaces.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file_with_spaces.diff rename from rhodecode/tests/fixtures/hg_diff_rename_file_with_spaces.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file_with_spaces.diff diff --git a/rhodecode/tests/fixtures/hg_diff_single_file_change_newline.diff b/rhodecode/tests/fixtures/diff_fixtures/hg_diff_single_file_change_newline.diff rename from rhodecode/tests/fixtures/hg_diff_single_file_change_newline.diff rename to rhodecode/tests/fixtures/diff_fixtures/hg_diff_single_file_change_newline.diff diff --git a/rhodecode/tests/fixtures/hg_node_history_response.json b/rhodecode/tests/fixtures/diff_fixtures/hg_node_history_response.json rename from rhodecode/tests/fixtures/hg_node_history_response.json rename to rhodecode/tests/fixtures/diff_fixtures/hg_node_history_response.json diff --git a/rhodecode/tests/fixtures/journal_dump.csv b/rhodecode/tests/fixtures/diff_fixtures/journal_dump.csv rename from rhodecode/tests/fixtures/journal_dump.csv rename to rhodecode/tests/fixtures/diff_fixtures/journal_dump.csv diff --git a/rhodecode/tests/fixtures/large_diff.diff b/rhodecode/tests/fixtures/diff_fixtures/large_diff.diff rename from rhodecode/tests/fixtures/large_diff.diff rename to rhodecode/tests/fixtures/diff_fixtures/large_diff.diff diff --git a/rhodecode/tests/fixtures/svn_diff_binary_add_file.diff b/rhodecode/tests/fixtures/diff_fixtures/svn_diff_binary_add_file.diff rename from rhodecode/tests/fixtures/svn_diff_binary_add_file.diff rename to rhodecode/tests/fixtures/diff_fixtures/svn_diff_binary_add_file.diff diff --git a/rhodecode/tests/fixtures/svn_diff_multiple_changes.diff b/rhodecode/tests/fixtures/diff_fixtures/svn_diff_multiple_changes.diff rename from rhodecode/tests/fixtures/svn_diff_multiple_changes.diff rename to rhodecode/tests/fixtures/diff_fixtures/svn_diff_multiple_changes.diff diff --git a/rhodecode/tests/fixtures/svn_node_history_branches.json b/rhodecode/tests/fixtures/diff_fixtures/svn_node_history_branches.json rename from rhodecode/tests/fixtures/svn_node_history_branches.json rename to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_branches.json diff --git a/rhodecode/tests/fixtures/svn_node_history_response.json b/rhodecode/tests/fixtures/diff_fixtures/svn_node_history_response.json rename from rhodecode/tests/fixtures/svn_node_history_response.json rename to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_response.json diff --git a/rhodecode/tests/fixture_mods/fixture_pyramid.py b/rhodecode/tests/fixtures/fixture_pyramid.py rename from rhodecode/tests/fixture_mods/fixture_pyramid.py rename to rhodecode/tests/fixtures/fixture_pyramid.py --- a/rhodecode/tests/fixture_mods/fixture_pyramid.py +++ b/rhodecode/tests/fixtures/fixture_pyramid.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -20,61 +19,128 @@ import pytest from rhodecode.lib.config_utils import get_app_config -from rhodecode.tests.fixture import TestINI +from rhodecode.tests.fixtures.rc_fixture import TestINI from rhodecode.tests import TESTS_TMP_PATH from rhodecode.tests.server_utils import RcVCSServer +from rhodecode.tests.server_utils import RcWebServer +from rhodecode.tests.server_utils import CeleryServer -@pytest.fixture(scope='session') -def vcsserver(request, vcsserver_port, vcsserver_factory): - """ - Session scope VCSServer. - - Tests which need the VCSServer have to rely on this fixture in order - to ensure it will be running. - - For specific needs, the fixture vcsserver_factory can be used. It allows to - adjust the configuration file for the test run. - - Command line args: - - --without-vcsserver: Allows to switch this fixture off. You have to - manually start the server. - - --vcsserver-port: Will expect the VCSServer to listen on this port. - """ - - if not request.config.getoption('with_vcsserver'): - return None - - return vcsserver_factory( - request, vcsserver_port=vcsserver_port) - - -@pytest.fixture(scope='session') -def vcsserver_factory(tmpdir_factory): +@pytest.fixture(scope="session") +def vcsserver_factory(): """ Use this if you need a running vcsserver with a special configuration. """ - def factory(request, overrides=(), vcsserver_port=None, - log_file=None, workers='3'): - - if vcsserver_port is None: + def factory(request, store_dir, overrides=(), config_file=None, port=None, log_file=None, workers="3", env=None, info_prefix=""): + env = env or {"RC_NO_TEST_ENV": "1"} + vcsserver_port = port + if port is None: vcsserver_port = get_available_port() overrides = list(overrides) - overrides.append({'server:main': {'port': vcsserver_port}}) + overrides.append({"server:main": {"port": vcsserver_port}}) + + if getattr(request, 'param', None): + config_overrides = [request.param] + overrides.extend(config_overrides) + + option_name = "vcsserver_config" + override_option_name = None + if not config_file: + config_file = get_config( + request.config, + option_name=option_name, + override_option_name=override_option_name, + overrides=overrides, + basetemp=store_dir, + prefix=f"{info_prefix}test_vcsserver_ini_", + ) + server = RcVCSServer(config_file, log_file, workers, env=env, info_prefix=info_prefix) + server.start() + + @request.addfinalizer + def cleanup(): + server.shutdown() + + server.wait_until_ready() + return server + + return factory + + +@pytest.fixture(scope="session") +def rhodecode_factory(): + def factory(request, store_dir, overrides=(), config_file=None, port=None, log_file=None, workers="3", env=None, info_prefix=""): + env = env or {"RC_NO_TEST_ENV": "1"} + rhodecode_port = port + if port is None: + rhodecode_port = get_available_port() + + overrides = list(overrides) + overrides.append({"server:main": {"port": rhodecode_port}}) + overrides.append({"app:main": {"use_celery": "true"}}) + overrides.append({"app:main": {"celery.task_always_eager": "false"}}) + + if getattr(request, 'param', None): + config_overrides = [request.param] + overrides.extend(config_overrides) + - option_name = 'vcsserver_config_http' - override_option_name = 'vcsserver_config_override' - config_file = get_config( - request.config, option_name=option_name, - override_option_name=override_option_name, overrides=overrides, - basetemp=tmpdir_factory.getbasetemp().strpath, - prefix='test_vcs_') + option_name = "rhodecode_config" + override_option_name = None + if not config_file: + config_file = get_config( + request.config, + option_name=option_name, + override_option_name=override_option_name, + overrides=overrides, + basetemp=store_dir, + prefix=f"{info_prefix}test_rhodecode_ini", + ) + + server = RcWebServer(config_file, log_file, workers, env, info_prefix=info_prefix) + server.start() + + @request.addfinalizer + def cleanup(): + server.shutdown() + + server.wait_until_ready() + return server + + return factory + - server = RcVCSServer(config_file, log_file, workers) +@pytest.fixture(scope="session") +def celery_factory(): + def factory(request, store_dir, overrides=(), config_file=None, port=None, log_file=None, workers="3", env=None, info_prefix=""): + env = env or {"RC_NO_TEST_ENV": "1"} + rhodecode_port = port + + overrides = list(overrides) + overrides.append({"app:main": {"use_celery": "true"}}) + overrides.append({"app:main": {"celery.task_always_eager": "false"}}) + config_overrides = None + + if getattr(request, 'param', None): + config_overrides = [request.param] + overrides.extend(config_overrides) + + option_name = "celery_config" + override_option_name = None + + if not config_file: + config_file = get_config( + request.config, + option_name=option_name, + override_option_name=override_option_name, + overrides=overrides, + basetemp=store_dir, + prefix=f"{info_prefix}test_celery_ini_", + ) + + server = CeleryServer(config_file, log_file, workers, env, info_prefix=info_prefix) server.start() @request.addfinalizer @@ -88,52 +154,68 @@ def vcsserver_factory(tmpdir_factory): def _use_log_level(config): - level = config.getoption('test_loglevel') or 'critical' + level = config.getoption("test_loglevel") or "critical" return level.upper() -@pytest.fixture(scope='session') -def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): - option_name = 'pyramid_config' +def _ini_config_factory(request, base_dir, rcserver_port, vcsserver_port): + option_name = "pyramid_config" log_level = _use_log_level(request.config) overrides = [ - {'server:main': {'port': rcserver_port}}, - {'app:main': { - 'cache_dir': '%(here)s/rc-tests/rc_data', - 'vcs.server': f'localhost:{vcsserver_port}', - # johbo: We will always start the VCSServer on our own based on the - # fixtures of the test cases. For the test run it must always be - # off in the INI file. - 'vcs.start_server': 'false', - - 'vcs.server.protocol': 'http', - 'vcs.scm_app_implementation': 'http', - 'vcs.svn.proxy.enabled': 'true', - 'vcs.hooks.protocol.v2': 'celery', - 'vcs.hooks.host': '*', - 'repo_store.path': TESTS_TMP_PATH, - 'app.service_api.token': 'service_secret_token', - }}, - - {'handler_console': { - 'class': 'StreamHandler', - 'args': '(sys.stderr,)', - 'level': log_level, - }}, - + {"server:main": {"port": rcserver_port}}, + { + "app:main": { + #'cache_dir': '%(here)s/rc-tests/rc_data', + "vcs.server": f"localhost:{vcsserver_port}", + # johbo: We will always start the VCSServer on our own based on the + # fixtures of the test cases. For the test run it must always be + # off in the INI file. + "vcs.start_server": "false", + "vcs.server.protocol": "http", + "vcs.scm_app_implementation": "http", + "vcs.svn.proxy.enabled": "true", + "vcs.hooks.protocol.v2": "celery", + "vcs.hooks.host": "*", + "repo_store.path": TESTS_TMP_PATH, + "app.service_api.token": "service_secret_token", + } + }, + { + "handler_console": { + "class": "StreamHandler", + "args": "(sys.stderr,)", + "level": log_level, + } + }, ] filename = get_config( - request.config, option_name=option_name, - override_option_name='{}_override'.format(option_name), + request.config, + option_name=option_name, + override_option_name=f"{option_name}_override", overrides=overrides, - basetemp=tmpdir_factory.getbasetemp().strpath, - prefix='test_rce_') + basetemp=base_dir, + prefix="test_rce_", + ) return filename -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") +def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): + base_dir = tmpdir_factory.getbasetemp().strpath + return _ini_config_factory(request, base_dir, rcserver_port, vcsserver_port) + + +@pytest.fixture(scope="session") +def ini_config_factory(request, tmpdir_factory, rcserver_port, vcsserver_port): + def _factory(ini_config_basedir, overrides=()): + return _ini_config_factory(request, ini_config_basedir, rcserver_port, vcsserver_port) + + return _factory + + +@pytest.fixture(scope="session") def ini_settings(ini_config): ini_path = ini_config return get_app_config(ini_path) @@ -141,26 +223,25 @@ def ini_settings(ini_config): def get_available_port(min_port=40000, max_port=55555): from rhodecode.lib.utils2 import get_available_port as _get_port + return _get_port(min_port, max_port) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def rcserver_port(request): port = get_available_port() - print(f'Using rhodecode port {port}') return port -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def vcsserver_port(request): - port = request.config.getoption('--vcsserver-port') + port = request.config.getoption("--vcsserver-port") if port is None: port = get_available_port() - print(f'Using vcsserver port {port}') return port -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def available_port_factory() -> get_available_port: """ Returns a callable which returns free port numbers. @@ -178,7 +259,7 @@ def available_port(available_port_factor return available_port_factory() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def testini_factory(tmpdir_factory, ini_config): """ Factory to create an INI file based on TestINI. @@ -190,37 +271,38 @@ def testini_factory(tmpdir_factory, ini_ class TestIniFactory(object): - - def __init__(self, basetemp, template_ini): - self._basetemp = basetemp + def __init__(self, ini_store_dir, template_ini): + self._ini_store_dir = ini_store_dir self._template_ini = template_ini - def __call__(self, ini_params, new_file_prefix='test'): + def __call__(self, ini_params, new_file_prefix="test"): ini_file = TestINI( - self._template_ini, ini_params=ini_params, - new_file_prefix=new_file_prefix, dir=self._basetemp) + self._template_ini, ini_params=ini_params, new_file_prefix=new_file_prefix, dir=self._ini_store_dir + ) result = ini_file.create() return result -def get_config( - config, option_name, override_option_name, overrides=None, - basetemp=None, prefix='test'): +def get_config(config, option_name, override_option_name, overrides=None, basetemp=None, prefix="test"): """ Find a configuration file and apply overrides for the given `prefix`. """ - config_file = ( - config.getoption(option_name) or config.getini(option_name)) + try: + config_file = config.getoption(option_name) + except ValueError: + config_file = None + if not config_file: - pytest.exit( - "Configuration error, could not extract {}.".format(option_name)) + config_file = config.getini(option_name) + + if not config_file: + pytest.exit(f"Configuration error, could not extract {option_name}.") overrides = overrides or [] - config_override = config.getoption(override_option_name) - if config_override: - overrides.append(config_override) - temp_ini_file = TestINI( - config_file, ini_params=overrides, new_file_prefix=prefix, - dir=basetemp) + if override_option_name: + config_override = config.getoption(override_option_name) + if config_override: + overrides.append(config_override) + temp_ini_file = TestINI(config_file, ini_params=overrides, new_file_prefix=prefix, dir=basetemp) return temp_ini_file.create() diff --git a/rhodecode/tests/fixture_mods/fixture_utils.py b/rhodecode/tests/fixtures/fixture_utils.py rename from rhodecode/tests/fixture_mods/fixture_utils.py rename to rhodecode/tests/fixtures/fixture_utils.py --- a/rhodecode/tests/fixture_mods/fixture_utils.py +++ b/rhodecode/tests/fixtures/fixture_utils.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -30,6 +29,7 @@ import uuid import dateutil.tz import logging import functools +import textwrap import mock import pyramid.testing @@ -43,8 +43,17 @@ import rhodecode.lib from rhodecode.model.changeset_status import ChangesetStatusModel from rhodecode.model.comment import CommentsModel from rhodecode.model.db import ( - PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus, - RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) + PullRequest, + PullRequestReviewers, + Repository, + RhodeCodeSetting, + ChangesetStatus, + RepoGroup, + UserGroup, + RepoRhodeCodeUi, + RepoRhodeCodeSetting, + RhodeCodeUi, +) from rhodecode.model.meta import Session from rhodecode.model.pull_request import PullRequestModel from rhodecode.model.repo import RepoModel @@ -60,12 +69,20 @@ from rhodecode.lib.str_utils import safe from rhodecode.lib.hash_utils import sha1_safe from rhodecode.lib.vcs.backends import get_backend from rhodecode.lib.vcs.nodes import FileNode +from rhodecode.lib.base import bootstrap_config from rhodecode.tests import ( - login_user_session, get_new_dir, utils, TESTS_TMP_PATH, - TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, - TEST_USER_REGULAR_PASS) -from rhodecode.tests.utils import CustomTestApp, set_anonymous_access -from rhodecode.tests.fixture import Fixture + login_user_session, + get_new_dir, + utils, + TESTS_TMP_PATH, + TEST_USER_ADMIN_LOGIN, + TEST_USER_REGULAR_LOGIN, + TEST_USER_REGULAR2_LOGIN, + TEST_USER_REGULAR_PASS, + console_printer, +) +from rhodecode.tests.utils import set_anonymous_access +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.config import utils as config_utils log = logging.getLogger(__name__) @@ -76,36 +93,7 @@ def cmp(a, b): return (a > b) - (a < b) -@pytest.fixture(scope='session', autouse=True) -def activate_example_rcextensions(request): - """ - Patch in an example rcextensions module which verifies passed in kwargs. - """ - from rhodecode.config import rcextensions - - old_extensions = rhodecode.EXTENSIONS - rhodecode.EXTENSIONS = rcextensions - rhodecode.EXTENSIONS.calls = collections.defaultdict(list) - - @request.addfinalizer - def cleanup(): - rhodecode.EXTENSIONS = old_extensions - - -@pytest.fixture() -def capture_rcextensions(): - """ - Returns the recorded calls to entry points in rcextensions. - """ - calls = rhodecode.EXTENSIONS.calls - calls.clear() - # Note: At this moment, it is still the empty dict, but that will - # be filled during the test run and since it is a reference this - # is enough to make it work. - return calls - - -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def http_environ_session(): """ Allow to use "http_environ" in session scope. @@ -117,7 +105,31 @@ def plain_http_host_stub(): """ Value of HTTP_HOST in the test run. """ - return 'example.com:80' + return "example.com:80" + + +def plain_config_stub(request, request_stub): + """ + Set up pyramid.testing and return the Configurator. + """ + + config = bootstrap_config(request=request_stub) + + @request.addfinalizer + def cleanup(): + pyramid.testing.tearDown() + + return config + + +def plain_request_stub(): + """ + Stub request object. + """ + from rhodecode.lib.base import bootstrap_request + + _request = bootstrap_request(scheme="https") + return _request @pytest.fixture() @@ -132,7 +144,7 @@ def plain_http_host_only_stub(): """ Value of HTTP_HOST in the test run. """ - return plain_http_host_stub().split(':')[0] + return plain_http_host_stub().split(":")[0] @pytest.fixture() @@ -147,33 +159,21 @@ def plain_http_environ(): """ HTTP extra environ keys. - User by the test application and as well for setting up the pylons + Used by the test application and as well for setting up the pylons environment. In the case of the fixture "app" it should be possible to override this for a specific test case. """ return { - 'SERVER_NAME': plain_http_host_only_stub(), - 'SERVER_PORT': plain_http_host_stub().split(':')[1], - 'HTTP_HOST': plain_http_host_stub(), - 'HTTP_USER_AGENT': 'rc-test-agent', - 'REQUEST_METHOD': 'GET' + "SERVER_NAME": plain_http_host_only_stub(), + "SERVER_PORT": plain_http_host_stub().split(":")[1], + "HTTP_HOST": plain_http_host_stub(), + "HTTP_USER_AGENT": "rc-test-agent", + "REQUEST_METHOD": "GET", } -@pytest.fixture() -def http_environ(): - """ - HTTP extra environ keys. - - User by the test application and as well for setting up the pylons - environment. In the case of the fixture "app" it should be possible - to override this for a specific test case. - """ - return plain_http_environ() - - -@pytest.fixture(scope='session') -def baseapp(ini_config, vcsserver, http_environ_session): +@pytest.fixture(scope="session") +def baseapp(request, ini_config, http_environ_session, available_port_factory, vcsserver_factory, celery_factory): from rhodecode.lib.config_utils import get_app_config from rhodecode.config.middleware import make_pyramid_app @@ -181,22 +181,41 @@ def baseapp(ini_config, vcsserver, http_ pyramid.paster.setup_logging(ini_config) settings = get_app_config(ini_config) - app = make_pyramid_app({'__file__': ini_config}, **settings) + store_dir = os.path.dirname(ini_config) + + # start vcsserver + _vcsserver_port = available_port_factory() + vcsserver_instance = vcsserver_factory( + request, + store_dir=store_dir, + port=_vcsserver_port, + info_prefix="base-app-" + ) + + settings["vcs.server"] = vcsserver_instance.bind_addr - return app + # we skip setting store_dir for baseapp, it's internally set via testing rhodecode.ini + # settings['repo_store.path'] = str(store_dir) + console_printer(f' :warning: [green]pytest-setup[/green] Starting base pyramid-app: {ini_config}') + pyramid_baseapp = make_pyramid_app({"__file__": ini_config}, **settings) + + # start celery + celery_factory( + request, + store_dir=store_dir, + port=None, + info_prefix="base-app-", + overrides=( + {'handler_console': {'level': 'DEBUG'}}, + {'app:main': {'vcs.server': vcsserver_instance.bind_addr}}, + {'app:main': {'repo_store.path': store_dir}} + ) + ) + + return pyramid_baseapp -@pytest.fixture(scope='function') -def app(request, config_stub, baseapp, http_environ): - app = CustomTestApp( - baseapp, - extra_environ=http_environ) - if request.cls: - request.cls.app = app - return app - - -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def app_settings(baseapp, ini_config): """ Settings dictionary used to create the app. @@ -207,19 +226,19 @@ def app_settings(baseapp, ini_config): return baseapp.config.get_settings() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def db_connection(ini_settings): # Initialize the database connection. config_utils.initialize_database(ini_settings) -LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) +LoginData = collections.namedtuple("LoginData", ("csrf_token", "user")) def _autologin_user(app, *args): session = login_user_session(app, *args) csrf_token = rhodecode.lib.auth.get_csrf_token(session) - return LoginData(csrf_token, session['rhodecode_user']) + return LoginData(csrf_token, session["rhodecode_user"]) @pytest.fixture() @@ -235,18 +254,17 @@ def autologin_regular_user(app): """ Utility fixture which makes sure that the regular user is logged in """ - return _autologin_user( - app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) + return _autologin_user(app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def csrf_token(request, autologin_user): return autologin_user.csrf_token -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def xhr_header(request): - return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} + return {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"} @pytest.fixture() @@ -257,18 +275,18 @@ def real_crypto_backend(monkeypatch): During the test run the crypto backend is replaced with a faster implementation based on the MD5 algorithm. """ - monkeypatch.setattr(rhodecode, 'is_test', False) + monkeypatch.setattr(rhodecode, "is_test", False) -@pytest.fixture(scope='class') +@pytest.fixture(scope="class") def index_location(request, baseapp): - index_location = baseapp.config.get_settings()['search.location'] + index_location = baseapp.config.get_settings()["search.location"] if request.cls: request.cls.index_location = index_location return index_location -@pytest.fixture(scope='session', autouse=True) +@pytest.fixture(scope="session", autouse=True) def tests_tmp_path(request): """ Create temporary directory to be used during the test session. @@ -276,7 +294,8 @@ def tests_tmp_path(request): if not os.path.exists(TESTS_TMP_PATH): os.makedirs(TESTS_TMP_PATH) - if not request.config.getoption('--keep-tmp-path'): + if not request.config.getoption("--keep-tmp-path"): + @request.addfinalizer def remove_tmp_path(): shutil.rmtree(TESTS_TMP_PATH) @@ -291,7 +310,7 @@ def test_repo_group(request): usage automatically """ fixture = Fixture() - repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') + repogroupid = "test_repo_group_%s" % str(time.time()).replace(".", "") repo_group = fixture.create_repo_group(repogroupid) def _cleanup(): @@ -308,7 +327,7 @@ def test_user_group(request): usage automatically """ fixture = Fixture() - usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') + usergroupid = "test_user_group_%s" % str(time.time()).replace(".", "") user_group = fixture.create_user_group(usergroupid) def _cleanup(): @@ -318,7 +337,7 @@ def test_user_group(request): return user_group -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def test_repo(request): container = TestRepoContainer() request.addfinalizer(container._cleanup) @@ -340,9 +359,9 @@ class TestRepoContainer(object): """ dump_extractors = { - 'git': utils.extract_git_repo_from_dump, - 'hg': utils.extract_hg_repo_from_dump, - 'svn': utils.extract_svn_repo_from_dump, + "git": utils.extract_git_repo_from_dump, + "hg": utils.extract_hg_repo_from_dump, + "svn": utils.extract_svn_repo_from_dump, } def __init__(self): @@ -358,7 +377,7 @@ class TestRepoContainer(object): return Repository.get(self._repos[key]) def _create_repo(self, dump_name, backend_alias, config): - repo_name = f'{backend_alias}-{dump_name}' + repo_name = f"{backend_alias}-{dump_name}" backend = get_backend(backend_alias) dump_extractor = self.dump_extractors[backend_alias] repo_path = dump_extractor(dump_name, repo_name) @@ -375,19 +394,17 @@ class TestRepoContainer(object): self._fixture.destroy_repo(repo_name) -def backend_base(request, backend_alias, baseapp, test_repo): - if backend_alias not in request.config.getoption('--backends'): - pytest.skip("Backend %s not selected." % (backend_alias, )) +def backend_base(request, backend_alias, test_repo): + if backend_alias not in request.config.getoption("--backends"): + pytest.skip(f"Backend {backend_alias} not selected.") utils.check_xfail_backends(request.node, backend_alias) utils.check_skip_backends(request.node, backend_alias) - repo_name = 'vcs_test_%s' % (backend_alias, ) + repo_name = "vcs_test_%s" % (backend_alias,) backend = Backend( - alias=backend_alias, - repo_name=repo_name, - test_name=request.node.name, - test_repo_container=test_repo) + alias=backend_alias, repo_name=repo_name, test_name=request.node.name, test_repo_container=test_repo + ) request.addfinalizer(backend.cleanup) return backend @@ -404,22 +421,22 @@ def backend(request, backend_alias, base for specific backends. This is intended as a utility for incremental development of a new backend implementation. """ - return backend_base(request, backend_alias, baseapp, test_repo) + return backend_base(request, backend_alias, test_repo) @pytest.fixture() def backend_git(request, baseapp, test_repo): - return backend_base(request, 'git', baseapp, test_repo) + return backend_base(request, "git", test_repo) @pytest.fixture() def backend_hg(request, baseapp, test_repo): - return backend_base(request, 'hg', baseapp, test_repo) + return backend_base(request, "hg", test_repo) @pytest.fixture() def backend_svn(request, baseapp, test_repo): - return backend_base(request, 'svn', baseapp, test_repo) + return backend_base(request, "svn", test_repo) @pytest.fixture() @@ -467,9 +484,9 @@ class Backend(object): session. """ - invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') + invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+") _master_repo = None - _master_repo_path = '' + _master_repo_path = "" _commit_ids = {} def __init__(self, alias, repo_name, test_name, test_repo_container): @@ -500,6 +517,7 @@ class Backend(object): last repo which has been created with `create_repo`. """ from rhodecode.model.db import Repository + return Repository.get_by_repo_name(self.repo_name) @property @@ -517,9 +535,7 @@ class Backend(object): which can serve as the base to create a new commit on top of it. """ vcsrepo = self.repo.scm_instance() - head_id = ( - vcsrepo.DEFAULT_BRANCH_NAME or - vcsrepo.commit_ids[-1]) + head_id = vcsrepo.DEFAULT_BRANCH_NAME or vcsrepo.commit_ids[-1] return head_id @property @@ -543,9 +559,7 @@ class Backend(object): return self._commit_ids - def create_repo( - self, commits=None, number_of_commits=0, heads=None, - name_suffix='', bare=False, **kwargs): + def create_repo(self, commits=None, number_of_commits=0, heads=None, name_suffix="", bare=False, **kwargs): """ Create a repository and record it for later cleanup. @@ -559,13 +573,10 @@ class Backend(object): :param bare: set a repo as bare (no checkout) """ self.repo_name = self._next_repo_name() + name_suffix - repo = self._fixture.create_repo( - self.repo_name, repo_type=self.alias, bare=bare, **kwargs) + repo = self._fixture.create_repo(self.repo_name, repo_type=self.alias, bare=bare, **kwargs) self._cleanup_repos.append(repo.repo_name) - commits = commits or [ - {'message': f'Commit {x} of {self.repo_name}'} - for x in range(number_of_commits)] + commits = commits or [{"message": f"Commit {x} of {self.repo_name}"} for x in range(number_of_commits)] vcs_repo = repo.scm_instance() vcs_repo.count() self._add_commits_to_repo(vcs_repo, commits) @@ -579,7 +590,7 @@ class Backend(object): Make sure that repo contains all commits mentioned in `heads` """ vcsrepo = repo.scm_instance() - vcsrepo.config.clear_section('hooks') + vcsrepo.config.clear_section("hooks") commit_ids = [self._commit_ids[h] for h in heads] if do_fetch: vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids) @@ -592,21 +603,22 @@ class Backend(object): self._cleanup_repos.append(self.repo_name) return repo - def new_repo_name(self, suffix=''): + def new_repo_name(self, suffix=""): self.repo_name = self._next_repo_name() + suffix self._cleanup_repos.append(self.repo_name) return self.repo_name def _next_repo_name(self): - return "%s_%s" % ( - self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos)) + return "%s_%s" % (self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos)) - def ensure_file(self, filename, content=b'Test content\n'): + def ensure_file(self, filename, content=b"Test content\n"): assert self._cleanup_repos, "Avoid writing into vcs_test repos" commits = [ - {'added': [ - FileNode(filename, content=content), - ]}, + { + "added": [ + FileNode(filename, content=content), + ] + }, ] self._add_commits_to_repo(self.repo.scm_instance(), commits) @@ -627,11 +639,11 @@ class Backend(object): self._commit_ids = commit_ids # Creating refs for Git to allow fetching them from remote repository - if self.alias == 'git': + if self.alias == "git": refs = {} for message in self._commit_ids: - cleanup_message = message.replace(' ', '') - ref_name = f'refs/test-refs/{cleanup_message}' + cleanup_message = message.replace(" ", "") + ref_name = f"refs/test-refs/{cleanup_message}" refs[ref_name] = self._commit_ids[message] self._create_refs(repo, refs) @@ -645,7 +657,7 @@ class VcsBackend(object): Represents the test configuration for one supported vcs backend. """ - invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') + invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+") def __init__(self, alias, repo_path, test_name, test_repo_container): self.alias = alias @@ -658,7 +670,7 @@ class VcsBackend(object): return self._test_repo_container(key, self.alias).scm_instance() def __repr__(self): - return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})' + return f"{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})" @property def repo(self): @@ -676,8 +688,7 @@ class VcsBackend(object): """ return get_backend(self.alias) - def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, - bare=False): + def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, bare=False): repo_name = self._next_repo_name() self._repo_path = get_new_dir(repo_name) repo_class = get_backend(self.alias) @@ -687,9 +698,7 @@ class VcsBackend(object): repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) self._cleanup_repos.append(repo) - commits = commits or [ - {'message': 'Commit %s of %s' % (x, repo_name)} - for x in range(number_of_commits)] + commits = commits or [{"message": "Commit %s of %s" % (x, repo_name)} for x in range(number_of_commits)] _add_commits_to_repo(repo, commits) return repo @@ -706,38 +715,30 @@ class VcsBackend(object): return self._repo_path def _next_repo_name(self): + return "{}_{}".format(self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos)) - return "{}_{}".format( - self.invalid_repo_name.sub('_', self._test_name), - len(self._cleanup_repos) - ) - - def add_file(self, repo, filename, content='Test content\n'): + def add_file(self, repo, filename, content="Test content\n"): imc = repo.in_memory_commit imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content))) - imc.commit( - message='Automatic commit from vcsbackend fixture', - author='Automatic ') + imc.commit(message="Automatic commit from vcsbackend fixture", author="Automatic ") - def ensure_file(self, filename, content='Test content\n'): + def ensure_file(self, filename, content="Test content\n"): assert self._cleanup_repos, "Avoid writing into vcs_test repos" self.add_file(self.repo, filename, content) def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend: - if backend_alias not in request.config.getoption('--backends'): - pytest.skip("Backend %s not selected." % (backend_alias, )) + if backend_alias not in request.config.getoption("--backends"): + pytest.skip("Backend %s not selected." % (backend_alias,)) utils.check_xfail_backends(request.node, backend_alias) utils.check_skip_backends(request.node, backend_alias) - repo_name = f'vcs_test_{backend_alias}' + repo_name = f"vcs_test_{backend_alias}" repo_path = os.path.join(tests_tmp_path, repo_name) backend = VcsBackend( - alias=backend_alias, - repo_path=repo_path, - test_name=request.node.name, - test_repo_container=test_repo) + alias=backend_alias, repo_path=repo_path, test_name=request.node.name, test_repo_container=test_repo + ) request.addfinalizer(backend.cleanup) return backend @@ -758,17 +759,17 @@ def vcsbackend(request, backend_alias, t @pytest.fixture() def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): - return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo) + return vcsbackend_base(request, "git", tests_tmp_path, baseapp, test_repo) @pytest.fixture() def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): - return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo) + return vcsbackend_base(request, "hg", tests_tmp_path, baseapp, test_repo) @pytest.fixture() def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): - return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo) + return vcsbackend_base(request, "svn", tests_tmp_path, baseapp, test_repo) @pytest.fixture() @@ -789,29 +790,28 @@ def _add_commits_to_repo(vcs_repo, commi imc = vcs_repo.in_memory_commit for idx, commit in enumerate(commits): - message = str(commit.get('message', f'Commit {idx}')) + message = str(commit.get("message", f"Commit {idx}")) - for node in commit.get('added', []): + for node in commit.get("added", []): imc.add(FileNode(safe_bytes(node.path), content=node.content)) - for node in commit.get('changed', []): + for node in commit.get("changed", []): imc.change(FileNode(safe_bytes(node.path), content=node.content)) - for node in commit.get('removed', []): + for node in commit.get("removed", []): imc.remove(FileNode(safe_bytes(node.path))) - parents = [ - vcs_repo.get_commit(commit_id=commit_ids[p]) - for p in commit.get('parents', [])] + parents = [vcs_repo.get_commit(commit_id=commit_ids[p]) for p in commit.get("parents", [])] - operations = ('added', 'changed', 'removed') + operations = ("added", "changed", "removed") if not any((commit.get(o) for o in operations)): - imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message))) + imc.add(FileNode(b"file_%b" % safe_bytes(str(idx)), content=safe_bytes(message))) commit = imc.commit( message=message, - author=str(commit.get('author', 'Automatic ')), - date=commit.get('date'), - branch=commit.get('branch'), - parents=parents) + author=str(commit.get("author", "Automatic ")), + date=commit.get("date"), + branch=commit.get("branch"), + parents=parents, + ) commit_ids[commit.message] = commit.raw_id @@ -842,14 +842,14 @@ class RepoServer(object): self._cleanup_servers = [] def serve(self, vcsrepo): - if vcsrepo.alias != 'svn': + if vcsrepo.alias != "svn": raise TypeError("Backend %s not supported" % vcsrepo.alias) proc = subprocess.Popen( - ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', - '--root', vcsrepo.path]) + ["svnserve", "-d", "--foreground", "--listen-host", "localhost", "--root", vcsrepo.path] + ) self._cleanup_servers.append(proc) - self.url = 'svn://localhost' + self.url = "svn://localhost" def cleanup(self): for proc in self._cleanup_servers: @@ -874,7 +874,6 @@ def pr_util(backend, request, config_stu class PRTestUtility(object): - pull_request = None pull_request_id = None mergeable_patcher = None @@ -886,48 +885,55 @@ class PRTestUtility(object): self.backend = backend def create_pull_request( - self, commits=None, target_head=None, source_head=None, - revisions=None, approved=False, author=None, mergeable=False, - enable_notifications=True, name_suffix='', reviewers=None, observers=None, - title="Test", description="Description"): + self, + commits=None, + target_head=None, + source_head=None, + revisions=None, + approved=False, + author=None, + mergeable=False, + enable_notifications=True, + name_suffix="", + reviewers=None, + observers=None, + title="Test", + description="Description", + ): self.set_mergeable(mergeable) if not enable_notifications: # mock notification side effect - self.notification_patcher = mock.patch( - 'rhodecode.model.notification.NotificationModel.create') + self.notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create") self.notification_patcher.start() if not self.pull_request: if not commits: commits = [ - {'message': 'c1'}, - {'message': 'c2'}, - {'message': 'c3'}, + {"message": "c1"}, + {"message": "c2"}, + {"message": "c3"}, ] - target_head = 'c1' - source_head = 'c2' - revisions = ['c2'] + target_head = "c1" + source_head = "c2" + revisions = ["c2"] self.commit_ids = self.backend.create_master_repo(commits) - self.target_repository = self.backend.create_repo( - heads=[target_head], name_suffix=name_suffix) - self.source_repository = self.backend.create_repo( - heads=[source_head], name_suffix=name_suffix) - self.author = author or UserModel().get_by_username( - TEST_USER_ADMIN_LOGIN) + self.target_repository = self.backend.create_repo(heads=[target_head], name_suffix=name_suffix) + self.source_repository = self.backend.create_repo(heads=[source_head], name_suffix=name_suffix) + self.author = author or UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) model = PullRequestModel() self.create_parameters = { - 'created_by': self.author, - 'source_repo': self.source_repository.repo_name, - 'source_ref': self._default_branch_reference(source_head), - 'target_repo': self.target_repository.repo_name, - 'target_ref': self._default_branch_reference(target_head), - 'revisions': [self.commit_ids[r] for r in revisions], - 'reviewers': reviewers or self._get_reviewers(), - 'observers': observers or self._get_observers(), - 'title': title, - 'description': description, + "created_by": self.author, + "source_repo": self.source_repository.repo_name, + "source_ref": self._default_branch_reference(source_head), + "target_repo": self.target_repository.repo_name, + "target_ref": self._default_branch_reference(target_head), + "revisions": [self.commit_ids[r] for r in revisions], + "reviewers": reviewers or self._get_reviewers(), + "observers": observers or self._get_observers(), + "title": title, + "description": description, } self.pull_request = model.create(**self.create_parameters) assert model.get_versions(self.pull_request) == [] @@ -943,9 +949,7 @@ class PRTestUtility(object): return self.pull_request def approve(self): - self.create_status_votes( - ChangesetStatus.STATUS_APPROVED, - *self.pull_request.reviewers) + self.create_status_votes(ChangesetStatus.STATUS_APPROVED, *self.pull_request.reviewers) def close(self): PullRequestModel().close_pull_request(self.pull_request, self.author) @@ -953,28 +957,26 @@ class PRTestUtility(object): def _default_branch_reference(self, commit_message, branch: str = None) -> str: default_branch = branch or self.backend.default_branch_name message = self.commit_ids[commit_message] - reference = f'branch:{default_branch}:{message}' + reference = f"branch:{default_branch}:{message}" return reference def _get_reviewers(self): role = PullRequestReviewers.ROLE_REVIEWER return [ - (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []), - (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []), + (TEST_USER_REGULAR_LOGIN, ["default1"], False, role, []), + (TEST_USER_REGULAR2_LOGIN, ["default2"], False, role, []), ] def _get_observers(self): - return [ - - ] + return [] def update_source_repository(self, head=None, do_fetch=False): - heads = [head or 'c3'] + heads = [head or "c3"] self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch) def update_target_repository(self, head=None, do_fetch=False): - heads = [head or 'c3'] + heads = [head or "c3"] self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch) def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str: @@ -1004,7 +1006,7 @@ class PRTestUtility(object): # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, # remove the if once that's sorted out. if self.backend.alias == "git": - kwargs = {'branch_name': self.backend.default_branch_name} + kwargs = {"branch_name": self.backend.default_branch_name} else: kwargs = {} source_vcs.strip(removed_commit_id, **kwargs) @@ -1015,10 +1017,8 @@ class PRTestUtility(object): def create_comment(self, linked_to=None): comment = CommentsModel().create( - text="Test comment", - repo=self.target_repository.repo_name, - user=self.author, - pull_request=self.pull_request) + text="Test comment", repo=self.target_repository.repo_name, user=self.author, pull_request=self.pull_request + ) assert comment.pull_request_version_id is None if linked_to: @@ -1026,15 +1026,15 @@ class PRTestUtility(object): return comment - def create_inline_comment( - self, linked_to=None, line_no='n1', file_path='file_1'): + def create_inline_comment(self, linked_to=None, line_no="n1", file_path="file_1"): comment = CommentsModel().create( text="Test comment", repo=self.target_repository.repo_name, user=self.author, line_no=line_no, f_path=file_path, - pull_request=self.pull_request) + pull_request=self.pull_request, + ) assert comment.pull_request_version_id is None if linked_to: @@ -1044,25 +1044,20 @@ class PRTestUtility(object): def create_version_of_pull_request(self): pull_request = self.create_pull_request() - version = PullRequestModel()._create_version_from_snapshot( - pull_request) + version = PullRequestModel()._create_version_from_snapshot(pull_request) return version def create_status_votes(self, status, *reviewers): for reviewer in reviewers: ChangesetStatusModel().set_status( - repo=self.pull_request.target_repo, - status=status, - user=reviewer.user_id, - pull_request=self.pull_request) + repo=self.pull_request.target_repo, status=status, user=reviewer.user_id, pull_request=self.pull_request + ) def set_mergeable(self, value): if not self.mergeable_patcher: - self.mergeable_patcher = mock.patch.object( - VcsSettingsModel, 'get_general_settings') + self.mergeable_patcher = mock.patch.object(VcsSettingsModel, "get_general_settings") self.mergeable_mock = self.mergeable_patcher.start() - self.mergeable_mock.return_value = { - 'rhodecode_pr_merge_enabled': value} + self.mergeable_mock.return_value = {"rhodecode_pr_merge_enabled": value} def cleanup(self): # In case the source repository is already cleaned up, the pull @@ -1109,7 +1104,6 @@ def user_util(request, db_connection): # TODO: johbo: Split this up into utilities per domain or something similar class UserUtility(object): - def __init__(self, test_name="test"): self._test_name = self._sanitize_name(test_name) self.fixture = Fixture() @@ -1126,37 +1120,29 @@ class UserUtility(object): self.user_permissions = [] def _sanitize_name(self, name): - for char in ['[', ']']: - name = name.replace(char, '_') + for char in ["[", "]"]: + name = name.replace(char, "_") return name - def create_repo_group( - self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): - group_name = "{prefix}_repogroup_{count}".format( - prefix=self._test_name, - count=len(self.repo_group_ids)) - repo_group = self.fixture.create_repo_group( - group_name, cur_user=owner) + def create_repo_group(self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): + group_name = "{prefix}_repogroup_{count}".format(prefix=self._test_name, count=len(self.repo_group_ids)) + repo_group = self.fixture.create_repo_group(group_name, cur_user=owner) if auto_cleanup: self.repo_group_ids.append(repo_group.group_id) return repo_group - def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, - auto_cleanup=True, repo_type='hg', bare=False): - repo_name = "{prefix}_repository_{count}".format( - prefix=self._test_name, - count=len(self.repos_ids)) + def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True, repo_type="hg", bare=False): + repo_name = "{prefix}_repository_{count}".format(prefix=self._test_name, count=len(self.repos_ids)) repository = self.fixture.create_repo( - repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare) + repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare + ) if auto_cleanup: self.repos_ids.append(repository.repo_id) return repository def create_user(self, auto_cleanup=True, **kwargs): - user_name = "{prefix}_user_{count}".format( - prefix=self._test_name, - count=len(self.user_ids)) + user_name = "{prefix}_user_{count}".format(prefix=self._test_name, count=len(self.user_ids)) user = self.fixture.create_user(user_name, **kwargs) if auto_cleanup: self.user_ids.append(user.user_id) @@ -1171,13 +1157,9 @@ class UserUtility(object): user_group = self.create_user_group(members=[user]) return user, user_group - def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, - auto_cleanup=True, **kwargs): - group_name = "{prefix}_usergroup_{count}".format( - prefix=self._test_name, - count=len(self.user_group_ids)) - user_group = self.fixture.create_user_group( - group_name, cur_user=owner, **kwargs) + def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, auto_cleanup=True, **kwargs): + group_name = "{prefix}_usergroup_{count}".format(prefix=self._test_name, count=len(self.user_group_ids)) + user_group = self.fixture.create_user_group(group_name, cur_user=owner, **kwargs) if auto_cleanup: self.user_group_ids.append(user_group.users_group_id) @@ -1190,52 +1172,34 @@ class UserUtility(object): self.inherit_default_user_permissions(user_name, False) self.user_permissions.append((user_name, permission_name)) - def grant_user_permission_to_repo_group( - self, repo_group, user, permission_name): - permission = RepoGroupModel().grant_user_permission( - repo_group, user, permission_name) - self.user_repo_group_permission_ids.append( - (repo_group.group_id, user.user_id)) + def grant_user_permission_to_repo_group(self, repo_group, user, permission_name): + permission = RepoGroupModel().grant_user_permission(repo_group, user, permission_name) + self.user_repo_group_permission_ids.append((repo_group.group_id, user.user_id)) return permission - def grant_user_group_permission_to_repo_group( - self, repo_group, user_group, permission_name): - permission = RepoGroupModel().grant_user_group_permission( - repo_group, user_group, permission_name) - self.user_group_repo_group_permission_ids.append( - (repo_group.group_id, user_group.users_group_id)) + def grant_user_group_permission_to_repo_group(self, repo_group, user_group, permission_name): + permission = RepoGroupModel().grant_user_group_permission(repo_group, user_group, permission_name) + self.user_group_repo_group_permission_ids.append((repo_group.group_id, user_group.users_group_id)) return permission - def grant_user_permission_to_repo( - self, repo, user, permission_name): - permission = RepoModel().grant_user_permission( - repo, user, permission_name) - self.user_repo_permission_ids.append( - (repo.repo_id, user.user_id)) + def grant_user_permission_to_repo(self, repo, user, permission_name): + permission = RepoModel().grant_user_permission(repo, user, permission_name) + self.user_repo_permission_ids.append((repo.repo_id, user.user_id)) return permission - def grant_user_group_permission_to_repo( - self, repo, user_group, permission_name): - permission = RepoModel().grant_user_group_permission( - repo, user_group, permission_name) - self.user_group_repo_permission_ids.append( - (repo.repo_id, user_group.users_group_id)) + def grant_user_group_permission_to_repo(self, repo, user_group, permission_name): + permission = RepoModel().grant_user_group_permission(repo, user_group, permission_name) + self.user_group_repo_permission_ids.append((repo.repo_id, user_group.users_group_id)) return permission - def grant_user_permission_to_user_group( - self, target_user_group, user, permission_name): - permission = UserGroupModel().grant_user_permission( - target_user_group, user, permission_name) - self.user_user_group_permission_ids.append( - (target_user_group.users_group_id, user.user_id)) + def grant_user_permission_to_user_group(self, target_user_group, user, permission_name): + permission = UserGroupModel().grant_user_permission(target_user_group, user, permission_name) + self.user_user_group_permission_ids.append((target_user_group.users_group_id, user.user_id)) return permission - def grant_user_group_permission_to_user_group( - self, target_user_group, user_group, permission_name): - permission = UserGroupModel().grant_user_group_permission( - target_user_group, user_group, permission_name) - self.user_group_user_group_permission_ids.append( - (target_user_group.users_group_id, user_group.users_group_id)) + def grant_user_group_permission_to_user_group(self, target_user_group, user_group, permission_name): + permission = UserGroupModel().grant_user_group_permission(target_user_group, user_group, permission_name) + self.user_group_user_group_permission_ids.append((target_user_group.users_group_id, user_group.users_group_id)) return permission def revoke_user_permission(self, user_name, permission_name): @@ -1285,14 +1249,11 @@ class UserUtility(object): """ first_group = RepoGroup.get(first_group_id) second_group = RepoGroup.get(second_group_id) - first_group_parts = ( - len(first_group.group_name.split('/')) if first_group else 0) - second_group_parts = ( - len(second_group.group_name.split('/')) if second_group else 0) + first_group_parts = len(first_group.group_name.split("/")) if first_group else 0 + second_group_parts = len(second_group.group_name.split("/")) if second_group else 0 return cmp(second_group_parts, first_group_parts) - sorted_repo_group_ids = sorted( - self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare)) + sorted_repo_group_ids = sorted(self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare)) for repo_group_id in sorted_repo_group_ids: self.fixture.destroy_repo_group(repo_group_id) @@ -1308,16 +1269,11 @@ class UserUtility(object): """ first_group = UserGroup.get(first_group_id) second_group = UserGroup.get(second_group_id) - first_group_parts = ( - len(first_group.users_group_name.split('/')) - if first_group else 0) - second_group_parts = ( - len(second_group.users_group_name.split('/')) - if second_group else 0) + first_group_parts = len(first_group.users_group_name.split("/")) if first_group else 0 + second_group_parts = len(second_group.users_group_name.split("/")) if second_group else 0 return cmp(second_group_parts, first_group_parts) - sorted_user_group_ids = sorted( - self.user_group_ids, key=functools.cmp_to_key(_user_group_compare)) + sorted_user_group_ids = sorted(self.user_group_ids, key=functools.cmp_to_key(_user_group_compare)) for user_group_id in sorted_user_group_ids: self.fixture.destroy_user_group(user_group_id) @@ -1326,22 +1282,19 @@ class UserUtility(object): self.fixture.destroy_user(user_id) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def testrun(): return { - 'uuid': uuid.uuid4(), - 'start': datetime.datetime.utcnow().isoformat(), - 'timestamp': int(time.time()), + "uuid": uuid.uuid4(), + "start": datetime.datetime.utcnow().isoformat(), + "timestamp": int(time.time()), } class AppenlightClient(object): - - url_template = '{url}?protocol_version=0.5' + url_template = "{url}?protocol_version=0.5" - def __init__( - self, url, api_key, add_server=True, add_timestamp=True, - namespace=None, request=None, testrun=None): + def __init__(self, url, api_key, add_server=True, add_timestamp=True, namespace=None, request=None, testrun=None): self.url = self.url_template.format(url=url) self.api_key = api_key self.add_server = add_server @@ -1362,40 +1315,41 @@ class AppenlightClient(object): def collect(self, data): if self.add_server: - data.setdefault('server', self.server) + data.setdefault("server", self.server) if self.add_timestamp: - data.setdefault('date', datetime.datetime.utcnow().isoformat()) + data.setdefault("date", datetime.datetime.utcnow().isoformat()) if self.namespace: - data.setdefault('namespace', self.namespace) + data.setdefault("namespace", self.namespace) if self.request: - data.setdefault('request', self.request) + data.setdefault("request", self.request) self.stats.append(data) def send_stats(self): tags = [ - ('testrun', self.request), - ('testrun.start', self.testrun['start']), - ('testrun.timestamp', self.testrun['timestamp']), - ('test', self.namespace), + ("testrun", self.request), + ("testrun.start", self.testrun["start"]), + ("testrun.timestamp", self.testrun["timestamp"]), + ("test", self.namespace), ] for key, value in self.tags_before.items(): - tags.append((key + '.before', value)) + tags.append((key + ".before", value)) try: delta = self.tags_after[key] - value - tags.append((key + '.delta', delta)) + tags.append((key + ".delta", delta)) except Exception: pass for key, value in self.tags_after.items(): - tags.append((key + '.after', value)) - self.collect({ - 'message': "Collected tags", - 'tags': tags, - }) + tags.append((key + ".after", value)) + self.collect( + { + "message": "Collected tags", + "tags": tags, + } + ) response = requests.post( self.url, - headers={ - 'X-appenlight-api-key': self.api_key}, + headers={"X-appenlight-api-key": self.api_key}, json=self.stats, ) @@ -1403,7 +1357,7 @@ class AppenlightClient(object): pprint.pprint(self.stats) print(response.headers) print(response.text) - raise Exception('Sending to appenlight failed') + raise Exception("Sending to appenlight failed") @pytest.fixture() @@ -1454,9 +1408,8 @@ class SettingsUtility(object): self.repo_rhodecode_ui_ids = [] self.repo_rhodecode_setting_ids = [] - def create_repo_rhodecode_ui( - self, repo, section, value, key=None, active=True, cleanup=True): - key = key or sha1_safe(f'{section}{value}{repo.repo_id}') + def create_repo_rhodecode_ui(self, repo, section, value, key=None, active=True, cleanup=True): + key = key or sha1_safe(f"{section}{value}{repo.repo_id}") setting = RepoRhodeCodeUi() setting.repository_id = repo.repo_id @@ -1471,9 +1424,8 @@ class SettingsUtility(object): self.repo_rhodecode_ui_ids.append(setting.ui_id) return setting - def create_rhodecode_ui( - self, section, value, key=None, active=True, cleanup=True): - key = key or sha1_safe(f'{section}{value}') + def create_rhodecode_ui(self, section, value, key=None, active=True, cleanup=True): + key = key or sha1_safe(f"{section}{value}") setting = RhodeCodeUi() setting.ui_section = section @@ -1487,10 +1439,8 @@ class SettingsUtility(object): self.rhodecode_ui_ids.append(setting.ui_id) return setting - def create_repo_rhodecode_setting( - self, repo, name, value, type_, cleanup=True): - setting = RepoRhodeCodeSetting( - repo.repo_id, key=name, val=value, type=type_) + def create_repo_rhodecode_setting(self, repo, name, value, type_, cleanup=True): + setting = RepoRhodeCodeSetting(repo.repo_id, key=name, val=value, type=type_) Session().add(setting) Session().commit() @@ -1530,13 +1480,12 @@ class SettingsUtility(object): @pytest.fixture() def no_notifications(request): - notification_patcher = mock.patch( - 'rhodecode.model.notification.NotificationModel.create') + notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create") notification_patcher.start() request.addfinalizer(notification_patcher.stop) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def repeat(request): """ The number of repetitions is based on this fixture. @@ -1544,7 +1493,7 @@ def repeat(request): Slower calls may divide it by 10 or 100. It is chosen in a way so that the tests are not too slow in our default test suite. """ - return request.config.getoption('--repeat') + return request.config.getoption("--repeat") @pytest.fixture() @@ -1562,42 +1511,17 @@ def context_stub(): @pytest.fixture() -def request_stub(): - """ - Stub request object. - """ - from rhodecode.lib.base import bootstrap_request - request = bootstrap_request(scheme='https') - return request - - -@pytest.fixture() -def config_stub(request, request_stub): - """ - Set up pyramid.testing and return the Configurator. - """ - from rhodecode.lib.base import bootstrap_config - config = bootstrap_config(request=request_stub) - - @request.addfinalizer - def cleanup(): - pyramid.testing.tearDown() - - return config - - -@pytest.fixture() def StubIntegrationType(): class _StubIntegrationType(IntegrationTypeBase): - """ Test integration type class """ + """Test integration type class""" - key = 'test' - display_name = 'Test integration type' - description = 'A test integration type for testing' + key = "test" + display_name = "Test integration type" + description = "A test integration type for testing" @classmethod def icon(cls): - return 'test_icon_html_image' + return "test_icon_html_image" def __init__(self, settings): super(_StubIntegrationType, self).__init__(settings) @@ -1611,15 +1535,15 @@ def StubIntegrationType(): test_string_field = colander.SchemaNode( colander.String(), missing=colander.required, - title='test string field', + title="test string field", ) test_int_field = colander.SchemaNode( colander.Int(), - title='some integer setting', + title="some integer setting", ) + return SettingsSchema() - integration_type_registry.register_integration_type(_StubIntegrationType) return _StubIntegrationType @@ -1627,18 +1551,22 @@ def StubIntegrationType(): @pytest.fixture() def stub_integration_settings(): return { - 'test_string_field': 'some data', - 'test_int_field': 100, + "test_string_field": "some data", + "test_int_field": 100, } @pytest.fixture() -def repo_integration_stub(request, repo_stub, StubIntegrationType, - stub_integration_settings): +def repo_integration_stub(request, repo_stub, StubIntegrationType, stub_integration_settings): integration = IntegrationModel().create( - StubIntegrationType, settings=stub_integration_settings, enabled=True, - name='test repo integration', - repo=repo_stub, repo_group=None, child_repos_only=None) + StubIntegrationType, + settings=stub_integration_settings, + enabled=True, + name="test repo integration", + repo=repo_stub, + repo_group=None, + child_repos_only=None, + ) @request.addfinalizer def cleanup(): @@ -1648,12 +1576,16 @@ def repo_integration_stub(request, repo_ @pytest.fixture() -def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, - stub_integration_settings): +def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings): integration = IntegrationModel().create( - StubIntegrationType, settings=stub_integration_settings, enabled=True, - name='test repogroup integration', - repo=None, repo_group=test_repo_group, child_repos_only=True) + StubIntegrationType, + settings=stub_integration_settings, + enabled=True, + name="test repogroup integration", + repo=None, + repo_group=test_repo_group, + child_repos_only=True, + ) @request.addfinalizer def cleanup(): @@ -1663,12 +1595,16 @@ def repogroup_integration_stub(request, @pytest.fixture() -def repogroup_recursive_integration_stub(request, test_repo_group, - StubIntegrationType, stub_integration_settings): +def repogroup_recursive_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings): integration = IntegrationModel().create( - StubIntegrationType, settings=stub_integration_settings, enabled=True, - name='test recursive repogroup integration', - repo=None, repo_group=test_repo_group, child_repos_only=False) + StubIntegrationType, + settings=stub_integration_settings, + enabled=True, + name="test recursive repogroup integration", + repo=None, + repo_group=test_repo_group, + child_repos_only=False, + ) @request.addfinalizer def cleanup(): @@ -1678,12 +1614,16 @@ def repogroup_recursive_integration_stub @pytest.fixture() -def global_integration_stub(request, StubIntegrationType, - stub_integration_settings): +def global_integration_stub(request, StubIntegrationType, stub_integration_settings): integration = IntegrationModel().create( - StubIntegrationType, settings=stub_integration_settings, enabled=True, - name='test global integration', - repo=None, repo_group=None, child_repos_only=None) + StubIntegrationType, + settings=stub_integration_settings, + enabled=True, + name="test global integration", + repo=None, + repo_group=None, + child_repos_only=None, + ) @request.addfinalizer def cleanup(): @@ -1693,12 +1633,16 @@ def global_integration_stub(request, Stu @pytest.fixture() -def root_repos_integration_stub(request, StubIntegrationType, - stub_integration_settings): +def root_repos_integration_stub(request, StubIntegrationType, stub_integration_settings): integration = IntegrationModel().create( - StubIntegrationType, settings=stub_integration_settings, enabled=True, - name='test global integration', - repo=None, repo_group=None, child_repos_only=True) + StubIntegrationType, + settings=stub_integration_settings, + enabled=True, + name="test global integration", + repo=None, + repo_group=None, + child_repos_only=True, + ) @request.addfinalizer def cleanup(): @@ -1710,8 +1654,8 @@ def root_repos_integration_stub(request, @pytest.fixture() def local_dt_to_utc(): def _factory(dt): - return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( - dateutil.tz.tzutc()).replace(tzinfo=None) + return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(dateutil.tz.tzutc()).replace(tzinfo=None) + return _factory @@ -1724,7 +1668,7 @@ def disable_anonymous_user(request, base set_anonymous_access(True) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def rc_fixture(request): return Fixture() @@ -1734,9 +1678,9 @@ def repo_groups(request): fixture = Fixture() session = Session() - zombie_group = fixture.create_repo_group('zombie') - parent_group = fixture.create_repo_group('parent') - child_group = fixture.create_repo_group('parent/child') + zombie_group = fixture.create_repo_group("zombie") + parent_group = fixture.create_repo_group("parent") + child_group = fixture.create_repo_group("parent/child") groups_in_db = session.query(RepoGroup).all() assert len(groups_in_db) == 3 assert child_group.group_parent_id == parent_group.group_id @@ -1748,3 +1692,4 @@ def repo_groups(request): fixture.destroy_repo_group(parent_group) return zombie_group, parent_group, child_group + diff --git a/rhodecode/tests/fixtures/function_scoped_baseapp.py b/rhodecode/tests/fixtures/function_scoped_baseapp.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/fixtures/function_scoped_baseapp.py @@ -0,0 +1,52 @@ +# Copyright (C) 2010-2024 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import pytest +from rhodecode.tests.utils import CustomTestApp +from rhodecode.tests.fixtures.fixture_utils import plain_http_environ, plain_config_stub, plain_request_stub + + +@pytest.fixture(scope='function') +def request_stub(): + return plain_request_stub() + + +@pytest.fixture(scope='function') +def config_stub(request, request_stub): + return plain_config_stub(request, request_stub) + + +@pytest.fixture(scope='function') +def http_environ(): + """ + HTTP extra environ keys. + + Used by the test application and as well for setting up the pylons + environment. In the case of the fixture "app" it should be possible + to override this for a specific test case. + """ + return plain_http_environ() + + +@pytest.fixture(scope='function') +def app(request, config_stub, http_environ, baseapp): + app = CustomTestApp(baseapp, extra_environ=http_environ) + if request.cls: + # inject app into a class that uses this fixtures + request.cls.app = app + return app diff --git a/rhodecode/tests/fixtures/hg_diff_mod_single_binary_file.diff b/rhodecode/tests/fixtures/hg_diff_mod_single_binary_file.diff deleted file mode 100644 --- a/rhodecode/tests/fixtures/hg_diff_mod_single_binary_file.diff +++ /dev/null @@ -1,1094 +0,0 @@ -diff --git a/US Warszawa.jpg b/US Warszawa.jpg -index 5a4b530a9322aad2b13d39a423a6a93146a09a29..b3b889566812a77f52376379e96d32d3db117150 -GIT binary patch -literal 85172 -zc$~C)2S5|s+ThHjCnN+2EtG^_0tAwf03w7A0)m1FNTf(87CIILK}Aupi-NuPMpaNy -zR4!NS9eeKuyCO>1Gn3%OckkPM|J&UpXXZO~&iVTHO)SEF{q3C`L{y8D^G?n@S$eXH -zPIVG+9l4oF`ouhW4wvsgF)>f>D<7AcEay@^Jh-`e -zIr79w)TwJ5V}ml2ota0UkM1FteYo-TOVF0Uy9a(yOdPg!(p|XtI8FaRvz{MQcYXZy -z)veR6d~f&Hk^8^oHXC**5=X{#uOb&GG|3;Ft+ZnSif~Me8^> -zZohqef5Sbf{!UGq=c*8%MTCFO)0xv=HY0x+ub(l)s&vUFuQXec(Q3-MM>}Ze?$=8m -z7#<3a*3z}xw9b3T;}gye!D(F=#pd`v(^ZmXIfC#^{*M&`wZNi&-t?Xi8K&hAh@x`S -z{Ad2F*Uvp?cbdESbN2F!D}N5{EbKJxw7y=`Q`pc9b+=u7mi@TtPiF_pq>7yU25um& -z$#p00aK@!VxFXLhb;k157f$hZ#XU^bE1UT)_*nBP!&ye=zSsG<`GTyr-BaRbnT@Wy -z+P&&hcI&E7>idNSYjr2pKJs|eo2A=ld?=W4z+H0LzxdeCC&gN=N6My!Pd`#QmzP>C -zoJ*5$`1JM3%NKWajP4|lxYvdhF1>D1e9gL}yma`HA6p4kGmdNT(t1kw+;V60hBr2o -zUkyLS+Pc?$)Y*gZR_T5(_g5`)Q)uU-z0RG5`$Y6RymMH)-8BA)QdX!ZXYumDgX0{07THaWT6ptt{y5@8n?=o?Ggc<-G5Dl^czbw| -zrt6t1*=8Xe74UKa6xB9?Q*p!MfGi$wJSQ}OdHVJ7dz&j>8_pnhCz?Qe(s-qNv@W}+ -z_NMPY-*CC7fLnX*PMVD1!F6sjrDon;bZW%akVhkp!^QAMn*4?!llstk#`e46&)SRe -zmm04GRTicHsPuJvmXK;B-S=kgPeGdZ2&wOVmxWQmD^2#v0&fQ?h#3TR^Bct>v}XzP -zFP>PhXZO&ZeD~_7Qyxz!i}*9Q6nJ;f*SZ&wleqHQtTCgOZ(9Fn%^J$|qBLpC>@D1g -zQ`uLm9$wnzUB2&x@s*8pPOkE}Fz?lZJokrZdevgJqdax@Gj%dgYQ4SuaJli@vG2XZ -zviwt9!fg-Y3S%wD5o^zdzMtQl+H^hXUR<`3bdtWP)DRAM`7=m7!YG_FbgtArzw&0- -z!_Rj`&ws*W%*yr3Q+?ISYZ$?14qIKFyeFOh)^Un8uVRAEp$Cti&ReHb;9=JEW9p0~ -z%jCd~g<21b!n^7ZJH9iUzM*$_v{flBggp9@)l}!EDQOejA02MXzPTy*-ouNzhWiDh -z8t%D&+V*HHi}YEm|N1KU>Ez<5yDhVRuItrMdkW318y&u$r{nN?)yr>tr1e*lU5d7D -zU#mF$Jmq0Q!~~(?{9v!pwdvtoq^pf5W_4J*eOo!7G4?&{(<|2EskzOQR+$bP$8LN> -zn&8&Eaa8_+yVp&udNWqUg%&qozEl>nF?m?t$egVkx<*}|bD%DA#oHx&8xx6RHiS;y -zJ&HxX%iKyx-|#X1RMV22tU0^xcb&PF>9_Jj+i|b?c4wnrwma$t*%n{eFzz$?!kQaF -z$NoI!^iVN=;=1ba3wt*2XiRpwsKd=|I*{nHDx7|_m0m90r*Zh|=r3KB^j1IPcAeO9 -z>MQ0u7vSDpiI;1BG@UTb<#wm})I@&F3a#(^8%e9*fOQz`zwtvkF3i5>AQJ|YhCT;WR!u5M* -z7aZ@(t$m)KeI<6j=;sOY{I<$F-Jfu{P5bqB=H8K6H1dZ>UY@WiHfv+mz20HDlU`m# -zPQNR|`>&ncwtvZ{ksVLBDJ*rzA}#y9T->c|Q=Dv*t$jomU-&|I=)p-;YWWuAP+ -zjKyk>_d6apsGqpeJK0yVzBD{~G*QyI)X8ng200{Rfx7$9BKGI_8>V1C4kw -z|Gac)^vR1KSmW2OD=?0Kad_d^E9)9ud89y3u`4Lce}HEk1h8K1t-z366Kwi`^BsTdBT#Zr8MR -zT7Q~y3_rT>=)SVsGYVYVx7i$UH(AoQF=$xO{E*RWa_5HF#GD-Vto+Lw=^b@ -zG@yu~%$meWjtk)#p3g0ZFPt5o6iB|f;y6v+Amz5H?x@P~g+zZ@$8Pj#HO*b8J>^vK}B{ucMfqgO8mVOi-*&KP~N$;DZGdyh;-BlZ!UO9_s+j!z$neMp}BXY}X -zu0<_@KWtwEA8mh1dOm-;e5Peam}`dZ+ij^M?&M6>CY$JokD3;=O=D=pgIJsL{hKRZ -ztiL_PY5wBU5i`1eDxSF0hOBPbG$UNwb-0H00b#^VgF>y!&R5r5#v~=ZpEcd5E=(#ALfQH -z7n=3$Y5(YKtutYjEVOglh1m^70o&@6>OQwC>_jaO`b{rS^YV(=fpd!a&RKoHQlBc% -zNLIN407BgdC&xj4gtvF*RPGqGMc@cN%Bu`@DKc{=MNZ3-bDjR3f5FGP`O#rX -zxW9Z7Cl>%m*RaS*u7iAUr -zFkF6kPUg4>c^-ERA9zJvex!VA9+w|9DKS;lbN4|E$6m?O^^H%&DyG%+tH9pJ->i(&-3%6@mGu3R|lTx7@f$@zoXY>5O)6=Z==(bQP<7M`QPJE=r|aM*x5jXcY0oKxID)vb5d4jhCCyW -zi(=X*b7E#rL{{QBFwj>%IenZQ*u`m}BT$~6nwG~E34TAp+v6|{LC~y?{d*?NYlBoD -z?z^I~=BNIC8rT2PX(B=sb0-YY?(c^S$^i4TGSTVWyqtXbU*{DF#e)Cl#Q#9re}C|} -zBa7nvm9;a6vcO5f(bOgq)h6&cR}O)J52SRghwO!Iuk}>Jr-v#XZ^tnc9&z -zBRjv;KbM}0e^>mu=iH$~ytMxN0bz4xlXpGoy;lWwZt1#rWB(iSoo!}qL}r~00)I@b -zlHHoMaB2h_f1>uT`jI!6R}#3DYYx3Tab&IU_L3#qdT|Mv@|_jchu(!e(%4Nu@9ma>J-PCTu2*`djqx$=YBbnXtW2a90_*~oFZDkZq9ygm`CY!v< -z^8L+{)(gixNl)HODnIc(9>=pHahAC>={qpF&R?2+;&$c7_ltMb&S@}vXYj$H{>iI% -zyzo1|_&U6YS_qkaXYupNt1M{JdA;4Hhr`Ij>>gLiCch-7O@hg=tdVP3=xmm>H{eFi -z=auoHGi@H!pL5$3-By|y(D97+#%Z~*_R8?4eXetodKMh}>3CMR*r-@nZ-+(NE5+STCeeJWdzU#h^*BRPs -zq}*jfwIF>Cf7`ywZzfGe@7?CUG`RAX_SR5MaOV7(>a3ePDQdfKA==zm;}Va1J|9Wn -z?ko{>xopc?HT8^j+sQkVHxes1MfLDrmB?LoY^sRz3jI8zta7Jo{=WB5KkHooQEES8 -zCscR4*y6Yr>gM*R~po33lm%-Lf*WX{u~M}~p7 -zZnezXx%i2`Wl`UwSi9Qw<6Qx=+ppvoo8v+n*=fYKHQA; -zzMRo`AV1)J<)RHIn+#<;kT;LRHc<{6bVnxOH|%{`@u)P!T4J|6*LtV$^ftJ3CeJc< -zTm-Xs(T3Y3^9JU|X|7{8HRKgI-S8N8Y5SVA3a9hSpc60o`xh^=c(7sq`m-#{tLD4U -z&99J&^ekUIIig*(NeB9wPAfeb%Dr=WlFq23YTU{rjm}z|?;LA675e?_RenTu_>!*+ -z^UM1$e3>`pZtH!dH0gvOgUXG&KK=d~_O{bwR*p$Ik(K6sT;e)0?rbSyqLp1xyP+g0 -zt#jIxKaZZ&R5NtzZ7{7+Oukdh&gMFiCC%YXg -zJe0Y!t+aX8vbLE&tDF)m)QY|zNcNueXPTCLbLy$p7vX0g=e!IF+#@()abkpF%COdg -zITJ5tW)h#`LB9eyyzj2bjz`|Q{;`(7_^ -zp0%`c+1jy*`dZYom{m&*VlI?kygs{ZBx`WoC<@K%LC=gti5+Z#IlL)zpid1FQphuDo5ou_&@tk>+_ -z_=GdeqHC4(4>Lg)w&A}{=EI?!@ce%Wsk?X?m3bCLH#wUYL}s6 -z=ji)zVg==CM=UbN?c5qpHd>%bp$Tu-jVaWt4j<(PxZbR%R>?n;?!&@je&p -zIo5-swer%iw!|LyDoWYoN#`XOk1x}9yGwgnA?JRcm^;I5^9qw@cZ2J*!+#dcBz8$j(umopjhU-tFG{IW&*6VDPJ*%Xe0- -zT}g>)xpFx;zN6H~)Mr0`hgYJFFtPi@+n85ZHZ1)z?Zl-w-8--7<ae`-WOcIt#UL!-s^H_9_7qQ`J!2WW>t8(82&Jx -z<)!z)T8qe;?m#ZPE%&~)iDPq$Jmd7L=dD`HNS1LmMiX30r(|u2O%$vi>WW;jx_`7g -z@a4kRbr50QX+mQ;CWS#SwHLK -z+v@j`+=)gJxCY#%CBHOToYHV-{odu0?Ip>^K`*Mv -zGh$>%on@8j6FSnmO=eoU%RhrXNnD+DNgC*U0D-QW4CuTm$%KbL`Nb;(m -zpQlmQ>{=e?>K{sf8+8w7^X2UAJnl)yo$u#KsHa&Q)MM(kc7OPCJ?jp4i|5a2*^57X -zM8e$@V>(JhY>nTv$M>H-vob9Bn@a=Nn0(%ZA}m>WTrj%m;>?mMQ55aMEuHgizB({d -zzOcL}zj+XJ?0R;!Lr3z|GZ`6LBPZ-yHYTL&WE`h;%=p)A+oop|SMDh9UDY3Sx+XC1 -zapjytL!v+OyT2Euc~vF%93FP_RKl_7t9Qq_jxOZA-uTYvv9+s4dk00&!|z9dg=q7h -zGkceBc(vk^*^bw1Hf+y{W4rK&Pw$G$-lt*ISH+hcOQ6Soz`b*OwLz3}aryPhef=b| -zqWtFHokH;-r!X9Kp!2wr0iW-8M}Ls3hE{#$DT(O -z|CPZ2R(Ik$i3Zn!V=h+J1LYGZbDf<3+gRa0k3;yrAufsF!A@9}pO*?cMO-YBQa$+} -z;*Nn`2PvIhez5BP&#{NnSq@kFy`l2t^u%AGnX{|2gFxuQbrHD8F#i|$(HVKUTr4Au -zVjZlYB8}wlq!}2*4^B_c<&ME{C;RPzx!9ulV64;NwFc*#m2)Nk@m!}t8yVE<^!F6D -zlat{8G28q*Mi>YVl`*fqGT^lz6!#8_e*ZDl`In$}aBwkd*b(Y%Df7)c-ZN5n-Jya* -z)Q8mBIxkD#wny<+l>YRL3DurEO)zy+#GSe0`ltHiWK!LGyhqxmuf%h=&#hWw?sMqn -za2C(>)x4g>^$Sv;BNt4)G~b+fdF)wItbEJbu`_O~6}fF3U3RGmF3L5U^mmaI3=Zmj -zRa6FOh>nTniX6Eju@g5Vf8s>hZ!{Vl+zn5h1du<*dO~JKx_q1sSKv4hPYs$dIJWhb -z=b|j=`ZpFFOyj6@`4o9hc#b?pj`BKqg3~We5tg4fF+Bq`34f0leG>B$CuXK{2lnLj -zE1pF8EfggMe^jn~I0y(;ZModP3xY_si+_p`RYRQr;eCqO5#`3=U{C+STNRi?eeh-K -z+I1^aBO|+X{=CpSrC`%J$2`4)caxx(tw9Gi3(l%JR>Eca%s=MNqx!Nm(#yGCFwWm+ -zX|%>4-r_N?f6ksHl{L>d`LD&aNx#)?4Ht{oA|DAkAZ$>>v0mFCV-M -zTD_uE7lH^CHBes#OLdcN)lO(<*YR=&$anod>}tv#qYHTl`c=AcgGuyOE)jEvMjH~z@Vx$J_> -z%ywM9;`EdHm)-g=SA~pqfHveR%=NP;z1(biyZZLc8+v1^tBXhM>~HSTwdj7ETA=@N -zS1NPTgDJN+e3y%$wrZ_wDbL>>EVYFWAB4EY@20G|4nyXCMXy`F`gw>xw3yTItG;gE -z?&|7l8OJ!|8+Y;k=VBwptfM{a=TALy|FqAy2j+q~>B!-xjJ*pQ)J!(4U(uPI{i37T -z@KLd0&-3Sw)Tib-J%;P8O-G94abuV5yz4F=-FEK5rRJ9Y>+QvQUr9ylCoHvI$bs{> -zQI;I`C!D%~gZxYo^NeF@gx-Cba8Drj=(G{zmv4BkSiZ6Uj<$ogXI)d!hDUdNPQJ7= -z@%-^=bx$lJ;zB`E0#&M=h73(mvn#RBf*jhkdS-H&jHEVd8`8-&dhf+Jg%E~8h$#vTv{8xC;asYi=E@I -zCPrHAR?pXRp1azw$andr(`h~pLvQICEgbIsd?W3W_6y5AQZG`VH+1HVg~t=p^kfU{ -zJ4jlygO2b0qcY)Z;w`6CC!gxMzvnimQf}4?NNz~6D}lVCRo*EX8EpV -zpGTh0srchV*7qiA9Iac_P0Y|dR8nr6JPqq(lMZDz+>E#X0c}wyR=grTn_$+^EqrZ2-q6ItD -z#%z~9+jw@`s%)zFrL9d7(Us>a{ZA84(dU?a -zc|Iq9Q$cI*#@<qs4hNbQqUUj~5f6M*@-R!$jcS!}wLsMHv70?R~eRk?q>x=9w>g}Q@;3mvY -zp3R;;VQ$T)g(h8D8y+svxOM9Myws}+dAF;R-;7^7>CSlf6mKELu~YI=FfC)ZoIT<2 -znTGqHx2$T6SskKzwuIH!1PsX3(?bYm}b)0n$*>$cQyVAh!h<$vter)#MuVe1k -zZjUC#W;S*=DiX#`IC&_&e$~Nibqf#eYivEYQ7}v2VnOiav>}os^vPCl@7=n6gZ5_e -z&HhITZ5&icc`E6vBh*H&z&-QLzI -zzdY)`^B-U4z0Q>gB}d1U-EKM3`ory8z45%VLjgdaUj4O!bxH -z`A_ZNQW3ACUPopd6JHY#P!%3{EltCbS|81?x{LS?o(nV&3ievO$!{7nHNB^g+)7UL -z&D@%t9G9H?q_8p|GUjSpL;B~#FZMWP2<*N|zN+svCx+yWMncHk~&2_4$3zjs|`R+7-Mij6Q5=<>6|b71!4<3SJ!M -zd3^c9Z6A9Dk;14{H90#)CuPz8y7&BgpdmCG)y_&Y>W5@O8{d?lo_k~5@KjM|^ -zbA0anh4&29GfgKByZhPaXv~Zl#h!B`)=Z~HQpGw8GC$nA9BVz|6u+hQTH=Je;;Z8B -zgU8>lyuIgOv0aj`%tp-HZ#^Uagl+T372ij%h_~+!yyFx-KH_k(Rf<*S^HUDbWo2i! -zp1EGR@EzmM-I(EgzHM=1>eKaSw4=21r&FfSnI6Ey*&f-`uzPY;>w)3>jrV8oNk1@Z -z|J|`ejxBC|*D<52Z&JQ({vY}87+LCfc75CbZsNuCoi6iStBc+TjY}Ta-?iYtnl-F7 -zy&M|>vY%0>gVv!UtK=u6=r_y+qC?Z -z)r8@l>uraIy2!E$KAc@G%uN+uIKMX7O>*x@-8aSe7oQIGdiN6fJ@HL>mtH^mKBG$^ -ztTN=UYnfR5y&acy=dM&+cl2T5<2#{? -z*XU+`zjxTJC2wk)dD4^fZ<}s!nQ~*w_U`R>i?aDY?w#mcsD4qs+<4f~y;HaT-1#t| -zk8?XHucI@47FO21G;<>9ja?Ql`b=7XJZ=H+!Ue@i&tzc+xq-|-=mLb+9rN}(@s-V{1G2K*0#m{^XXTk-o?GYv!(0h0o`X6_pDo7 -zf3Ey+uwQodNavij-)?+c!TId@x$tv;Pl8iX_J{ayrJsJBNuqfD-CY;{Q*gT$WO3_m0lL@FC>3 -z-8_#s(+_wK>59p}VO%KL{btOIHCqg>HGi6vbx2{%-Z8xU^0dZ_n{Vyc*XkIZyN}dZ -z{dn=>+b{N$io35RHIBUbExkuPx2)h+Ltkgn$eUdk4`2LVQP3M>WaV+M?BSs!Zl1fh -z#U2>-`h4#03t!%EJL;=VzYx3c -z>y`Q_+Vk7}snHdut~7riRkoPi0)nBad78BseQ~08!yfMjjo@HA$HQhT+eE!cq1jQ> -z$v$@y^1=#n+eeM6j?+A(S7gzha`jAP+TszzufKP;+xn0fdGWoy&CZ8op858yjGH_6 -zK159S#>nV4 -z3At;;UwGq4Zr9{{t?_m~p6xD*pYWpfr04$-<_(U?N2A#R@Dqbr%~>GAf;?wu$zMev -z=89ZhxFV5=>nsw0HUU=x?gD`eHXf}DorPSXivWxfbA>_?S15Mmx;i>xZD>C>M*`M& -zktpYZb*jce@%&?dLx^22hHLsWIdoSyye1;R6J*wP^ -zv#Tq*V+q$uD0Bg92t~go3Iw8m2(29jE{?z6_&N>L|I%U*`dXR-IYKxnVN&AMEJ6|b -zVj8}RzMW2!CnnpsYClAZXlf(bA3Ftt&4AD=8CJwV -z5RLmo)LTGpRJH&0k&rk9_%m;fxN+1Yt -z4(Xxo_-T+kek>FL$|U3k6btT;K=A>k5I+_^j`m}xl|Tz1CX@`30aloh6dDCk8Afym -zbVh0;Aumn12n=I_s}z(apx_`+I3J3~%4ATJ!kP8f_F`!hoLQ%9AZ~)Q>v3)-OT5@{ -zHkj!MuHm3$fwBRVW1!pzg$SvGt0yS&pcI3G?{9-6>I{v<%>Wozr>TVj!|Q}16<}@& -z79U>kuESApg~veeKtaIMW9#+V=+k5CwY9N(9g~U8h^ePg(S|J5_Oj6Jp&hb1Z6%OY -z$I?~-s^^Xz08;DtVimx!#DJ7K3LOIy2LO4UP^<#1ZB>8~7i>~q$JXa4hlY(%&GGP5 -z0s6`ZB-U$T+fA&e(b0Q+owWe#2&;>VRRZ;{L&T0=I&c^`Wf;I>7{GQGxR!u|0Pal& -zbV&x#MR0w9LU3|jRxF(&E;gs+8hj1hI9+g7 -z7ks+YmUFt()#WryJLP){Y3gq~c!#1P4!aBkP-v|xOLLH@VczJlDds`HFX#3 -z9@J6W>fYC(qoux$;6NN-SUrjpg@lcYABDIav*lzSgEt&Ia14<&YjgNV;FhDhoYtc{ -zoTIJUoWso{IR~3fTMR4(g4GWNxjWCoHp466vlO|>*BFr(|F-ykan&3R?A(nuX -z#tP)rgJRFp;MlP&I6Rgs$CjnVxo`c>8o6MN-f-3|CTBg@hr?kRb4~Zgo>d9&@t!&^bYEWs8ouCqmHQKkPFBKTtCu}!>iF<33yi=;tE5q;RLt| -z%&yT4hJqu#YqVf+kMORg2zbFY$s@hzEL><*vod&ObWMp-OcNQ5iLQZb;6*Vt@J?obSF5n>8 -zX!HS)bZF!dH&+*l*jXeLI63ki?Cp5AHr7^_7UpKACR~oOk)Z)wUr$#@o28|x!Bl6^ -z)o4@-nM5SuaR?0A!8Lk5(Y5*{o`H#pNsPU!&i=h#gR@|MG^v3!e;Z`*FOv-arEc^u -zb>m<4;WZGe#^21x7hSj(@_$wXY1P228VG#~tQ88Lq?+WHJl@PNy++?BIUxbe@HJy{ -zYy98yRa?QN&_GbKwxZEE$r_BV*ujs=T?hAWV*dwjV7-KvG7C7 -z_!?!AhV<>#rUlYzqc=L15Ydq@{%58lq1P341O#y+)c?166Qq?b^y0G%=y1 -z1fE*5S#n|=z+58OV=aWU@JowC@#u$^gfwmq9xRSs3_uIlFO6GnhN8nSEdgB3e8H@L -zYzNI0pXf4^HiH^XaQCax;MD|x5dj5H3~=Rsdg)xWT3%kpt=TvtdT_4^dX0&R(E}SU -z_cH?vg2nvCdxN9s@$K!DXHp>`Ibl5d%<+lnKK;gX%PZvAUKe0{z=ryz0m3BypGK9J -z`z4$CB_}3(D_8fakw#)SC^9x0J0L)vZ;Yx*H30PBv7Q9q7!xJ3fx!Eq8#7DvHBgfM -zS4)Db1vK~#baK%h1c3!=xZ}7r(8y>rFw_OT$X%fFaV{A1n8d(fR`_4DY6uogGj918 -zs0L0jd;8}1mPAzx(SrE}LP38s|Ag{#e>1Lsc|v(&Q-4X48JB5RUK<=-p5>PSo*E9+ -z5okYLVNm10Af|?ykOsQ}+@l=eKQh|Wz(gZvpc+00kpNcSQAPrc>jG%3LK})Bxyvu~_^3b=6O=nEnxs$?sMM56~^;2i+X!)hN)xbLcn{K(X -zzedX^8fSpSC?UiEheA{ykaYJNT^@j~dF6lz7tLyzyc$Aun}K@_m#G0X07|_Gv)~c2 -z(OkdsUtFMUQJoa!1HkHLiD~63Q9_yipIcG$p~N)Gu|T9N0LDv_#sj7Szr+Qo`Y|bI -z*7$!lF)%6DFw^9^^3km#K9SLbpE{tJfWq;vHG}7ksFlL=B4VSPnILeQ7a3iLz=%(R -zcTBB0=!vyt6kd_I(qTZ?l7eJ#*XC<&ep -zlfi6P=peivdIZISZx(PiAHgR9_Q6bi3zP=9tp$IJq(^!K#X}=N*$p4UA0%9ZWC#H7=~RZUHe2(IV?bUKp&0hM1* -z(U5@qDJhLcBMyNe6MYpAP6GAzgXlOyGMD)obu9Y?8bUsg -zr|Ig-#~e-BTgcPh^Q|VRf{T+QT*fTu(`$oyGDv1(0-I@=X_yFfbajAFNhE2BEUhdp -z&CE1}0^p71obB;F6Mk!-tRAzBt9f(KaihegCL=jyxMAysw6E$xtX*pTG5v -zGrZkP&NbBSJ8#Ul%|kKi*8dP!kDm&uL1K7$EfLkM&5&CEs|Ix6;DE2B*s&p)PSxTJ -z#iA)A6dOz!20S~_Z;ee1lEGZL~SkI_)!T-p~)IUHQ9zDzC&4X -z-WErm3EA9xgpRJdmBlf2b927Kvh+SYn$;W9{~WgnKLxUa)s%cA74w;%OoXYf>#2q} -zHR75$njj`do)AUd)YQhBP1Hn5Pt-w4Pc%eHPsB)11RR2qo=CK@{+;wSv|yASP5WQD -z#Iyl4os((0dcJi%Cv$Q%kTQnoI~j!POATU>QBTwx-8?nr#tS3xL=`LP#7TA2lfrZ -zeI>FW9hj@40iDJqdu!ntFifVCfduq4qL_w&;67+T@ehC=Ott~=xdH71z!NDIB>)WT -z=`z&@=BQz6kcN_rPQ+ASQ~zmIujrK&hjJ)7H_Rtc_#R -zRokJH5mc7wC^``~q{C!l5~^rGag8Y&8YZ+vbO2V@K=BV{>M?0#w2j6F8~7^KM<>z| -zOwzHjbOh7c8M>H2p%SGe5)dO)r+Q-gR2E}mrtwQufHoCsVC2!peQip^%Hn($8z-eWfw^c0B9ht~l=jZpRn?1Gf|Z -zJ-7G%&$*raf6MIz!0qraZioNBa=YL^;&$!-fZOr1LX6pgS1_di86EYr`6_kGoXbJU8X!c|#ptp4K_K6-}Lq{1K92X=`WCvzrj0lq$R> -zupuT|+zlZ2#941~k -zHrUyFsTN8`0w!JrJW4u3h8iYMfN&Z@8D9qI=IV+G7I4)ux4f;b;ENu<4M2V5Hb@41 -zG@dLQ$Q!J*DzFg7TYgi-4{K?G55H14BL;yAdm`&+Hhe>24>sWhbXW?oBNS-7R6m^r -z<49z31A=3LGazISm`uWX5C{~M@8~GnD0m7EPsSl6(9(!DkVvTWN?}PzG+jeCg=dT -zEafV>LXU{aHj#z-dzfq!^)RpRFMm&8@0UDlsP4>jsH+bgeUu|a`cll5mZGE?!Vob< -ztRXtV3}2=%D_95H8Q39J%>W`}`qfmWTptV=vREmItOU%+!$?jX(0HE3rEaL%xoC%qkDn%B;iT-ly@EpbaM=up$V6BIbVCISS%a(80 -zuzcwT#6j^)aSk2|GhsbgtT>~1e)Gzeo0o51LvcP2;M@sFo*HBX4^`n@lcW3s8m*2WwQZA>n;GmKF>pn;Og`R4t$ -zL!d+fRYXclr*tb&5?NZ22An)iTxLO8=sIaYhWC=KClp^#jhX4GR?`x&J-zi7A~zKnoEjS-jKn4qh-3;4LI5&|1Ok~t1!zGNCQ>FvTLM80bq0sw -z$w*+}iF_=I0y_nJ*3eaI*FNxrWJ*bc&Cs7v3#e2r!ezzN{fc*r5Bn64+W&wlYhdPK -zyzIvoob2ZoyzJ)&Tq0V4OcevNjUwDe^GYyJB~uiFFA;|TM;PGyr_>TfCmtdoSbm0z -z*(zn0hKNcGY6rj-LMfoqa({W1pQXB(TS>+odTbV~rgAHlbb?_D`7c@G@j8@Rtpjrx -zF{L(mH@ywO91lSOsU^z#_zeUEZ6RR(H6a1Q1Oj4*!8LFjd=bXOSn5e#0`5OC7X^(f -zP-hB2n7F{0mk49V4Bbl@1qRlnt&XNH;mLk&j3;}``;3axz_w6_Q7AEU7Q%roE%fWx -z@v{S9={gL4h@4YHbV}7r#gv#c8U+ba+&O24{8KuGfu)SFNvdR#rYh{ZgC*7Uvw{aS^`OLx -zySso7>JYzAz{m8(pahFD!}4FIR)~WGc`yMT{Xa{$fEB%?O}w-y<|9m}Z>LfIr`Z>_ -z*kDAs7HvJ1R#|Q0&O!ZkMGWoQOyMg2jgxo*ID!zcenpa&!HMwiDk5) -zT<+nF83_-fi0rQBZV*EBBL}G!!9`9KsSxJtG1MhNM({uFP+b6nP5ug=*lasC%I9|I -zUpri|Y#zqpc9^xc%XAh9NCsTYq;oL~!^KQG7c=QxY!Ej~G!P>R`M*L`ciHcomhn`< -zDy%UmTLbG1#HwPkGZw@m8oyFI5}^hYL0QHbXYD^_$l|R<3xd}4b!ZLai=suNrX)rQ=Ui#S_I>woW^s?kAwr9sw(a=E}{bbT|qj5TgiW5@b&V;{OQq0bxGRz0ZBX -zQ$bB1DDb0q;DeLGMnAT|MnFAJR3sxEz~j({f~3wQDV-^S(F=(Qq#8yq);McIATE$F -zg+4`t2wZ42vLlp*{trr>!^Cs(2%axPr3gI8Q|S=#c(9ozqQTTuMJ$LmOA}yB7wLGq -z|Ia^ao{m`V=^h;9;l4Ulk$eF@4-bJw@U;unpku|;6*so-Q9LP`{-+<$i&3L!w8SZ(>x#wSA&6arG^t4W1_t3ge6%}es#inoKS}X<7g)* -z7YWXqjmNkh%Qv$z<-x}EHG9Acv)P05&Fz1M_w57h!Pij5Ck4txvkS)9R6Qb}t*6bm -zB-xmWNv>wY5I;@~5~CN%3FoIHa*o`79Df=znN!LsHPg{&>Fe6CY;=dPhUgMmy1ETK -z2NsX#kk6~&RXE^y4!SHnWVFNt9(+_B7j4IJiEIr6Crs<>NSu^;4Fo;rXDpTAgo*|B -zlEMjlbpcLoZNSE|+6-+8TN53_#ZcyM%7UV -zvrbp<;bH!MIy7wtXGvk;Ob3Ao_0hEZUm?2*>mdWENorvv#f%~s*bXw8j`yIF2zq)f -z5ZZeh!FmvlN#oLRw1Hd@?N|PdMl+()iKsTjUTzTSnBAqbN$NxnkqbN+oFx|U4`gs~ -z18;f&@KMQsILvWFJ20b!t2R_A9iBku{uV@MOm__3GF0)PHVKHUkdMB?fO!D3&Kkh@ -z0;x68Hu)<*hKdw?YxUmZ;ubhov7N}$2=Q`=(xN#}(mBwIOjrwhDUPk}dpj!5+RV&= -zvP)e9QpWSh4WKPFq4iP&h|Z*Q>5g=~8buv~it&qq2>Vji2^2E+5{s%S!F)4yWz5D> -zBVh*zF=p>U6cHpg1r3umu(TrPUr;d5f}(Ukh$x^?o}Yrr6{w%a&QHnEQW<4b+wzoW -z2U^3uPbL5og(j(BvwzJ`=^{6%+I*9cl|C&ksjC8wj5Kk}i8ybspU?5K?PG#*IP`pM -z2qGB~E&-pMb`Zx@dE@T~yz%$a^(aKZY2r+A65JTv1Y9AAvGF9DKB9x?A?A2XstY2f -z1|oqJf2tZCrXhGdLco*NXl%SS&I)fsv81{qBD{neh!3C!(V~$wB$YfBUyjVh-@tbf -zx~R_y&&l85Z!}{X8BI!1X^5H@*cX`sp27lNuMK1cTN5ma!=PZ;m*7VvlW{a00iqCb -zFablmltR&vY=mlnUTx~XI)Hr4FOFy6(D)cr=|nyc^N?j&X3yOnQ$w%}c*t#JEdPMs -zc(QB=Rl@_`GK~I6MvC(Lic5+cw-mF_z`(WdgzbT`z0}LcU;SZ6xb(vofBlCs{t?*q -zFklh3ViqB;8PWnQ0n7mo-6IbGL>y5|8>g*h30vYUwJfy*-~jEZl&N$zO&G7H>4`4} -z+=Qobz$wlvfbZRq@xq4VQtUJK;xYhU(QC903r0>BX8?E -zQ7}Jzh0m~b-({PT5XDPHqouXNj_^I-bClwu;`OmB0WN)I=IraryzxWa6TnUSkR@!b -zq7aL1j*xYAS$dGMHwzB~f0m3U2EIx$*AmiY!Ig=1om9g)Znq!2@v^FtdV8ml!0~l;&iWu!=WLc?5@Y~B? -z<@;U@RrW&pcI5Zq15<_?Lfl|i6!)aXPRg^6avifqEO8I<8M-e%vd|CEDAQ}{))^JU -zR_0hZvb3~Af`;8(xV$SoD8j-De$x9BDK%x^Jom>XA5=Ec|4l5%bf=+IH;M|oAT9(k -zH517sBv6ZyVnP;`f?708{oqm4=88h2C{ff{8g4QLCQuO^1;K0LZSc-`UwkBf0-l7& -z@;7*(VJSG61Ok4b><|{3C6sDlxWf1nLR4)1Vn8@w4G3qi^8W&mno35WNewX3lrz*B -z;S7X;VH~{zwXQ`Pc;7%8Fu|zsN;S~6NCZrVap*dr^%Q1^NXhg8LjXj#;`tO3!BqiVONSXEeH;x_|f8E -zIjs<$PEkh?Trv#9^;im-Atffp45nbH{)@?8h8f+-6u;oZkmX1ojfWO~5n -z$g*=xOa|q+W!>qO?U%h?hvUzh^>K7?~1t6#fHvSARM3$Q}Z(nGWWHLMWU!N>alvG49A+w -ziQ}~?PO+{=Zn*gKR -zl9~XwmLOHWP9cjRDmKeO*VtI~!dTk@`xgX-C2_!_0Z_e)v4X>ZEp$NN8K4LSz%z`8 -z0B}+1f;c!ht2>7|1CwrnQo<4Kwb0(ETceBUVi|N@H5664YUql(40JhNj5xa3({+I= -z#uSGx_PRzxqRSr;HGn@MUp5eJpn|B3s<4OybYnpW4~seg&ke*GKG4@?4W1_dXvOH=L%jS^DiamSxUayUCPAm9HabLC -zVjOWI5wDN4gb^Gb&2l0nsyc8vT}d&dbP|q)ra_4W)WEUPW-%d(kV3!_*mxXX57Nck -zK$duvF<(h34B%=pzy*)ibpID~Zvq}wkv)o6)!lmU+nwIK(@Q$tSvr`Ig%F6OSpo=z -zAP^v!NRWLe1Q8Gzj=PS43nHTuLckCiiHbNnGT<^Uj0!4lGlMcCKgNZ0zf*N@ -zCqbS0&wKCt{@+WdZk>B?-L2}>+3M6O0FZ+o5)nsuffzV~IbP}W_EIJ(x|Oc3HEXe1 -z8_?!!r)igHg>QKbYI(gEY*7-GRx+md5gKl -zESSv&`N4v91%z2R>(8l*SD~!*H%nPr_uxr+c?DBGdr2TY&H~KD`D?xd&K?t}u*Q&P -z8&xr2!wAeBWZ*L`TW< -z^A%yLo=FH&NhwGPrNsBd!N(pu_yjlk`mPwM+PL+4F0ZR@BT6O}%b$2F>4&U<3Qkm# -zT!RqO@5iWre^1{Cj+LmAE8ht+mKK-B$Lg8Wjq?Fh#vNdgf*a~^BNJ;0rv}S -zsx9OkRnkC`OhdtLeNq)-&#N-7qNF)g88?yAG81h-mudB8czHtD_zvB|Q`7j=4f&Ml -z#&^rKJfms6L2ZB|Om5?6>V93O?V=Q}m;6Rqz_Lc5_|VHkszhrk;yXX?T6_Q`(#Zz| -z{>KZ#n?KFsAA&lr2aMXvg~+h3M~l!EWt33$??qGU}VGvZns3cPD{Fb#p$lo_4KB!OIWCvPsoD5OBncP69ysF -z((>a;=nF%Vp9ph=v&e~Z&@1tfPAk|wTDyI!w#F{{i|r(Y4ZVHnqQhsB{HxPRtw*LzV!SzqGil2g(;}JnYs}cph@zRbx0zW;%xpl{ -z%oGT-jmdZpRmMBT+S)ujw}+%bTi2&39Mi*~nSp^~ht+N)DXR%7DYhzBleFR;sXXgId|dqK?5t!A3OFxLiDP{#~u3; -zAI_LNWA>~WIODm4*f)E|ocUc>-Fwx8rj-9W>gKcqA9c -zNC_$=MrDt)q>Nz8p$2087+MP -z-%KD2JsuA!2E_i>e%_JR;odn!a1?7eXR)L)+vlQdnBn+TiZq?;neCwBVE62e_f_Na -zoPaqH4-luVx7DQEj-na|B;(HvC;rs1X;1y5?p}tK+7BrN%K9?`CG>gXmd3C5C7!|) -z-uiapmPh`yY-x4d#{2HO@yPf&bIF%)B--mn6bNrtluk>$^4`4x00;a{_{ -z>}kyR6i7XKGj1Jjo`R?Jm2=pH&WG!CvXc{o=u-&DpKIA6*) -zRT<)08c$hI%2hL85mPvrHVZZ7RNXz#Q1V32Q{^`LAN@V0U-2%$7x{ozoKO7o*Ihte -zrFLHI@=dg;-)+m#u14F -z$(;jwkN(%U@C7iTByMwZ)Pd-Cb|J%jht~485hH(Acj#aX68WdtS>?E -z4_B1K*NuRPa58M{2ec$~fYX^nGFqpM38i}U*&gvjShfi2nFlTvMXTdQo{WeSxJsy8=;tg8 -zj}eADFAGm}P7Pg=G0WNDUGBWbdu?!CMw-oL>TnYh>A)H~6FVNz%caq%CRE|8_7NYm -z#rqr)hmXrAxRYSQMm>$CBtKT)=%n7V?6au=L5)!Gy&zWh>lxfud!6b4q}ZD1e~9NV -z%$w0Nao3uxjBs6P$)dvCu`d0fk7gYIdroF#?tm-5tRx@4dF9jf&wsLV(3Jtd-)plJ -zSl$T?{OI?S@2(i!JZScZblUJ#(wE#$z5q^=9@pX_WE~wpBdYxj-jQ+*>fBDfcggKt -zZ;)crm&Q-}6d|r%co!Maweg6sUlg^Ro@>XNe1vzyX<$5BRc#I8AlJ-M;r*KwBTp!z -z1!qt>{J9ikKNn{{6!ZiY<9j(A#_EgC#L5cu;e8sRuF5Nl=vzTmQHg^RD@%xL@5;rd -z#6KUnQ~MCs+|PY+)8th@z69tzH2E2?h5lee1JIFpobRja@96944@8?>swvS|k^X_rOhW(L|9#t);2q9WLI#Z{E~?*WOjoFWOKVzq=H3UEJuw -zgZ9?VdGgY#nhBRr`p2H^saZPr{L;gx%;WB?nQ&9(xbfVg`wtZtWVSqcN=OrSQIIV7d*{JM7uGeS|B -zHf4U|zN;Km@%-`y$HthPlZppkb!qvsoJdwIC$_fg^2hsfeY>_~4o_e9$YrCd@ri}) -zLx+vckG^e*rP-aag1i;us%M9y0k@tccPEzfe6+OxPK5D`f`gkQ-U_`i7`KkVlhACm -z0`VNzfayA6B%gW_))#e)Zrq&2xA*K7Z%u5*E1;Sfcc7LDv?880fGg3A;wm*$xyhRO -z+#HRD`uC@${QFC&e}9R|~^BHd- -z6#gqhnTHXI?m{T*QG{|$2*rjVl&6DKfKZ>i5xV3mgz{G-RIm-9zKanmJcv*c+`HI= -zP|34O>IdihJq_suLj7$Bm5Gqx8_MAR17{*sek-KcX)L>6S>dmD_`S--GnUMxkq^5g -z9QGOcAA>7i6xC1(PdIb>;^p)}Q}Bwy!tQm}x+5OaD++!YH*w;Un#J>2tkS~1zg#7t -z1rRdmd~5!|G$d#Uw`+D -zi$5)pH-1~b;f{}n+RKNI+#*{yWHi3NsAj)ew!S=k>Ro@hky~=;=H$sm&y~q*Z@u(S -zrRV#9@b~9_+VM@*KmYodf1X<1wCkbQpI>n8cTW_JxaL`J(VnYzb^JZNaq_C10Y&aF -z1K+&&asBzDPnG=U^S3gW4E_GZPtUKKGWpD#T?Mb*visrP5A9vG@}y53^kK(0yPt30 -zQ95Do)CXSw?U9KUw|q0Xu`#Os&cA8T9dGCFIr*;v9M?s=en$B%tIE!|#3J>5Ut9A9?X?jc)4q^XoJ( -zv_@W<{AGOo?k|Sjvis5NJFj_lL+1AaXTmJkm}%SdE5^4UditK@ -zr8_=Nj30OEp;sne;j=H^+&Q^6wtmU$J71do)$iV)HtVY3RqqWU4p@o7!1b>OKG^&LY8`;OI=TMnrJze -zr^=P|`$Z}$D*EMdDBB;7)9ZU5FYWcr-q+FNseO74h0BV2_G$XJVOIw)n>T%7)x`f_ -z -z{fdKqiVBO_sx&lCYf1gD2XF25h5dge?rLe@g8seZt}6TgKJKcxi?U063aN2dpMRh6 -z&+fRZbGAcCl(rOjXaSA8+AyEpcyMyk|rRD!l6r@eS_-}%InD8nR -z0j_{F4&X=N*l1FQGV!aRE58b5;5ejgIJX_He-zH&0C}A-DcJ?(IskqI%5m%-O=viz -zA&@3Ps-cPOLW3c%f;0gVqR*uFgL>Zt*B^lWlaMyjcY{jwOW=D``96MYLQ -z35i1i*ycm3hBOV*9gwy{5|LRwvINpPNQWSuWoP1?v-Pf`cspdbux#)Ag@tT)y0TkW -z&vy4tu2u4?%awfCWlFIDO0mAh%Hd0fD0x<%l3OE%%{12;3p)opI0rfoiJ+yh#pEzD -zgRmMv+c-OFfkZe_Jrd`vdo!bjTMt2VV9*3Hnu!9*P7b#kEro-1BuTzSRumxLkTc2| -za%Qi|Qn+>SDDo-V4e1ahj(iHgPsk@|9XUl~)xx#{(pE@^ARUABEhLegg5OE_eM~+^ -zX7Ulrhg1P+8lEs(ZBYJmiDCv0U%LGn7JHz2)>3LwQHjf12i$6Mhs?c`W% -zbVy*ZgZzoSikwiXN68;p{suY1@;{PSSpFL1e#l=ZM_T;>G+0mQEZkS&Ax9v+3bpnM -z63B~tGp&K-U<)|})f0eiKBNjr)sUt^x&x9(4v~!3nE@-@9?QuMG* -zWk92s;U+J`4a~5ufHVzKC!}v7B_WAWIs;z(4z@MJHXl+2q-l`WLHZVw$X@4LNCYiW -zUvW2kv3&KVtLYwj8Gae?3z1MfUG~W_c_ep-51akC+Mo23Qsi&|`LkLymUe91|1abB -z|1u)&U@f_w+=0@eA-1Ub9j!m42io!dtA=^%b^_p6zUKq)1>1St8u&9D`Q84&JHDF( -zujRLE;K;#fJBF+y$ci5D^$$GrIxD&!PCVEexQ=G~18aRF0tp8Eq8zAFReN~sch&Hg|Eyh2O?bKzxkeC(lNKf7T~X*-@9&y((z -zrbyLNKdDg4lR{EJN|(HnO=H!_8k5GL(P=aqQ3GnZ25D?mRgY1AY7-e-6an%f2@Vv)Q-`_DeX8(4{cy^NDm;4EgNq0(mZ>^ZMC(?`Jd~DWpI$y>@iVZzcrmrX0Zk#Ahpq9l-yfS?!bq -z+#vkV%JK9#H)vRG?dWzqiIqb^{0}Gx81oNSPUA;GS`Gy@ex=-lO1TJJ59Kmx2IaI` -z6k+8eS}iNbV_I%cQ)cC`J(-!Tj5COuSeYhguy>i)BTyz1VPzc6=yg`+bw@KT(=vz^ -z@%f;L-^Yqz5Av}hKI~ydCiN7_SBu=-UF2r=Bo6nK@hN4Dr&48%r=U#izx|svBo@Pa -z2iDH0tDF_CYzSA*g4ED*%Zj4{(q7j3v|#$1|DpVd+uo;9pxMtE3o -z&%nB0IbBCj4-606gX$_LPT5lzpEay?V0<7A-CDbM#JJ+pi=K6J_p^$}{mRY8(VG?1 -zXN@TRmD8p4^a%Q_Qu?e?`m7Q05$sur;rO^Ido*ZB?a(?Ur@qX9uMOT%?GT5&bPz+{ -zfg#s*-VP8*b|QVOw#5)0(qe=}PxTqxXD~eleTtqk!C|v{%5~kqkhcTxR8Prp*b*Ls -zV#}{?x*EAE=M7W-Y1n8uq{MetqdYhLyFWO`0^&9;M`(0Q?u5}T6_-z)vPY7XV7PE> -zK`FWWyAb9yN41N -zkpL-}I2FFNZt9c{5SJ)>XsU$|Y{D_z9le^pwi2K^hUoXE((mIarR$X;|NIu;u8maIhUuy?hG5w+#!|23yWB -zxQfAB7;FTXhUyIlD_wJk1zT`Ku(D<7EgFs -zWs94JiDrR?E2qzF8FzVigyR-GbVm597DTZ)#)_b!>@oivv8-9dX=iv?i*xm9SI;hu -zteuKSt)5NAqjp{#k2-ZWM7Jy)H4UO08Q!pYI6V5+ku(x75V#hyp>|3O-jIsoOTS9_ -zK7|ZK8s-OEwBaG)xtr%VK+||Ox1jN>LaiQ8{6O*)@>B*lPn;4CwN!Y+wbO_B_Sn$o -z@vHW_<3abuXZqyrku6Ha?lGBFz+mhR%<4YJAXa>q{{J}QbjwU=k$ElJp$(hm0o@VO -z|F>dKQ#|QLH8`JwTVk=6+}z$_qzAG6l0JDW+DSOPR0i3GR-Y1_x^_Um>;JWI(7zJW -zdE&nh@C@fv=-0wXkDjPduT&A(nPO0aJc|seXSPe4%hj;YI5@mTZWyU-YjvT}e|b@R -z@+`fIWj&Xvuh}vncJcnei}o*iJ;P=WUYL&(pt_qk>n=L?pQC{T?y0fAS^V!_vB3fV -zltb$7MFoEuF#O-dG(SLIA}{^NsOJCg2fO~afx3kbxUqbqd=81X9 -zujqN2lfNPwjaf+u?L^PudFUB*2)%^Q!d1J`0n~=5f;|j9jMkvL(I!xuroxe%(Rla? -zaQtrUPPU9APh92bVd7PY+{1M(Dgsy>i;87JiiaSXR?mi2BiCuUNHx%yjSh6K~7)tq2 -zJ(`OiL>;&Uk02qTE?Jd4iX8Bam2lSwP%GLGzjpL{bOIZMv&qMkXOSD_q08WV+R&eH -zCzt42pQwNu6QGuIP#K(Af?hzcqT@J>UnENegHR~Mh1JP-kPY=kHSo4i!j)g(f066p -zw~jl)4^Iw3Ca9M?X>Frd&?ne~^KmtvL~_Uy@(8yKY2b-{;Wrb_gZjQ7?*1_dDf@|m -z9OEA2f5-nMrYBA%P0%7y^bq@WMOh6x*YIfJ-q!r=n?p`1Lzp~0DdRYr&z#xY=U19hj0zP7Jk>^ -z+wh}!Cw>aI!BdXoPx05F@ca+{i2z~~k$6G1rV4hLEF;&DyUD|J85MGz{GI&3Ik^lj -z#+7j8TrIZ*-f9!K1%CUuPk0Z1j88&67Yg?ZTZNs%?}V3xv!X$|9yG`|E8GMwF@gw+Y{MGy;{FnTfLY?qO;d43R*t>-Gaedu;l#JkC#$e#e8 -zr=gi#6#+~oJMm3qEp8*3!b))<8HmTCvp|;bCP&Cta-Iz2s_Hp4#Zk -z`Ipfd{vdqYpWx0bMFYN$d@CAID`xgz{0djV$GAVD6WmEG@!Qc~cpY})GvrBb9ALrk -z`9Z=I6yhF6&vK1;E!sya5z_snxfSquEZzllVInTX-*ZXCk+Fb9rQB!e2DE^@kIq0p -zz6srjXYzB+|%&($H=o>6@OM3kLN-USc^8I#^idmN|?gGh36m+PeKv?6i~M{Tp=HV -zd>v4gI-qI$p?7xx9T?12!4Vf=&KSUp8lVsl!tZ{d6FgwhJm}+-fr|YJwTTl+JDMYy -zFi;(Y_&+Abqp8Uq=z-)Mv^aSe>I3v(Q*sU5aVPp5-GO%E4T)>fQsjr8^D!PH3@68g -z;mJN^Gx>l_AopI}zEIl{>_UHq-?P-$KzJT)=HEjTP(||A8Q_V=-`0Pi%M| -zT?lfW7veNNe30*?=kJ9t`x-ci1*M~0oOS^5$#d}$dc9b~^TWfF!n4Co+8edvJkQm_ -zQf-s|2H^(1nB~xNuB=?YBVDUav-)#$b8?U`-4C2J;P+dQ#uXI}vP^13I+LTwYzBx1 -zRjCo_l!Y2mL+{S~>uN;m&|D)Ww` -zB9Q{nvUEl?b!c+K9=oNok=;z(?IfFP72TZ)@*2FVHN9}=GSMDgvI@mmCPeWgByu~rQ>ary{ -zKW&o?{TB=$zp5`C%o>&aiudt@01rw@pLm|usLgd7J-Io?+}tu_KYOWnK<;I^^~U<# -z`Nny<4F#Kx8*?6XJmh)GXwRmzS83&C(RqY)5@gK|_pa>y?&q@)yN_kRW&bE!Gt7bg -zY+eejFe?oZRwJ+mVd2GUC=_r8T(P{|;xfK0?=pU5-Xu+JY_?`zY=vQy;Wfh##vfvq -z(qa?l<^0TIXJN?ZnwGO9hvfM3O%igl?PS+}*F83$PhwLAJghk@vvh?%PM!62gy{Zh>UKo|HnoicAF|03HJR)|nd%f10LT?_Q&t>BNd!>faWHK>@q|AzWuwM_UjwZoI4Yf>l9-_p~uiZflOduCWS -zo@t~J{jFtXSc$fTl+aR*kfFv%$`iU@Df^WaXEDF9tU;uxka~>SsUKcA%88PRsP5DK -zfF;j7Fn{;K5ltgY7Mz%aiz;tgw<^8GwfOkWH|-iHYn>Sfea@>6FR3eBG;i*s(exW? -zhW~EE*!5#=CZi`aqFdbO(%ME><1M4((??yh^6XC=F71y$%J#|GRrwXgOy-&PH6S(ZM4Z?{|7di -zwnk55cuo@QY?2p -zu5UzuJVkVls)`A0@+x!9v}}6x>W><>kCSz6xeG=%J;_J!+g-V|s&H*r6WO?U(crt@ -z=sKv*bLPGWexWDR<6h<}T_cH;#Hm`&Z2V3*FLK%x>1UI{YIG?;n*umWmZyzYl_HulvA?#$SZ%E3YsD#2t#&QHN_aqfM0|^XS3E6#E&WUUL1VY-bOOimL=+{h -zRs%a)jV2R1X`J)p?jgAR3Q$G|4Wm-*odO0-M$F1pQkiF9yWZ9lj8GD7=dem5t0d*8B -zteEU<4VBTLN39N;ecURSiAu)y3@qEDS1yj#V#RzLw|>N9jm-g1v)Rho2Ch2a>Y^9_ -zeUDcuj_Yd`S_V*KtVoHNfoZhpo7(WMuM_j}p^p>W*9jdL4&s)?imsU?usU%C<(I>N -zUuFSyF(Nl!u;1>Y6`DplAqC77<(equW+$wY&TSYWj?_#NYc+Gkc^XZzJit1@QR1qU -zM_WfbDqVF#op!ujZ>@KXcP$bYX=lodtcx5oUDsf{Ruqg^a1(`zx+@F|xmm(2-9m%T -z>Ek7fPp`LSdgHXEy^K6bEL1(^36hH*m1OnkS;m?qH8P0;Q2HP#ZrTj!jHhKriVGx6 -zCte06PU?Hoi@o&tWt1BMY|2Ci6BUM-bjAFRYmkp2mx&=4V>pN|B2$AL4AJ6n%K)l? -z3hV1BE~Tw6CL#By!~9H5P;oghrmUL{e!(07)74_|32U1v{?&+bt&(XAZP(+Ss~x!=_dc^RsSWk@%$R=-;l#er$Z>k8iyB -z${$~+!-62QKF3Fql~3P8lr#CxJqipg}nS+*@WRI^+2i -zvc%cG&cb125G4_dW*yQQ4NMRLo7boy0X8weH4zI27U#h(jg7W1M(n>OwAnYDsi>6TysF|Fz>hikNA@|TW`rm*~TnWwmHk1Z|jdrt=DKaYVPOmGdziR -zlAZb;hJ9$i*kSquP{j|lU-4g=zqFncf7JTuVih2Av#l&Dq8aNwY-?4CiC1Sb^HyZh -zNI*8t5tB87kV)c<*brgysY@y58G#DsQUDvVElt$vEm2*}GLavztFtV$tg&pi=qx&( -zLzp&)(l9+$U5_nnrKfZ7Pxs~1^ruKX@b8V=I6)wyBneubPNO#%bh5<)U3~Oj0a=6X -z$;;xib!JoWWs9T`*~l5&@sn{~uv&+g=+^0ouAS7xwbd59 -z#InvpEObvV3%Eg8Dl`k60EPGA^J(XSypNU7xvS3A*Sosv8{yyM?gIFWZn`e3KlpT3 -z6Yy_g+f7x>i+vMN`d?&#m77dJ{Y`Q?O%$eSbW32uls03~5F`harx1o@N*-@R1?Hd? -za1{4f|7u6K6iMu+Yo -z0UxTb4dJ3RXTQ=k*un;fxhxKkPCVc7R0Uu3)Pb!fm+s%4XnX#tocH)>*F&c*uam`H -z_y6%Inf=oVvS#0fW631w*k5sbgqH}jrD6&7M^A&o9Em&dvEq}(B)@oCF{v)b#db6@ -zhH>!$l7Vul3{_vR%y1hA*Pmh6o7C%HhwBUWVvPN%!b)%1hx~rMelQRWBl2(alXZUV -z*TY>Y3oZd7{0PvzsUS_6LHqtZo*xY0p&CV#w#a@n(m10*VBL(PXqf`6)lqJuW!yx| -z6e&v9wVpt_{9mid|4OOJ->1}M|IgLrUNcx#^_a<*{W!0r5jBE_*YKj-<#7>FuLGSO -zIv3^iHS -z4}>3W=RJI1>i~CO-(YK%XRz;bYn^+%Z@P7nXS#2txYB-}oOj8fyqJwn=Qu}$W2u94 -z_{>}6Z8DK%-s{szsDtbRyzgZC09*TwhT*G`0WI!H^YMCT+(?5y(dIC!(`ajq>O5o$ -zbjGz=xy3CYMtW$7@V${}F$)zwn6mo-4mgVBOevn3TO0tflSX0)GHHV8nUasONyhdi -zACn*)OtNtMDVragiUv?0tLi!p!elIVo~aX6l*M-e%RjBeQz-9hELVw+YW|>l|I&N!D5p_Y}yeHMVV^JUD=WM&4I5I-(uT`f5s+!;VWJ1h8eeZogkMR`cJxf%~N=i -z^RYG@z#KNQ0JOWv1o75t8O2;KpqV-Ih -zLsCNkQ{bOW#g|eHUs9tnP=G~%V^SEK&ixLd~@?aZ|$tO*M`S1(GRBbAbi7s6k`-5#xSVb<1K(C~g+|lu>&4qmus(Z9>P7 -z!`aFo@a^U7+a+`~ex;UuezkUscAK_E+o?UNJ*$7a7W?^ -z2kP8)G|rgF>qIFcAbu;qjc?&Q`BS{u$)Dvx(F^j&VUOoiU&?nY^gR1go_#6LzLZx3 -zYS-|okLA0+ksqtM_#5G)ln*SgIK#S@MF}=jzpOEqR#If=;9GBMYir~GcI?n6zYPv`t*_u=n6L3B@IJqW_XgA9-yikN_g&f -zjxa%9K3rc&6Ah>_LTkVQ-_W$7&IxG~oDFFW&WFfD+=Ir)<;Oh+jnS=}Pv&v+g{uuz -z)B%xEyI;4@U~m{V8a^YODPx*>iFuuwGvju$E551#(U)p~*P;PAP66s@k=d+AJ@4p) -zSI;ytHP@8krF~u>3n+_X#+jTLr`3r^(i+4bS`1(2vu7TYaDeI93SrZ+-pN#Esjs*A -zuv*RajY`DOSp70}`1Am!!~JW|ECYRc8T)oCHSFfg^{3&V8q692*=kc{t=P(@OG>@t -zhnDY2|MuAviGMBo`sQao4D5EVn|jl($8Vf}JKo@Y?ifzTx~DN&zk9oP!NQl{e)py8 -zX`5vu#JvL=j0x|K8?Ei+H4U-iLaUQnH~tjY0)PkknXTic_$UB!NVYa#F2iNI%kXeA -zTyvSWTCT$r$wbXm?KpWMoiQ*hhOk1XTLh~M$ -zWazce$@XFbqLY@!y$f5*Fd3lL5sgk4!30EGf6my|RU*aiS&QqL45>+e+-jzkpk`k^=PlD}9NtT*;??S;hd}0W -zlo1;wkE1NrMQe*nuoz~uLZyC1A$vA?ylG0Jnw#16;*wSK@!#&^G~!*?bX|F^_904- -zwu9964A3JNu%HX^kX3KO)_%UJf!Ufx0ba{YTN<{N*fx{urL<2PQzkG&3ecwj5S>5W -zYxNXcso5(dtJp&KL1UEF+^pts{{6k_QRO_8m(`q}k6#8r#5Bq`DmX!3=Ue1krd?@T -zW!|8>$$X#jDRaB|E7O-|*<>&TEoK{NOct|2YxR2dp=Yz0u- -zo=ztUWl(t(xm=(DY5Y;s!=e%%lNtFG=~86jhnc;b>55`~Fmq{Ub0(LW;rjm+OW8c= -z8XEUj{B`LxJaA_(2?pfTD&VyBti!ucyL#+lYz&7+7b!2xXS$QKj7H@b*2;%dt1oo_ -zQN*cd7BEfRTxOOBSgZq>wW?87Mm~;v++~&wcbOHEDefziGi;CoknCmMXwx$^1Tt+} -zn7f2z(WowPn8~!^&~~!<@EfaNf4eHXW=!(jOErrp_X&;u1aIGP@7VhuOB4tl)qhy^ -z@Vn`e%&}J|8gbtnZ|$#_x~}GmN>`1TyOGkAM}de^4-Y-MCvF!7Kh;MO$I~X!X+f9J -zFiTOoY%P{1a-)K}ppoc2Mqc}WF=%EOctvlCkyiIu`FwrVxzn+qkCycuDkAo~hV~{9f -z0V5$6`VLz5WGxNrMUS43)7UpPT4v#=(f?sP)_svEP~R9k^Gj2HdDm$c=DM5>=O~7J -z4=jm3+oA_|%mqLImCp%NxmW+@AWD!~ByQJAY(^XDrctCsa` -zdduR=FTZ_Y+rw=m7gd)uk-NI~-qv@-iIjn;fl*!ZOa5=d+o*up<5@Gf8GIAB -zoaZB1C0v)*H>y*L@ -z5J>^i6p%&hZ#adrjnPb!$z?_QnTx~2B9-}5gOkEFk%jvC#s#L?wpp%K`qjqO=C$(G -znN5+6+-CjF#?9v2d{7lhai;WvGRp|qi7b8Z)2ZT -z-kZI|8*v!>__HE7A~@I*qLhuUMUER?mjp^3NzgZ@f<=(`3{O -zA<)>p8cF0iBH~D91{{Nq>h0s9vEpj(@LHx5U%> -z&_~io!yAv%Mo|vVp&}+lvm7;|B0!!(_$C~+(!o!9%9`rWEPR}q@sWwN+q?VDP`Yq^ -z)oEtfJ;TPm>G*+S(xvfjPty%EumPcDgNXpvGTEpHIYCNFEA^A2LRCl4RAR0x<3}lZ -z$?4#g(87R>nbErEjMM(GcFC>@`K33LWNWo@&`%)*pW`rFr$n -zpMLvl;=2d%d-CGjCJ$*ERyil^oE|HEY}S$&XU=sQ_E%JAU?~LD>pj-1s-o)v3S^*?~DmY1!0bq@2kU(SVu4)vN -z3^q7LIJ%%GHN=n_F31~faFy0C2pYf0iS1-*T#Gb@uj4?lNe9-0#H$B12%=dMH+~#{ -zj6cb9TX@X3Q$cu~p;J0(;I6k>;fnH0b-G0uAf|3J69NnTmvoVgFc;#|LR>UUlsHoKCYkcl@46n^{sI2S1H&_XMN~_}2NT1{ -zRDAD&Yi_%RQv43sY(hstDvZQkq#V@D@@XI@u0y*KzYWfB1|L}PB;X>T2X!1h -zdK4)(F}@30*$DDgYdmk3Wr2;1mPgyJkgu@udV`~%boaK7sa_`g{ -zZIPV2sutc=`%U7t#7+3xgOAjY>3d`1W}(Amowa|_^NFsmr#XDl3 -z@4p-$8DIwIPUCT7(kLRV;q@G^Cz41cL8rwUK_`K#@fz0Ybr|y+jgCi>rc%%mtc&v} -zLB$$t?5B=}D@;t^t;MTD9NQHRMD#TI~{6gLs159*KWIsI4zV`^PJ0@WAw -z#yItY13XZXz?S9$;VwCO}}mG%7HIt -zR$0(^3pi)|T0X;;F+e*?J1lcj#;lAr+S|1^X6{J)UEWKaQS0=$oCTxv-gOFI==MY| -z#5z}WuS2X_~K_ub*$2Z)}UUWtl12%FOAPIaODypBbH*y*#`egwx;X -z9yZ*SeP7-^1&`~VGCY>`c=q1tD^W+b5+YEn$w&d=6p)z$veht9N*M}-Q$S`4NT;i& -zSp8*FHCYjZj`swkc3yu;x`!&v8SXq<;Q@DryV^a?z1w}vEt=f{_Y(I>Hy?1{;U@0i -z14`Q2oNUl!Y2--CH{zJc_&6XwmN8+A8`>PjilxzHDaQDcy7YzVB;9AH!HQYGTJNLE@ewQhMPuxs&NPDm2Mixx5mwex!IC} -zHSTtDMXQvVOBdJml^xH;x%4UYirkcLkgKk8OM$=crx)dV*rP&Oxy22IorR>Lu(^;F -zQtN0Yaw((ZtiJ`-7>zYSHRvJ(;%@p9!Avu=Yp?*BHDrpQ&8MPMGr+d+Y;zsc*ruS;H<3yCQ{` -ztsgsLRdygY^4dAB@pa=%!@l%1U1rhXHFZ@Tk%Vy;D=_L!& -z*+D~a6=4*ym?N@$+=69G+|poaw(u+%XpJ&xVzE+kXHi2i10+(SXJP1OVX=oP$+OT9 -zt8||&_P~aM<3rd$!E$TnA>hF$rKH5CtnU6^R(HQ*aAUR-#TSP8tWm4`xZXNcW7V0i -zSg+=!#IdWK$^GRtF+&Iyz1T>*D%F>$W-00g(Q90t>p$!8x<6k%|D78e?#Y -z6;JHEcIEbskKFpxV_Pw|`SQWU^y6@1edF~PkDPepFm1CTiOada0&Vr9T)ZUSpw|Ps -z>La!>`bwLqO?Ri~>7%y1aGAcJZIphvZIU!aKUe>w?tkp2OTu|sgTjNd#$;{D+m$k<*aZoOdGYtI#*$Z?h~;hiGpnd)l&nX%e#z%0X1X -zY=h0H6CDTPt(~lm7YaU~Sy!3iGw2-lqDYZ0;&L5#V%ZsYHaMG|d>(LRQj^CT(a9Ro -z+1-dv)`(6AJI6vHGsVxUDt<~MI;lZ+6w?5gn{fnX1TqhqkC{)JlV(0(t}s`dITl~H -z#_VCFB!iI>AH9=GKv+wf-LbsoA=;STJ~^~8&rze+MeW&j`aCuLpQfWJr)kc_fkqIf -zowQdnC%Y`VAg0o)&JxhqSTF7EGBJC%zHsRBwKus;cty)!&MtoQwu7s8%=*i=7ykOd -zj-|#>g6E-bz6! -z%1AjBkLJgY(tr9x)-q)s2pZ<^hV7Lmrj~_9@D89MqC1oY3E%Xrp>T0Q6jN_b5~VTs=$Yu2KCX7$zt#OV?!f&) -zyzPwZptvP3;@!&%4#u7W(V6D_>tQd*=095?{Uh -zViMordSFw_^=r1<9w95&&ba=@8-x2^ozpsV+QXOle}8*t;(xw?m#zVA-3={Ed-Ai2 -zs{5Qi;G%@krHHp;jVmRmxTqAHP4U=eVMuLZOx)tKX*SHJ<3PlIVykj3N*HYV7*@AuS@`-Ks=YBoP~`@ -z{F|p<{H&inD!=!G;>80zV;u4D6^_Z_+1x_MBF~)gYR_8#t)5%_4?3Rm9Q6Fv@kQ`_ -zFzr&uBaUYr+<=^!BFUo82w}i7S12e3v;EbkX(nPa`RF6@+v8~9Ml!dJzC@q{mmxjw -zAGf#|F2U!xWEXL5$pam|e;e8tX|m8NGEoEqWBt>tu3i0)N1r}H -z{`kc5m90;$S-TxikXJ7rv+n(+2G^toSo8i#EI*j|Eb-68mx;a49^#50+JE@rTX*k9 -zD$ZB{XXr|RH7Y&OdHlR!1ODaIrbIR^^A~5H1s247wvj_ojhJ*sTm5LhvgAEmn_D%s -zTy&>%3<0(3a!C~UMlmGAEfw*OVIX4qTK6q`?%5^=-Lj8b>q$`m0yLcik`0bFY*9r2j_^$phW>5F)HfdQqQ!AgWCE6_kOB3pDV1+{XyBk<(-jqKaCG3cKwqWMO -zk|r}&?XcQeV`fFuWZI4vnbHQaX2ILrRz*B41JoVIdYevSAVar-d$u;5hd#NYyMrZiJC-#Rn6TywQ=gJz@eUClcVi^QDfvQoZsVa5z76bharhQz<#+I;3ZJQ}aTPrbPFr3)X`@2YKoOiLp*mt0~E`9&eyZ#DY0hLj`re%8xlX2tK6=}=!Wb&&$> -zq}jzO>#Q(`ZKV{~jS^g@olR`8vpd=uO4xgP^TnisDo|(Gf}bkC@+*cp^+iPGMNTuy -zK>>D5eLg+LQBf{~9;A<@SK1jfv$HSl`M%V{;8{3deSuV>SXo-)z&Vb~9G6AEFnnDg -zXbbRKv=*=7munjJ%M4c=S37S(oAIsuM$LNtjfRcJ+njG$j-**Ls8^dWNY}t_4F>aR -z)+ZRHDCy4$8j#YW{&m5kO}8b^fAB%#{GC^ATrhXT&9mp+G~lu=6V~s1=K6I{a^9T# -z=Wjc4a@*|ta`Fz}bTEl9?!4ngJaO)gH%yzc>BbAmsx8$!ny=rrQ;nI&$C&kQZ`@8z -zRYs0CUSz6z9-EK`5o~J3dR;p<#WM{?jcT_7qL1|x&64ILX1$ul<9Z{T7-m!!KvV(M -zh3dyPoieI=KIKBCNPR(|X6>YPuZbD%QiiH8QbsD7E&e=8s8qe^Ugb8 -z!ebI`SYH)9Eoqwpg%*XZkWT1XT8C@lgE)&P5YFHGvU3DbSWU!@0Z3c@MTa9*g -zD$aVqdriin5CC*q&i29`^HD*aL?mi_mS3>E5|)}XV(Xm=N|y<+Q>(l*Hr&_kQb0BX^B|QbLCj% -zb)-C>&I6#v@$DLBjR#so?b+}NDYs+j5Y#~l(t)E#k8-s~k6w84=ux=hBs}0J;5<6C -zJ09fXMoaMmejT}kJfPv9=CKxu0^zg*HW2)}vZ#fQ@?^wJ@sz8p)_8St1oE*4GpQE& -zvx==RB`LBN@)(4;(X9BiQDRvj;Ghr}h~UnaK&KL?? -zfKk&$3yR23ZG+#Qc;Bb_%lT^utqD9k;`M2?-Znz74gdwC(sFbBAuJfonL>$BDF_vT -zmH-I^GJHk8AwFug5(lJF6V@2V7*D;X-Z;fv@3_)4U$f9S*Sy%V*wY#Kz;MEO!u@I5 -z-<^MVf5z-y?x2uw&bJi^73R1w#yn1#Eu2XIAO1(#Alps6NRZbD-BV}xne?vA<9aOX -zzM&duMj>4C=*KbDFIxsE`6a)f6Hfv{N>nEq&n>>^UsR72qB#q -zZ^7Ge3qFha0RCU?y?J~TMfxya)yEt?_mxa0$MobTfy~u&K_Cq|AY2JI0Rx%L!KEZX -zKtWva0tI$;b-i!b)vqTpD1vz4vAVjt%6hH3qPy$Y6O>(D*RL?0-&5T)NjUa+_x-)^ -z=lx?RsqX6T>guYep0lo}0k?rdsCbN&=dhko2n)*+9?OJMif*^Qh5Lz -zJLNfC^1@zRP%?Ss?9(BdGH2k<$HRRnGUh3+o`-tEAR$^qT*z90I!Y)9al&0i+w(`r -zuQB&Lvi9Nb-D^bYFHb(PiW27ibi)I`Sij)`_VLVLuV}mC<#p26(%ZM-YoDIC^VMI! -z{=%!!y4$4{j0d==6*R$a@h5uo81s1ZG&9q{_w$sSAEhrV4mbkElZt!!D|yY>{IP{o -z^QRVeXcp_w%0H`crDm0Wh54NPRfPxnH|!rcKXAQK^0EEnl8^X7KCg_aHP<>q%vf^^ -zGu6Dv{NK7SizTzpVx;qmNTaIqpiz;rp!{_Xn>mr|;TACuH6!#GKjd -zA99Q6zLFc5%QY}Zm~k0_G}ni1K`Ka=VVt=wb4_R-(S+ucn((_*n^0D=fMLl^=!Pc5 -zC$(X&2~TZ6xdkn;k?m*8u{cmiWHYzQLXS{Kw%;}Os^yoxzHSe-`8%QIy~1AOdX8&hQd?wlrWT*t^UMKT4}+_^>uF_~AB^GS%w -z$Sot!k5hE#{fHj?=rO%T(C~V)l&`kK?<3Re8_!z!h(>IY*X8YPiD|@u5)H)Er3Eo+ -z-{9fB1rQA=QF6CTj*7annA&KEU?bPx?zKU*Scw)x)Ip-(KRh($2`eXgXrcq;7w{BI -z8MfT~B27L1;t7d;{P=~;mLFP}t;e^L5&Ju(S$M2GbJvRA5}O&ef*r>M*=;PFuVGm= -z!%z%sL)f6BXuF=VusSuFrwa;7k;QbS9os=K(d!Kr9CxJ-yLAn^HXSXGT}I@wOBthe -zGDhoUX|F4hxuC3=5RWb-F7v(uoBhF4rMo;ik&NZaG8{F`kg=Bt?x9(_mSAwZS);6d -zFlx-EN{yK-#9E_TnTA6IIT4Up>HQ;9aMXa~+a8gYmxkSu@FT&A*R?XAA3F3u=igv# -zy^1;O`0dZnND>V8NVDj#KsLMZ-?G+tF=sc?I=ZO9WL4=@HqmP0b)ud($-0xNz+3D3 -zz~y`uls0lAYo(6L-I0ZROhwp48uFZ?Shan=X*UPrSOi0Zul5OMa!~8F);xpLTBWPf -zR~f?kupwl;!J@0SR@&6}ApvPjH$_ViHSB93>9B4Uv|PwEC!aB$)q=1 -ztX2-u1$LXQ!phm&HAAD*<=giBF#Ax9p`&=PsQPv54e+x3HbN)IXacufEss^O7BeskJc -z3&6{enG-IjJaNKl{`TVnqB9(IIn77q7;=)q7-A)mX%w1~%W4ssscJOW*K3}i)^D6M -zt=~Co(ZT~rKlmBa4St44u4J+Ug+*dqX(VO@X7s2dX30(&>>&CHTGL6Ex!S6fF#-6) -zL0g`@pb0CUUAEZAjjo@PZ>eN;(mBt5P+RJ*{nI1T*%Qlso97GCvU|V2-axH7{;S=SI_40(x)g}+1(0qQ^pZHvYr|#+moWm<$jo8VOdq4tS~OaX)?l+} -z4T#oYoh+6Rr>(wHgEghR4cn+vi?Sdf!su7Eu -zXIdvefAX|=GV{&c`R&?|6?BoOsmKU}%r9*>5^pW2b1hjhq==Awwc=mOVY&3q2`d^F -zFPSiD(zqq|5~lKwHB-jkQ#HAvYi;IDVqr~{dYEsS31DGSvV|3+wxa@M6+s=1R-q|X -zaN=Zx3~Cn#do_=}2kg~e(fGUpGw@Go7%go;`=|q=kHU;rZ&*ju6zNuKqn1)V6ivBI -znswSQQ5#;0mr`^CoOxZ^q5_DYxwCVPj12M;BxSg)7Ssk{q7+kBT29IQRPL>?FkR0krQ>h@M*5-m*~t%H@b+Hz -z@e>b!Af5R6Pq5*0y6wck(~ovPi|w)(CutVd#l8XB`*@M7GGS!4sx@ZwJ{;VGZZm4& -zELzmJ8J8g1O!G8NKVZ3Km)zfy`R=IsyQ8w;m0cHAQWgP-GYA+5IA_NA!)tysW6={A -zZmb$#2B;&=dIEosjbFW&IsSUbj%yD*DY+#c@XZDIWQI24kO5bUVe3Nua{Z0^d-X5r -z+3ECj!_OGniYX1!t7tXL>1Z_q`DS>TX6!W00M1c*1EZ!7PzU5LlG{ZNF$^R@FLTU3 -zD*YJCa$>Pt;Bqz;r*yPuHAH0zGLDD_wOCpvsJD7T>MKpeK^io4w|E@kqp4*ij2b8HjGRHFv9v-m93}o?v -zE*PUr$HZu+xVT>7c}SjTZP$ysn0{+}OfOc(^rb~`9+TZ)fOtj2L3vFSZNb#FnMLdJz -zC-z^vW6^JqXWsit`irC?g8>MsH;|PH_DyVe`te|gVoAlEhbJ@B2#f~JK)UR^%v3>gU9VXVHrxCVl -zkj6x5$o#ugmTVV_B{U;1&DLlDx%GOj!3dJy4#M4$m+y2s_6^qWVUd$3XT8-z&f+2m -zDAr^ZHm|J0cCs2xiNk4kIGk3!R$Jn*Ley#jyO%dx>}F6Sw0e!x!I~^);BZhRmUfy= -zCaqSZ0Sus=R;$H=G%nB_%@ehF7Dz8W932ou#Nt_dc^+e2P_Pg0d{|jl(CI3ek#RXQ -z8COBZDR=0caV2(6CCpBz{O3!t$@=U1=X0@<2RyCc1R8_|G>8E!M}}?5l*?)T6>zgg -zUcH3K>LsMi&@{rGJ$jK9vpUAwPES^2fRj~G9Lg}6oQd(R()llbSni5)IRA^^wv`o) -z`Q$li^#SSERqA}Z^b&B*hU>2RvYbAgaY;wM+4%_l5UA>%yZF@P<3G>t&_mW5*l~P6 -z${Rem$B{3{BlL1FSp^fKo9V|5jC^Zseu0p$vFI&!n#IUe#H#H&PG6ywH__4JgIb&? -zTef-f+-)1w{Qrzbmu-bK>MGw9`s99nq>zjkN_zn04mDSo$C9yORL3%q=i4+5f)g@^?1r(VVhLPu_@4dMHJ0ah$rEFKesSj_A`+ruHLw -zE{`QPydRHIWmH!fRr*3~(3o9$CMt{m1p}W7pA`qHm*j(-ge{$JBL=tILLM@_g*P2EMSlPAj{Lk%zacF -z`z+*>B8%#NwFlZ=cDV^>VquDv5ZQvaQXgWvhu(s=&~y*#!BiV=r$GAxDNE7YK{MJ% -zbq!G4=zY|jM^ORu?maT;&B%P4K@FKsnd5cJJCE3GLHh8eM}E)Thn>=Aqy&#i^Qm)? -z6FBcYQTr~1jl97o$o^Ohir`G$CK~l7#A$e~PfKa%S>;k0;Tipa2|s|MrrRuc?8n;> -zX+=_P!f=U9J6cQj^uV1U;^k5INX~`uN1Ll=PnfY-*fb__p0i_G)7-jq3ro*S&ChP! -zR|AE579jaEgj9?h%AcB@mWC_%DRa#}KkUgplpG2-$ywkmFg!A7BAIFMJuH -zqMsmCG8*vnZG=j}P%eD}p)yEQ4*67{1)ne@1dq{}_Ym?K5%R-30k{|ZMDg<%NkagY -zF!ClsV;Klt2#u>pXgp{i6AqItY!RAxJ3@`$Bh(}z)C^^AhHtg}fY4-ke+uN=IvWB! -zpB_eN#(PSbITgYy#P8A>d;|vn4IhESPOJWjlfdx5{rsUu$19$jZ+!ce2d;no-WM+U^SoPr -zefX8HzWLi@zjprSz<0m9;oY?>25y`4eZUy`=8=aEG=ASb@O5MHw!6pQaLoxkPfKO|I3Eax=c)?WI_;yYW;irz`r -zT~>1O(#vD7+wz^GpU-c4WA^UL9=d#0(}$P$Tz2<0x~2~=E4;npsYLJW-<2*xV>ahM -z@ba^fdoH?kcjYC!KWX~D#@6wDjbrrYYc`JAe9cpNPyI0IlHE0yr*6FOqTSQh{oza7 -zv+w-knPcnkt@(cJ>RS%Jvu5`tnbeXCcPF;6PyM8YTKd}En$5)r_(Sj9{RCV*c=czi -z-0;%uCEx$>v&)`3^3#Lw{9^yHzmD==u>Wd!{6YKo++Q6}t!}(zcX@Kl{()x~>??fv -z>l;F~n?HS^;}5kDbR7CHbYSWylu;qKREwv=HE5Sn%_p9V`E2pN|Du3piAzN5<4jxq%!+W2jOR25s3Uvj#kzR*Z -z!#A3kThRt4iUg8BwRP|~wGbgTJ$Q`16Rn2O#w@|}WGIYByNDr6`WWCWqIr;iIlx+W -z?zePeJ|%la#2MnC363I!D(mBV-FbqLJhL0mhaH-z^?Wb{DAsBFaUQ6hV^ -z=CHUAR}Z*c@{Q^}1_QZKv8Sa)J`cEg`9Z}%VNo{X$je4dmTZL6%aPK7s;X?Hq(q7A -z;W&~_=^nkFoIRkGk~2M!pD!n%2MWlCpT&-W5_dMj+2x3B0N&j{_zcb)m@_XMnc0?& -zG&g4>jg8sJs8LE}PkANzLf$|@f&7KMfxNtI#A3-tv0Qo+mn4# -zT#}7s3u@g1*+3HOfK$oWIxur)Hqtyf8>y(wMzUYB<{9nT?pt;U}>Xv&e$n -z02g+TL2hYg4}6!L2eh1=lF0#z8K@thjZB@YMD{G`AZfh=TAd7B&4AV=N3;WCV>Tkc -zL?U&*Y^16t8!0W%MqIhK><&3%A1E)+Mk=e6$R52dY?^4qK|tXkpl^_Pt;80Du^F3? -z0CCd*)9xn4LZVye3=e+pc7O4u%kBFTe{OfVzdmAie+2=3Cw>R!&sNd-olYPA4!hh( -zkLcXyV^F83!Pms@-f3f%BJeCVA9_uKEd -z+*{wmZ^5bSo!)n-mtU%Jzx+}x`ZCu2s_9o$|L@6U?7hDS>goNhJTLpw-*UEa`L4OU -z`gUEi>#kk>yPn^r7N5hB{TBBM2+u%x8p2Z$o`moOgah*}?#K5Rx*vu3-u*83V-WU1 -z*bil?Z?L$>Lzn=e2|^=;NevG7LMFdc~G2DE`L>e@yYK#8nhpj^+RMhM-&)+UPXlQd_Tzry3r|D*(#g@GrDW^+;D23TKi%A~ -zYF-}iSCuubBNs+;VI&vE=6(~oFqJjoetYxsemmUl%XtPH_J_4@IJQo4velP$5k~$Y -z;d*jFBhsDzTel9w^L0qhp;kV~1I+S`+FXEd|D#O)jUOp%EodC#+$m!f7hd6i(}x%C -zcQ!73xIkSyNr?(id8z1>>*7S$&#i -zlFPbFE`yx3c{wyp{GX-Tv7o%_KS1n6Pmkdp|7#w?{*EW7ho?>Zhkl%7uW>E27d8}n -zEFHPa_T1<d{xak7}d`s+{Yo)16Hb7^Q~&TQz2*|8Kb?JCj?ney$cMy}m!c7UI>l -zJD?F>FY7ng_OlBQ7S?z0W(#ltd`4GhCpj;Z{o0iQ)-2zVB_)FNXTD2hjRI+cu|U#0 -z0peS`R|3+&e|#5VKb{@t{+91Ng`ORjGAkeRb|_eFe!c7^5K}f_Yvz7jFINl8JPp3o -z+E3?4;o-mig_EoFZ}|c1_&?zYjQIQl3%5jNXOFRmm~BjgehjQJH2BBC52W*?q|`y* -zByUhVA9vxEcmuv@$aTyTyiAVVh2wY?KKCT6b~<_#6pi=LpNP$!{S9IU^xOi7{WrFu -z3*@)oK!?$X=v#Cgv)F=NxD5TX-;J`X8uEJ^@1x{C0>q(R)NSZREXhuE8c`#Z<_l^w -zeJM?zZ$lS=ZY})NoX_y14b&C*Eb3fzJHCTzq!v;iQ1^{|jx}gH)O{_!>hyPU9(IE| -zHWs(w*|;0;z+Y2AJQ014zCjs)gblmV)(z&yk_=GdQkBdgMaxq3_nBXP~Y8SWkCT1N3xy6a5#KqX)na -zKZIN=GxaT%21RfSx*GmE5O7e`g)T-HqE`X_$M6X>N`~VtkYW}5eaIx3jr5Cn0Hx7< -zl!o&gXc4{=CD7$izZtlI`ZcnnJ=A}rJJ7rMEP5ikn%;=Pwgq*x2g>s^cKX?^+;o$cTdvj8uAGQ)tPH;vLK@?yPV(oxwHdKLTihPI-l{K@?W$W1urK+M3 -z$LR}023oIKfR-W3?vTeyEm}(ip(<0ASVa|bc73wa{9Pu{X$g93k9Lw#GP0n`@9V^N -zGC7URPb;$!uSa?Ld3h|^*@o3m6fX<^aaV9f%To2UGc>KU--9N3A+OV3E3 -zB`>Y6@f44$x@_69EtOT}<>j>-fulPHzo4hGccDVGP;}^U5vDX61Jl5%S*O$9fV56| -zax$+qM|8U4hT^tjO2rvmYLm*X;%Qn%Mn`wkH0|vSb_O#6uMG7dYJg}1TnD`XceXUU -zX|S@%-cV(N4wZ@$-hOV|HjoChq}`OsIJv24v9%<&HLw3yl;K-EQF{7YY3;a$Wo4tB -z+}}(VD6a{y%)}&sXFV>ukQQsSIz-VbR>iWKsi;{+X*p6XQfNIC8X+TZ6pfThDH)mP -z0T^L{Q+9WAn%CMRWUav-IBUqtI5-nqlPtHJro&pAVL1(}E5vqoG%jPSG!YzS$7({l -zCOnOus%g?KqUJI4xCOdZRFYZFE>mxzdYSc_joLnLlaANBpv-C)lng`MolgJcMpOQdmV>9r>uT*xK -z!p4bqm1r!?i*jjKj)j#Sj^ -zbf|QKin-m1iJPH4Msq}VH&me2MVwBHh_nR-ZWp%(h!R)mTy(EOl7Q+#FCcr6Y)6x$ -z%i2yhhx8VQCvOgn6Gg7l?t17>v|vkW;< -z;!(ZU3j8gHHcXS1C2n;_@Rxj3zB`}Nt6kdFc|-h#OdrbB2D|{`+~jus;K2c{4NwRW -zi0r}%2&IcaEjw_TQV*F7!r6^)^6B%6?bGWb8)Kv?ob$>mQ)4VO7TT#+^LZIFleiakaFj-I%s9ii;^~Iz#l1GRjI;_(4h~c}(ZXPjXOf1q=VKS@ -zsVFJb$(+WV$piw~H7aDveW%A0QutvhWy$TtlMiTN8L^2lbFp-1=>kv9?AXDNrZ1r`M_W>B>Xxi863uoyo`AWlf7jkRuc8VcK3+;y87%#`-u<(3fU*zZ -zssf7Dq6K0Nt=4EBp=dir(GAoJs!P3*>Q%FpW+K+8>4_|>Wq{x`G^LjJ1l0oM)w(S@ -zOcC;}C0n8fy##?`4Vf5lmh8T}P!CLiiam_#L=EMX5WOe!DOLU{wH~Ygl-ZSSYvxYo -zB7j{?ET`-?yTCPby$;rF&@|Z?Ba00h0xV~j6PtA|!|HqmVU_7@1Xba4KsQ5NIopb4 -z(HnE8bi2-EZHm<{43Jiw|MSEHd#N!^+xbclUv}bSCb7A7_Pf7ry`o1$0ad9nW;NwHLD!_4txM}R=u|qYi|gemPFZJX -zlM7=o8QccZKx=3hyIMa)a6xZpjIeNM6KiaU<}`%LvNR{w0B+%G$OPI`hcqrN)>doF -zBicu3iqml_on5DVe4b84l!EQ%gUmtpueEELgM>lXY7TQdQHT#p}JE(VD-25lDYRE@1>5xb~hCA+eqPqVgkeT|m5 -z;(HWm6W14!3P$vXNKuik+FkA~-%@@>Ia6NFH&ioYG+cv?D@J2nqkFt2FU~Uq<1kgY -zE2xSJwJWc;4v-P5U)xE5k)>m}R=7K1qgbJ4g>Lh+lwB$Bu~i<)ZtPNK0i5L-hs8tv -zxnp&xE;w&aT~}3k%;)WBzvh?8B}=j9`pYk$aCcjY@Yx1jinY>RxZ(?~!DgNqZW~u~ -zS&7e?@BG#E7u-0euAEbKP99T+O{P)LypUnYvO~b`H-N^>s2I%`tsxvLZo*B)^GpfT -z1?&X{nmi(A0S}uCi)~`mD;HBWjB*G?+J}uYWana|>y$8Mm)^?rrJD)GBZ1n4-qIt(&Qd(l^rH -z%4%seJGg$*Z0Tj`tGj+yQj+<#p1UWDsrX}Ax>oI_Fc@ber76q!SdndKbJA4a?B6pa -zLsjxbvkQPf-vHE|2hzDhrz%!mpr&m+)RS_sT9DC}CYH13xC{q^BwyD5pgHB%RO+a3 -zWzuk#5xSDa!`vX2Muw#h -z=3;0E6N;f;aaO%*no6`amiJN{i#L_@hBc#zpm6H*oe^gZs1PP{8Dp7=0%Mp|t2VoM -zm(SJfdfmmkT%L(}h6t*5nR~{G1-PIjM0fW}aI -z$#17lo-w*2K-y_N@T4ce#!#W8SV|*|Z&f4`D3beG!hSJIB1|m=9zs!sLmu0s1t=)W -zfex6H#-wp;D55RwA6ibJ3&sp<;6}Ra{J) -zY_|OU9VTp|qK}2?7Mh~D`3PIBl^(f-CanodSZ}I#*Hf;5hinDCM`w*_y|p=;2pSAE -zbRKQ!%*cw1w^NaB*p58VN|j`LFW?xmE|67Olora%!`Ur#<(u~5HfG+G-j+^W^vdLU -zi{>s}j4NMiy}Gc_b>58KPvx~ZB -z);b@XpL`zs*7sasQ%Ua|Jo_H-Mr%inJ1AXVK5zb%B}GLJyNRnS+j?^~UsOWc?>T6{ -zo51okp%$^0-K$dRX(PQ4YrZCvA+Q_s$ct&DH}aUL+iA*Vaa$;wwwUG0b_S2e0-atN -z2$=xUh!sM%ctVQSNt)`W6V-UE^g{DxqkJI-x8NXVXxledyK`3kaWAyln86dwSAanr -zGN27&SZ^>y2tZV7y-KY$?8gmE8?%(z!oZJKgLR~CP%l=mQlGD8ky>v6@y>(kBJZZG -z)dP1inM`gIWl)Khyod^foFeLiWF0;f)7wDG4b~Kyw4L>p2qL*GE21nxI5Jh*&?!ZfN9^G}~T6|n$q0BP@&t7J~mbY4&A{G?xA8Vaz-Dst)lxKfP -z)vVf}qE%d(C#w(uzM~3R`jmL|1cwaPh@KeKG72g9LrhjXm8I@1-YA%2Wy!WdDvP{@ -z5cBecrl$99z2&{;rU~OGxBUM4JKkw-8ZTYG`0Q0@owaJ!S=8sxNQdL`L?Y3R-3MR5 -z&Qv0vN_I;h9l-VvKLjQI*q{CcEqfib`UmV+h(|vW^U4fk3tNp-3)|Imjq{z0i&rq4 -z!1UiY_&Fg+)4stw#Y(VAkkwddwiXw93zrq1Td1~Lb&uy!UgG3=Kh|`)d)<^g9GkB* -zN36YoExg9#wo(NJrKVDMDMe-Jn^Nc-2%C@&To47cQ)YZQH9DO20HxULM9P*EDXVli -zc$g24Kfd#Wx#x6m?un&@;1p|d1J-ZGT5Q>T<)Zs4sdK+s{rtjp51qBVrzkJqr^n4D -z4M*O-B=ggboyBCr%HUzVw&QfLQUXhEHID6g6vM>1Rj74#I_#lWsxyL);&VX -zDaS$xOdP;Tgkd3(9}e4Ou2Pj#UPwR3yKU2_zyITF|2SoG(~q}ZynM=(rb8F)cy&_K -zl=HXzY?suVoIg7?UM!j+R+YOaq>Im~sTzOD*~Qa}`6_%{*Dvbpo0`XsyS-!UeKD1| -zw|DlHvGBxkKL5^HEAJnxs(+5Ep1yc?eSNFjSmF#W&0Nqr({HS?R;_DZzS(BapFnub -zGN6`y>@n1cc8i6h4du0!;{)}L^$W+%o|uXzCasHdqXna^%Dhdi-vFSV*alRi$p@fK -z7+<)rs|Xhr392?9_W4FVE>I$eInMOBit~z)Ue^MLGS9rRcx<89XZQFD$B$*S2vjGq -z^qNi0?q*6Svu@5zShvg(h$?;*7#A??+S;7p&1!(18AU-N0{A3>8yt2{Hr&k>8qv9= -zAnVAsff%!I*37oIZg}W_W|q(XU~za$ZRx17KHs*WIId}Jb@iAUcX?N7WNoFpDy0=bdgb0SzC2JK>a*``T^jpb!ekFgVT&Q1(<-niD|NT8t0AC80G@d -zGz9=)kEeM5s8Onl{iU=FR|~+ktioL?>Wq=nd{0@a3`(ACLG3g7+&)U1rDd%`%S1{a -z1q}A)zz84&X{e}}3j8WhRak~#RyJ(=5l|gyYn%DbEq8xCv$Uus7CNVK?6y%QrL|?j -zD}~uN#d!L?%=VHw`K$J}%v*xL?|Wg&Vd5*&3%95!QglYMuHkXNRl0N;O -zqHD)Q_LDwT(hoWjAmKpeD2SGeW9B3tk+q{pw?)Vl;zMGHE*WiCCxzjTQGzqpm_obC#%HnEF!koTM~A6(B>LD+ -zzGNrPojmuYn=%hj#=GvF+TM0f*N(d-PeoNlD(Uie%@{SNbV)Q?dTz4WepT<-pcntD -z=f5J8qU@o9>e?%poO55brg#s2rF^=@OiRCHY>ug!H=AeL4YahADaf0b>k~c>urMPx -zXh@Bq0ht7IlbPYL3wv=r9&c&|>+Br7(zXS26t+S_v)UyzP;C_aSf`+(5v#E{h)GEq -z3}!Z~$`SZ@3TQi<BrnMoYK`K#2!5`9@b(BtL;K4$sSgjo;hb-jIpsd*( -z9ooK8?sm=o-AI3(xs+MuFpIx-G_gT-^cSA9OT?NEjE6crETqF@ -zwH9lAV6L7&8?a1JKL5XbH1gow$E2yM3Ar!kdETDT!bgvEO|>PZ9dMg_8S@G -zDk&)yTvRWMSym$;uB621@XKrNv_%oWKg2NAjw|!~^QnAU?KMa8L1uuI$l6ZBf|sls -zUDMf_ZM|Byw46u`W~8`~bs*#kFhizPZZt>cAc#ueFJ*d4d$Dl~zk;s{FC0Jp!D$a( -z3s$Z6QmkI?ne+9J{4+t8%<7xAv+btqq@Nc~^5gEMX`{(9J`im&+sj?!e{<6zOo!{F -z2PStL?dFE6SYx4uFN!{S(_!f^gnKp){(=1jG#N4uwNmyfN32;kr{?QKIH<|Nvrl;> -zJLnz-5fi!VAcqa~MlEFxX{nMB&FLLz8JGrQ1?6<8(#jatcgh%6r=o+9-@h`L#eO3F -zNa~k%OIz_)JPWtui=-b#>UZQ{c_9s*A5v>H -zU^_d`B|4s4bho>ye2|PK%1B&iRv;>hESNToED9N7(VW8TU)j5}Px|^ssSf|<7HmE5 -z>ieXvsTI@j=~JtJ^x(|1x~NZImmXUb;ZS}OEnNBxpm>@k)(@0O#Fu)xi5IqEg=fZ*XnZKy?P*9o>=_n7ev{p;YUE~HZ -zAk}KZ$rwgMc?tJdjK^FzdA1WmcK+Q-h?SU(@-U)q7{09vH@UC1|P7FUIU}_jeQ$1XTA-{ -z#&sqj+txt~5t{%xdh3b9Z@*2KzWp}X(=TC0I&mTbUn3(gx5C#9s9W?_Fr&Bzs)1{x -z+PFDP2X_v24!4P3quIh4C>=|-wSYgYqV)g?Ev+n;m01`V(O9d|*;$%^ArnwIm^@rS -zy4Jj2xhI5JGfUFg;dG> -zgRBpgxhYwXog0A^wKg&W3AAS;BaqqQBSo+*1EH_mP17GzuQTs!-{crAWzr(9O)GL& -zP+OTRv|Bmy=tJ%6oI$Hm>0Fe9acLb~4OPihs%tgnx@!GcN?^umg8xzD!0V3wjtFY{h7-#`o)yral0Y^* -zk>!F0Q(@!*1k6YE=svt26^Lfs9>$mtw}&Yd4(0nR9(gz6&?XgDQC(HIs)`R0eBcb#O0SJ6QeEvec}4G1FYR*B={obbicwro -zx<-hAkdK{13^6yzDeI21G&mzi%#C@{Mbg942D}rub^K&v@Vtu3!r6jwLDQ`5I^LMgCra=J -z>TLFql9Ouib9xixdrW(jSQD-oFN$&Kf -zNsZH9xis<6q|!0nGZL%v@>%JA>eu+u`2F=wqREKstZo3(g>BS4Yy|Om6l6#*d|iX~ -zh+R+`#*EDjSn-UAMP>o+!f~b-uVD;G%>Wl;X -z^HB$T4(etvKx#)b69}4vGWjWr7-oAjlC&f9S9~8X#|1!aw`8uO7SkItM<{z% -zhrC$k@bzK^9r9^JO_!!u!;oz+WOY#s$x@&ef`u_cDO3h4WOY#1(>=q+g@?UZ+9A=h -z>X(7${8CNQyHJuVpbh?vFDK4k_fpRRj`qt=h_t#0R`Smd>-<@|6)5U*1yZV=Ln4 -z#1il- -zl~?fm>_AONAx+mmB|X1p-T78Et}yadHKQ*{1VW=~>z}-~@0YDpX17k8Cw;qZ<1M54 -zqC!t`Z{w8CqQbmv|w1VbB -zO%)kOG!nbZ1Q9bqTeCS|)HCJs&P{MxUS2FBtx37o@^C%cRjBH%1-Y4>GDc -zVow41u*4kdBF@Q5>cO3#&YQa=yj&4~@yFJd##*oRrrl?E6d$p~M#a|5frwksK -zenQpBoU@K>+j2x!qNrce&yf+*4@Es(5hCByK#}P>M>}f~v@Pk$!MMyu)k$~0^-Fwt -zhNX@}UdKTCY>-D_ZxP2Mma3sD0G?ppRO2eD5l^QW3ey~7X%}+PpjFf3kU)bxrO!fh -z=(ACR-i+4K=B$RDo#>}AX_jgzTCslsbn-lrhRzWkH|$7>>HuNQiYyHxR7Dz5>C!q) -zntEhexaS~;lBu`PUx%I7{Q~QIHomjGxOM;LbEU&~-7kHydBZc4 -z3H`P~IfHUJTOM&>wO*c!q1Z4ViP?glXT-(_RgOHRw6n9Ny;6NV9a>0H20<<|lknIY_j)M>C~yCJ*)`YD;k4sn$e!mXf{mIK=4DeL|@ -z5EtA*gmiG$?LbI>A3}yGSZ*sJ90Bw4c91Du2w8v~T7QF(y$N756QR6C2<3|qo^sH`*_oevirm+E^Kj8QK0=`%%77C1lyxQ0H -zB-bZWYx(Nd?$mjFds7Q9ztppq7wUp_k-CsK6bwfB#dR~A=kkF%U#E9Ad2dEf^6zFk -zT$uCLrqY~`4+@-b=*NdcpYr -zYh~)0vtm;UaF^@`L$x-}LlTkeqiG{*kMMZO43uoX) -zZZ9)`Me5wt+IC2kTANy(NUh`jkz96d>-)}Lu^Mg#Pu@7d%nvb){DMbkvS+B+q6_6B%3Hw}kn{3vpJHm|%(xS1T2(*z^gJQl^9y7^!<_vK-R -zDW?T4)6|m~x)%)R(vhE1NCPLAM~*Fqizk(iMU}T=QfPs{lmo``c(8^`4oZ8NuOF2?@c#Bj@|LTb*Fr7MBcHJ -z8*MI68j2+6t0A&F-nV{j%0Fu8{fVnr_w>Kgl>7CCDzEf*BQV6!k*OHBkhR}F+YF*FzwFI?g(bIEw&)PY?@kEMOwpU8W*Pq=t -zd-5b+P+lNgKjrX;%WdZ7&ys+0?dSc%$ZH?(4`;7sbn}NsWCfBeF((D}Am|0xb5rN_ -z!Kw8epo_*_WKu5D3X;~lu&sL~sX8gZigSdhKfq@Ns&X0RvlQoTnjVeA*A<%g_AP^l -z0`KaLB0`G(Z-M$ovwtzqG3lSOT>po*nemSR5FdkISVfifh -zQ*rWAKo&j$zb_>CqbcCv@klx@_(87v)7|MrEbJ#=i$zKP$ur~+FDhS;`V)Z>r3;2a -zJ^`c^d?5rn3do*F(B}^gJ&D7U6zJ!n277M4qs!qWhX?Pi2MtrBH1rTa$km0!uv@`5di7v2OvhF+%foK6uz8*bA0H_ -za3dHzz3f6ToPe(eNm+scxK2XiZXuS1f&gC&Ls}sc3j4ze(s&RHKna2*pKyTWM{*I! -z*OL%~i~wdSKeP>e65_#-Tpr)>Unrh^8kC>5xAW -zi-*#X1z?XqJKV -za3cvUJqn);BvR=}G8Kg{#^Q-^6j)+11WZZ@b_;1=JQ4{(GJvZu4V(`cXAH^&aw(mN -z0|ElFr9RHatPgi_-4o#O9S<$Lx}*CI3@Vg5uux8>yL-Sz=1;k7!(Vb -zBf$fZWIygQjr_>-Y@Bn}z_;gG;=5|AntCLj%Tr()?yB9TY|oBff1nZ2%Yx}>*q;uhLa~TH9tHXX3=E~f -zHjPITX((nO5rfiocSmBNhy@c+mRL8C3kV}2C8U7=#}i2)6<`U#heD||Xk5THBB|~O -zkVYWjPsF>S0P%DJbm&Af0J1(Bi*%=wP>pb$P!!-%3~~=dL%{YxuL}bcAWC5@5l#T} -z2e|n|QK%=86+r>04mdGb1kfSNMMJ=&K)nnQ-A5oF -z0ShG@iUmNz5ZyI;#+-r#OU`WF{1d%GkU5br;g6?4EXO0sDDeFR@Ej;oBo>S&fsFHqNw0L(4co^`T6bSbSGyurRXb^N5A&~?H0W^DmpxaOC -zD#U@=LbZ~D5Qzc<6o8|FFefAnN<|cyTLM@!acP_e;R`hbHV^6)Fwz*vC18!9Ndb}r -ztARH`lS+e-f=ttqbaynF^aIz5kT1rfiFh28cz|azL0C#63{Rt=i~xNAH;P3A!4z;F -zP{$&mdH|ONwJiYpSFjtHbrLvx$_ENqifBOHK_N+07(mQ4ksgqBDggRg3ivK4R7oKk -z0)-)%i~|uRp)q68RCh8eBmhppqtX#TEKs&X2_X$)1(>i;0EH|NOecK*`TghjpWpxH -zZvyFR-Po6!9bdgHMf%esK$~Gc3c^p|!B*|$|Nbx6Wm`RC{n>phy5p-?k^a>gCk=W5 -zb%%nBMvd$wB|`xQ&c_D~XBhU%-it&d!$TINU)7&`5EF*SX$+*VJ~H$Qi0t7f+0?$_ -zk*MK&5&voT{Qkgc_X5MCFhlo7jJOQli-t#(C_6%87#<=K{Qr1}#M{0$wSkw%MhKV; -T!viJ;E(+{1YEXtu4BY<*Qm7~* - diff --git a/rhodecode/tests/fixtures/module_scoped_baseapp.py b/rhodecode/tests/fixtures/module_scoped_baseapp.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/fixtures/module_scoped_baseapp.py @@ -0,0 +1,49 @@ +# Copyright (C) 2010-2024 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import pytest +from rhodecode.tests.utils import CustomTestApp +from rhodecode.tests.fixtures.fixture_utils import plain_http_environ, plain_config_stub, plain_request_stub + + +@pytest.fixture(scope='module') +def module_request_stub(): + return plain_request_stub() + + +@pytest.fixture(scope='module') +def module_config_stub(request, module_request_stub): + return plain_config_stub(request, module_request_stub) + + +@pytest.fixture(scope='module') +def module_http_environ(): + """ + HTTP extra environ keys. + + Used by the test application and as well for setting up the pylons + environment. In the case of the fixture "app" it should be possible + to override this for a specific test case. + """ + return plain_http_environ() + + +@pytest.fixture(scope='module') +def module_app(request, module_config_stub, module_http_environ, baseapp): + app = CustomTestApp(baseapp, extra_environ=module_http_environ) + return app diff --git a/rhodecode/tests/fixture.py b/rhodecode/tests/fixtures/rc_fixture.py rename from rhodecode/tests/fixture.py rename to rhodecode/tests/fixtures/rc_fixture.py --- a/rhodecode/tests/fixture.py +++ b/rhodecode/tests/fixtures/rc_fixture.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -37,17 +36,16 @@ from rhodecode.model.user_group import U from rhodecode.model.gist import GistModel from rhodecode.model.auth_token import AuthTokenModel from rhodecode.model.scm import ScmModel -from rhodecode.authentication.plugins.auth_rhodecode import \ - RhodeCodeAuthPlugin +from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin from rhodecode.tests import TEST_USER_ADMIN_LOGIN dn = os.path.dirname -FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') +FIXTURES = os.path.join(dn(os.path.abspath(__file__)), "diff_fixtures") def error_function(*args, **kwargs): - raise Exception('Total Crash !') + raise Exception("Total Crash !") class TestINI(object): @@ -59,8 +57,7 @@ class TestINI(object): print('paster server %s' % new_test_ini) """ - def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', - destroy=True, dir=None): + def __init__(self, ini_file_path, ini_params, new_file_prefix="DEFAULT", destroy=True, dir=None): self.ini_file_path = ini_file_path self.ini_params = ini_params self.new_path = None @@ -85,9 +82,8 @@ class TestINI(object): parser[section][key] = str(val) with tempfile.NamedTemporaryFile( - mode='w', - prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, - delete=False) as new_ini_file: + mode="w", prefix=self.new_path_prefix, suffix=".ini", dir=self._dir, delete=False + ) as new_ini_file: parser.write(new_ini_file) self.new_path = new_ini_file.name @@ -99,7 +95,6 @@ class TestINI(object): class Fixture(object): - def anon_access(self, status): """ Context process for disabling anonymous access. use like: @@ -139,22 +134,19 @@ class Fixture(object): class context(object): def _get_plugin(self): - plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid) + plugin_id = "egg:rhodecode-enterprise-ce#{}".format(RhodeCodeAuthPlugin.uid) plugin = RhodeCodeAuthPlugin(plugin_id) return plugin def __enter__(self): - plugin = self._get_plugin() - plugin.create_or_update_setting('auth_restriction', auth_restriction) + plugin.create_or_update_setting("auth_restriction", auth_restriction) Session().commit() SettingsModel().invalidate_settings_cache(hard=True) def __exit__(self, exc_type, exc_val, exc_tb): - plugin = self._get_plugin() - plugin.create_or_update_setting( - 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE) + plugin.create_or_update_setting("auth_restriction", RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE) Session().commit() SettingsModel().invalidate_settings_cache(hard=True) @@ -173,62 +165,61 @@ class Fixture(object): class context(object): def _get_plugin(self): - plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid) + plugin_id = "egg:rhodecode-enterprise-ce#{}".format(RhodeCodeAuthPlugin.uid) plugin = RhodeCodeAuthPlugin(plugin_id) return plugin def __enter__(self): plugin = self._get_plugin() - plugin.create_or_update_setting('scope_restriction', scope_restriction) + plugin.create_or_update_setting("scope_restriction", scope_restriction) Session().commit() SettingsModel().invalidate_settings_cache(hard=True) def __exit__(self, exc_type, exc_val, exc_tb): plugin = self._get_plugin() - plugin.create_or_update_setting( - 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL) + plugin.create_or_update_setting("scope_restriction", RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL) Session().commit() SettingsModel().invalidate_settings_cache(hard=True) return context() def _get_repo_create_params(self, **custom): - repo_type = custom.get('repo_type') or 'hg' + repo_type = custom.get("repo_type") or "hg" default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type) defs = { - 'repo_name': None, - 'repo_type': repo_type, - 'clone_uri': '', - 'push_uri': '', - 'repo_group': '-1', - 'repo_description': 'DESC', - 'repo_private': False, - 'repo_landing_commit_ref': default_landing_ref, - 'repo_copy_permissions': False, - 'repo_state': Repository.STATE_CREATED, + "repo_name": None, + "repo_type": repo_type, + "clone_uri": "", + "push_uri": "", + "repo_group": "-1", + "repo_description": "DESC", + "repo_private": False, + "repo_landing_commit_ref": default_landing_ref, + "repo_copy_permissions": False, + "repo_state": Repository.STATE_CREATED, } defs.update(custom) - if 'repo_name_full' not in custom: - defs.update({'repo_name_full': defs['repo_name']}) + if "repo_name_full" not in custom: + defs.update({"repo_name_full": defs["repo_name"]}) # fix the repo name if passed as repo_name_full - if defs['repo_name']: - defs['repo_name'] = defs['repo_name'].split('/')[-1] + if defs["repo_name"]: + defs["repo_name"] = defs["repo_name"].split("/")[-1] return defs def _get_group_create_params(self, **custom): defs = { - 'group_name': None, - 'group_description': 'DESC', - 'perm_updates': [], - 'perm_additions': [], - 'perm_deletions': [], - 'group_parent_id': -1, - 'enable_locking': False, - 'recursive': False, + "group_name": None, + "group_description": "DESC", + "perm_updates": [], + "perm_additions": [], + "perm_deletions": [], + "group_parent_id": -1, + "enable_locking": False, + "recursive": False, } defs.update(custom) @@ -236,16 +227,16 @@ class Fixture(object): def _get_user_create_params(self, name, **custom): defs = { - 'username': name, - 'password': 'qweqwe', - 'email': '%s+test@rhodecode.org' % name, - 'firstname': 'TestUser', - 'lastname': 'Test', - 'description': 'test description', - 'active': True, - 'admin': False, - 'extern_type': 'rhodecode', - 'extern_name': None, + "username": name, + "password": "qweqwe", + "email": "%s+test@rhodecode.org" % name, + "firstname": "TestUser", + "lastname": "Test", + "description": "test description", + "active": True, + "admin": False, + "extern_type": "rhodecode", + "extern_name": None, } defs.update(custom) @@ -253,30 +244,30 @@ class Fixture(object): def _get_user_group_create_params(self, name, **custom): defs = { - 'users_group_name': name, - 'user_group_description': 'DESC', - 'users_group_active': True, - 'user_group_data': {}, + "users_group_name": name, + "user_group_description": "DESC", + "users_group_active": True, + "user_group_data": {}, } defs.update(custom) return defs def create_repo(self, name, **kwargs): - repo_group = kwargs.get('repo_group') + repo_group = kwargs.get("repo_group") if isinstance(repo_group, RepoGroup): - kwargs['repo_group'] = repo_group.group_id + kwargs["repo_group"] = repo_group.group_id name = name.split(Repository.NAME_SEP)[-1] name = Repository.NAME_SEP.join((repo_group.group_name, name)) - if 'skip_if_exists' in kwargs: - del kwargs['skip_if_exists'] + if "skip_if_exists" in kwargs: + del kwargs["skip_if_exists"] r = Repository.get_by_repo_name(name) if r: return r form_data = self._get_repo_create_params(repo_name=name, **kwargs) - cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) + cur_user = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN) RepoModel().create(form_data, cur_user) Session().commit() repo = Repository.get_by_repo_name(name) @@ -287,17 +278,15 @@ class Fixture(object): repo_to_fork = Repository.get_by_repo_name(repo_to_fork) form_data = self._get_repo_create_params( - repo_name=fork_name, - fork_parent_id=repo_to_fork.repo_id, - repo_type=repo_to_fork.repo_type, - **kwargs) + repo_name=fork_name, fork_parent_id=repo_to_fork.repo_id, repo_type=repo_to_fork.repo_type, **kwargs + ) # TODO: fix it !! - form_data['description'] = form_data['repo_description'] - form_data['private'] = form_data['repo_private'] - form_data['landing_rev'] = form_data['repo_landing_commit_ref'] + form_data["description"] = form_data["repo_description"] + form_data["private"] = form_data["repo_private"] + form_data["landing_rev"] = form_data["repo_landing_commit_ref"] - owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) + owner = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN) RepoModel().create_fork(form_data, cur_user=owner) Session().commit() r = Repository.get_by_repo_name(fork_name) @@ -305,7 +294,7 @@ class Fixture(object): return r def destroy_repo(self, repo_name, **kwargs): - RepoModel().delete(repo_name, pull_requests='delete', artifacts='delete', **kwargs) + RepoModel().delete(repo_name, pull_requests="delete", artifacts="delete", **kwargs) Session().commit() def destroy_repo_on_filesystem(self, repo_name): @@ -314,17 +303,16 @@ class Fixture(object): shutil.rmtree(rm_path) def create_repo_group(self, name, **kwargs): - if 'skip_if_exists' in kwargs: - del kwargs['skip_if_exists'] + if "skip_if_exists" in kwargs: + del kwargs["skip_if_exists"] gr = RepoGroup.get_by_group_name(group_name=name) if gr: return gr form_data = self._get_group_create_params(group_name=name, **kwargs) - owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) + owner = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN) gr = RepoGroupModel().create( - group_name=form_data['group_name'], - group_description=form_data['group_name'], - owner=owner) + group_name=form_data["group_name"], group_description=form_data["group_name"], owner=owner + ) Session().commit() gr = RepoGroup.get_by_group_name(gr.group_name) return gr @@ -334,8 +322,8 @@ class Fixture(object): Session().commit() def create_user(self, name, **kwargs): - if 'skip_if_exists' in kwargs: - del kwargs['skip_if_exists'] + if "skip_if_exists" in kwargs: + del kwargs["skip_if_exists"] user = User.get_by_username(name) if user: return user @@ -343,8 +331,7 @@ class Fixture(object): user = UserModel().create(form_data) # create token for user - AuthTokenModel().create( - user=user, description=u'TEST_USER_TOKEN') + AuthTokenModel().create(user=user, description="TEST_USER_TOKEN") Session().commit() user = User.get_by_username(user.username) @@ -368,22 +355,24 @@ class Fixture(object): Session().commit() def create_user_group(self, name, **kwargs): - if 'skip_if_exists' in kwargs: - del kwargs['skip_if_exists'] + if "skip_if_exists" in kwargs: + del kwargs["skip_if_exists"] gr = UserGroup.get_by_group_name(group_name=name) if gr: return gr # map active flag to the real attribute. For API consistency of fixtures - if 'active' in kwargs: - kwargs['users_group_active'] = kwargs['active'] - del kwargs['active'] + if "active" in kwargs: + kwargs["users_group_active"] = kwargs["active"] + del kwargs["active"] form_data = self._get_user_group_create_params(name, **kwargs) - owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) + owner = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN) user_group = UserGroupModel().create( - name=form_data['users_group_name'], - description=form_data['user_group_description'], - owner=owner, active=form_data['users_group_active'], - group_data=form_data['user_group_data']) + name=form_data["users_group_name"], + description=form_data["user_group_description"], + owner=owner, + active=form_data["users_group_active"], + group_data=form_data["user_group_data"], + ) Session().commit() user_group = UserGroup.get_by_group_name(user_group.users_group_name) return user_group @@ -394,18 +383,23 @@ class Fixture(object): def create_gist(self, **kwargs): form_data = { - 'description': 'new-gist', - 'owner': TEST_USER_ADMIN_LOGIN, - 'gist_type': GistModel.cls.GIST_PUBLIC, - 'lifetime': -1, - 'acl_level': Gist.ACL_LEVEL_PUBLIC, - 'gist_mapping': {b'filename1.txt': {'content': b'hello world'},} + "description": "new-gist", + "owner": TEST_USER_ADMIN_LOGIN, + "gist_type": GistModel.cls.GIST_PUBLIC, + "lifetime": -1, + "acl_level": Gist.ACL_LEVEL_PUBLIC, + "gist_mapping": { + b"filename1.txt": {"content": b"hello world"}, + }, } form_data.update(kwargs) gist = GistModel().create( - description=form_data['description'], owner=form_data['owner'], - gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], - lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] + description=form_data["description"], + owner=form_data["owner"], + gist_mapping=form_data["gist_mapping"], + gist_type=form_data["gist_type"], + lifetime=form_data["lifetime"], + gist_acl_level=form_data["acl_level"], ) Session().commit() return gist @@ -420,7 +414,7 @@ class Fixture(object): Session().commit() def load_resource(self, resource_name, strip=False): - with open(os.path.join(FIXTURES, resource_name), 'rb') as f: + with open(os.path.join(FIXTURES, resource_name), "rb") as f: source = f.read() if strip: source = source.strip() diff --git a/rhodecode/tests/fixtures/rcextensions_fixtures.py b/rhodecode/tests/fixtures/rcextensions_fixtures.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/fixtures/rcextensions_fixtures.py @@ -0,0 +1,157 @@ +# Copyright (C) 2010-2024 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +import os +import shutil +import logging +import textwrap + +import pytest + +import rhodecode +import rhodecode.lib + +from rhodecode.tests import console_printer + +log = logging.getLogger(__name__) + + +def store_rcextensions(destination, force=False): + from rhodecode.config import rcextensions + package_path = rcextensions.__path__[0] + + # Note: rcextensions are looked up based on the path of the ini file + rcextensions_path = os.path.join(destination, 'rcextensions') + + if force: + shutil.rmtree(rcextensions_path, ignore_errors=True) + shutil.copytree(package_path, rcextensions_path) + + +@pytest.fixture(scope="module") +def rcextensions(request, tmp_storage_location): + """ + Installs a testing rcextensions pack to ensure they work as expected. + """ + + # Note: rcextensions are looked up based on the path of the ini file + rcextensions_path = os.path.join(tmp_storage_location, 'rcextensions') + + if os.path.exists(rcextensions_path): + pytest.fail( + f"Path for rcextensions already exists, please clean up before " + f"test run this path: {rcextensions_path}") + else: + store_rcextensions(tmp_storage_location) + + +@pytest.fixture(scope='function') +def rcextensions_present(request): + + class RcExtensionsPresent: + def __init__(self, rcextensions_location): + self.rcextensions_location = rcextensions_location + + def __enter__(self): + self.store() + + def __exit__(self, exc_type, exc_val, exc_tb): + self.cleanup() + + def store(self): + store_rcextensions(self.rcextensions_location) + + def cleanup(self): + shutil.rmtree(os.path.join(self.rcextensions_location, 'rcextensions')) + + return RcExtensionsPresent + + +@pytest.fixture(scope='function') +def rcextensions_modification(request): + """ + example usage:: + + hook_name = '_pre_push_hook' + code = ''' + raise OSError('failed') + return HookResponse(1, 'FAILED') + ''' + mods = [ + (hook_name, code), + ] + # rhodecode.ini file location, where rcextensions needs to live + rcstack_location = os.path.dirname(rcstack.config_file) + with rcextensions_modification(rcstack_location, mods): + # do some stuff + """ + + class RcextensionsModification: + def __init__(self, rcextensions_location, mods, create_if_missing=False, force_create=False): + self.force_create = force_create + self.create_if_missing = create_if_missing + self.rcextensions_location = rcextensions_location + self.mods = mods + if not isinstance(mods, list): + raise ValueError('mods must be a list of modifications') + + def __enter__(self): + if self.create_if_missing: + store_rcextensions(self.rcextensions_location, force=self.force_create) + + for hook_name, method_body in self.mods: + self.modification(hook_name, method_body) + + def __exit__(self, exc_type, exc_val, exc_tb): + self.cleanup() + + def cleanup(self): + # reset rcextensions to "bare" state from the package + store_rcextensions(self.rcextensions_location, force=True) + + def modification(self, hook_name, method_body): + import ast + + rcextensions_path = os.path.join(self.rcextensions_location, 'rcextensions') + + # Load the code from hooks.py + hooks_filename = os.path.join(rcextensions_path, 'hooks.py') + with open(hooks_filename, "r") as file: + tree = ast.parse(file.read()) + + # Define new content for the function as a string + new_code = textwrap.dedent(method_body) + + # Parse the new code to add it to the function + new_body = ast.parse(new_code).body + + # Walk through the AST to find and modify the function + for node in tree.body: + if isinstance(node, ast.FunctionDef) and node.name == hook_name: + node.body = new_body # Replace the function body with the new body + + # Compile the modified AST back to code + compile(tree, hooks_filename, "exec") + + # Write the updated code back to hooks.py + with open(hooks_filename, "w") as file: + file.write(ast.unparse(tree)) # Requires Python 3.9+ + + console_printer(f" [green]rcextensions[/green] Updated the body of '{hooks_filename}' function '{hook_name}'") + + return RcextensionsModification diff --git a/rhodecode/tests/functional/test_delegated_admin.py b/rhodecode/tests/functional/test_delegated_admin.py --- a/rhodecode/tests/functional/test_delegated_admin.py +++ b/rhodecode/tests/functional/test_delegated_admin.py @@ -21,7 +21,7 @@ import pytest from rhodecode.tests import TestController -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests.routes import route_path diff --git a/rhodecode/tests/integrations/__init__.py b/rhodecode/tests/integrations/__init__.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/integrations/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2010-2024 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ diff --git a/rhodecode/tests/integrations/test_integration.py b/rhodecode/tests/integrations/test_integration.py --- a/rhodecode/tests/integrations/test_integration.py +++ b/rhodecode/tests/integrations/test_integration.py @@ -20,7 +20,7 @@ import time import pytest from rhodecode import events -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.model.db import Session, Integration from rhodecode.model.integration import IntegrationModel diff --git a/rhodecode/tests/lib/auth_modules/__init__.py b/rhodecode/tests/lib/auth_modules/__init__.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/lib/auth_modules/__init__.py @@ -0,0 +1,17 @@ +# Copyright (C) 2010-2024 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ diff --git a/rhodecode/tests/lib/middleware/test_simplehg.py b/rhodecode/tests/lib/middleware/test_simplehg.py --- a/rhodecode/tests/lib/middleware/test_simplehg.py +++ b/rhodecode/tests/lib/middleware/test_simplehg.py @@ -123,10 +123,14 @@ def test_get_config(user_util, baseapp, ('web', 'allow_push', '*'), ('web', 'allow_archive', 'gz zip bz2'), ('web', 'baseurl', '/'), + + # largefiles data... ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')), + ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')), + ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'), ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'), - ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')), + ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'), ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'), ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'), diff --git a/rhodecode/tests/lib/middleware/test_simplevcs.py b/rhodecode/tests/lib/middleware/test_simplevcs.py --- a/rhodecode/tests/lib/middleware/test_simplevcs.py +++ b/rhodecode/tests/lib/middleware/test_simplevcs.py @@ -22,7 +22,8 @@ import pytest from rhodecode.lib.str_utils import base64_to_str from rhodecode.lib.utils2 import AttributeDict -from rhodecode.tests.utils import CustomTestApp +from rhodecode.tests.fixtures.fixture_pyramid import ini_config +from rhodecode.tests.utils import CustomTestApp, AuthPluginManager from rhodecode.lib.caching_query import FromCache from rhodecode.lib.middleware import simplevcs @@ -34,6 +35,57 @@ from rhodecode.tests import ( HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) from rhodecode.tests.lib.middleware import mock_scm_app +from rhodecode.model.db import Permission, User +from rhodecode.model.meta import Session +from rhodecode.model.user import UserModel + + +@pytest.fixture() +def enable_auth_plugins(request, app): + """ + Return a factory object that when called, allows to control which + authentication plugins are enabled. + """ + + enabler = AuthPluginManager() + request.addfinalizer(enabler.cleanup) + + return enabler + + +@pytest.fixture() +def test_user_factory(request, baseapp): + + def user_factory(username='test_user', password='qweqwe', first_name='John', last_name='Testing', **kwargs): + usr = UserModel().create_or_update( + username=username, + password=password, + email=f'{username}@rhodecode.org', + firstname=first_name, lastname=last_name) + Session().commit() + + for k, v in kwargs.items(): + setattr(usr, k, v) + Session().add(usr) + + new_usr = User.get_by_username(username) + new_usr_id = new_usr.user_id + assert new_usr == usr + + @request.addfinalizer + def cleanup(): + if User.get(new_usr_id) is None: + return + + perm = Permission.query().all() + for p in perm: + UserModel().revoke_perm(usr, p) + + UserModel().delete(new_usr_id) + Session().commit() + return usr + + return user_factory class StubVCSController(simplevcs.SimpleVCS): @@ -107,8 +159,7 @@ def _remove_default_user_from_query_cach Session().expire(user) -def test_handles_exceptions_during_permissions_checks( - vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory): +def test_handles_exceptions_during_permissions_checks(vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory): test_password = 'qweqwe' test_user = test_user_factory(password=test_password, extern_type='headers', extern_name='headers') @@ -373,29 +424,30 @@ class TestShadowRepoExposure(object): controller.vcs_repo_name) -@pytest.mark.usefixtures('baseapp') class TestGenerateVcsResponse(object): - def test_ensures_that_start_response_is_called_early_enough(self): - self.call_controller_with_response_body(iter(['a', 'b'])) + def test_ensures_that_start_response_is_called_early_enough(self, baseapp): + app_ini_config = baseapp.config.registry.settings['__file__'] + self.call_controller_with_response_body(app_ini_config, iter(['a', 'b'])) assert self.start_response.called - def test_invalidates_cache_after_body_is_consumed(self): - result = self.call_controller_with_response_body(iter(['a', 'b'])) + def test_invalidates_cache_after_body_is_consumed(self, baseapp): + app_ini_config = baseapp.config.registry.settings['__file__'] + result = self.call_controller_with_response_body(app_ini_config, iter(['a', 'b'])) assert not self.was_cache_invalidated() # Consume the result list(result) assert self.was_cache_invalidated() - def test_raises_unknown_exceptions(self): - result = self.call_controller_with_response_body( - self.raise_result_iter(vcs_kind='unknown')) + def test_raises_unknown_exceptions(self, baseapp): + app_ini_config = baseapp.config.registry.settings['__file__'] + result = self.call_controller_with_response_body(app_ini_config, self.raise_result_iter(vcs_kind='unknown')) with pytest.raises(Exception): list(result) - def call_controller_with_response_body(self, response_body): + def call_controller_with_response_body(self, ini_config, response_body): + settings = { - 'base_path': 'fake_base_path', 'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False, } @@ -407,7 +459,7 @@ class TestGenerateVcsResponse(object): result = controller._generate_vcs_response( environ={}, start_response=self.start_response, repo_path='fake_repo_path', - extras={}, action='push') + extras={'config': ini_config}, action='push') self.controller = controller return result diff --git a/rhodecode/tests/lib/middleware/utils/test_scm_app_http.py b/rhodecode/tests/lib/middleware/utils/test_scm_app_http.py --- a/rhodecode/tests/lib/middleware/utils/test_scm_app_http.py +++ b/rhodecode/tests/lib/middleware/utils/test_scm_app_http.py @@ -19,6 +19,7 @@ # and proprietary license terms, please see https://rhodecode.com/licenses/ import pytest +import tempfile from rhodecode.tests.utils import CustomTestApp from rhodecode.lib.middleware.utils import scm_app_http, scm_app @@ -41,10 +42,13 @@ def vcsserver_http_echo_app(request, vcs """ A running VCSServer with the EchoApp activated via HTTP. """ - vcsserver = vcsserver_factory( + store_dir = tempfile.gettempdir() + + vcsserver_instance = vcsserver_factory( request=request, + store_dir=store_dir, overrides=[{'app:main': {'dev.use_echo_app': 'true'}}]) - return vcsserver + return vcsserver_instance @pytest.fixture(scope='session') diff --git a/rhodecode/tests/lib/test_auth.py b/rhodecode/tests/lib/test_auth.py --- a/rhodecode/tests/lib/test_auth.py +++ b/rhodecode/tests/lib/test_auth.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 diff --git a/rhodecode/tests/lib/test_diffs.py b/rhodecode/tests/lib/test_diffs.py --- a/rhodecode/tests/lib/test_diffs.py +++ b/rhodecode/tests/lib/test_diffs.py @@ -30,7 +30,7 @@ from rhodecode.lib.diffs import ( from rhodecode.lib.utils2 import AttributeDict from rhodecode.lib.vcs.backends.git import GitCommit -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.tests import no_newline_id_generator from rhodecode.lib.vcs.backends.git.repository import GitDiff from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff diff --git a/rhodecode/tests/lib/test_hooks_daemon.py b/rhodecode/tests/lib/test_hooks_daemon.py --- a/rhodecode/tests/lib/test_hooks_daemon.py +++ b/rhodecode/tests/lib/test_hooks_daemon.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -18,305 +17,71 @@ # and proprietary license terms, please see https://rhodecode.com/licenses/ import logging -import io import mock -import msgpack import pytest import tempfile -from rhodecode.lib.hook_daemon import http_hooks_deamon from rhodecode.lib.hook_daemon import celery_hooks_deamon -from rhodecode.lib.hook_daemon import hook_module +from rhodecode.lib.hook_daemon import utils as hooks_utils from rhodecode.lib.hook_daemon import base as hook_base -from rhodecode.lib.str_utils import safe_bytes + from rhodecode.tests.utils import assert_message_in_log -from rhodecode.lib.ext_json import json - -test_proto = http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO class TestHooks(object): def test_hooks_can_be_used_as_a_context_processor(self): - hooks = hook_module.Hooks() + hooks = hook_base.Hooks() with hooks as return_value: pass assert hooks == return_value - -class TestHooksHttpHandler(object): - def test_read_request_parses_method_name_and_arguments(self): - data = { - 'method': 'test', - 'extras': { - 'param1': 1, - 'param2': 'a' - } - } - request = self._generate_post_request(data) - hooks_patcher = mock.patch.object( - hook_module.Hooks, data['method'], create=True, return_value=1) - - with hooks_patcher as hooks_mock: - handler = http_hooks_deamon.HooksHttpHandler - handler.DEFAULT_HOOKS_PROTO = test_proto - handler.wbufsize = 10240 - MockServer(handler, request) - - hooks_mock.assert_called_once_with(data['extras']) - - def test_hooks_serialized_result_is_returned(self): - request = self._generate_post_request({}) - rpc_method = 'test' - hook_result = { - 'first': 'one', - 'second': 2 - } - extras = {} - - # patching our _read to return test method and proto used - read_patcher = mock.patch.object( - http_hooks_deamon.HooksHttpHandler, '_read_request', - return_value=(test_proto, rpc_method, extras)) - - # patch Hooks instance to return hook_result data on 'test' call - hooks_patcher = mock.patch.object( - hook_module.Hooks, rpc_method, create=True, - return_value=hook_result) - - with read_patcher, hooks_patcher: - handler = http_hooks_deamon.HooksHttpHandler - handler.DEFAULT_HOOKS_PROTO = test_proto - handler.wbufsize = 10240 - server = MockServer(handler, request) - - expected_result = http_hooks_deamon.HooksHttpHandler.serialize_data(hook_result) - - server.request.output_stream.seek(0) - assert server.request.output_stream.readlines()[-1] == expected_result - - def test_exception_is_returned_in_response(self): - request = self._generate_post_request({}) - rpc_method = 'test' - - read_patcher = mock.patch.object( - http_hooks_deamon.HooksHttpHandler, '_read_request', - return_value=(test_proto, rpc_method, {})) - - hooks_patcher = mock.patch.object( - hook_module.Hooks, rpc_method, create=True, - side_effect=Exception('Test exception')) - - with read_patcher, hooks_patcher: - handler = http_hooks_deamon.HooksHttpHandler - handler.DEFAULT_HOOKS_PROTO = test_proto - handler.wbufsize = 10240 - server = MockServer(handler, request) - - server.request.output_stream.seek(0) - data = server.request.output_stream.readlines() - msgpack_data = b''.join(data[5:]) - org_exc = http_hooks_deamon.HooksHttpHandler.deserialize_data(msgpack_data) - expected_result = { - 'exception': 'Exception', - 'exception_traceback': org_exc['exception_traceback'], - 'exception_args': ['Test exception'] - } - assert org_exc == expected_result - - def test_log_message_writes_to_debug_log(self, caplog): - ip_port = ('0.0.0.0', 8888) - handler = http_hooks_deamon.HooksHttpHandler(MockRequest('POST /'), ip_port, mock.Mock()) - fake_date = '1/Nov/2015 00:00:00' - date_patcher = mock.patch.object( - handler, 'log_date_time_string', return_value=fake_date) - - with date_patcher, caplog.at_level(logging.DEBUG): - handler.log_message('Some message %d, %s', 123, 'string') - - expected_message = f"HOOKS: client={ip_port} - - [{fake_date}] Some message 123, string" - - assert_message_in_log( - caplog.records, expected_message, - levelno=logging.DEBUG, module='http_hooks_deamon') - - def _generate_post_request(self, data, proto=test_proto): - if proto == http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO: - payload = msgpack.packb(data) - else: - payload = json.dumps(data) - - return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % ( - len(payload), payload) - - -class ThreadedHookCallbackDaemon(object): - def test_constructor_calls_prepare(self): - prepare_daemon_patcher = mock.patch.object( - http_hooks_deamon.ThreadedHookCallbackDaemon, '_prepare') - with prepare_daemon_patcher as prepare_daemon_mock: - http_hooks_deamon.ThreadedHookCallbackDaemon() - prepare_daemon_mock.assert_called_once_with() - - def test_run_is_called_on_context_start(self): - patchers = mock.patch.multiple( - http_hooks_deamon.ThreadedHookCallbackDaemon, - _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT) - - with patchers as mocks: - daemon = http_hooks_deamon.ThreadedHookCallbackDaemon() - with daemon as daemon_context: - pass - mocks['_run'].assert_called_once_with() - assert daemon_context == daemon - - def test_stop_is_called_on_context_exit(self): - patchers = mock.patch.multiple( - http_hooks_deamon.ThreadedHookCallbackDaemon, - _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT) - - with patchers as mocks: - daemon = http_hooks_deamon.ThreadedHookCallbackDaemon() - with daemon as daemon_context: - assert mocks['_stop'].call_count == 0 - - mocks['_stop'].assert_called_once_with() - assert daemon_context == daemon - - -class TestHttpHooksCallbackDaemon(object): - def test_hooks_callback_generates_new_port(self, caplog): - with caplog.at_level(logging.DEBUG): - daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881) - assert daemon._daemon.server_address == ('127.0.0.1', 8881) - - with caplog.at_level(logging.DEBUG): - daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host=None, port=None) - assert daemon._daemon.server_address[1] in range(0, 66000) - assert daemon._daemon.server_address[0] != '127.0.0.1' - - def test_prepare_inits_daemon_variable(self, tcp_server, caplog): - with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG): - daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881) - assert daemon._daemon == tcp_server - - _, port = tcp_server.server_address - - msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \ - f"hook object: " - assert_message_in_log( - caplog.records, msg, levelno=logging.DEBUG, module='http_hooks_deamon') - - def test_prepare_inits_hooks_uri_and_logs_it( - self, tcp_server, caplog): - with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG): - daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881) - - _, port = tcp_server.server_address - expected_uri = '{}:{}'.format('127.0.0.1', port) - assert daemon.hooks_uri == expected_uri - - msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \ - f"hook object: " - - assert_message_in_log( - caplog.records, msg, - levelno=logging.DEBUG, module='http_hooks_deamon') - - def test_run_creates_a_thread(self, tcp_server): - thread = mock.Mock() - - with self._tcp_patcher(tcp_server): - daemon = http_hooks_deamon.HttpHooksCallbackDaemon() - - with self._thread_patcher(thread) as thread_mock: - daemon._run() - - thread_mock.assert_called_once_with( - target=tcp_server.serve_forever, - kwargs={'poll_interval': daemon.POLL_INTERVAL}) - assert thread.daemon is True - thread.start.assert_called_once_with() - - def test_run_logs(self, tcp_server, caplog): - - with self._tcp_patcher(tcp_server): - daemon = http_hooks_deamon.HttpHooksCallbackDaemon() - - with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG): - daemon._run() - - assert_message_in_log( - caplog.records, - 'Running thread-based loop of callback daemon in background', - levelno=logging.DEBUG, module='http_hooks_deamon') - - def test_stop_cleans_up_the_connection(self, tcp_server, caplog): - thread = mock.Mock() - - with self._tcp_patcher(tcp_server): - daemon = http_hooks_deamon.HttpHooksCallbackDaemon() - - with self._thread_patcher(thread), caplog.at_level(logging.DEBUG): - with daemon: - assert daemon._daemon == tcp_server - assert daemon._callback_thread == thread - - assert daemon._daemon is None - assert daemon._callback_thread is None - tcp_server.shutdown.assert_called_with() - thread.join.assert_called_once_with() - - assert_message_in_log( - caplog.records, 'Waiting for background thread to finish.', - levelno=logging.DEBUG, module='http_hooks_deamon') - - def _tcp_patcher(self, tcp_server): - return mock.patch.object( - http_hooks_deamon, 'TCPServer', return_value=tcp_server) - - def _thread_patcher(self, thread): - return mock.patch.object( - http_hooks_deamon.threading, 'Thread', return_value=thread) - - class TestPrepareHooksDaemon(object): @pytest.mark.parametrize('protocol', ('celery',)) - def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified( - self, protocol): + def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(self, protocol): with tempfile.NamedTemporaryFile(mode='w') as temp_file: - temp_file.write("[app:main]\ncelery.broker_url = redis://redis/0\n" - "celery.result_backend = redis://redis/0") + temp_file.write( + "[app:main]\n" + "celery.broker_url = redis://redis/0\n" + "celery.result_backend = redis://redis/0\n" + ) temp_file.flush() expected_extras = {'config': temp_file.name} - callback, extras = hook_base.prepare_callback_daemon( - expected_extras, protocol=protocol, host='') + callback, extras = hooks_utils.prepare_callback_daemon(expected_extras, protocol=protocol) assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon) @pytest.mark.parametrize('protocol, expected_class', ( - ('http', http_hooks_deamon.HttpHooksCallbackDaemon), + ('celery', celery_hooks_deamon.CeleryHooksCallbackDaemon), )) - def test_returns_real_hooks_callback_daemon_when_protocol_is_specified( - self, protocol, expected_class): - expected_extras = { - 'extra1': 'value1', - 'txn_id': 'txnid2', - 'hooks_protocol': protocol.lower(), - 'task_backend': '', - 'task_queue': '', - 'repo_store': '/var/opt/rhodecode_repo_store', - 'repository': 'rhodecode', - } - from rhodecode import CONFIG - CONFIG['vcs.svn.redis_conn'] = 'redis://redis:6379/0' - callback, extras = hook_base.prepare_callback_daemon( - expected_extras.copy(), protocol=protocol, host='127.0.0.1', - txn_id='txnid2') - assert isinstance(callback, expected_class) - extras.pop('hooks_uri') - expected_extras['time'] = extras['time'] - assert extras == expected_extras + def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(self, protocol, expected_class): + + with tempfile.NamedTemporaryFile(mode='w') as temp_file: + temp_file.write( + "[app:main]\n" + "celery.broker_url = redis://redis:6379/0\n" + "celery.result_backend = redis://redis:6379/0\n" + ) + temp_file.flush() + + expected_extras = { + 'extra1': 'value1', + 'txn_id': 'txnid2', + 'hooks_protocol': protocol.lower(), + 'hooks_config': { + 'broker_url': 'redis://redis:6379/0', + 'result_backend': 'redis://redis:6379/0', + }, + 'repo_store': '/var/opt/rhodecode_repo_store', + 'repository': 'rhodecode', + 'config': temp_file.name + } + from rhodecode import CONFIG + CONFIG['vcs.svn.redis_conn'] = 'redis://redis:6379/0' + callback, extras = hooks_utils.prepare_callback_daemon(expected_extras.copy(), protocol=protocol,txn_id='txnid2') + assert isinstance(callback, expected_class) + expected_extras['time'] = extras['time'] + assert extras == expected_extras @pytest.mark.parametrize('protocol', ( 'invalid', @@ -330,35 +95,4 @@ class TestPrepareHooksDaemon(object): 'hooks_protocol': protocol.lower() } with pytest.raises(Exception): - callback, extras = hook_base.prepare_callback_daemon( - expected_extras.copy(), - protocol=protocol, host='127.0.0.1') - - -class MockRequest(object): - - def __init__(self, request): - self.request = request - self.input_stream = io.BytesIO(safe_bytes(self.request)) - self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion - self.output_stream.close = lambda: None - - def makefile(self, mode, *args, **kwargs): - return self.output_stream if mode == 'wb' else self.input_stream - - -class MockServer(object): - - def __init__(self, handler_cls, request): - ip_port = ('0.0.0.0', 8888) - self.request = MockRequest(request) - self.server_address = ip_port - self.handler = handler_cls(self.request, ip_port, self) - - -@pytest.fixture() -def tcp_server(): - server = mock.Mock() - server.server_address = ('127.0.0.1', 8881) - server.wbufsize = 1024 - return server + callback, extras = hooks_utils.prepare_callback_daemon(expected_extras.copy(), protocol=protocol) diff --git a/rhodecode/tests/lib/test_utils.py b/rhodecode/tests/lib/test_utils.py --- a/rhodecode/tests/lib/test_utils.py +++ b/rhodecode/tests/lib/test_utils.py @@ -33,7 +33,7 @@ from rhodecode.model import meta from rhodecode.model.repo import RepoModel from rhodecode.model.repo_group import RepoGroupModel from rhodecode.model.settings import UiSetting, SettingsModel -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode_tools.lib.hash_utils import md5_safe from rhodecode.lib.ext_json import json @@ -403,12 +403,9 @@ class TestPrepareConfigData(object): self._assert_repo_name_passed(model_mock, repo_name) - expected_result = [ - ('section1', 'option1', 'value1'), - ('section2', 'option2', 'value2'), - ] - # We have extra config items returned, so we're ignoring two last items - assert result[:2] == expected_result + assert ('section1', 'option1', 'value1') in result + assert ('section2', 'option2', 'value2') in result + assert ('section3', 'option3', 'value3') not in result def _assert_repo_name_passed(self, model_mock, repo_name): assert model_mock.call_count == 1 diff --git a/rhodecode/tests/load/http_performance.py b/rhodecode/tests/load/http_performance.py --- a/rhodecode/tests/load/http_performance.py +++ b/rhodecode/tests/load/http_performance.py @@ -25,7 +25,7 @@ It works by replaying a group of commits import argparse import collections -import ConfigParser +import configparser import functools import itertools import os @@ -294,7 +294,7 @@ class HgMixin(object): def add_remote(self, repo, remote_url, remote_name='upstream'): self.remove_remote(repo, remote_name) os.chdir(repo) - hgrc = ConfigParser.RawConfigParser() + hgrc = configparser.RawConfigParser() hgrc.read('.hg/hgrc') hgrc.set('paths', remote_name, remote_url) with open('.hg/hgrc', 'w') as f: @@ -303,7 +303,7 @@ class HgMixin(object): @keep_cwd def remove_remote(self, repo, remote_name='upstream'): os.chdir(repo) - hgrc = ConfigParser.RawConfigParser() + hgrc = configparser.RawConfigParser() hgrc.read('.hg/hgrc') hgrc.remove_option('paths', remote_name) with open('.hg/hgrc', 'w') as f: diff --git a/rhodecode/tests/load/profile-mem.py b/rhodecode/tests/load/profile-mem.py --- a/rhodecode/tests/load/profile-mem.py +++ b/rhodecode/tests/load/profile-mem.py @@ -59,16 +59,6 @@ def parse_options(): parser.add_argument( '--interval', '-i', type=float, default=5, help="Interval in secods.") - parser.add_argument( - '--appenlight', '--ae', action='store_true') - parser.add_argument( - '--appenlight-url', '--ae-url', - default='https://ae.rhodecode.com/api/logs', - help='URL of the Appenlight API endpoint, defaults to "%(default)s".') - parser.add_argument( - '--appenlight-api-key', '--ae-key', - help='API key to use when sending data to appenlight. This has to be ' - 'set if Appenlight is enabled.') return parser.parse_args() diff --git a/rhodecode/tests/load/profile.py b/rhodecode/tests/load/profile.py --- a/rhodecode/tests/load/profile.py +++ b/rhodecode/tests/load/profile.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify diff --git a/rhodecode/tests/load/vcs_performance.py b/rhodecode/tests/load/vcs_performance.py --- a/rhodecode/tests/load/vcs_performance.py +++ b/rhodecode/tests/load/vcs_performance.py @@ -1,5 +1,3 @@ - - # Copyright (C) 2016-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify diff --git a/rhodecode/tests/models/common.py b/rhodecode/tests/models/common.py --- a/rhodecode/tests/models/common.py +++ b/rhodecode/tests/models/common.py @@ -22,7 +22,7 @@ from rhodecode.model.meta import Session from rhodecode.model.repo_group import RepoGroupModel from rhodecode.model.repo import RepoModel from rhodecode.model.user import UserModel -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture fixture = Fixture() diff --git a/rhodecode/tests/models/test_notifications.py b/rhodecode/tests/models/test_notifications.py --- a/rhodecode/tests/models/test_notifications.py +++ b/rhodecode/tests/models/test_notifications.py @@ -19,7 +19,7 @@ import pytest -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.model.db import User, Notification, UserNotification from rhodecode.model.meta import Session diff --git a/rhodecode/tests/models/test_permissions.py b/rhodecode/tests/models/test_permissions.py --- a/rhodecode/tests/models/test_permissions.py +++ b/rhodecode/tests/models/test_permissions.py @@ -29,7 +29,7 @@ from rhodecode.model.repo import RepoMod from rhodecode.model.repo_group import RepoGroupModel from rhodecode.model.user import UserModel from rhodecode.model.user_group import UserGroupModel -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture fixture = Fixture() diff --git a/rhodecode/tests/models/test_pullrequest.py b/rhodecode/tests/models/test_pullrequest.py --- a/rhodecode/tests/models/test_pullrequest.py +++ b/rhodecode/tests/models/test_pullrequest.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -16,6 +15,7 @@ # This program is dual-licensed. If you wish to learn more about the # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ +import os import mock import pytest @@ -23,8 +23,7 @@ import textwrap import rhodecode from rhodecode.lib.vcs.backends import get_backend -from rhodecode.lib.vcs.backends.base import ( - MergeResponse, MergeFailureReason, Reference) +from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason, Reference from rhodecode.lib.vcs.exceptions import RepositoryError from rhodecode.lib.vcs.nodes import FileNode from rhodecode.model.comment import CommentsModel @@ -39,54 +38,42 @@ pytestmark = [ ] -@pytest.mark.usefixtures('config_stub') +@pytest.mark.usefixtures("config_stub") class TestPullRequestModel(object): - @pytest.fixture() def pull_request(self, request, backend, pr_util): """ A pull request combined with multiples patches. """ BackendClass = get_backend(backend.alias) - merge_resp = MergeResponse( - False, False, None, MergeFailureReason.UNKNOWN, - metadata={'exception': 'MockError'}) - self.merge_patcher = mock.patch.object( - BackendClass, 'merge', return_value=merge_resp) - self.workspace_remove_patcher = mock.patch.object( - BackendClass, 'cleanup_merge_workspace') + merge_resp = MergeResponse(False, False, None, MergeFailureReason.UNKNOWN, metadata={"exception": "MockError"}) + self.merge_patcher = mock.patch.object(BackendClass, "merge", return_value=merge_resp) + self.workspace_remove_patcher = mock.patch.object(BackendClass, "cleanup_merge_workspace") self.workspace_remove_mock = self.workspace_remove_patcher.start() self.merge_mock = self.merge_patcher.start() - self.comment_patcher = mock.patch( - 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status') + self.comment_patcher = mock.patch("rhodecode.model.changeset_status.ChangesetStatusModel.set_status") self.comment_patcher.start() - self.notification_patcher = mock.patch( - 'rhodecode.model.notification.NotificationModel.create') + self.notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create") self.notification_patcher.start() - self.helper_patcher = mock.patch( - 'rhodecode.lib.helpers.route_path') + self.helper_patcher = mock.patch("rhodecode.lib.helpers.route_path") self.helper_patcher.start() - self.hook_patcher = mock.patch.object(PullRequestModel, - 'trigger_pull_request_hook') + self.hook_patcher = mock.patch.object(PullRequestModel, "trigger_pull_request_hook") self.hook_mock = self.hook_patcher.start() - self.invalidation_patcher = mock.patch( - 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation') + self.invalidation_patcher = mock.patch("rhodecode.model.pull_request.ScmModel.mark_for_invalidation") self.invalidation_mock = self.invalidation_patcher.start() - self.pull_request = pr_util.create_pull_request( - mergeable=True, name_suffix=u'ąć') + self.pull_request = pr_util.create_pull_request(mergeable=True, name_suffix="ąć") self.source_commit = self.pull_request.source_ref_parts.commit_id self.target_commit = self.pull_request.target_ref_parts.commit_id - self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id + self.workspace_id = f"pr-{self.pull_request.pull_request_id}" self.repo_id = self.pull_request.target_repo.repo_id @request.addfinalizer def cleanup_pull_request(): - calls = [mock.call( - self.pull_request, self.pull_request.author, 'create')] + calls = [mock.call(self.pull_request, self.pull_request.author, "create")] self.hook_mock.assert_has_calls(calls) self.workspace_remove_patcher.stop() @@ -114,29 +101,30 @@ class TestPullRequestModel(object): assert len(prs) == 1 def test_count_awaiting_review(self, pull_request): - pr_count = PullRequestModel().count_awaiting_review( - pull_request.target_repo) + pr_count = PullRequestModel().count_awaiting_review(pull_request.target_repo) assert pr_count == 1 def test_get_awaiting_my_review(self, pull_request): PullRequestModel().update_reviewers( - pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])], - pull_request.author) + pull_request, [(pull_request.author, ["author"], False, "reviewer", [])], pull_request.author + ) Session().commit() prs = PullRequestModel().get_awaiting_my_review( - pull_request.target_repo.repo_name, user_id=pull_request.author.user_id) + pull_request.target_repo.repo_name, user_id=pull_request.author.user_id + ) assert isinstance(prs, list) assert len(prs) == 1 def test_count_awaiting_my_review(self, pull_request): PullRequestModel().update_reviewers( - pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])], - pull_request.author) + pull_request, [(pull_request.author, ["author"], False, "reviewer", [])], pull_request.author + ) Session().commit() pr_count = PullRequestModel().count_awaiting_my_review( - pull_request.target_repo.repo_name, user_id=pull_request.author.user_id) + pull_request.target_repo.repo_name, user_id=pull_request.author.user_id + ) assert pr_count == 1 def test_delete_calls_cleanup_merge(self, pull_request): @@ -144,24 +132,19 @@ class TestPullRequestModel(object): PullRequestModel().delete(pull_request, pull_request.author) Session().commit() - self.workspace_remove_mock.assert_called_once_with( - repo_id, self.workspace_id) + self.workspace_remove_mock.assert_called_once_with(repo_id, self.workspace_id) def test_close_calls_cleanup_and_hook(self, pull_request): - PullRequestModel().close_pull_request( - pull_request, pull_request.author) + PullRequestModel().close_pull_request(pull_request, pull_request.author) Session().commit() repo_id = pull_request.target_repo.repo_id - self.workspace_remove_mock.assert_called_once_with( - repo_id, self.workspace_id) - self.hook_mock.assert_called_with( - self.pull_request, self.pull_request.author, 'close') + self.workspace_remove_mock.assert_called_once_with(repo_id, self.workspace_id) + self.hook_mock.assert_called_with(self.pull_request, self.pull_request.author, "close") def test_merge_status(self, pull_request): - self.merge_mock.return_value = MergeResponse( - True, False, None, MergeFailureReason.NONE) + self.merge_mock.return_value = MergeResponse(True, False, None, MergeFailureReason.NONE) assert pull_request._last_merge_source_rev is None assert pull_request._last_merge_target_rev is None @@ -169,13 +152,17 @@ class TestPullRequestModel(object): merge_response, status, msg = PullRequestModel().merge_status(pull_request) assert status is True - assert msg == 'This pull request can be automatically merged.' + assert msg == "This pull request can be automatically merged." self.merge_mock.assert_called_with( - self.repo_id, self.workspace_id, + self.repo_id, + self.workspace_id, pull_request.target_ref_parts, pull_request.source_repo.scm_instance(), - pull_request.source_ref_parts, dry_run=True, - use_rebase=False, close_branch=False) + pull_request.source_ref_parts, + dry_run=True, + use_rebase=False, + close_branch=False, + ) assert pull_request._last_merge_source_rev == self.source_commit assert pull_request._last_merge_target_rev == self.target_commit @@ -184,13 +171,13 @@ class TestPullRequestModel(object): self.merge_mock.reset_mock() merge_response, status, msg = PullRequestModel().merge_status(pull_request) assert status is True - assert msg == 'This pull request can be automatically merged.' + assert msg == "This pull request can be automatically merged." assert self.merge_mock.called is False def test_merge_status_known_failure(self, pull_request): self.merge_mock.return_value = MergeResponse( - False, False, None, MergeFailureReason.MERGE_FAILED, - metadata={'unresolved_files': 'file1'}) + False, False, None, MergeFailureReason.MERGE_FAILED, metadata={"unresolved_files": "file1"} + ) assert pull_request._last_merge_source_rev is None assert pull_request._last_merge_target_rev is None @@ -198,13 +185,17 @@ class TestPullRequestModel(object): merge_response, status, msg = PullRequestModel().merge_status(pull_request) assert status is False - assert msg == 'This pull request cannot be merged because of merge conflicts. file1' + assert msg == "This pull request cannot be merged because of merge conflicts. file1" self.merge_mock.assert_called_with( - self.repo_id, self.workspace_id, + self.repo_id, + self.workspace_id, pull_request.target_ref_parts, pull_request.source_repo.scm_instance(), - pull_request.source_ref_parts, dry_run=True, - use_rebase=False, close_branch=False) + pull_request.source_ref_parts, + dry_run=True, + use_rebase=False, + close_branch=False, + ) assert pull_request._last_merge_source_rev == self.source_commit assert pull_request._last_merge_target_rev == self.target_commit @@ -213,13 +204,13 @@ class TestPullRequestModel(object): self.merge_mock.reset_mock() merge_response, status, msg = PullRequestModel().merge_status(pull_request) assert status is False - assert msg == 'This pull request cannot be merged because of merge conflicts. file1' + assert msg == "This pull request cannot be merged because of merge conflicts. file1" assert self.merge_mock.called is False def test_merge_status_unknown_failure(self, pull_request): self.merge_mock.return_value = MergeResponse( - False, False, None, MergeFailureReason.UNKNOWN, - metadata={'exception': 'MockError'}) + False, False, None, MergeFailureReason.UNKNOWN, metadata={"exception": "MockError"} + ) assert pull_request._last_merge_source_rev is None assert pull_request._last_merge_target_rev is None @@ -227,15 +218,17 @@ class TestPullRequestModel(object): merge_response, status, msg = PullRequestModel().merge_status(pull_request) assert status is False - assert msg == ( - 'This pull request cannot be merged because of an unhandled exception. ' - 'MockError') + assert msg == "This pull request cannot be merged because of an unhandled exception. MockError" self.merge_mock.assert_called_with( - self.repo_id, self.workspace_id, + self.repo_id, + self.workspace_id, pull_request.target_ref_parts, pull_request.source_repo.scm_instance(), - pull_request.source_ref_parts, dry_run=True, - use_rebase=False, close_branch=False) + pull_request.source_ref_parts, + dry_run=True, + use_rebase=False, + close_branch=False, + ) assert pull_request._last_merge_source_rev is None assert pull_request._last_merge_target_rev is None @@ -244,155 +237,136 @@ class TestPullRequestModel(object): self.merge_mock.reset_mock() merge_response, status, msg = PullRequestModel().merge_status(pull_request) assert status is False - assert msg == ( - 'This pull request cannot be merged because of an unhandled exception. ' - 'MockError') + assert msg == "This pull request cannot be merged because of an unhandled exception. MockError" assert self.merge_mock.called is True def test_merge_status_when_target_is_locked(self, pull_request): - pull_request.target_repo.locked = [1, u'12345.50', 'lock_web'] + pull_request.target_repo.locked = [1, "12345.50", "lock_web"] merge_response, status, msg = PullRequestModel().merge_status(pull_request) assert status is False - assert msg == ( - 'This pull request cannot be merged because the target repository ' - 'is locked by user:1.') + assert msg == "This pull request cannot be merged because the target repository is locked by user:1." def test_merge_status_requirements_check_target(self, pull_request): - def has_largefiles(self, repo): return repo == pull_request.source_repo - patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles) + patcher = mock.patch.object(PullRequestModel, "_has_largefiles", has_largefiles) with patcher: merge_response, status, msg = PullRequestModel().merge_status(pull_request) assert status is False - assert msg == 'Target repository large files support is disabled.' + assert msg == "Target repository large files support is disabled." def test_merge_status_requirements_check_source(self, pull_request): - def has_largefiles(self, repo): return repo == pull_request.target_repo - patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles) + patcher = mock.patch.object(PullRequestModel, "_has_largefiles", has_largefiles) with patcher: merge_response, status, msg = PullRequestModel().merge_status(pull_request) assert status is False - assert msg == 'Source repository large files support is disabled.' + assert msg == "Source repository large files support is disabled." def test_merge(self, pull_request, merge_extras): user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) - merge_ref = Reference( - 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') - self.merge_mock.return_value = MergeResponse( - True, True, merge_ref, MergeFailureReason.NONE) + merge_ref = Reference("type", "name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6") + self.merge_mock.return_value = MergeResponse(True, True, merge_ref, MergeFailureReason.NONE) - merge_extras['repository'] = pull_request.target_repo.repo_name - PullRequestModel().merge_repo( - pull_request, pull_request.author, extras=merge_extras) + merge_extras["repository"] = pull_request.target_repo.repo_name + PullRequestModel().merge_repo(pull_request, pull_request.author, extras=merge_extras) Session().commit() - message = ( - u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' - u'\n\n {pr_title}'.format( - pr_id=pull_request.pull_request_id, - source_repo=safe_str( - pull_request.source_repo.scm_instance().name), - source_ref_name=pull_request.source_ref_parts.name, - pr_title=safe_str(pull_request.title) - ) + message = "Merge pull request !{pr_id} from {source_repo} {source_ref_name}" "\n\n {pr_title}".format( + pr_id=pull_request.pull_request_id, + source_repo=safe_str(pull_request.source_repo.scm_instance().name), + source_ref_name=pull_request.source_ref_parts.name, + pr_title=safe_str(pull_request.title), ) self.merge_mock.assert_called_with( - self.repo_id, self.workspace_id, + self.repo_id, + self.workspace_id, pull_request.target_ref_parts, pull_request.source_repo.scm_instance(), pull_request.source_ref_parts, - user_name=user.short_contact, user_email=user.email, message=message, - use_rebase=False, close_branch=False + user_name=user.short_contact, + user_email=user.email, + message=message, + use_rebase=False, + close_branch=False, ) - self.invalidation_mock.assert_called_once_with( - pull_request.target_repo.repo_name) + self.invalidation_mock.assert_called_once_with(pull_request.target_repo.repo_name) - self.hook_mock.assert_called_with( - self.pull_request, self.pull_request.author, 'merge') + self.hook_mock.assert_called_with(self.pull_request, self.pull_request.author, "merge") pull_request = PullRequest.get(pull_request.pull_request_id) - assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6' + assert pull_request.merge_rev == "6126b7bfcc82ad2d3deaee22af926b082ce54cc6" def test_merge_with_status_lock(self, pull_request, merge_extras): user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) - merge_ref = Reference( - 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') - self.merge_mock.return_value = MergeResponse( - True, True, merge_ref, MergeFailureReason.NONE) + merge_ref = Reference("type", "name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6") + self.merge_mock.return_value = MergeResponse(True, True, merge_ref, MergeFailureReason.NONE) - merge_extras['repository'] = pull_request.target_repo.repo_name + merge_extras["repository"] = pull_request.target_repo.repo_name with pull_request.set_state(PullRequest.STATE_UPDATING): assert pull_request.pull_request_state == PullRequest.STATE_UPDATING - PullRequestModel().merge_repo( - pull_request, pull_request.author, extras=merge_extras) + PullRequestModel().merge_repo(pull_request, pull_request.author, extras=merge_extras) Session().commit() assert pull_request.pull_request_state == PullRequest.STATE_CREATED - message = ( - u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' - u'\n\n {pr_title}'.format( - pr_id=pull_request.pull_request_id, - source_repo=safe_str( - pull_request.source_repo.scm_instance().name), - source_ref_name=pull_request.source_ref_parts.name, - pr_title=safe_str(pull_request.title) - ) + message = "Merge pull request !{pr_id} from {source_repo} {source_ref_name}" "\n\n {pr_title}".format( + pr_id=pull_request.pull_request_id, + source_repo=safe_str(pull_request.source_repo.scm_instance().name), + source_ref_name=pull_request.source_ref_parts.name, + pr_title=safe_str(pull_request.title), ) self.merge_mock.assert_called_with( - self.repo_id, self.workspace_id, + self.repo_id, + self.workspace_id, pull_request.target_ref_parts, pull_request.source_repo.scm_instance(), pull_request.source_ref_parts, - user_name=user.short_contact, user_email=user.email, message=message, - use_rebase=False, close_branch=False + user_name=user.short_contact, + user_email=user.email, + message=message, + use_rebase=False, + close_branch=False, ) - self.invalidation_mock.assert_called_once_with( - pull_request.target_repo.repo_name) + self.invalidation_mock.assert_called_once_with(pull_request.target_repo.repo_name) - self.hook_mock.assert_called_with( - self.pull_request, self.pull_request.author, 'merge') + self.hook_mock.assert_called_with(self.pull_request, self.pull_request.author, "merge") pull_request = PullRequest.get(pull_request.pull_request_id) - assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6' + assert pull_request.merge_rev == "6126b7bfcc82ad2d3deaee22af926b082ce54cc6" def test_merge_failed(self, pull_request, merge_extras): user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) - merge_ref = Reference( - 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') - self.merge_mock.return_value = MergeResponse( - False, False, merge_ref, MergeFailureReason.MERGE_FAILED) + merge_ref = Reference("type", "name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6") + self.merge_mock.return_value = MergeResponse(False, False, merge_ref, MergeFailureReason.MERGE_FAILED) - merge_extras['repository'] = pull_request.target_repo.repo_name - PullRequestModel().merge_repo( - pull_request, pull_request.author, extras=merge_extras) + merge_extras["repository"] = pull_request.target_repo.repo_name + PullRequestModel().merge_repo(pull_request, pull_request.author, extras=merge_extras) Session().commit() - message = ( - u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' - u'\n\n {pr_title}'.format( - pr_id=pull_request.pull_request_id, - source_repo=safe_str( - pull_request.source_repo.scm_instance().name), - source_ref_name=pull_request.source_ref_parts.name, - pr_title=safe_str(pull_request.title) - ) + message = "Merge pull request !{pr_id} from {source_repo} {source_ref_name}" "\n\n {pr_title}".format( + pr_id=pull_request.pull_request_id, + source_repo=safe_str(pull_request.source_repo.scm_instance().name), + source_ref_name=pull_request.source_ref_parts.name, + pr_title=safe_str(pull_request.title), ) self.merge_mock.assert_called_with( - self.repo_id, self.workspace_id, + self.repo_id, + self.workspace_id, pull_request.target_ref_parts, pull_request.source_repo.scm_instance(), pull_request.source_ref_parts, - user_name=user.short_contact, user_email=user.email, message=message, - use_rebase=False, close_branch=False + user_name=user.short_contact, + user_email=user.email, + message=message, + use_rebase=False, + close_branch=False, ) pull_request = PullRequest.get(pull_request.pull_request_id) @@ -410,7 +384,7 @@ class TestPullRequestModel(object): assert commit_ids == pull_request.revisions # Merge revision is not in the revisions list - pull_request.merge_rev = 'f000' * 10 + pull_request.merge_rev = "f000" * 10 commit_ids = PullRequestModel()._get_commit_ids(pull_request) assert commit_ids == pull_request.revisions + [pull_request.merge_rev] @@ -419,147 +393,126 @@ class TestPullRequestModel(object): source_ref_id = pull_request.source_ref_parts.commit_id target_ref_id = pull_request.target_ref_parts.commit_id diff = PullRequestModel()._get_diff_from_pr_or_version( - source_repo, source_ref_id, target_ref_id, - hide_whitespace_changes=False, diff_context=6) - assert b'file_1' in diff.raw.tobytes() + source_repo, source_ref_id, target_ref_id, hide_whitespace_changes=False, diff_context=6 + ) + assert b"file_1" in diff.raw.tobytes() def test_generate_title_returns_unicode(self): title = PullRequestModel().generate_pullrequest_title( - source='source-dummy', - source_ref='source-ref-dummy', - target='target-dummy', + source="source-dummy", + source_ref="source-ref-dummy", + target="target-dummy", ) assert type(title) == str - @pytest.mark.parametrize('title, has_wip', [ - ('hello', False), - ('hello wip', False), - ('hello wip: xxx', False), - ('[wip] hello', True), - ('[wip] hello', True), - ('wip: hello', True), - ('wip hello', True), - - ]) + @pytest.mark.parametrize( + "title, has_wip", + [ + ("hello", False), + ("hello wip", False), + ("hello wip: xxx", False), + ("[wip] hello", True), + ("[wip] hello", True), + ("wip: hello", True), + ("wip hello", True), + ], + ) def test_wip_title_marker(self, pull_request, title, has_wip): pull_request.title = title assert pull_request.work_in_progress == has_wip -@pytest.mark.usefixtures('config_stub') +@pytest.mark.usefixtures("config_stub") class TestIntegrationMerge(object): - @pytest.mark.parametrize('extra_config', ( - {'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False}, - )) - def test_merge_triggers_push_hooks( - self, pr_util, user_admin, capture_rcextensions, merge_extras, - extra_config): - - pull_request = pr_util.create_pull_request( - approved=True, mergeable=True) - # TODO: johbo: Needed for sqlite, try to find an automatic way for it - merge_extras['repository'] = pull_request.target_repo.repo_name - Session().commit() - - with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False): - merge_state = PullRequestModel().merge_repo( - pull_request, user_admin, extras=merge_extras) - Session().commit() - - assert merge_state.executed - assert '_pre_push_hook' in capture_rcextensions - assert '_push_hook' in capture_rcextensions - def test_merge_can_be_rejected_by_pre_push_hook( - self, pr_util, user_admin, capture_rcextensions, merge_extras): - pull_request = pr_util.create_pull_request( - approved=True, mergeable=True) - # TODO: johbo: Needed for sqlite, try to find an automatic way for it - merge_extras['repository'] = pull_request.target_repo.repo_name - Session().commit() - - with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull: - pre_pull.side_effect = RepositoryError("Disallow push!") - merge_status = PullRequestModel().merge_repo( - pull_request, user_admin, extras=merge_extras) - Session().commit() - - assert not merge_status.executed - assert 'pre_push' not in capture_rcextensions - assert 'post_push' not in capture_rcextensions - - def test_merge_fails_if_target_is_locked( - self, pr_util, user_regular, merge_extras): - pull_request = pr_util.create_pull_request( - approved=True, mergeable=True) - locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web'] + def test_merge_fails_if_target_is_locked(self, pr_util, user_regular, merge_extras): + pull_request = pr_util.create_pull_request(approved=True, mergeable=True) + locked_by = [user_regular.user_id + 1, 12345.50, "lock_web"] pull_request.target_repo.locked = locked_by # TODO: johbo: Check if this can work based on the database, currently # all data is pre-computed, that's why just updating the DB is not # enough. - merge_extras['locked_by'] = locked_by - merge_extras['repository'] = pull_request.target_repo.repo_name + merge_extras["locked_by"] = locked_by + merge_extras["repository"] = pull_request.target_repo.repo_name # TODO: johbo: Needed for sqlite, try to find an automatic way for it Session().commit() - merge_status = PullRequestModel().merge_repo( - pull_request, user_regular, extras=merge_extras) + merge_status = PullRequestModel().merge_repo(pull_request, user_regular, extras=merge_extras) Session().commit() assert not merge_status.executed -@pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [ - (False, 1, 0), - (True, 0, 1), -]) -def test_outdated_comments( - pr_util, use_outdated, inlines_count, outdated_count, config_stub): +@pytest.mark.parametrize( + "use_outdated, inlines_count, outdated_count", + [ + (False, 1, 0), + (True, 0, 1), + ], +) +def test_outdated_comments(pr_util, use_outdated, inlines_count, outdated_count, config_stub): pull_request = pr_util.create_pull_request() - pr_util.create_inline_comment(file_path='not_in_updated_diff') + pr_util.create_inline_comment(file_path="not_in_updated_diff") with outdated_comments_patcher(use_outdated) as outdated_comment_mock: pr_util.add_one_commit() - assert_inline_comments( - pull_request, visible=inlines_count, outdated=outdated_count) + assert_inline_comments(pull_request, visible=inlines_count, outdated=outdated_count) outdated_comment_mock.assert_called_with(pull_request) -@pytest.mark.parametrize('mr_type, expected_msg', [ - (MergeFailureReason.NONE, - 'This pull request can be automatically merged.'), - (MergeFailureReason.UNKNOWN, - 'This pull request cannot be merged because of an unhandled exception. CRASH'), - (MergeFailureReason.MERGE_FAILED, - 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'), - (MergeFailureReason.PUSH_FAILED, - 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'), - (MergeFailureReason.TARGET_IS_NOT_HEAD, - 'This pull request cannot be merged because the target `ref_name` is not a head.'), - (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES, - 'This pull request cannot be merged because the source contains more branches than the target.'), - (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, - 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'), - (MergeFailureReason.TARGET_IS_LOCKED, - 'This pull request cannot be merged because the target repository is locked by user:123.'), - (MergeFailureReason.MISSING_TARGET_REF, - 'This pull request cannot be merged because the target reference `ref_name` is missing.'), - (MergeFailureReason.MISSING_SOURCE_REF, - 'This pull request cannot be merged because the source reference `ref_name` is missing.'), - (MergeFailureReason.SUBREPO_MERGE_FAILED, - 'This pull request cannot be merged because of conflicts related to sub repositories.'), - -]) +@pytest.mark.parametrize( + "mr_type, expected_msg", + [ + (MergeFailureReason.NONE, "This pull request can be automatically merged."), + (MergeFailureReason.UNKNOWN, "This pull request cannot be merged because of an unhandled exception. CRASH"), + ( + MergeFailureReason.MERGE_FAILED, + "This pull request cannot be merged because of merge conflicts. CONFLICT_FILE", + ), + ( + MergeFailureReason.PUSH_FAILED, + "This pull request could not be merged because push to target:`some-repo@merge_commit` failed.", + ), + ( + MergeFailureReason.TARGET_IS_NOT_HEAD, + "This pull request cannot be merged because the target `ref_name` is not a head.", + ), + ( + MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES, + "This pull request cannot be merged because the source contains more branches than the target.", + ), + ( + MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, + "This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.", + ), + ( + MergeFailureReason.TARGET_IS_LOCKED, + "This pull request cannot be merged because the target repository is locked by user:123.", + ), + ( + MergeFailureReason.MISSING_TARGET_REF, + "This pull request cannot be merged because the target reference `ref_name` is missing.", + ), + ( + MergeFailureReason.MISSING_SOURCE_REF, + "This pull request cannot be merged because the source reference `ref_name` is missing.", + ), + ( + MergeFailureReason.SUBREPO_MERGE_FAILED, + "This pull request cannot be merged because of conflicts related to sub repositories.", + ), + ], +) def test_merge_response_message(mr_type, expected_msg): - merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') + merge_ref = Reference("type", "ref_name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6") metadata = { - 'unresolved_files': 'CONFLICT_FILE', - 'exception': "CRASH", - 'target': 'some-repo', - 'merge_commit': 'merge_commit', - 'target_ref': merge_ref, - 'source_ref': merge_ref, - 'heads': ','.join(['a', 'b', 'c']), - 'locked_by': 'user:123' + "unresolved_files": "CONFLICT_FILE", + "exception": "CRASH", + "target": "some-repo", + "merge_commit": "merge_commit", + "target_ref": merge_ref, + "source_ref": merge_ref, + "heads": ",".join(["a", "b", "c"]), + "locked_by": "user:123", } merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata) @@ -573,30 +526,28 @@ def merge_extras(request, user_regular): """ extras = { - 'ip': '127.0.0.1', - 'username': user_regular.username, - 'user_id': user_regular.user_id, - 'action': 'push', - 'repository': 'fake_target_repo_name', - 'scm': 'git', - 'config': request.config.getini('pyramid_config'), - 'repo_store': '', - 'make_lock': None, - 'locked_by': [None, None, None], - 'server_url': 'http://test.example.com:5000', - 'hooks': ['push', 'pull'], - 'is_shadow_repo': False, + "ip": "127.0.0.1", + "username": user_regular.username, + "user_id": user_regular.user_id, + "action": "push", + "repository": "fake_target_repo_name", + "scm": "git", + "config": request.config.getini("pyramid_config"), + "repo_store": "", + "make_lock": None, + "locked_by": [None, None, None], + "server_url": "http://test.example.com:5000", + "hooks": ["push", "pull"], + "is_shadow_repo": False, } return extras -@pytest.mark.usefixtures('config_stub') +@pytest.mark.usefixtures("config_stub") class TestUpdateCommentHandling(object): - - @pytest.fixture(autouse=True, scope='class') + @pytest.fixture(autouse=True, scope="class") def enable_outdated_comments(self, request, baseapp): - config_patch = mock.patch.dict( - 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True}) + config_patch = mock.patch.dict("rhodecode.CONFIG", {"rhodecode_use_outdated_comments": True}) config_patch.start() @request.addfinalizer @@ -605,206 +556,194 @@ class TestUpdateCommentHandling(object): def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util): commits = [ - {'message': 'a'}, - {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]}, - {'message': 'c', 'added': [FileNode(b'file_c', b'test_content\n')]}, + {"message": "a"}, + {"message": "b", "added": [FileNode(b"file_b", b"test_content\n")]}, + {"message": "c", "added": [FileNode(b"file_c", b"test_content\n")]}, ] - pull_request = pr_util.create_pull_request( - commits=commits, target_head='a', source_head='b', revisions=['b']) - pr_util.create_inline_comment(file_path='file_b') - pr_util.add_one_commit(head='c') + pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) + pr_util.create_inline_comment(file_path="file_b") + pr_util.add_one_commit(head="c") assert_inline_comments(pull_request, visible=1, outdated=0) def test_comment_stays_unflagged_on_change_above(self, pr_util): - original_content = b''.join((b'line %d\n' % x for x in range(1, 11))) - updated_content = b'new_line_at_top\n' + original_content + original_content = b"".join((b"line %d\n" % x for x in range(1, 11))) + updated_content = b"new_line_at_top\n" + original_content commits = [ - {'message': 'a'}, - {'message': 'b', 'added': [FileNode(b'file_b', original_content)]}, - {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]}, + {"message": "a"}, + {"message": "b", "added": [FileNode(b"file_b", original_content)]}, + {"message": "c", "changed": [FileNode(b"file_b", updated_content)]}, ] - pull_request = pr_util.create_pull_request( - commits=commits, target_head='a', source_head='b', revisions=['b']) + pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) with outdated_comments_patcher(): - comment = pr_util.create_inline_comment( - line_no=u'n8', file_path='file_b') - pr_util.add_one_commit(head='c') + comment = pr_util.create_inline_comment(line_no="n8", file_path="file_b") + pr_util.add_one_commit(head="c") assert_inline_comments(pull_request, visible=1, outdated=0) - assert comment.line_no == u'n9' + assert comment.line_no == "n9" def test_comment_stays_unflagged_on_change_below(self, pr_util): - original_content = b''.join([b'line %d\n' % x for x in range(10)]) - updated_content = original_content + b'new_line_at_end\n' + original_content = b"".join([b"line %d\n" % x for x in range(10)]) + updated_content = original_content + b"new_line_at_end\n" commits = [ - {'message': 'a'}, - {'message': 'b', 'added': [FileNode(b'file_b', original_content)]}, - {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]}, + {"message": "a"}, + {"message": "b", "added": [FileNode(b"file_b", original_content)]}, + {"message": "c", "changed": [FileNode(b"file_b", updated_content)]}, ] - pull_request = pr_util.create_pull_request( - commits=commits, target_head='a', source_head='b', revisions=['b']) - pr_util.create_inline_comment(file_path='file_b') - pr_util.add_one_commit(head='c') + pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) + pr_util.create_inline_comment(file_path="file_b") + pr_util.add_one_commit(head="c") assert_inline_comments(pull_request, visible=1, outdated=0) - @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9']) + @pytest.mark.parametrize("line_no", ["n4", "o4", "n10", "o9"]) def test_comment_flagged_on_change_around_context(self, pr_util, line_no): - base_lines = [b'line %d\n' % x for x in range(1, 13)] + base_lines = [b"line %d\n" % x for x in range(1, 13)] change_lines = list(base_lines) - change_lines.insert(6, b'line 6a added\n') + change_lines.insert(6, b"line 6a added\n") # Changes on the last line of sight update_lines = list(change_lines) - update_lines[0] = b'line 1 changed\n' - update_lines[-1] = b'line 12 changed\n' + update_lines[0] = b"line 1 changed\n" + update_lines[-1] = b"line 12 changed\n" def file_b(lines): - return FileNode(b'file_b', b''.join(lines)) + return FileNode(b"file_b", b"".join(lines)) commits = [ - {'message': 'a', 'added': [file_b(base_lines)]}, - {'message': 'b', 'changed': [file_b(change_lines)]}, - {'message': 'c', 'changed': [file_b(update_lines)]}, + {"message": "a", "added": [file_b(base_lines)]}, + {"message": "b", "changed": [file_b(change_lines)]}, + {"message": "c", "changed": [file_b(update_lines)]}, ] - pull_request = pr_util.create_pull_request( - commits=commits, target_head='a', source_head='b', revisions=['b']) - pr_util.create_inline_comment(line_no=line_no, file_path='file_b') + pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) + pr_util.create_inline_comment(line_no=line_no, file_path="file_b") with outdated_comments_patcher(): - pr_util.add_one_commit(head='c') + pr_util.add_one_commit(head="c") assert_inline_comments(pull_request, visible=0, outdated=1) - @pytest.mark.parametrize("change, content", [ - ('changed', b'changed\n'), - ('removed', b''), - ], ids=['changed', b'removed']) + @pytest.mark.parametrize( + "change, content", + [ + ("changed", b"changed\n"), + ("removed", b""), + ], + ids=["changed", b"removed"], + ) def test_comment_flagged_on_change(self, pr_util, change, content): commits = [ - {'message': 'a'}, - {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]}, - {'message': 'c', change: [FileNode(b'file_b', content)]}, + {"message": "a"}, + {"message": "b", "added": [FileNode(b"file_b", b"test_content\n")]}, + {"message": "c", change: [FileNode(b"file_b", content)]}, ] - pull_request = pr_util.create_pull_request( - commits=commits, target_head='a', source_head='b', revisions=['b']) - pr_util.create_inline_comment(file_path='file_b') + pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) + pr_util.create_inline_comment(file_path="file_b") with outdated_comments_patcher(): - pr_util.add_one_commit(head='c') + pr_util.add_one_commit(head="c") assert_inline_comments(pull_request, visible=0, outdated=1) -@pytest.mark.usefixtures('config_stub') +@pytest.mark.usefixtures("config_stub") class TestUpdateChangedFiles(object): - def test_no_changes_on_unchanged_diff(self, pr_util): commits = [ - {'message': 'a'}, - {'message': 'b', - 'added': [FileNode(b'file_b', b'test_content b\n')]}, - {'message': 'c', - 'added': [FileNode(b'file_c', b'test_content c\n')]}, + {"message": "a"}, + {"message": "b", "added": [FileNode(b"file_b", b"test_content b\n")]}, + {"message": "c", "added": [FileNode(b"file_c", b"test_content c\n")]}, ] # open a PR from a to b, adding file_b pull_request = pr_util.create_pull_request( - commits=commits, target_head='a', source_head='b', revisions=['b'], - name_suffix='per-file-review') + commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review" + ) # modify PR adding new file file_c - pr_util.add_one_commit(head='c') + pr_util.add_one_commit(head="c") - assert_pr_file_changes( - pull_request, - added=['file_c'], - modified=[], - removed=[]) + assert_pr_file_changes(pull_request, added=["file_c"], modified=[], removed=[]) def test_modify_and_undo_modification_diff(self, pr_util): commits = [ - {'message': 'a'}, - {'message': 'b', - 'added': [FileNode(b'file_b', b'test_content b\n')]}, - {'message': 'c', - 'changed': [FileNode(b'file_b', b'test_content b modified\n')]}, - {'message': 'd', - 'changed': [FileNode(b'file_b', b'test_content b\n')]}, + {"message": "a"}, + {"message": "b", "added": [FileNode(b"file_b", b"test_content b\n")]}, + {"message": "c", "changed": [FileNode(b"file_b", b"test_content b modified\n")]}, + {"message": "d", "changed": [FileNode(b"file_b", b"test_content b\n")]}, ] # open a PR from a to b, adding file_b pull_request = pr_util.create_pull_request( - commits=commits, target_head='a', source_head='b', revisions=['b'], - name_suffix='per-file-review') + commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review" + ) # modify PR modifying file file_b - pr_util.add_one_commit(head='c') + pr_util.add_one_commit(head="c") - assert_pr_file_changes( - pull_request, - added=[], - modified=['file_b'], - removed=[]) + assert_pr_file_changes(pull_request, added=[], modified=["file_b"], removed=[]) # move the head again to d, which rollbacks change, # meaning we should indicate no changes - pr_util.add_one_commit(head='d') + pr_util.add_one_commit(head="d") - assert_pr_file_changes( - pull_request, - added=[], - modified=[], - removed=[]) + assert_pr_file_changes(pull_request, added=[], modified=[], removed=[]) def test_updated_all_files_in_pr(self, pr_util): commits = [ - {'message': 'a'}, - {'message': 'b', 'added': [ - FileNode(b'file_a', b'test_content a\n'), - FileNode(b'file_b', b'test_content b\n'), - FileNode(b'file_c', b'test_content c\n')]}, - {'message': 'c', 'changed': [ - FileNode(b'file_a', b'test_content a changed\n'), - FileNode(b'file_b', b'test_content b changed\n'), - FileNode(b'file_c', b'test_content c changed\n')]}, + {"message": "a"}, + { + "message": "b", + "added": [ + FileNode(b"file_a", b"test_content a\n"), + FileNode(b"file_b", b"test_content b\n"), + FileNode(b"file_c", b"test_content c\n"), + ], + }, + { + "message": "c", + "changed": [ + FileNode(b"file_a", b"test_content a changed\n"), + FileNode(b"file_b", b"test_content b changed\n"), + FileNode(b"file_c", b"test_content c changed\n"), + ], + }, ] # open a PR from a to b, changing 3 files pull_request = pr_util.create_pull_request( - commits=commits, target_head='a', source_head='b', revisions=['b'], - name_suffix='per-file-review') - - pr_util.add_one_commit(head='c') + commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review" + ) - assert_pr_file_changes( - pull_request, - added=[], - modified=['file_a', 'file_b', 'file_c'], - removed=[]) + pr_util.add_one_commit(head="c") + + assert_pr_file_changes(pull_request, added=[], modified=["file_a", "file_b", "file_c"], removed=[]) def test_updated_and_removed_all_files_in_pr(self, pr_util): commits = [ - {'message': 'a'}, - {'message': 'b', 'added': [ - FileNode(b'file_a', b'test_content a\n'), - FileNode(b'file_b', b'test_content b\n'), - FileNode(b'file_c', b'test_content c\n')]}, - {'message': 'c', 'removed': [ - FileNode(b'file_a', b'test_content a changed\n'), - FileNode(b'file_b', b'test_content b changed\n'), - FileNode(b'file_c', b'test_content c changed\n')]}, + {"message": "a"}, + { + "message": "b", + "added": [ + FileNode(b"file_a", b"test_content a\n"), + FileNode(b"file_b", b"test_content b\n"), + FileNode(b"file_c", b"test_content c\n"), + ], + }, + { + "message": "c", + "removed": [ + FileNode(b"file_a", b"test_content a changed\n"), + FileNode(b"file_b", b"test_content b changed\n"), + FileNode(b"file_c", b"test_content c changed\n"), + ], + }, ] # open a PR from a to b, removing 3 files pull_request = pr_util.create_pull_request( - commits=commits, target_head='a', source_head='b', revisions=['b'], - name_suffix='per-file-review') - - pr_util.add_one_commit(head='c') + commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review" + ) - assert_pr_file_changes( - pull_request, - added=[], - modified=[], - removed=['file_a', 'file_b', 'file_c']) + pr_util.add_one_commit(head="c") + + assert_pr_file_changes(pull_request, added=[], modified=[], removed=["file_a", "file_b", "file_c"]) def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub): @@ -866,8 +805,7 @@ def test_update_adds_a_comment_to_the_pu .. |under_review| replace:: *"Under Review"*""" ).format(commit_id[:12]) - pull_request_comments = sorted( - pull_request.comments, key=lambda c: c.modified_at) + pull_request_comments = sorted(pull_request.comments, key=lambda c: c.modified_at) update_comment = pull_request_comments[-1] assert update_comment.text == expected_message @@ -890,8 +828,8 @@ def test_create_version_from_snapshot_up version = PullRequestModel()._create_version_from_snapshot(pull_request) # Check attributes - assert version.title == pr_util.create_parameters['title'] - assert version.description == pr_util.create_parameters['description'] + assert version.title == pr_util.create_parameters["title"] + assert version.description == pr_util.create_parameters["description"] assert version.status == PullRequest.STATUS_CLOSED # versions get updated created_on @@ -899,11 +837,11 @@ def test_create_version_from_snapshot_up assert version.updated_on == updated_on assert version.user_id == pull_request.user_id - assert version.revisions == pr_util.create_parameters['revisions'] + assert version.revisions == pr_util.create_parameters["revisions"] assert version.source_repo == pr_util.source_repository - assert version.source_ref == pr_util.create_parameters['source_ref'] + assert version.source_ref == pr_util.create_parameters["source_ref"] assert version.target_repo == pr_util.target_repository - assert version.target_ref == pr_util.create_parameters['target_ref'] + assert version.target_ref == pr_util.create_parameters["target_ref"] assert version._last_merge_source_rev == pull_request._last_merge_source_rev assert version._last_merge_target_rev == pull_request._last_merge_target_rev assert version.last_merge_status == pull_request.last_merge_status @@ -921,15 +859,9 @@ def test_link_comments_to_version_only_u Session().commit() # Expect that only the new comment is linked to version2 - assert ( - comment_unlinked.pull_request_version_id == - version2.pull_request_version_id) - assert ( - comment_linked.pull_request_version_id == - version1.pull_request_version_id) - assert ( - comment_unlinked.pull_request_version_id != - comment_linked.pull_request_version_id) + assert comment_unlinked.pull_request_version_id == version2.pull_request_version_id + assert comment_linked.pull_request_version_id == version1.pull_request_version_id + assert comment_unlinked.pull_request_version_id != comment_linked.pull_request_version_id def test_calculate_commits(): @@ -945,35 +877,26 @@ def test_calculate_commits(): def assert_inline_comments(pull_request, visible=None, outdated=None): if visible is not None: inline_comments = CommentsModel().get_inline_comments( - pull_request.target_repo.repo_id, pull_request=pull_request) - inline_cnt = len(CommentsModel().get_inline_comments_as_list( - inline_comments)) + pull_request.target_repo.repo_id, pull_request=pull_request + ) + inline_cnt = len(CommentsModel().get_inline_comments_as_list(inline_comments)) assert inline_cnt == visible if outdated is not None: - outdated_comments = CommentsModel().get_outdated_comments( - pull_request.target_repo.repo_id, pull_request) + outdated_comments = CommentsModel().get_outdated_comments(pull_request.target_repo.repo_id, pull_request) assert len(outdated_comments) == outdated -def assert_pr_file_changes( - pull_request, added=None, modified=None, removed=None): +def assert_pr_file_changes(pull_request, added=None, modified=None, removed=None): pr_versions = PullRequestModel().get_versions(pull_request) # always use first version, ie original PR to calculate changes pull_request_version = pr_versions[0] - old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs( - pull_request, pull_request_version) - file_changes = PullRequestModel()._calculate_file_changes( - old_diff_data, new_diff_data) + old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(pull_request, pull_request_version) + file_changes = PullRequestModel()._calculate_file_changes(old_diff_data, new_diff_data) - assert added == file_changes.added, \ - 'expected added:%s vs value:%s' % (added, file_changes.added) - assert modified == file_changes.modified, \ - 'expected modified:%s vs value:%s' % (modified, file_changes.modified) - assert removed == file_changes.removed, \ - 'expected removed:%s vs value:%s' % (removed, file_changes.removed) + assert added == file_changes.added, "expected added:%s vs value:%s" % (added, file_changes.added) + assert modified == file_changes.modified, "expected modified:%s vs value:%s" % (modified, file_changes.modified) + assert removed == file_changes.removed, "expected removed:%s vs value:%s" % (removed, file_changes.removed) def outdated_comments_patcher(use_outdated=True): - return mock.patch.object( - CommentsModel, 'use_outdated_comments', - return_value=use_outdated) + return mock.patch.object(CommentsModel, "use_outdated_comments", return_value=use_outdated) diff --git a/rhodecode/tests/models/test_repo_groups.py b/rhodecode/tests/models/test_repo_groups.py --- a/rhodecode/tests/models/test_repo_groups.py +++ b/rhodecode/tests/models/test_repo_groups.py @@ -23,7 +23,7 @@ from sqlalchemy.exc import IntegrityErro import pytest from rhodecode.tests import TESTS_TMP_PATH -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.model.repo_group import RepoGroupModel from rhodecode.model.repo import RepoModel diff --git a/rhodecode/tests/models/test_user_group_permissions_on_repo_groups.py b/rhodecode/tests/models/test_user_group_permissions_on_repo_groups.py --- a/rhodecode/tests/models/test_user_group_permissions_on_repo_groups.py +++ b/rhodecode/tests/models/test_user_group_permissions_on_repo_groups.py @@ -28,7 +28,7 @@ from rhodecode.model.user_group import U from rhodecode.tests.models.common import ( _create_project_tree, check_tree_perms, _get_perms, _check_expected_count, expected_count, _destroy_project_tree) -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture fixture = Fixture() diff --git a/rhodecode/tests/models/test_user_groups.py b/rhodecode/tests/models/test_user_groups.py --- a/rhodecode/tests/models/test_user_groups.py +++ b/rhodecode/tests/models/test_user_groups.py @@ -22,7 +22,7 @@ import pytest from rhodecode.model.db import User from rhodecode.tests import TEST_USER_REGULAR_LOGIN -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.model.user_group import UserGroupModel from rhodecode.model.meta import Session diff --git a/rhodecode/tests/models/test_users.py b/rhodecode/tests/models/test_users.py --- a/rhodecode/tests/models/test_users.py +++ b/rhodecode/tests/models/test_users.py @@ -27,7 +27,7 @@ from rhodecode.model.user import UserMod from rhodecode.model.user_group import UserGroupModel from rhodecode.model.repo import RepoModel from rhodecode.model.repo_group import RepoGroupModel -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture from rhodecode.lib.str_utils import safe_str diff --git a/rhodecode/tests/other/test_validators.py b/rhodecode/tests/other/test_validators.py --- a/rhodecode/tests/other/test_validators.py +++ b/rhodecode/tests/other/test_validators.py @@ -32,11 +32,11 @@ from rhodecode.model.meta import Session from rhodecode.model.repo_group import RepoGroupModel from rhodecode.model.db import ChangesetStatus, Repository from rhodecode.model.changeset_status import ChangesetStatusModel -from rhodecode.tests.fixture import Fixture +from rhodecode.tests.fixtures.rc_fixture import Fixture fixture = Fixture() -pytestmark = pytest.mark.usefixtures('baseapp') +pytestmark = pytest.mark.usefixtures("baseapp") @pytest.fixture() diff --git a/rhodecode/tests/rhodecode.ini b/rhodecode/tests/rhodecode.ini --- a/rhodecode/tests/rhodecode.ini +++ b/rhodecode/tests/rhodecode.ini @@ -111,7 +111,7 @@ app.base_url = http://rhodecode.local app.service_api.host = http://rhodecode.local:10020 ; Secret for Service API authentication. -app.service_api.token = +app.service_api.token = secret4 ; Unique application ID. Should be a random unique string for security. app_instance_uuid = rc-production @@ -351,7 +351,7 @@ archive_cache.objectstore.retry_attempts ; and served from the cache during subsequent requests for the same archive of ; the repository. This path is important to be shared across filesystems and with ; RhodeCode and vcsserver -archive_cache.filesystem.store_dir = %(here)s/rc-tests/archive_cache +archive_cache.filesystem.store_dir = %(here)s/.rc-test-data/archive_cache ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb archive_cache.filesystem.cache_size_gb = 2 @@ -406,7 +406,7 @@ celery.task_store_eager_result = true ; Default cache dir for caches. Putting this into a ramdisk can boost performance. ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space -cache_dir = %(here)s/rc-test-data +cache_dir = %(here)s/.rc-test-data ; ********************************************* ; `sql_cache_short` cache for heavy SQL queries @@ -435,7 +435,7 @@ rc_cache.cache_repo_longterm.max_size = rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace rc_cache.cache_general.expiration_time = 43200 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set -rc_cache.cache_general.arguments.filename = %(here)s/rc-tests/cache-backend/cache_general_db +rc_cache.cache_general.arguments.filename = %(here)s/.rc-test-data/cache-backend/cache_general_db ; alternative `cache_general` redis backend with distributed lock #rc_cache.cache_general.backend = dogpile.cache.rc.redis @@ -454,6 +454,10 @@ rc_cache.cache_general.arguments.filenam ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen #rc_cache.cache_general.arguments.lock_auto_renewal = true +; prefix for redis keys used for this cache backend, the final key is constructed using {custom-prefix}{key} +#rc_cache.cache_general.arguments.key_prefix = custom-prefix- + + ; ************************************************* ; `cache_perms` cache for permission tree, auth TTL ; for simplicity use rc.file_namespace backend, @@ -462,7 +466,7 @@ rc_cache.cache_general.arguments.filenam rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace rc_cache.cache_perms.expiration_time = 0 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set -rc_cache.cache_perms.arguments.filename = %(here)s/rc-tests/cache-backend/cache_perms_db +rc_cache.cache_perms.arguments.filename = %(here)s/.rc-test-data/cache-backend/cache_perms_db ; alternative `cache_perms` redis backend with distributed lock #rc_cache.cache_perms.backend = dogpile.cache.rc.redis @@ -481,6 +485,10 @@ rc_cache.cache_perms.arguments.filename ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen #rc_cache.cache_perms.arguments.lock_auto_renewal = true +; prefix for redis keys used for this cache backend, the final key is constructed using {custom-prefix}{key} +#rc_cache.cache_perms.arguments.key_prefix = custom-prefix- + + ; *************************************************** ; `cache_repo` cache for file tree, Readme, RSS FEEDS ; for simplicity use rc.file_namespace backend, @@ -489,7 +497,7 @@ rc_cache.cache_perms.arguments.filename rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace rc_cache.cache_repo.expiration_time = 2592000 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set -rc_cache.cache_repo.arguments.filename = %(here)s/rc-tests/cache-backend/cache_repo_db +rc_cache.cache_repo.arguments.filename = %(here)s/.rc-test-data/cache-backend/cache_repo_db ; alternative `cache_repo` redis backend with distributed lock #rc_cache.cache_repo.backend = dogpile.cache.rc.redis @@ -508,6 +516,10 @@ rc_cache.cache_repo.arguments.filename = ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen #rc_cache.cache_repo.arguments.lock_auto_renewal = true +; prefix for redis keys used for this cache backend, the final key is constructed using {custom-prefix}{key} +#rc_cache.cache_repo.arguments.key_prefix = custom-prefix- + + ; ############## ; BEAKER SESSION ; ############## @@ -516,7 +528,7 @@ rc_cache.cache_repo.arguments.filename = ; types are file, ext:redis, ext:database, ext:memcached ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session beaker.session.type = file -beaker.session.data_dir = %(here)s/rc-tests/data/sessions +beaker.session.data_dir = %(here)s/.rc-test-data/data/sessions ; Redis based sessions #beaker.session.type = ext:redis @@ -532,7 +544,7 @@ beaker.session.data_dir = %(here)s/rc-te beaker.session.key = rhodecode beaker.session.secret = test-rc-uytcxaz -beaker.session.lock_dir = %(here)s/rc-tests/data/sessions/lock +beaker.session.lock_dir = %(here)s/.rc-test-data/data/sessions/lock ; Secure encrypted cookie. Requires AES and AES python libraries ; you must disable beaker.session.secret to use this @@ -564,7 +576,7 @@ beaker.session.secure = false ; WHOOSH Backend, doesn't require additional services to run ; it works good with few dozen repos search.module = rhodecode.lib.index.whoosh -search.location = %(here)s/rc-tests/data/index +search.location = %(here)s/.rc-test-data/data/index ; #################### ; CHANNELSTREAM CONFIG @@ -584,7 +596,7 @@ channelstream.server = channelstream:980 ; see Nginx/Apache configuration examples in our docs channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream channelstream.secret = ENV_GENERATED -channelstream.history.location = %(here)s/rc-tests/channelstream_history +channelstream.history.location = %(here)s/.rc-test-data/channelstream_history ; Internal application path that Javascript uses to connect into. ; If you use proxy-prefix the prefix should be added before /_channelstream @@ -601,7 +613,7 @@ channelstream.proxy_path = /_channelstre ; pymysql is an alternative driver for MySQL, use in case of problems with default one #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode -sqlalchemy.db1.url = sqlite:///%(here)s/rc-tests/rhodecode_test.db?timeout=30 +sqlalchemy.db1.url = sqlite:///%(here)s/.rc-test-data/rhodecode_test.db?timeout=30 ; see sqlalchemy docs for other advanced settings ; print the sql statements to output @@ -737,7 +749,7 @@ ssh.generate_authorized_keyfile = true ; Path to the authorized_keys file where the generate entries are placed. ; It is possible to have multiple key files specified in `sshd_config` e.g. ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode -ssh.authorized_keys_file_path = %(here)s/rc-tests/authorized_keys_rhodecode +ssh.authorized_keys_file_path = %(here)s/.rc-test-data/authorized_keys_rhodecode ; Command to execute the SSH wrapper. The binary is available in the ; RhodeCode installation directory. diff --git a/rhodecode/tests/run_full_tests.sh b/rhodecode/tests/run_full_tests.sh deleted file mode 100755 --- a/rhodecode/tests/run_full_tests.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -set -e - -## MIGRATIONS AND DB TESTS ## -echo "DATABASE CREATION TESTS" -rhodecode/tests/database/test_creation.sh - -echo "DATABASE MIGRATIONS TESTS" -rhodecode/tests/database/test_migration.sh - -## TEST VCS OPERATIONS ## -echo "VCS FUNCTIONAL TESTS" -rhodecode/tests/test_vcs_operations.sh - -## TOX TESTS ## -echo "TOX TESTS" -tox -r --develop diff --git a/rhodecode/tests/server_utils.py b/rhodecode/tests/server_utils.py --- a/rhodecode/tests/server_utils.py +++ b/rhodecode/tests/server_utils.py @@ -24,13 +24,13 @@ import tempfile import pytest import subprocess import logging -from urllib.request import urlopen -from urllib.error import URLError +import requests import configparser from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS from rhodecode.tests.utils import is_url_reachable +from rhodecode.tests import console_printer log = logging.getLogger(__name__) @@ -49,7 +49,7 @@ def get_host_url(pyramid_config): def assert_no_running_instance(url): if is_url_reachable(url): - print(f"Hint: Usually this means another instance of server " + console_printer(f"Hint: Usually this means another instance of server " f"is running in the background at {url}.") pytest.fail(f"Port is not free at {url}, cannot start server at") @@ -58,8 +58,9 @@ class ServerBase(object): _args = [] log_file_name = 'NOT_DEFINED.log' status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' + console_marker = " :warning: [green]pytest-setup[/green] " - def __init__(self, config_file, log_file): + def __init__(self, config_file, log_file, env): self.config_file = config_file config = configparser.ConfigParser() config.read(config_file) @@ -69,10 +70,10 @@ class ServerBase(object): self._args = [] self.log_file = log_file or os.path.join( tempfile.gettempdir(), self.log_file_name) + self.env = env self.process = None self.server_out = None - log.info("Using the {} configuration:{}".format( - self.__class__.__name__, config_file)) + log.info(f"Using the {self.__class__.__name__} configuration:{config_file}") if not os.path.isfile(config_file): raise RuntimeError(f'Failed to get config at {config_file}') @@ -110,18 +111,17 @@ class ServerBase(object): while time.time() - start < timeout: try: - urlopen(status_url) + requests.get(status_url) break - except URLError: + except requests.exceptions.ConnectionError: time.sleep(0.2) else: pytest.fail( - "Starting the {} failed or took more than {} " - "seconds. cmd: `{}`".format( - self.__class__.__name__, timeout, self.command)) + f"Starting the {self.__class__.__name__} failed or took more than {timeout} seconds." + f"cmd: `{self.command}`" + ) - log.info('Server of {} ready at url {}'.format( - self.__class__.__name__, status_url)) + log.info(f'Server of {self.__class__.__name__} ready at url {status_url}') def shutdown(self): self.process.kill() @@ -130,7 +130,7 @@ class ServerBase(object): def get_log_file_with_port(self): log_file = list(self.log_file.partition('.log')) - log_file.insert(1, get_port(self.config_file)) + log_file.insert(1, f'-{get_port(self.config_file)}') log_file = ''.join(log_file) return log_file @@ -140,11 +140,12 @@ class RcVCSServer(ServerBase): Represents a running VCSServer instance. """ - log_file_name = 'rc-vcsserver.log' + log_file_name = 'rhodecode-vcsserver.log' status_url_tmpl = 'http://{host}:{port}/status' - def __init__(self, config_file, log_file=None, workers='3'): - super(RcVCSServer, self).__init__(config_file, log_file) + def __init__(self, config_file, log_file=None, workers='3', env=None, info_prefix=''): + super(RcVCSServer, self).__init__(config_file, log_file, env) + self.info_prefix = info_prefix self._args = [ 'gunicorn', '--bind', self.bind_addr, @@ -164,9 +165,10 @@ class RcVCSServer(ServerBase): host_url = self.host_url() assert_no_running_instance(host_url) - print(f'rhodecode-vcsserver starting at: {host_url}') - print(f'rhodecode-vcsserver command: {self.command}') - print(f'rhodecode-vcsserver logfile: {self.log_file}') + console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-vcsserver starting at: {host_url}') + console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-vcsserver command: {self.command}') + console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-vcsserver logfile: {self.log_file}') + console_printer() self.process = subprocess.Popen( self._args, bufsize=0, env=env, @@ -178,11 +180,12 @@ class RcWebServer(ServerBase): Represents a running RCE web server used as a test fixture. """ - log_file_name = 'rc-web.log' + log_file_name = 'rhodecode-ce.log' status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' - def __init__(self, config_file, log_file=None, workers='2'): - super(RcWebServer, self).__init__(config_file, log_file) + def __init__(self, config_file, log_file=None, workers='2', env=None, info_prefix=''): + super(RcWebServer, self).__init__(config_file, log_file, env) + self.info_prefix = info_prefix self._args = [ 'gunicorn', '--bind', self.bind_addr, @@ -195,7 +198,8 @@ class RcWebServer(ServerBase): def start(self): env = os.environ.copy() - env['RC_NO_TMP_PATH'] = '1' + if self.env: + env.update(self.env) self.log_file = self.get_log_file_with_port() self.server_out = open(self.log_file, 'w') @@ -203,9 +207,10 @@ class RcWebServer(ServerBase): host_url = self.host_url() assert_no_running_instance(host_url) - print(f'rhodecode-web starting at: {host_url}') - print(f'rhodecode-web command: {self.command}') - print(f'rhodecode-web logfile: {self.log_file}') + console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-ce starting at: {host_url}') + console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-ce command: {self.command}') + console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-ce logfile: {self.log_file}') + console_printer() self.process = subprocess.Popen( self._args, bufsize=0, env=env, @@ -229,3 +234,44 @@ class RcWebServer(ServerBase): } params.update(**kwargs) return params['user'], params['passwd'] + +class CeleryServer(ServerBase): + log_file_name = 'rhodecode-celery.log' + status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' + + def __init__(self, config_file, log_file=None, workers='2', env=None, info_prefix=''): + super(CeleryServer, self).__init__(config_file, log_file, env) + self.info_prefix = info_prefix + self._args = \ + ['celery', + '--no-color', + '--app=rhodecode.lib.celerylib.loader', + 'worker', + '--autoscale=4,2', + '--max-tasks-per-child=30', + '--task-events', + '--loglevel=DEBUG', + '--ini=' + self.config_file] + + def start(self): + env = os.environ.copy() + env['RC_NO_TEST_ENV'] = '1' + + self.log_file = self.get_log_file_with_port() + self.server_out = open(self.log_file, 'w') + + host_url = "Celery" #self.host_url() + #assert_no_running_instance(host_url) + + console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-celery starting at: {host_url}') + console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-celery command: {self.command}') + console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-celery logfile: {self.log_file}') + console_printer() + + self.process = subprocess.Popen( + self._args, bufsize=0, env=env, + stdout=self.server_out, stderr=self.server_out) + + + def wait_until_ready(self, timeout=30): + time.sleep(2) diff --git a/rhodecode/tests/test_vcs_operations.sh b/rhodecode/tests/test_vcs_operations.sh deleted file mode 100755 --- a/rhodecode/tests/test_vcs_operations.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/sh -set -e - -SQLITE_DB_PATH=/mnt/hgfs/marcink-shared/workspace-python/rhodecode -RC_LOG=/tmp/rc.log -INI_FILE=test.ini -TEST_DB_NAME=rhodecode_test - - -for databaseName in p m s; do - # set the different DBs - if [ "$databaseName" = "s" ]; then - echo "sqlite" - rhodecode-config --filename=$INI_FILE --update '[app:main]sqlalchemy.db1.url=sqlite:///'$SQLITE_DB_PATH/$TEST_DB_NAME'.sqlite' - elif [ "$databaseName" = "p" ]; then - echo "postgres" - rhodecode-config --filename=$INI_FILE --update '[app:main]sqlalchemy.db1.url=postgresql://postgres:qweqwe@localhost/'$TEST_DB_NAME'' - elif [ "$databaseName" = "m" ]; then - echo "mysql" - rhodecode-config --filename=$INI_FILE --update '[app:main]sqlalchemy.db1.url=mysql://root:qweqwe@localhost/'$TEST_DB_NAME'' - fi - - # running just VCS tests - RC_NO_TMP_PATH=1 py.test \ - rhodecode/tests/other/test_vcs_operations.py - -done diff --git a/rhodecode/tests/utils.py b/rhodecode/tests/utils.py --- a/rhodecode/tests/utils.py +++ b/rhodecode/tests/utils.py @@ -36,24 +36,29 @@ from webtest.app import TestResponse, Te import pytest -try: - import rc_testdata -except ImportError: - raise ImportError('Failed to import rc_testdata, ' - 'please make sure this package is installed from requirements_test.txt') - from rhodecode.model.db import User, Repository from rhodecode.model.meta import Session from rhodecode.model.scm import ScmModel from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository from rhodecode.lib.vcs.backends.base import EmptyCommit -from rhodecode.tests import login_user_session +from rhodecode.tests import login_user_session, console_printer +from rhodecode.authentication import AuthenticationPluginRegistry +from rhodecode.model.settings import SettingsModel log = logging.getLogger(__name__) -def print_to_func(value, print_to=sys.stderr): - print(value, file=print_to) +def console_printer_utils(msg): + console_printer(f" :white_check_mark: [green]test-utils[/green] {msg}") + + +def get_rc_testdata(): + try: + import rc_testdata + except ImportError: + raise ImportError('Failed to import rc_testdata, ' + 'please make sure this package is installed from requirements_test.txt') + return rc_testdata class CustomTestResponse(TestResponse): @@ -73,7 +78,6 @@ class CustomTestResponse(TestResponse): assert string in res """ print_body = kw.pop('print_body', False) - print_to = kw.pop('print_to', sys.stderr) if 'no' in kw: no = kw['no'] @@ -89,18 +93,18 @@ class CustomTestResponse(TestResponse): for s in strings: if s not in self: - print_to_func(f"Actual response (no {s!r}):", print_to=print_to) - print_to_func(f"body output saved as `{f}`", print_to=print_to) + console_printer_utils(f"Actual response (no {s!r}):") + console_printer_utils(f"body output saved as `{f}`") if print_body: - print_to_func(str(self), print_to=print_to) + console_printer_utils(str(self)) raise IndexError(f"Body does not contain string {s!r}, body output saved as {f}") for no_s in no: if no_s in self: - print_to_func(f"Actual response (has {no_s!r})", print_to=print_to) - print_to_func(f"body output saved as `{f}`", print_to=print_to) + console_printer_utils(f"Actual response (has {no_s!r})") + console_printer_utils(f"body output saved as `{f}`") if print_body: - print_to_func(str(self), print_to=print_to) + console_printer_utils(str(self)) raise IndexError(f"Body contains bad string {no_s!r}, body output saved as {f}") def assert_response(self): @@ -209,6 +213,7 @@ def extract_git_repo_from_dump(dump_name """Create git repo `repo_name` from dump `dump_name`.""" repos_path = ScmModel().repos_path target_path = os.path.join(repos_path, repo_name) + rc_testdata = get_rc_testdata() rc_testdata.extract_git_dump(dump_name, target_path) return target_path @@ -217,6 +222,7 @@ def extract_hg_repo_from_dump(dump_name, """Create hg repo `repo_name` from dump `dump_name`.""" repos_path = ScmModel().repos_path target_path = os.path.join(repos_path, repo_name) + rc_testdata = get_rc_testdata() rc_testdata.extract_hg_dump(dump_name, target_path) return target_path @@ -245,6 +251,7 @@ def _load_svn_dump_into_repo(dump_name, Currently the dumps are in rc_testdata. They might later on be integrated with the main repository once they stabilize more. """ + rc_testdata = get_rc_testdata() dump = rc_testdata.load_svn_dump(dump_name) load_dump = subprocess.Popen( ['svnadmin', 'load', repo_path], @@ -254,9 +261,7 @@ def _load_svn_dump_into_repo(dump_name, if load_dump.returncode != 0: log.error("Output of load_dump command: %s", out) log.error("Error output of load_dump command: %s", err) - raise Exception( - 'Failed to load dump "%s" into repository at path "%s".' - % (dump_name, repo_path)) + raise Exception(f'Failed to load dump "{dump_name}" into repository at path "{repo_path}".') class AssertResponse(object): @@ -492,3 +497,54 @@ def permission_update_data_generator(csr ('perm_del_member_type_{}'.format(obj_id), obj_type), ]) return form_data + + + +class AuthPluginManager: + + def cleanup(self): + self._enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) + + def enable(self, plugins_list, override=None): + return self._enable_plugins(plugins_list, override) + + @classmethod + def _enable_plugins(cls, plugins_list, override: object = None): + override = override or {} + params = { + 'auth_plugins': ','.join(plugins_list), + } + + # helper translate some names to others, to fix settings code + name_map = { + 'token': 'authtoken' + } + log.debug('enable_auth_plugins: enabling following auth-plugins: %s', plugins_list) + + for module in plugins_list: + plugin_name = module.partition('#')[-1] + if plugin_name in name_map: + plugin_name = name_map[plugin_name] + enabled_plugin = f'auth_{plugin_name}_enabled' + cache_ttl = f'auth_{plugin_name}_cache_ttl' + + # default params that are needed for each plugin, + # `enabled` and `cache_ttl` + params.update({ + enabled_plugin: True, + cache_ttl: 0 + }) + if override.get: + params.update(override.get(module, {})) + + validated_params = params + + for k, v in validated_params.items(): + setting = SettingsModel().create_or_update_setting(k, v) + Session().add(setting) + Session().commit() + + AuthenticationPluginRegistry.invalidate_auth_plugins_cache(hard=True) + + enabled_plugins = SettingsModel().get_auth_plugins() + assert plugins_list == enabled_plugins diff --git a/rhodecode/tests/vcs/__init__.py b/rhodecode/tests/vcs/__init__.py --- a/rhodecode/tests/vcs/__init__.py +++ b/rhodecode/tests/vcs/__init__.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify diff --git a/rhodecode/tests/vcs/conftest.py b/rhodecode/tests/vcs/conftest.py --- a/rhodecode/tests/vcs/conftest.py +++ b/rhodecode/tests/vcs/conftest.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -32,8 +31,7 @@ from rhodecode.tests.utils import check_ @pytest.fixture() -def vcs_repository_support( - request, backend_alias, baseapp, _vcs_repo_container): +def vcs_repository_support(request, backend_alias, baseapp, _vcs_repo_container): """ Provide a test repository for the test run. @@ -63,7 +61,7 @@ def vcs_repository_support( return backend_alias, repo -@pytest.fixture(scope='class') +@pytest.fixture(scope="class") def _vcs_repo_container(request): """ Internal fixture intended to help support class based scoping on demand. @@ -73,13 +71,12 @@ def _vcs_repo_container(request): def _create_vcs_repo_container(request): repo_container = VcsRepoContainer() - if not request.config.getoption('--keep-tmp-path'): + if not request.config.getoption("--keep-tmp-path"): request.addfinalizer(repo_container.cleanup) return repo_container class VcsRepoContainer(object): - def __init__(self): self._cleanup_paths = [] self._repos = {} @@ -98,14 +95,14 @@ class VcsRepoContainer(object): def _should_create_repo_per_test(cls): - return getattr(cls, 'recreate_repo_per_test', False) + return getattr(cls, "recreate_repo_per_test", False) def _create_empty_repository(cls, backend_alias=None): Backend = get_backend(backend_alias or cls.backend_alias) repo_path = get_new_dir(str(time.time())) repo = Backend(repo_path, create=True) - if hasattr(cls, '_get_commits'): + if hasattr(cls, "_get_commits"): commits = cls._get_commits() cls.tip = _add_commits_to_repo(repo, commits) @@ -127,7 +124,7 @@ def config(): specific content is required. """ config = Config() - config.set('section-a', 'a-1', 'value-a-1') + config.set("section-a", "a-1", "value-a-1") return config @@ -136,24 +133,24 @@ def _add_commits_to_repo(repo, commits): tip = None for commit in commits: - for node in commit.get('added', []): + for node in commit.get("added", []): if not isinstance(node, FileNode): node = FileNode(safe_bytes(node.path), content=node.content) imc.add(node) - for node in commit.get('changed', []): + for node in commit.get("changed", []): if not isinstance(node, FileNode): node = FileNode(safe_bytes(node.path), content=node.content) imc.change(node) - for node in commit.get('removed', []): + for node in commit.get("removed", []): imc.remove(FileNode(safe_bytes(node.path))) tip = imc.commit( - message=str(commit['message']), - author=str(commit['author']), - date=commit['date'], - branch=commit.get('branch') + message=str(commit["message"]), + author=str(commit["author"]), + date=commit["date"], + branch=commit.get("branch"), ) return tip @@ -183,16 +180,15 @@ def generate_repo_with_commits(vcs_repo) start_date = datetime.datetime(2010, 1, 1, 20) for x in range(num): yield { - 'message': 'Commit %d' % x, - 'author': 'Joe Doe ', - 'date': start_date + datetime.timedelta(hours=12 * x), - 'added': [ - FileNode(b'file_%d.txt' % x, content=b'Foobar %d' % x), + "message": "Commit %d" % x, + "author": "Joe Doe ", + "date": start_date + datetime.timedelta(hours=12 * x), + "added": [ + FileNode(b"file_%d.txt" % x, content=b"Foobar %d" % x), ], - 'modified': [ - FileNode(b'file_%d.txt' % x, - content=b'Foobar %d modified' % (x-1)), - ] + "modified": [ + FileNode(b"file_%d.txt" % x, content=b"Foobar %d modified" % (x - 1)), + ], } def commit_maker(num=5): @@ -231,34 +227,33 @@ class BackendTestMixin(object): created before every single test. Defaults to ``True``. """ + recreate_repo_per_test = True @classmethod def _get_commits(cls): commits = [ { - 'message': 'Initial commit', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 20), - 'added': [ - FileNode(b'foobar', content=b'Foobar'), - FileNode(b'foobar2', content=b'Foobar II'), - FileNode(b'foo/bar/baz', content=b'baz here!'), + "message": "Initial commit", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 20), + "added": [ + FileNode(b"foobar", content=b"Foobar"), + FileNode(b"foobar2", content=b"Foobar II"), + FileNode(b"foo/bar/baz", content=b"baz here!"), ], }, { - 'message': 'Changes...', - 'author': 'Jane Doe ', - 'date': datetime.datetime(2010, 1, 1, 21), - 'added': [ - FileNode(b'some/new.txt', content=b'news...'), + "message": "Changes...", + "author": "Jane Doe ", + "date": datetime.datetime(2010, 1, 1, 21), + "added": [ + FileNode(b"some/new.txt", content=b"news..."), ], - 'changed': [ - FileNode(b'foobar', b'Foobar I'), + "changed": [ + FileNode(b"foobar", b"Foobar I"), ], - 'removed': [], + "removed": [], }, ] return commits - - diff --git a/rhodecode/tests/vcs/test_archives.py b/rhodecode/tests/vcs/test_archives.py --- a/rhodecode/tests/vcs/test_archives.py +++ b/rhodecode/tests/vcs/test_archives.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -43,121 +42,120 @@ def d_cache_config(): @pytest.mark.usefixtures("vcs_repository_support") class TestArchives(BackendTestMixin): - @classmethod def _get_commits(cls): start_date = datetime.datetime(2010, 1, 1, 20) yield { - 'message': 'Initial Commit', - 'author': 'Joe Doe ', - 'date': start_date + datetime.timedelta(hours=12), - 'added': [ - FileNode(b'executable_0o100755', b'mode_755', mode=0o100755), - FileNode(b'executable_0o100500', b'mode_500', mode=0o100500), - FileNode(b'not_executable', b'mode_644', mode=0o100644), + "message": "Initial Commit", + "author": "Joe Doe ", + "date": start_date + datetime.timedelta(hours=12), + "added": [ + FileNode(b"executable_0o100755", b"mode_755", mode=0o100755), + FileNode(b"executable_0o100500", b"mode_500", mode=0o100500), + FileNode(b"not_executable", b"mode_644", mode=0o100644), ], } for x in range(5): yield { - 'message': 'Commit %d' % x, - 'author': 'Joe Doe ', - 'date': start_date + datetime.timedelta(hours=12 * x), - 'added': [ - FileNode(b'%d/file_%d.txt' % (x, x), content=b'Foobar %d' % x), + "message": "Commit %d" % x, + "author": "Joe Doe ", + "date": start_date + datetime.timedelta(hours=12 * x), + "added": [ + FileNode(b"%d/file_%d.txt" % (x, x), content=b"Foobar %d" % x), ], } - @pytest.mark.parametrize('compressor', ['gz', 'bz2']) + @pytest.mark.parametrize("compressor", ["gz", "bz2"]) def test_archive_tar(self, compressor, tmpdir, tmp_path, d_cache_config): - - archive_node = tmp_path / 'archive-node' + archive_node = tmp_path / "archive-node" archive_node.touch() archive_lnk = self.tip.archive_repo( - str(archive_node), kind=f't{compressor}', archive_dir_name='repo', cache_config=d_cache_config) + str(archive_node), kind=f"t{compressor}", archive_dir_name="repo", cache_config=d_cache_config + ) out_dir = tmpdir - out_file = tarfile.open(str(archive_lnk), f'r|{compressor}') + out_file = tarfile.open(str(archive_lnk), f"r|{compressor}") out_file.extractall(out_dir) out_file.close() for x in range(5): - node_path = '%d/file_%d.txt' % (x, x) - with open(os.path.join(out_dir, 'repo/' + node_path), 'rb') as f: + node_path = "%d/file_%d.txt" % (x, x) + with open(os.path.join(out_dir, "repo/" + node_path), "rb") as f: file_content = f.read() assert file_content == self.tip.get_node(node_path).content shutil.rmtree(out_dir) - @pytest.mark.parametrize('compressor', ['gz', 'bz2']) + @pytest.mark.parametrize("compressor", ["gz", "bz2"]) def test_archive_tar_symlink(self, compressor): - pytest.skip('Not supported') + pytest.skip("Not supported") - @pytest.mark.parametrize('compressor', ['gz', 'bz2']) + @pytest.mark.parametrize("compressor", ["gz", "bz2"]) def test_archive_tar_file_modes(self, compressor, tmpdir, tmp_path, d_cache_config): - archive_node = tmp_path / 'archive-node' + archive_node = tmp_path / "archive-node" archive_node.touch() archive_lnk = self.tip.archive_repo( - str(archive_node), kind='t{}'.format(compressor), archive_dir_name='repo', cache_config=d_cache_config) + str(archive_node), kind="t{}".format(compressor), archive_dir_name="repo", cache_config=d_cache_config + ) out_dir = tmpdir - out_file = tarfile.open(str(archive_lnk), 'r|{}'.format(compressor)) + out_file = tarfile.open(str(archive_lnk), "r|{}".format(compressor)) out_file.extractall(out_dir) out_file.close() def dest(inp): return os.path.join(out_dir, "repo/" + inp) - assert oct(os.stat(dest('not_executable')).st_mode) == '0o100644' + assert oct(os.stat(dest("not_executable")).st_mode) == "0o100644" def test_archive_zip(self, tmp_path, d_cache_config): - archive_node = tmp_path / 'archive-node' - archive_node.touch() - - archive_lnk = self.tip.archive_repo(str(archive_node), kind='zip', - archive_dir_name='repo', cache_config=d_cache_config) - zip_file = zipfile.ZipFile(str(archive_lnk)) - - for x in range(5): - node_path = '%d/file_%d.txt' % (x, x) - data = zip_file.read(f'repo/{node_path}') - - decompressed = io.BytesIO() - decompressed.write(data) - assert decompressed.getvalue() == \ - self.tip.get_node(node_path).content - decompressed.close() - - def test_archive_zip_with_metadata(self, tmp_path, d_cache_config): - archive_node = tmp_path / 'archive-node' + archive_node = tmp_path / "archive-node" archive_node.touch() archive_lnk = self.tip.archive_repo( - str(archive_node), kind='zip', - archive_dir_name='repo', write_metadata=True, cache_config=d_cache_config) + str(archive_node), kind="zip", archive_dir_name="repo", cache_config=d_cache_config + ) + zip_file = zipfile.ZipFile(str(archive_lnk)) + + for x in range(5): + node_path = "%d/file_%d.txt" % (x, x) + data = zip_file.read(f"repo/{node_path}") + + decompressed = io.BytesIO() + decompressed.write(data) + assert decompressed.getvalue() == self.tip.get_node(node_path).content + decompressed.close() + + def test_archive_zip_with_metadata(self, tmp_path, d_cache_config): + archive_node = tmp_path / "archive-node" + archive_node.touch() + + archive_lnk = self.tip.archive_repo( + str(archive_node), kind="zip", archive_dir_name="repo", write_metadata=True, cache_config=d_cache_config + ) zip_file = zipfile.ZipFile(str(archive_lnk)) - metafile = zip_file.read('repo/.archival.txt') + metafile = zip_file.read("repo/.archival.txt") raw_id = ascii_bytes(self.tip.raw_id) - assert b'commit_id:%b' % raw_id in metafile + assert b"commit_id:%b" % raw_id in metafile for x in range(5): - node_path = '%d/file_%d.txt' % (x, x) - data = zip_file.read(f'repo/{node_path}') + node_path = "%d/file_%d.txt" % (x, x) + data = zip_file.read(f"repo/{node_path}") decompressed = io.BytesIO() decompressed.write(data) - assert decompressed.getvalue() == \ - self.tip.get_node(node_path).content + assert decompressed.getvalue() == self.tip.get_node(node_path).content decompressed.close() def test_archive_wrong_kind(self, tmp_path, d_cache_config): - archive_node = tmp_path / 'archive-node' + archive_node = tmp_path / "archive-node" archive_node.touch() with pytest.raises(ImproperArchiveTypeError): - self.tip.archive_repo(str(archive_node), kind='wrong kind', cache_config=d_cache_config) + self.tip.archive_repo(str(archive_node), kind="wrong kind", cache_config=d_cache_config) @pytest.fixture() @@ -167,8 +165,8 @@ def base_commit(): """ commit = base.BaseCommit() commit.repository = mock.Mock() - commit.repository.name = 'fake_repo' - commit.short_id = 'fake_id' + commit.repository.name = "fake_repo" + commit.short_id = "fake_id" return commit @@ -180,19 +178,17 @@ def test_validate_archive_prefix_enforce def test_validate_archive_prefix_empty_prefix(base_commit): # TODO: johbo: Should raise a ValueError here. with pytest.raises(VCSError): - base_commit._validate_archive_prefix('') + base_commit._validate_archive_prefix("") def test_validate_archive_prefix_with_leading_slash(base_commit): # TODO: johbo: Should raise a ValueError here. with pytest.raises(VCSError): - base_commit._validate_archive_prefix('/any') + base_commit._validate_archive_prefix("/any") def test_validate_archive_prefix_falls_back_to_repository_name(base_commit): prefix = base_commit._validate_archive_prefix(None) - expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format( - repo_name='fake_repo', - short_id='fake_id') + expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format(repo_name="fake_repo", short_id="fake_id") assert isinstance(prefix, str) assert prefix == expected_prefix diff --git a/rhodecode/tests/vcs/test_branches.py b/rhodecode/tests/vcs/test_branches.py --- a/rhodecode/tests/vcs/test_branches.py +++ b/rhodecode/tests/vcs/test_branches.py @@ -64,18 +64,14 @@ class TestBranches(BackendTestMixin): def test_new_head(self): tip = self.repo.get_commit() - self.imc.add( - FileNode(b"docs/index.txt", content=b"Documentation\n") - ) + self.imc.add(FileNode(b"docs/index.txt", content=b"Documentation\n")) foobar_tip = self.imc.commit( message="New branch: foobar", author="joe ", branch="foobar", parents=[tip], ) - self.imc.change( - FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n") - ) + self.imc.change(FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n")) assert foobar_tip.branch == "foobar" newtip = self.imc.commit( message="At foobar_tip branch", @@ -96,21 +92,15 @@ class TestBranches(BackendTestMixin): @pytest.mark.backends("git", "hg") def test_branch_with_slash_in_name(self): self.imc.add(FileNode(b"extrafile", content=b"Some data\n")) - self.imc.commit( - "Branch with a slash!", author="joe ", branch="issue/123" - ) + self.imc.commit("Branch with a slash!", author="joe ", branch="issue/123") assert "issue/123" in self.repo.branches @pytest.mark.backends("git", "hg") def test_branch_with_slash_in_name_and_similar_without(self): self.imc.add(FileNode(b"extrafile", content=b"Some data\n")) - self.imc.commit( - "Branch with a slash!", author="joe ", branch="issue/123" - ) + self.imc.commit("Branch with a slash!", author="joe ", branch="issue/123") self.imc.add(FileNode(b"extrafile II", content=b"Some data\n")) - self.imc.commit( - "Branch without a slash...", author="joe ", branch="123" - ) + self.imc.commit("Branch without a slash...", author="joe ", branch="123") assert "issue/123" in self.repo.branches assert "123" in self.repo.branches diff --git a/rhodecode/tests/vcs/test_client_http.py b/rhodecode/tests/vcs/test_client_http.py --- a/rhodecode/tests/vcs/test_client_http.py +++ b/rhodecode/tests/vcs/test_client_http.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -28,9 +27,7 @@ from rhodecode.lib.vcs import client_htt def is_new_connection(logger, level, message): - return ( - logger == 'requests.packages.urllib3.connectionpool' and - message.startswith('Starting new HTTP')) + return logger == "requests.packages.urllib3.connectionpool" and message.startswith("Starting new HTTP") @pytest.fixture() @@ -54,7 +51,7 @@ def stub_fail_session(): """ session = mock.Mock() post = session.post() - post.content = msgpack.packb({'error': '500'}) + post.content = msgpack.packb({"error": "500"}) post.status_code = 500 session.reset_mock() @@ -89,44 +86,37 @@ def test_uses_persistent_http_connection for x in range(5): remote_call(normal=True, closed=False) - new_connections = [ - r for r in caplog.record_tuples if is_new_connection(*r)] + new_connections = [r for r in caplog.record_tuples if is_new_connection(*r)] assert len(new_connections) <= 1 def test_repo_maker_uses_session_for_classmethods(stub_session_factory): - repo_maker = client_http.RemoteVCSMaker( - 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory) + repo_maker = client_http.RemoteVCSMaker("server_and_port", "endpoint", "test_dummy_scm", stub_session_factory) repo_maker.example_call() - stub_session_factory().post.assert_called_with( - 'http://server_and_port/endpoint', data=mock.ANY) + stub_session_factory().post.assert_called_with("http://server_and_port/endpoint", data=mock.ANY) -def test_repo_maker_uses_session_for_instance_methods( - stub_session_factory, config): - repo_maker = client_http.RemoteVCSMaker( - 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory) - repo = repo_maker('stub_path', 'stub_repo_id', config) +def test_repo_maker_uses_session_for_instance_methods(stub_session_factory, config): + repo_maker = client_http.RemoteVCSMaker("server_and_port", "endpoint", "test_dummy_scm", stub_session_factory) + repo = repo_maker("stub_path", "stub_repo_id", config) repo.example_call() - stub_session_factory().post.assert_called_with( - 'http://server_and_port/endpoint', data=mock.ANY) + stub_session_factory().post.assert_called_with("http://server_and_port/endpoint", data=mock.ANY) -@mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory') -@mock.patch('rhodecode.lib.vcs.connection') -def test_connect_passes_in_the_same_session( - connection, session_factory_class, stub_session): +@mock.patch("rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory") +@mock.patch("rhodecode.lib.vcs.connection") +def test_connect_passes_in_the_same_session(connection, session_factory_class, stub_session): session_factory = session_factory_class.return_value session_factory.return_value = stub_session - vcs.connect_http('server_and_port') + vcs.connect_http("server_and_port") -def test_repo_maker_uses_session_that_throws_error( - stub_session_failing_factory, config): +def test_repo_maker_uses_session_that_throws_error(stub_session_failing_factory, config): repo_maker = client_http.RemoteVCSMaker( - 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_failing_factory) - repo = repo_maker('stub_path', 'stub_repo_id', config) + "server_and_port", "endpoint", "test_dummy_scm", stub_session_failing_factory + ) + repo = repo_maker("stub_path", "stub_repo_id", config) with pytest.raises(exceptions.HttpVCSCommunicationError): repo.example_call() diff --git a/rhodecode/tests/vcs/test_commits.py b/rhodecode/tests/vcs/test_commits.py --- a/rhodecode/tests/vcs/test_commits.py +++ b/rhodecode/tests/vcs/test_commits.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -23,27 +22,31 @@ import time import pytest from rhodecode.lib.str_utils import safe_bytes -from rhodecode.lib.vcs.backends.base import ( - CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit) +from rhodecode.lib.vcs.backends.base import CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit from rhodecode.lib.vcs.exceptions import ( - BranchDoesNotExistError, CommitDoesNotExistError, - RepositoryError, EmptyRepositoryError) + BranchDoesNotExistError, + CommitDoesNotExistError, + RepositoryError, + EmptyRepositoryError, +) from rhodecode.lib.vcs.nodes import ( - FileNode, AddedFileNodesGenerator, - ChangedFileNodesGenerator, RemovedFileNodesGenerator) + FileNode, + AddedFileNodesGenerator, + ChangedFileNodesGenerator, + RemovedFileNodesGenerator, +) from rhodecode.tests import get_new_dir from rhodecode.tests.vcs.conftest import BackendTestMixin class TestBaseChangeset(object): - def test_is_deprecated(self): from rhodecode.lib.vcs.backends.base import BaseChangeset + pytest.deprecated_call(BaseChangeset) class TestEmptyCommit(object): - def test_branch_without_alias_returns_none(self): commit = EmptyCommit() assert commit.branch is None @@ -58,29 +61,28 @@ class TestCommitsInNonEmptyRepo(BackendT start_date = datetime.datetime(2010, 1, 1, 20) for x in range(5): yield { - 'message': 'Commit %d' % x, - 'author': 'Joe Doe ', - 'date': start_date + datetime.timedelta(hours=12 * x), - 'added': [ - FileNode(b'file_%d.txt' % x, - content=b'Foobar %d' % x), + "message": "Commit %d" % x, + "author": "Joe Doe ", + "date": start_date + datetime.timedelta(hours=12 * x), + "added": [ + FileNode(b"file_%d.txt" % x, content=b"Foobar %d" % x), ], } def test_walk_returns_empty_list_in_case_of_file(self): - result = list(self.tip.walk('file_0.txt')) + result = list(self.tip.walk("file_0.txt")) assert result == [] @pytest.mark.backends("git", "hg") def test_new_branch(self): - self.imc.add(FileNode(b'docs/index.txt', content=b'Documentation\n')) + self.imc.add(FileNode(b"docs/index.txt", content=b"Documentation\n")) foobar_tip = self.imc.commit( - message='New branch: foobar', - author='joe ', - branch='foobar', + message="New branch: foobar", + author="joe ", + branch="foobar", ) - assert 'foobar' in self.repo.branches - assert foobar_tip.branch == 'foobar' + assert "foobar" in self.repo.branches + assert foobar_tip.branch == "foobar" # 'foobar' should be the only branch that contains the new commit branch = list(self.repo.branches.values()) assert branch[0] != branch[1] @@ -89,18 +91,14 @@ class TestCommitsInNonEmptyRepo(BackendT def test_new_head_in_default_branch(self): tip = self.repo.get_commit() - self.imc.add( - FileNode(b"docs/index.txt", content=b"Documentation\n") - ) + self.imc.add(FileNode(b"docs/index.txt", content=b"Documentation\n")) foobar_tip = self.imc.commit( message="New branch: foobar", author="joe ", branch="foobar", parents=[tip], ) - self.imc.change( - FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n") - ) + self.imc.change(FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n")) assert foobar_tip.branch == "foobar" newtip = self.imc.commit( message="At foobar_tip branch", @@ -132,51 +130,55 @@ class TestCommitsInNonEmptyRepo(BackendT :return: """ DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME - TEST_BRANCH = 'docs' + TEST_BRANCH = "docs" org_tip = self.repo.get_commit() - self.imc.add(FileNode(b'readme.txt', content=b'Document\n')) + self.imc.add(FileNode(b"readme.txt", content=b"Document\n")) initial = self.imc.commit( - message='Initial commit', - author='joe ', + message="Initial commit", + author="joe ", parents=[org_tip], - branch=DEFAULT_BRANCH,) + branch=DEFAULT_BRANCH, + ) - self.imc.add(FileNode(b'newdoc.txt', content=b'foobar\n')) + self.imc.add(FileNode(b"newdoc.txt", content=b"foobar\n")) docs_branch_commit1 = self.imc.commit( - message='New branch: docs', - author='joe ', + message="New branch: docs", + author="joe ", parents=[initial], - branch=TEST_BRANCH,) + branch=TEST_BRANCH, + ) - self.imc.add(FileNode(b'newdoc2.txt', content=b'foobar2\n')) + self.imc.add(FileNode(b"newdoc2.txt", content=b"foobar2\n")) docs_branch_commit2 = self.imc.commit( - message='New branch: docs2', - author='joe ', + message="New branch: docs2", + author="joe ", parents=[docs_branch_commit1], - branch=TEST_BRANCH,) + branch=TEST_BRANCH, + ) - self.imc.add(FileNode(b'newfile', content=b'hello world\n')) + self.imc.add(FileNode(b"newfile", content=b"hello world\n")) self.imc.commit( - message='Back in default branch', - author='joe ', + message="Back in default branch", + author="joe ", parents=[initial], - branch=DEFAULT_BRANCH,) + branch=DEFAULT_BRANCH, + ) default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH) assert docs_branch_commit1 not in list(default_branch_commits) assert docs_branch_commit2 not in list(default_branch_commits) docs_branch_commits = self.repo.get_commits( - start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1], - branch_name=TEST_BRANCH) + start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1], branch_name=TEST_BRANCH + ) assert docs_branch_commit1 in list(docs_branch_commits) assert docs_branch_commit2 in list(docs_branch_commits) @pytest.mark.backends("svn") def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn): - repo = vcsbackend_svn['svn-simple-layout'] - commits = repo.get_commits(branch_name='trunk') + repo = vcsbackend_svn["svn-simple-layout"] + commits = repo.get_commits(branch_name="trunk") commit_indexes = [c.idx for c in commits] assert commit_indexes == [1, 2, 3, 7, 12, 15] @@ -214,13 +216,10 @@ class TestCommits(BackendTestMixin): start_date = datetime.datetime(2010, 1, 1, 20) for x in range(5): yield { - 'message': 'Commit %d' % x, - 'author': 'Joe Doe ', - 'date': start_date + datetime.timedelta(hours=12 * x), - 'added': [ - FileNode(b'file_%d.txt' % x, - content=b'Foobar %d' % x) - ], + "message": "Commit %d" % x, + "author": "Joe Doe ", + "date": start_date + datetime.timedelta(hours=12 * x), + "added": [FileNode(b"file_%d.txt" % x, content=b"Foobar %d" % x)], } def test_simple(self): @@ -231,11 +230,11 @@ class TestCommits(BackendTestMixin): tip = self.repo.get_commit() # json.dumps(tip) uses .__json__() method data = tip.__json__() - assert 'branch' in data - assert data['revision'] + assert "branch" in data + assert data["revision"] def test_retrieve_tip(self): - tip = self.repo.get_commit('tip') + tip = self.repo.get_commit("tip") assert tip == self.repo.get_commit() def test_invalid(self): @@ -259,34 +258,34 @@ class TestCommits(BackendTestMixin): def test_size(self): tip = self.repo.get_commit() - size = 5 * len('Foobar N') # Size of 5 files + size = 5 * len("Foobar N") # Size of 5 files assert tip.size == size def test_size_at_commit(self): tip = self.repo.get_commit() - size = 5 * len('Foobar N') # Size of 5 files + size = 5 * len("Foobar N") # Size of 5 files assert self.repo.size_at_commit(tip.raw_id) == size def test_size_at_first_commit(self): commit = self.repo[0] - size = len('Foobar N') # Size of 1 file + size = len("Foobar N") # Size of 1 file assert self.repo.size_at_commit(commit.raw_id) == size def test_author(self): tip = self.repo.get_commit() - assert_text_equal(tip.author, 'Joe Doe ') + assert_text_equal(tip.author, "Joe Doe ") def test_author_name(self): tip = self.repo.get_commit() - assert_text_equal(tip.author_name, 'Joe Doe') + assert_text_equal(tip.author_name, "Joe Doe") def test_author_email(self): tip = self.repo.get_commit() - assert_text_equal(tip.author_email, 'joe.doe@example.com') + assert_text_equal(tip.author_email, "joe.doe@example.com") def test_message(self): tip = self.repo.get_commit() - assert_text_equal(tip.message, 'Commit 4') + assert_text_equal(tip.message, "Commit 4") def test_diff(self): tip = self.repo.get_commit() @@ -296,7 +295,7 @@ class TestCommits(BackendTestMixin): def test_prev(self): tip = self.repo.get_commit() prev_commit = tip.prev() - assert prev_commit.message == 'Commit 3' + assert prev_commit.message == "Commit 3" def test_prev_raises_on_first_commit(self): commit = self.repo.get_commit(commit_idx=0) @@ -311,7 +310,7 @@ class TestCommits(BackendTestMixin): def test_next(self): commit = self.repo.get_commit(commit_idx=2) next_commit = commit.next() - assert next_commit.message == 'Commit 3' + assert next_commit.message == "Commit 3" def test_next_raises_on_tip(self): commit = self.repo.get_commit() @@ -320,36 +319,36 @@ class TestCommits(BackendTestMixin): def test_get_path_commit(self): commit = self.repo.get_commit() - commit.get_path_commit('file_4.txt') - assert commit.message == 'Commit 4' + commit.get_path_commit("file_4.txt") + assert commit.message == "Commit 4" def test_get_filenodes_generator(self): tip = self.repo.get_commit() filepaths = [node.path for node in tip.get_filenodes_generator()] - assert filepaths == ['file_%d.txt' % x for x in range(5)] + assert filepaths == ["file_%d.txt" % x for x in range(5)] def test_get_file_annotate(self): file_added_commit = self.repo.get_commit(commit_idx=3) - annotations = list(file_added_commit.get_file_annotate('file_3.txt')) + annotations = list(file_added_commit.get_file_annotate("file_3.txt")) line_no, commit_id, commit_loader, line = annotations[0] assert line_no == 1 assert commit_id == file_added_commit.raw_id assert commit_loader() == file_added_commit - assert b'Foobar 3' in line + assert b"Foobar 3" in line def test_get_file_annotate_does_not_exist(self): file_added_commit = self.repo.get_commit(commit_idx=2) # TODO: Should use a specific exception class here? with pytest.raises(Exception): - list(file_added_commit.get_file_annotate('file_3.txt')) + list(file_added_commit.get_file_annotate("file_3.txt")) def test_get_file_annotate_tip(self): tip = self.repo.get_commit() commit = self.repo.get_commit(commit_idx=3) - expected_values = list(commit.get_file_annotate('file_3.txt')) - annotations = list(tip.get_file_annotate('file_3.txt')) + expected_values = list(commit.get_file_annotate("file_3.txt")) + annotations = list(tip.get_file_annotate("file_3.txt")) # Note: Skip index 2 because the loader function is not the same for idx in (0, 1, 3): @@ -398,7 +397,7 @@ class TestCommits(BackendTestMixin): repo = self.Backend(repo_path, create=True) with pytest.raises(EmptyRepositoryError): - list(repo.get_commits(start_id='foobar')) + list(repo.get_commits(start_id="foobar")) def test_get_commits_respects_hidden(self): commits = self.repo.get_commits(show_hidden=True) @@ -424,8 +423,7 @@ class TestCommits(BackendTestMixin): def test_get_commits_respects_start_date_with_branch(self): start_date = datetime.datetime(2010, 1, 2) - commits = self.repo.get_commits( - start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME) + commits = self.repo.get_commits(start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME) assert isinstance(commits, CollectionGenerator) # Should be 4 commits after 2010-01-02 00:00:00 assert len(commits) == 4 @@ -435,8 +433,7 @@ class TestCommits(BackendTestMixin): def test_get_commits_respects_start_date_and_end_date(self): start_date = datetime.datetime(2010, 1, 2) end_date = datetime.datetime(2010, 1, 3) - commits = self.repo.get_commits(start_date=start_date, - end_date=end_date) + commits = self.repo.get_commits(start_date=start_date, end_date=end_date) assert isinstance(commits, CollectionGenerator) assert len(commits) == 2 for c in commits: @@ -459,23 +456,22 @@ class TestCommits(BackendTestMixin): assert list(commit_ids) == list(reversed(self.repo.commit_ids)) def test_get_commits_slice_generator(self): - commits = self.repo.get_commits( - branch_name=self.repo.DEFAULT_BRANCH_NAME) + commits = self.repo.get_commits(branch_name=self.repo.DEFAULT_BRANCH_NAME) assert isinstance(commits, CollectionGenerator) commit_slice = list(commits[1:3]) assert len(commit_slice) == 2 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self): with pytest.raises(CommitDoesNotExistError): - list(self.repo.get_commits(start_id='foobar')) + list(self.repo.get_commits(start_id="foobar")) def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self): with pytest.raises(CommitDoesNotExistError): - list(self.repo.get_commits(end_id='foobar')) + list(self.repo.get_commits(end_id="foobar")) def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self): with pytest.raises(BranchDoesNotExistError): - list(self.repo.get_commits(branch_name='foobar')) + list(self.repo.get_commits(branch_name="foobar")) def test_get_commits_raise_repositoryerror_for_wrong_start_end(self): start_id = self.repo.commit_ids[-1] @@ -498,13 +494,16 @@ class TestCommits(BackendTestMixin): assert commit1 is not None assert commit2 is not None assert 1 != commit1 - assert 'string' != commit1 + assert "string" != commit1 -@pytest.mark.parametrize("filename, expected", [ - ("README.rst", False), - ("README", True), -]) +@pytest.mark.parametrize( + "filename, expected", + [ + ("README.rst", False), + ("README", True), + ], +) def test_commit_is_link(vcsbackend, filename, expected): commit = vcsbackend.repo.get_commit() link_status = commit.is_link(filename) @@ -519,75 +518,74 @@ class TestCommitsChanges(BackendTestMixi def _get_commits(cls): return [ { - 'message': 'Initial', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 20), - 'added': [ - FileNode(b'foo/bar', content=b'foo'), - FileNode(safe_bytes('foo/bał'), content=b'foo'), - FileNode(b'foobar', content=b'foo'), - FileNode(b'qwe', content=b'foo'), + "message": "Initial", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 20), + "added": [ + FileNode(b"foo/bar", content=b"foo"), + FileNode(safe_bytes("foo/bał"), content=b"foo"), + FileNode(b"foobar", content=b"foo"), + FileNode(b"qwe", content=b"foo"), ], }, { - 'message': 'Massive changes', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 22), - 'added': [FileNode(b'fallout', content=b'War never changes')], - 'changed': [ - FileNode(b'foo/bar', content=b'baz'), - FileNode(b'foobar', content=b'baz'), + "message": "Massive changes", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 22), + "added": [FileNode(b"fallout", content=b"War never changes")], + "changed": [ + FileNode(b"foo/bar", content=b"baz"), + FileNode(b"foobar", content=b"baz"), ], - 'removed': [FileNode(b'qwe')], + "removed": [FileNode(b"qwe")], }, ] def test_initial_commit(self, local_dt_to_utc): commit = self.repo.get_commit(commit_idx=0) assert set(commit.added) == { - commit.get_node('foo/bar'), - commit.get_node('foo/bał'), - commit.get_node('foobar'), - commit.get_node('qwe') + commit.get_node("foo/bar"), + commit.get_node("foo/bał"), + commit.get_node("foobar"), + commit.get_node("qwe"), } assert set(commit.changed) == set() assert set(commit.removed) == set() - assert set(commit.affected_files) == {'foo/bar', 'foo/bał', 'foobar', 'qwe'} - assert commit.date == local_dt_to_utc( - datetime.datetime(2010, 1, 1, 20, 0)) + assert set(commit.affected_files) == {"foo/bar", "foo/bał", "foobar", "qwe"} + assert commit.date == local_dt_to_utc(datetime.datetime(2010, 1, 1, 20, 0)) def test_head_added(self): commit = self.repo.get_commit() assert isinstance(commit.added, AddedFileNodesGenerator) - assert set(commit.added) == {commit.get_node('fallout')} + assert set(commit.added) == {commit.get_node("fallout")} assert isinstance(commit.changed, ChangedFileNodesGenerator) - assert set(commit.changed) == {commit.get_node('foo/bar'), commit.get_node('foobar')} + assert set(commit.changed) == {commit.get_node("foo/bar"), commit.get_node("foobar")} assert isinstance(commit.removed, RemovedFileNodesGenerator) assert len(commit.removed) == 1 - assert list(commit.removed)[0].path == 'qwe' + assert list(commit.removed)[0].path == "qwe" def test_get_filemode(self): commit = self.repo.get_commit() - assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bar') + assert FILEMODE_DEFAULT == commit.get_file_mode("foo/bar") def test_get_filemode_non_ascii(self): commit = self.repo.get_commit() - assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bał') - assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bał') + assert FILEMODE_DEFAULT == commit.get_file_mode("foo/bał") + assert FILEMODE_DEFAULT == commit.get_file_mode("foo/bał") def test_get_path_history(self): commit = self.repo.get_commit() - history = commit.get_path_history('foo/bar') + history = commit.get_path_history("foo/bar") assert len(history) == 2 def test_get_path_history_with_limit(self): commit = self.repo.get_commit() - history = commit.get_path_history('foo/bar', limit=1) + history = commit.get_path_history("foo/bar", limit=1) assert len(history) == 1 def test_get_path_history_first_commit(self): commit = self.repo[0] - history = commit.get_path_history('foo/bar') + history = commit.get_path_history("foo/bar") assert len(history) == 1 diff --git a/rhodecode/tests/vcs/test_config.py b/rhodecode/tests/vcs/test_config.py --- a/rhodecode/tests/vcs/test_config.py +++ b/rhodecode/tests/vcs/test_config.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -21,14 +20,17 @@ import pytest def test_get_existing_value(config): - value = config.get('section-a', 'a-1') - assert value == 'value-a-1' + value = config.get("section-a", "a-1") + assert value == "value-a-1" -@pytest.mark.parametrize('section, option', [ - ('section-a', 'does-not-exist'), - ('does-not-exist', 'does-not-exist'), -]) +@pytest.mark.parametrize( + "section, option", + [ + ("section-a", "does-not-exist"), + ("does-not-exist", "does-not-exist"), + ], +) def test_get_unset_value_returns_none(config, section, option): value = config.get(section, option) assert value is None @@ -41,11 +43,11 @@ def test_allows_to_create_a_copy(config) def test_changes_in_the_copy_dont_affect_the_original(config): clone = config.copy() - clone.set('section-a', 'a-2', 'value-a-2') - assert set(config.serialize()) == {('section-a', 'a-1', 'value-a-1')} + clone.set("section-a", "a-2", "value-a-2") + assert set(config.serialize()) == {("section-a", "a-1", "value-a-1")} def test_changes_in_the_original_dont_affect_the_copy(config): clone = config.copy() - config.set('section-a', 'a-2', 'value-a-2') - assert set(clone.serialize()) == {('section-a', 'a-1', 'value-a-1')} + config.set("section-a", "a-2", "value-a-2") + assert set(clone.serialize()) == {("section-a", "a-1", "value-a-1")} diff --git a/rhodecode/tests/vcs/test_diff.py b/rhodecode/tests/vcs/test_diff.py --- a/rhodecode/tests/vcs/test_diff.py +++ b/rhodecode/tests/vcs/test_diff.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -25,7 +24,6 @@ from rhodecode.tests.vcs.conftest import class TestGetDiffValidation: - def test_raises_on_string_input(self, vcsbackend): repo = vcsbackend.repo with pytest.raises(TypeError): @@ -54,63 +52,58 @@ class TestGetDiffValidation: def test_supports_path1_parameter(self, vcsbackend): repo = vcsbackend.repo commit = repo[1] - repo.get_diff( - repo.EMPTY_COMMIT, commit, - path='vcs/__init__.py', path1='vcs/__init__.py') + repo.get_diff(repo.EMPTY_COMMIT, commit, path="vcs/__init__.py", path1="vcs/__init__.py") @pytest.mark.backends("git", "hg") def test_raises_value_error_if_paths_not_supported(self, vcsbackend): repo = vcsbackend.repo commit = repo[1] with pytest.raises(ValueError): - repo.get_diff( - repo.EMPTY_COMMIT, commit, - path='trunk/example.py', path1='branches/argparse/example.py') + repo.get_diff(repo.EMPTY_COMMIT, commit, path="trunk/example.py", path1="branches/argparse/example.py") @pytest.mark.usefixtures("vcs_repository_support") class TestRepositoryGetDiff(BackendTestMixin): - recreate_repo_per_test = False @classmethod def _get_commits(cls): commits = [ { - 'message': 'Initial commit', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 20), - 'added': [ - FileNode(b'foobar', content=b'foobar'), - FileNode(b'foobar2', content=b'foobar2'), + "message": "Initial commit", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 20), + "added": [ + FileNode(b"foobar", content=b"foobar"), + FileNode(b"foobar2", content=b"foobar2"), ], }, { - 'message': 'Changed foobar, added foobar3', - 'author': 'Jane Doe ', - 'date': datetime.datetime(2010, 1, 1, 21), - 'added': [ - FileNode(b'foobar3', content=b'foobar3'), + "message": "Changed foobar, added foobar3", + "author": "Jane Doe ", + "date": datetime.datetime(2010, 1, 1, 21), + "added": [ + FileNode(b"foobar3", content=b"foobar3"), ], - 'changed': [ - FileNode(b'foobar', b'FOOBAR'), + "changed": [ + FileNode(b"foobar", b"FOOBAR"), ], }, { - 'message': 'Removed foobar, changed foobar3', - 'author': 'Jane Doe ', - 'date': datetime.datetime(2010, 1, 1, 22), - 'changed': [ - FileNode(b'foobar3', content=b'FOOBAR\nFOOBAR\nFOOBAR\n'), + "message": "Removed foobar, changed foobar3", + "author": "Jane Doe ", + "date": datetime.datetime(2010, 1, 1, 22), + "changed": [ + FileNode(b"foobar3", content=b"FOOBAR\nFOOBAR\nFOOBAR\n"), ], - 'removed': [FileNode(b'foobar')], + "removed": [FileNode(b"foobar")], }, { - 'message': 'Whitespace changes', - 'author': 'Jane Doe ', - 'date': datetime.datetime(2010, 1, 1, 23), - 'changed': [ - FileNode(b'foobar3', content=b'FOOBAR \nFOOBAR\nFOOBAR\n'), + "message": "Whitespace changes", + "author": "Jane Doe ", + "date": datetime.datetime(2010, 1, 1, 23), + "changed": [ + FileNode(b"foobar3", content=b"FOOBAR \nFOOBAR\nFOOBAR\n"), ], }, ] @@ -130,28 +123,24 @@ class TestRepositoryGetDiff(BackendTestM assert diff.raw.tobytes() == self.third_commit_diffs[self.repo.alias] def test_ignore_whitespace(self): - diff = self.repo.get_diff( - self.repo[2], self.repo[3], ignore_whitespace=True) - assert b'@@' not in diff.raw.tobytes() + diff = self.repo.get_diff(self.repo[2], self.repo[3], ignore_whitespace=True) + assert b"@@" not in diff.raw.tobytes() def test_only_one_file(self): - diff = self.repo.get_diff( - self.repo.EMPTY_COMMIT, self.repo[0], path='foobar') - assert b'foobar2' not in diff.raw.tobytes() + diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[0], path="foobar") + assert b"foobar2" not in diff.raw.tobytes() def test_context_parameter(self): first_commit = self.repo.get_commit(commit_idx=0) - diff = self.repo.get_diff( - self.repo.EMPTY_COMMIT, first_commit, context=2) + diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, first_commit, context=2) assert diff.raw.tobytes() == self.first_commit_diffs[self.repo.alias] def test_context_only_one_file(self): - diff = self.repo.get_diff( - self.repo.EMPTY_COMMIT, self.repo[0], path='foobar', context=2) + diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[0], path="foobar", context=2) assert diff.raw.tobytes() == self.first_commit_one_file[self.repo.alias] first_commit_diffs = { - 'git': br"""diff --git a/foobar b/foobar + "git": rb"""diff --git a/foobar b/foobar new file mode 100644 index 0000000..f6ea049 --- /dev/null @@ -168,7 +157,7 @@ index 0000000..e8c9d6b +foobar2 \ No newline at end of file """, - 'hg': br"""diff --git a/foobar b/foobar + "hg": rb"""diff --git a/foobar b/foobar new file mode 100644 --- /dev/null +++ b/foobar @@ -183,7 +172,7 @@ new file mode 100644 +foobar2 \ No newline at end of file """, - 'svn': b"""Index: foobar + "svn": b"""Index: foobar =================================================================== diff --git a/foobar b/foobar new file mode 10644 @@ -205,7 +194,7 @@ new file mode 10644 } second_commit_diffs = { - 'git': br"""diff --git a/foobar b/foobar + "git": rb"""diff --git a/foobar b/foobar index f6ea049..389865b 100644 --- a/foobar +++ b/foobar @@ -223,7 +212,7 @@ index 0000000..c11c37d +foobar3 \ No newline at end of file """, - 'hg': br"""diff --git a/foobar b/foobar + "hg": rb"""diff --git a/foobar b/foobar --- a/foobar +++ b/foobar @@ -1,1 +1,1 @@ @@ -239,7 +228,7 @@ new file mode 100644 +foobar3 \ No newline at end of file """, - 'svn': b"""Index: foobar + "svn": b"""Index: foobar =================================================================== diff --git a/foobar b/foobar --- a/foobar\t(revision 1) @@ -262,7 +251,7 @@ new file mode 10644 } third_commit_diffs = { - 'git': br"""diff --git a/foobar b/foobar + "git": rb"""diff --git a/foobar b/foobar deleted file mode 100644 index 389865b..0000000 --- a/foobar @@ -281,7 +270,7 @@ index c11c37d..f932447 100644 +FOOBAR +FOOBAR """, - 'hg': br"""diff --git a/foobar b/foobar + "hg": rb"""diff --git a/foobar b/foobar deleted file mode 100644 --- a/foobar +++ /dev/null @@ -298,7 +287,7 @@ diff --git a/foobar3 b/foobar3 +FOOBAR +FOOBAR """, - 'svn': b"""Index: foobar + "svn": b"""Index: foobar =================================================================== diff --git a/foobar b/foobar deleted file mode 10644 @@ -322,7 +311,7 @@ diff --git a/foobar3 b/foobar3 } first_commit_one_file = { - 'git': br"""diff --git a/foobar b/foobar + "git": rb"""diff --git a/foobar b/foobar new file mode 100644 index 0000000..f6ea049 --- /dev/null @@ -331,7 +320,7 @@ index 0000000..f6ea049 +foobar \ No newline at end of file """, - 'hg': br"""diff --git a/foobar b/foobar + "hg": rb"""diff --git a/foobar b/foobar new file mode 100644 --- /dev/null +++ b/foobar @@ -339,7 +328,7 @@ new file mode 100644 +foobar \ No newline at end of file """, - 'svn': b"""Index: foobar + "svn": b"""Index: foobar =================================================================== diff --git a/foobar b/foobar new file mode 10644 @@ -353,13 +342,11 @@ new file mode 10644 class TestSvnGetDiff(object): - - @pytest.mark.parametrize('path, path1', [ - ('trunk/example.py', 'tags/v0.2/example.py'), - ('trunk', 'tags/v0.2') - ], ids=['file', 'dir']) + @pytest.mark.parametrize( + "path, path1", [("trunk/example.py", "tags/v0.2/example.py"), ("trunk", "tags/v0.2")], ids=["file", "dir"] + ) def test_diff_to_tagged_version(self, vcsbackend_svn, path, path1): - repo = vcsbackend_svn['svn-simple-layout'] + repo = vcsbackend_svn["svn-simple-layout"] commit1 = repo[-2] commit2 = repo[-1] diff = repo.get_diff(commit1, commit2, path=path, path1=path1) @@ -386,7 +373,7 @@ diff --git a/example.py b/example.py ''' def test_diff_of_moved_directory(self, vcsbackend_svn): - repo = vcsbackend_svn['svn-move-directory'] + repo = vcsbackend_svn["svn-move-directory"] diff = repo.get_diff(repo[0], repo[1]) # TODO: johbo: Think about supporting svn directory nodes # a little bit better, source is here like a file @@ -408,7 +395,6 @@ new file mode 10644 @pytest.mark.usefixtures("vcs_repository_support") class TestGetDiffBinary(BackendTestMixin): - recreate_repo_per_test = False # Note: "Fake" PNG files, has the correct magic as prefix @@ -419,26 +405,29 @@ class TestGetDiffBinary(BackendTestMixin def _get_commits(): commits = [ { - 'message': 'Add binary file image.png', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 20), - 'added': [ - FileNode(b'image.png', content=TestGetDiffBinary.BINARY), - ]}, + "message": "Add binary file image.png", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 20), + "added": [ + FileNode(b"image.png", content=TestGetDiffBinary.BINARY), + ], + }, { - 'message': 'Modify image.png', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 21), - 'changed': [ - FileNode(b'image.png', content=TestGetDiffBinary.BINARY2), - ]}, + "message": "Modify image.png", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 21), + "changed": [ + FileNode(b"image.png", content=TestGetDiffBinary.BINARY2), + ], + }, { - 'message': 'Remove image.png', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 21), - 'removed': [ - FileNode(b'image.png'), - ]}, + "message": "Remove image.png", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 21), + "removed": [ + FileNode(b"image.png"), + ], + }, ] return commits @@ -446,7 +435,7 @@ class TestGetDiffBinary(BackendTestMixin diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[0]) expected = { - 'git': b"""diff --git a/image.png b/image.png + "git": b"""diff --git a/image.png b/image.png new file mode 100644 index 0000000000000000000000000000000000000000..28380fd4a25c58be1b68b523ba2a314f4459ee9c GIT binary patch @@ -457,7 +446,7 @@ literal 0 Hc$@', - 'date': datetime.datetime(2010, 1, 1, 20), - 'added': nodes, + "message": "Initial commit", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 20), + "added": nodes, }, ] return commits diff --git a/rhodecode/tests/vcs/test_getitem.py b/rhodecode/tests/vcs/test_getitem.py --- a/rhodecode/tests/vcs/test_getitem.py +++ b/rhodecode/tests/vcs/test_getitem.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -28,28 +27,30 @@ from rhodecode.tests.vcs.conftest import @pytest.mark.usefixtures("vcs_repository_support") class TestGetitem(BackendTestMixin): - @classmethod def _get_commits(cls): start_date = datetime.datetime(2010, 1, 1, 20) for x in range(5): yield { - 'message': 'Commit %d' % x, - 'author': 'Joe Doe ', - 'date': start_date + datetime.timedelta(hours=12 * x), - 'added': [ - FileNode(b'file_%d.txt' % x, content='Foobar %d' % x), + "message": "Commit %d" % x, + "author": "Joe Doe ", + "date": start_date + datetime.timedelta(hours=12 * x), + "added": [ + FileNode(b"file_%d.txt" % x, content="Foobar %d" % x), ], } def test_last_item_is_tip(self): assert self.repo[-1] == self.repo.get_commit() - @pytest.mark.parametrize("offset, message", [ - (-1, 'Commit 4'), - (-2, 'Commit 3'), - (-5, 'Commit 0'), - ]) + @pytest.mark.parametrize( + "offset, message", + [ + (-1, "Commit 4"), + (-2, "Commit 3"), + (-5, "Commit 0"), + ], + ) def test_negative_offset_fetches_correct_commit(self, offset, message): assert self.repo[offset].message == message diff --git a/rhodecode/tests/vcs/test_getslice.py b/rhodecode/tests/vcs/test_getslice.py --- a/rhodecode/tests/vcs/test_getslice.py +++ b/rhodecode/tests/vcs/test_getslice.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -25,17 +24,16 @@ from rhodecode.tests.vcs.conftest import @pytest.mark.usefixtures("vcs_repository_support") class TestGetslice(BackendTestMixin): - @classmethod def _get_commits(cls): start_date = datetime.datetime(2010, 1, 1, 20) for x in range(5): yield { - 'message': 'Commit %d' % x, - 'author': 'Joe Doe ', - 'date': start_date + datetime.timedelta(hours=12 * x), - 'added': [ - FileNode(b'file_%d.txt' % x, content='Foobar %d' % x), + "message": "Commit %d" % x, + "author": "Joe Doe ", + "date": start_date + datetime.timedelta(hours=12 * x), + "added": [ + FileNode(b"file_%d.txt" % x, content="Foobar %d" % x), ], } @@ -43,34 +41,24 @@ class TestGetslice(BackendTestMixin): assert list(self.repo[-1:])[0] == self.repo.get_commit() def test__getslice__respects_start_index(self): - assert list(self.repo[2:]) == \ - [self.repo.get_commit(commit_id) - for commit_id in self.repo.commit_ids[2:]] + assert list(self.repo[2:]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[2:]] def test__getslice__respects_negative_start_index(self): - assert list(self.repo[-2:]) == \ - [self.repo.get_commit(commit_id) - for commit_id in self.repo.commit_ids[-2:]] + assert list(self.repo[-2:]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[-2:]] def test__getslice__respects_end_index(self): - assert list(self.repo[:2]) == \ - [self.repo.get_commit(commit_id) - for commit_id in self.repo.commit_ids[:2]] + assert list(self.repo[:2]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[:2]] def test__getslice__respects_negative_end_index(self): - assert list(self.repo[:-2]) == \ - [self.repo.get_commit(commit_id) - for commit_id in self.repo.commit_ids[:-2]] + assert list(self.repo[:-2]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[:-2]] def test__getslice__start_grater_than_end(self): assert list(self.repo[10:0]) == [] def test__getslice__negative_iteration(self): - assert list(self.repo[::-1]) == \ - [self.repo.get_commit(commit_id) - for commit_id in self.repo.commit_ids[::-1]] + assert list(self.repo[::-1]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[::-1]] def test__getslice__iterate_even(self): - assert list(self.repo[0:10:2]) == \ - [self.repo.get_commit(commit_id) - for commit_id in self.repo.commit_ids[0:10:2]] + assert list(self.repo[0:10:2]) == [ + self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[0:10:2] + ] diff --git a/rhodecode/tests/vcs/test_git.py b/rhodecode/tests/vcs/test_git.py --- a/rhodecode/tests/vcs/test_git.py +++ b/rhodecode/tests/vcs/test_git.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -27,12 +26,9 @@ import pytest from rhodecode.lib.utils import make_db_config from rhodecode.lib.vcs.backends.base import Reference -from rhodecode.lib.vcs.backends.git import ( - GitRepository, GitCommit, discover_git_version) -from rhodecode.lib.vcs.exceptions import ( - RepositoryError, VCSError, NodeDoesNotExistError) -from rhodecode.lib.vcs.nodes import ( - NodeKind, FileNode, DirNode, NodeState, SubModuleNode) +from rhodecode.lib.vcs.backends.git import GitRepository, GitCommit, discover_git_version +from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError +from rhodecode.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState, SubModuleNode from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir from rhodecode.tests.vcs.conftest import BackendTestMixin @@ -40,7 +36,7 @@ from rhodecode.tests.vcs.conftest import pytestmark = pytest.mark.backends("git") -DIFF_FROM_REMOTE = br"""diff --git a/foobar b/foobar +DIFF_FROM_REMOTE = rb"""diff --git a/foobar b/foobar new file mode 100644 index 0000000..f6ea049 --- /dev/null @@ -64,7 +60,6 @@ def callable_get_diff(*args, **kwargs): class TestGitRepository(object): - @pytest.fixture(autouse=True) def prepare(self, request, baseapp): self.repo = GitRepository(TEST_GIT_REPO, bare=True) @@ -74,9 +69,8 @@ class TestGitRepository(object): """ Return a non bare clone of the base repo. """ - clone_path = str(tmpdir.join('clone-repo')) - repo_clone = GitRepository( - clone_path, create=True, src_url=self.repo.path, bare=False) + clone_path = str(tmpdir.join("clone-repo")) + repo_clone = GitRepository(clone_path, create=True, src_url=self.repo.path, bare=False) return repo_clone @@ -84,20 +78,18 @@ class TestGitRepository(object): """ Return a non bare empty repo. """ - clone_path = str(tmpdir.join('empty-repo')) + clone_path = str(tmpdir.join("empty-repo")) return GitRepository(clone_path, create=True, bare=bare) def test_wrong_repo_path(self): - wrong_repo_path = '/tmp/errorrepo_git' + wrong_repo_path = "/tmp/errorrepo_git" with pytest.raises(RepositoryError): GitRepository(wrong_repo_path) def test_repo_clone(self, tmp_path_factory): repo = GitRepository(TEST_GIT_REPO) - clone_path = '{}_{}'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE) - repo_clone = GitRepository( - clone_path, - src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True) + clone_path = f"{tmp_path_factory.mktemp('_')}_{TEST_GIT_REPO_CLONE}" + repo_clone = GitRepository(clone_path, src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True) assert len(repo.commit_ids) == len(repo_clone.commit_ids) # Checking hashes of commits should be enough @@ -107,48 +99,42 @@ class TestGitRepository(object): def test_repo_clone_without_create(self): with pytest.raises(RepositoryError): - GitRepository( - TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO) + GitRepository(TEST_GIT_REPO_CLONE + "_wo_create", src_url=TEST_GIT_REPO) def test_repo_clone_with_update(self, tmp_path_factory): repo = GitRepository(TEST_GIT_REPO) - clone_path = '{}_{}_update'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE) + clone_path = "{}_{}_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE) - repo_clone = GitRepository( - clone_path, - create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True) + repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True) assert len(repo.commit_ids) == len(repo_clone.commit_ids) # check if current workdir was updated - fpath = os.path.join(clone_path, 'MANIFEST.in') + fpath = os.path.join(clone_path, "MANIFEST.in") assert os.path.isfile(fpath) def test_repo_clone_without_update(self, tmp_path_factory): repo = GitRepository(TEST_GIT_REPO) - clone_path = '{}_{}_without_update'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE) - repo_clone = GitRepository( - clone_path, - create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False) + clone_path = "{}_{}_without_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE) + repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False) assert len(repo.commit_ids) == len(repo_clone.commit_ids) # check if current workdir was *NOT* updated - fpath = os.path.join(clone_path, 'MANIFEST.in') + fpath = os.path.join(clone_path, "MANIFEST.in") # Make sure it's not bare repo assert not repo_clone.bare assert not os.path.isfile(fpath) def test_repo_clone_into_bare_repo(self, tmp_path_factory): repo = GitRepository(TEST_GIT_REPO) - clone_path = '{}_{}_bare.git'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE) - repo_clone = GitRepository( - clone_path, create=True, src_url=repo.path, bare=True) + clone_path = "{}_{}_bare.git".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE) + repo_clone = GitRepository(clone_path, create=True, src_url=repo.path, bare=True) assert repo_clone.bare def test_create_repo_is_not_bare_by_default(self): - repo = GitRepository(get_new_dir('not-bare-by-default'), create=True) + repo = GitRepository(get_new_dir("not-bare-by-default"), create=True) assert not repo.bare def test_create_bare_repo(self): - repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True) + repo = GitRepository(get_new_dir("bare-repo"), create=True, bare=True) assert repo.bare def test_update_server_info(self): @@ -167,37 +153,38 @@ class TestGitRepository(object): def test_commit_ids(self): # there are 112 commits (by now) # so we can assume they would be available from now on - subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3', - '38b5fe81f109cb111f549bfe9bb6b267e10bc557', - 'fa6600f6848800641328adbf7811fd2372c02ab2', - '102607b09cdd60e2793929c4f90478be29f85a17', - '49d3fd156b6f7db46313fac355dca1a0b94a0017', - '2d1028c054665b962fa3d307adfc923ddd528038', - 'd7e0d30fbcae12c90680eb095a4f5f02505ce501', - 'ff7ca51e58c505fec0dd2491de52c622bb7a806b', - 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f', - '8430a588b43b5d6da365400117c89400326e7992', - 'd955cd312c17b02143c04fa1099a352b04368118', - 'f67b87e5c629c2ee0ba58f85197e423ff28d735b', - 'add63e382e4aabc9e1afdc4bdc24506c269b7618', - 'f298fe1189f1b69779a4423f40b48edf92a703fc', - 'bd9b619eb41994cac43d67cf4ccc8399c1125808', - '6e125e7c890379446e98980d8ed60fba87d0f6d1', - 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd', - '0b05e4ed56c802098dfc813cbe779b2f49e92500', - '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', - '45223f8f114c64bf4d6f853e3c35a369a6305520', - 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e', - 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68', - '27d48942240f5b91dfda77accd2caac94708cc7d', - '622f0eb0bafd619d2560c26f80f09e3b0b0d78af', - 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'} + subset = { + "c1214f7e79e02fc37156ff215cd71275450cffc3", + "38b5fe81f109cb111f549bfe9bb6b267e10bc557", + "fa6600f6848800641328adbf7811fd2372c02ab2", + "102607b09cdd60e2793929c4f90478be29f85a17", + "49d3fd156b6f7db46313fac355dca1a0b94a0017", + "2d1028c054665b962fa3d307adfc923ddd528038", + "d7e0d30fbcae12c90680eb095a4f5f02505ce501", + "ff7ca51e58c505fec0dd2491de52c622bb7a806b", + "dd80b0f6cf5052f17cc738c2951c4f2070200d7f", + "8430a588b43b5d6da365400117c89400326e7992", + "d955cd312c17b02143c04fa1099a352b04368118", + "f67b87e5c629c2ee0ba58f85197e423ff28d735b", + "add63e382e4aabc9e1afdc4bdc24506c269b7618", + "f298fe1189f1b69779a4423f40b48edf92a703fc", + "bd9b619eb41994cac43d67cf4ccc8399c1125808", + "6e125e7c890379446e98980d8ed60fba87d0f6d1", + "d4a54db9f745dfeba6933bf5b1e79e15d0af20bd", + "0b05e4ed56c802098dfc813cbe779b2f49e92500", + "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", + "45223f8f114c64bf4d6f853e3c35a369a6305520", + "ca1eb7957a54bce53b12d1a51b13452f95bc7c7e", + "f5ea29fc42ef67a2a5a7aecff10e1566699acd68", + "27d48942240f5b91dfda77accd2caac94708cc7d", + "622f0eb0bafd619d2560c26f80f09e3b0b0d78af", + "e686b958768ee96af8029fe19c6050b1a8dd3b2b", + } assert subset.issubset(set(self.repo.commit_ids)) def test_slicing(self): # 4 1 5 10 95 - for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), - (10, 20, 10), (5, 100, 95)]: + for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), (10, 20, 10), (5, 100, 95)]: commit_ids = list(self.repo[sfrom:sto]) assert len(commit_ids) == size assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom) @@ -214,8 +201,8 @@ class TestGitRepository(object): def test_tags(self): # TODO: Need more tests here - assert 'v0.1.1' in self.repo.tags - assert 'v0.1.2' in self.repo.tags + assert "v0.1.1" in self.repo.tags + assert "v0.1.2" in self.repo.tags for __, commit_id in self.repo.tags.items(): assert isinstance(self.repo.get_commit(commit_id), GitCommit) @@ -229,37 +216,34 @@ class TestGitRepository(object): init_commit = self.repo.get_commit(commit_id) init_author = init_commit.author - assert init_commit.message == 'initial import\n' - assert init_author == 'Marcin Kuzminski ' + assert init_commit.message == "initial import\n" + assert init_author == "Marcin Kuzminski " assert init_author == init_commit.committer - for path in ('vcs/__init__.py', - 'vcs/backends/BaseRepository.py', - 'vcs/backends/__init__.py'): + for path in ("vcs/__init__.py", "vcs/backends/BaseRepository.py", "vcs/backends/__init__.py"): assert isinstance(init_commit.get_node(path), FileNode) - for path in ('', 'vcs', 'vcs/backends'): + for path in ("", "vcs", "vcs/backends"): assert isinstance(init_commit.get_node(path), DirNode) with pytest.raises(NodeDoesNotExistError): - init_commit.get_node(path='foobar') + init_commit.get_node(path="foobar") - node = init_commit.get_node('vcs/') - assert hasattr(node, 'kind') + node = init_commit.get_node("vcs/") + assert hasattr(node, "kind") assert node.kind == NodeKind.DIR - node = init_commit.get_node('vcs') - assert hasattr(node, 'kind') + node = init_commit.get_node("vcs") + assert hasattr(node, "kind") assert node.kind == NodeKind.DIR - node = init_commit.get_node('vcs/__init__.py') - assert hasattr(node, 'kind') + node = init_commit.get_node("vcs/__init__.py") + assert hasattr(node, "kind") assert node.kind == NodeKind.FILE def test_not_existing_commit(self): with pytest.raises(RepositoryError): - self.repo.get_commit('f' * 40) + self.repo.get_commit("f" * 40) def test_commit10(self): - commit10 = self.repo.get_commit(self.repo.commit_ids[9]) README = """=== VCS @@ -273,7 +257,7 @@ Introduction TODO: To be written... """ - node = commit10.get_node('README.rst') + node = commit10.get_node("README.rst") assert node.kind == NodeKind.FILE assert node.str_content == README @@ -283,39 +267,39 @@ TODO: To be written... def test_checkout_with_create(self, tmpdir): repo_clone = self.get_clone_repo(tmpdir) - new_branch = 'new_branch' - assert repo_clone._current_branch() == 'master' - assert set(repo_clone.branches) == {'master'} + new_branch = "new_branch" + assert repo_clone._current_branch() == "master" + assert set(repo_clone.branches) == {"master"} repo_clone._checkout(new_branch, create=True) # Branches is a lazy property so we need to recrete the Repo object. repo_clone = GitRepository(repo_clone.path) - assert set(repo_clone.branches) == {'master', new_branch} + assert set(repo_clone.branches) == {"master", new_branch} assert repo_clone._current_branch() == new_branch def test_checkout(self, tmpdir): repo_clone = self.get_clone_repo(tmpdir) - repo_clone._checkout('new_branch', create=True) - repo_clone._checkout('master') + repo_clone._checkout("new_branch", create=True) + repo_clone._checkout("master") - assert repo_clone._current_branch() == 'master' + assert repo_clone._current_branch() == "master" def test_checkout_same_branch(self, tmpdir): repo_clone = self.get_clone_repo(tmpdir) - repo_clone._checkout('master') - assert repo_clone._current_branch() == 'master' + repo_clone._checkout("master") + assert repo_clone._current_branch() == "master" def test_checkout_branch_already_exists(self, tmpdir): repo_clone = self.get_clone_repo(tmpdir) with pytest.raises(RepositoryError): - repo_clone._checkout('master', create=True) + repo_clone._checkout("master", create=True) def test_checkout_bare_repo(self): with pytest.raises(RepositoryError): - self.repo._checkout('master') + self.repo._checkout("master") def test_current_branch_bare_repo(self): with pytest.raises(RepositoryError): @@ -326,8 +310,8 @@ TODO: To be written... assert repo._current_branch() is None def test_local_clone(self, tmp_path_factory): - clone_path = str(tmp_path_factory.mktemp('test-local-clone')) - self.repo._local_clone(clone_path, 'master') + clone_path = str(tmp_path_factory.mktemp("test-local-clone")) + self.repo._local_clone(clone_path, "master") repo_clone = GitRepository(clone_path) assert self.repo.commit_ids == repo_clone.commit_ids @@ -338,23 +322,23 @@ TODO: To be written... # Create a new branch in source repo new_branch_commit = source_repo.commit_ids[-3] source_repo._checkout(new_branch_commit) - source_repo._checkout('new_branch', create=True) + source_repo._checkout("new_branch", create=True) - clone_path = str(tmpdir.join('git-clone-path-1')) - source_repo._local_clone(clone_path, 'new_branch') + clone_path = str(tmpdir.join("git-clone-path-1")) + source_repo._local_clone(clone_path, "new_branch") repo_clone = GitRepository(clone_path) - assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids + assert source_repo.commit_ids[: -3 + 1] == repo_clone.commit_ids - clone_path = str(tmpdir.join('git-clone-path-2')) - source_repo._local_clone(clone_path, 'master') + clone_path = str(tmpdir.join("git-clone-path-2")) + source_repo._local_clone(clone_path, "master") repo_clone = GitRepository(clone_path) assert source_repo.commit_ids == repo_clone.commit_ids def test_local_clone_fails_if_target_exists(self): with pytest.raises(RepositoryError): - self.repo._local_clone(self.repo.path, 'master') + self.repo._local_clone(self.repo.path, "master") def test_local_fetch(self, tmpdir): target_repo = self.get_empty_repo(tmpdir) @@ -364,30 +348,30 @@ TODO: To be written... master_commit = source_repo.commit_ids[-1] new_branch_commit = source_repo.commit_ids[-3] source_repo._checkout(new_branch_commit) - source_repo._checkout('new_branch', create=True) + source_repo._checkout("new_branch", create=True) - target_repo._local_fetch(source_repo.path, 'new_branch') + target_repo._local_fetch(source_repo.path, "new_branch") assert target_repo._last_fetch_heads() == [new_branch_commit] - target_repo._local_fetch(source_repo.path, 'master') + target_repo._local_fetch(source_repo.path, "master") assert target_repo._last_fetch_heads() == [master_commit] def test_local_fetch_from_bare_repo(self, tmpdir): target_repo = self.get_empty_repo(tmpdir) - target_repo._local_fetch(self.repo.path, 'master') + target_repo._local_fetch(self.repo.path, "master") master_commit = self.repo.commit_ids[-1] assert target_repo._last_fetch_heads() == [master_commit] def test_local_fetch_from_same_repo(self): with pytest.raises(ValueError): - self.repo._local_fetch(self.repo.path, 'master') + self.repo._local_fetch(self.repo.path, "master") def test_local_fetch_branch_does_not_exist(self, tmpdir): target_repo = self.get_empty_repo(tmpdir) with pytest.raises(RepositoryError): - target_repo._local_fetch(self.repo.path, 'new_branch') + target_repo._local_fetch(self.repo.path, "new_branch") def test_local_pull(self, tmpdir): target_repo = self.get_empty_repo(tmpdir) @@ -397,19 +381,19 @@ TODO: To be written... master_commit = source_repo.commit_ids[-1] new_branch_commit = source_repo.commit_ids[-3] source_repo._checkout(new_branch_commit) - source_repo._checkout('new_branch', create=True) + source_repo._checkout("new_branch", create=True) - target_repo._local_pull(source_repo.path, 'new_branch') + target_repo._local_pull(source_repo.path, "new_branch") target_repo = GitRepository(target_repo.path) assert target_repo.head == new_branch_commit - target_repo._local_pull(source_repo.path, 'master') + target_repo._local_pull(source_repo.path, "master") target_repo = GitRepository(target_repo.path) assert target_repo.head == master_commit def test_local_pull_in_bare_repo(self): with pytest.raises(RepositoryError): - self.repo._local_pull(self.repo.path, 'master') + self.repo._local_pull(self.repo.path, "master") def test_local_merge(self, tmpdir): target_repo = self.get_empty_repo(tmpdir) @@ -419,159 +403,144 @@ TODO: To be written... master_commit = source_repo.commit_ids[-1] new_branch_commit = source_repo.commit_ids[-3] source_repo._checkout(new_branch_commit) - source_repo._checkout('new_branch', create=True) + source_repo._checkout("new_branch", create=True) # This is required as one cannot do a -ff-only merge in an empty repo. - target_repo._local_pull(source_repo.path, 'new_branch') + target_repo._local_pull(source_repo.path, "new_branch") - target_repo._local_fetch(source_repo.path, 'master') - merge_message = 'Merge message\n\nDescription:...' - user_name = 'Albert Einstein' - user_email = 'albert@einstein.com' - target_repo._local_merge(merge_message, user_name, user_email, - target_repo._last_fetch_heads()) + target_repo._local_fetch(source_repo.path, "master") + merge_message = "Merge message\n\nDescription:..." + user_name = "Albert Einstein" + user_email = "albert@einstein.com" + target_repo._local_merge(merge_message, user_name, user_email, target_repo._last_fetch_heads()) target_repo = GitRepository(target_repo.path) assert target_repo.commit_ids[-2] == master_commit last_commit = target_repo.get_commit(target_repo.head) assert last_commit.message.strip() == merge_message - assert last_commit.author == '%s <%s>' % (user_name, user_email) + assert last_commit.author == "%s <%s>" % (user_name, user_email) - assert not os.path.exists( - os.path.join(target_repo.path, '.git', 'MERGE_HEAD')) + assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD")) def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git): target_repo = vcsbackend_git.create_repo(number_of_commits=1) - vcsbackend_git.ensure_file(b'README', b'I will conflict with you!!!') + vcsbackend_git.ensure_file(b"README", b"I will conflict with you!!!") - target_repo._local_fetch(self.repo.path, 'master') + target_repo._local_fetch(self.repo.path, "master") with pytest.raises(RepositoryError): - target_repo._local_merge( - 'merge_message', 'user name', 'user@name.com', - target_repo._last_fetch_heads()) + target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads()) # Check we are not left in an intermediate merge state - assert not os.path.exists( - os.path.join(target_repo.path, '.git', 'MERGE_HEAD')) + assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD")) def test_local_merge_into_empty_repo(self, tmpdir): target_repo = self.get_empty_repo(tmpdir) # This is required as one cannot do a -ff-only merge in an empty repo. - target_repo._local_fetch(self.repo.path, 'master') + target_repo._local_fetch(self.repo.path, "master") with pytest.raises(RepositoryError): - target_repo._local_merge( - 'merge_message', 'user name', 'user@name.com', - target_repo._last_fetch_heads()) + target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads()) def test_local_merge_in_bare_repo(self): with pytest.raises(RepositoryError): - self.repo._local_merge( - 'merge_message', 'user name', 'user@name.com', None) + self.repo._local_merge("merge_message", "user name", "user@name.com", None) def test_local_push_non_bare(self, tmpdir): target_repo = self.get_empty_repo(tmpdir) - pushed_branch = 'pushed_branch' - self.repo._local_push('master', target_repo.path, pushed_branch) + pushed_branch = "pushed_branch" + self.repo._local_push("master", target_repo.path, pushed_branch) # Fix the HEAD of the target repo, or otherwise GitRepository won't # report any branches. - with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f: - f.write('ref: refs/heads/%s' % pushed_branch) + with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f: + f.write("ref: refs/heads/%s" % pushed_branch) target_repo = GitRepository(target_repo.path) - assert (target_repo.branches[pushed_branch] == - self.repo.branches['master']) + assert target_repo.branches[pushed_branch] == self.repo.branches["master"] def test_local_push_bare(self, tmpdir): target_repo = self.get_empty_repo(tmpdir, bare=True) - pushed_branch = 'pushed_branch' - self.repo._local_push('master', target_repo.path, pushed_branch) + pushed_branch = "pushed_branch" + self.repo._local_push("master", target_repo.path, pushed_branch) # Fix the HEAD of the target repo, or otherwise GitRepository won't # report any branches. - with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f: - f.write('ref: refs/heads/%s' % pushed_branch) + with open(os.path.join(target_repo.path, "HEAD"), "w") as f: + f.write("ref: refs/heads/%s" % pushed_branch) target_repo = GitRepository(target_repo.path) - assert (target_repo.branches[pushed_branch] == - self.repo.branches['master']) + assert target_repo.branches[pushed_branch] == self.repo.branches["master"] def test_local_push_non_bare_target_branch_is_checked_out(self, tmpdir): target_repo = self.get_clone_repo(tmpdir) - pushed_branch = 'pushed_branch' + pushed_branch = "pushed_branch" # Create a new branch in source repo new_branch_commit = target_repo.commit_ids[-3] target_repo._checkout(new_branch_commit) target_repo._checkout(pushed_branch, create=True) - self.repo._local_push('master', target_repo.path, pushed_branch) + self.repo._local_push("master", target_repo.path, pushed_branch) target_repo = GitRepository(target_repo.path) - assert (target_repo.branches[pushed_branch] == - self.repo.branches['master']) + assert target_repo.branches[pushed_branch] == self.repo.branches["master"] def test_local_push_raises_exception_on_conflict(self, vcsbackend_git): target_repo = vcsbackend_git.create_repo(number_of_commits=1) with pytest.raises(RepositoryError): - self.repo._local_push('master', target_repo.path, 'master') + self.repo._local_push("master", target_repo.path, "master") def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmpdir): target_repo = self.get_empty_repo(tmpdir, bare=True) - with mock.patch.object(self.repo, 'run_git_command') as run_mock: - self.repo._local_push( - 'master', target_repo.path, 'master', enable_hooks=True) - env = run_mock.call_args[1]['extra_env'] - assert 'RC_SKIP_HOOKS' not in env + with mock.patch.object(self.repo, "run_git_command") as run_mock: + self.repo._local_push("master", target_repo.path, "master", enable_hooks=True) + env = run_mock.call_args[1]["extra_env"] + assert "RC_SKIP_HOOKS" not in env def _add_failing_hook(self, repo_path, hook_name, bare=False): - path_components = ( - ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name]) + path_components = ["hooks", hook_name] if bare else [".git", "hooks", hook_name] hook_path = os.path.join(repo_path, *path_components) - with open(hook_path, 'w') as f: + with open(hook_path, "w") as f: script_lines = [ - '#!%s' % sys.executable, - 'import os', - 'import sys', + "#!%s" % sys.executable, + "import os", + "import sys", 'if os.environ.get("RC_SKIP_HOOKS"):', - ' sys.exit(0)', - 'sys.exit(1)', + " sys.exit(0)", + "sys.exit(1)", ] - f.write('\n'.join(script_lines)) + f.write("\n".join(script_lines)) os.chmod(hook_path, 0o755) def test_local_push_does_not_execute_hook(self, tmpdir): target_repo = self.get_empty_repo(tmpdir) - pushed_branch = 'pushed_branch' - self._add_failing_hook(target_repo.path, 'pre-receive') - self.repo._local_push('master', target_repo.path, pushed_branch) + pushed_branch = "pushed_branch" + self._add_failing_hook(target_repo.path, "pre-receive") + self.repo._local_push("master", target_repo.path, pushed_branch) # Fix the HEAD of the target repo, or otherwise GitRepository won't # report any branches. - with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f: - f.write('ref: refs/heads/%s' % pushed_branch) + with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f: + f.write("ref: refs/heads/%s" % pushed_branch) target_repo = GitRepository(target_repo.path) - assert (target_repo.branches[pushed_branch] == - self.repo.branches['master']) + assert target_repo.branches[pushed_branch] == self.repo.branches["master"] def test_local_push_executes_hook(self, tmpdir): target_repo = self.get_empty_repo(tmpdir, bare=True) - self._add_failing_hook(target_repo.path, 'pre-receive', bare=True) + self._add_failing_hook(target_repo.path, "pre-receive", bare=True) with pytest.raises(RepositoryError): - self.repo._local_push( - 'master', target_repo.path, 'master', enable_hooks=True) + self.repo._local_push("master", target_repo.path, "master", enable_hooks=True) def test_maybe_prepare_merge_workspace(self): workspace = self.repo._maybe_prepare_merge_workspace( - 2, 'pr2', Reference('branch', 'master', 'unused'), - Reference('branch', 'master', 'unused')) + 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused") + ) assert os.path.isdir(workspace) workspace_repo = GitRepository(workspace) @@ -579,14 +548,14 @@ TODO: To be written... # Calling it a second time should also succeed workspace = self.repo._maybe_prepare_merge_workspace( - 2, 'pr2', Reference('branch', 'master', 'unused'), - Reference('branch', 'master', 'unused')) + 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused") + ) assert os.path.isdir(workspace) def test_maybe_prepare_merge_workspace_different_refs(self): workspace = self.repo._maybe_prepare_merge_workspace( - 2, 'pr2', Reference('branch', 'master', 'unused'), - Reference('branch', 'develop', 'unused')) + 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused") + ) assert os.path.isdir(workspace) workspace_repo = GitRepository(workspace) @@ -594,48 +563,47 @@ TODO: To be written... # Calling it a second time should also succeed workspace = self.repo._maybe_prepare_merge_workspace( - 2, 'pr2', Reference('branch', 'master', 'unused'), - Reference('branch', 'develop', 'unused')) + 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused") + ) assert os.path.isdir(workspace) def test_cleanup_merge_workspace(self): workspace = self.repo._maybe_prepare_merge_workspace( - 2, 'pr3', Reference('branch', 'master', 'unused'), - Reference('branch', 'master', 'unused')) - self.repo.cleanup_merge_workspace(2, 'pr3') + 2, "pr3", Reference("branch", "master", "unused"), Reference("branch", "master", "unused") + ) + self.repo.cleanup_merge_workspace(2, "pr3") assert not os.path.exists(workspace) def test_cleanup_merge_workspace_invalid_workspace_id(self): # No assert: because in case of an inexistent workspace this function # should still succeed. - self.repo.cleanup_merge_workspace(1, 'pr4') + self.repo.cleanup_merge_workspace(1, "pr4") def test_set_refs(self): - test_ref = 'refs/test-refs/abcde' - test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623' + test_ref = "refs/test-refs/abcde" + test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623" self.repo.set_refs(test_ref, test_commit_id) - stdout, _ = self.repo.run_git_command(['show-ref']) + stdout, _ = self.repo.run_git_command(["show-ref"]) assert test_ref in stdout assert test_commit_id in stdout def test_remove_ref(self): - test_ref = 'refs/test-refs/abcde' - test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623' + test_ref = "refs/test-refs/abcde" + test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623" self.repo.set_refs(test_ref, test_commit_id) - stdout, _ = self.repo.run_git_command(['show-ref']) + stdout, _ = self.repo.run_git_command(["show-ref"]) assert test_ref in stdout assert test_commit_id in stdout self.repo.remove_ref(test_ref) - stdout, _ = self.repo.run_git_command(['show-ref']) + stdout, _ = self.repo.run_git_command(["show-ref"]) assert test_ref not in stdout assert test_commit_id not in stdout class TestGitCommit(object): - @pytest.fixture(autouse=True) def prepare(self): self.repo = GitRepository(TEST_GIT_REPO) @@ -643,11 +611,11 @@ class TestGitCommit(object): def test_default_commit(self): tip = self.repo.get_commit() assert tip == self.repo.get_commit(None) - assert tip == self.repo.get_commit('tip') + assert tip == self.repo.get_commit("tip") def test_root_node(self): tip = self.repo.get_commit() - assert tip.root is tip.get_node('') + assert tip.root is tip.get_node("") def test_lazy_fetch(self): """ @@ -655,7 +623,7 @@ class TestGitCommit(object): the commit. This test is somewhat hard to write as order of tests is a key here. Written by running command after command in a shell. """ - commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc' + commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc" assert commit_id in self.repo.commit_ids commit = self.repo.get_commit(commit_id) assert len(commit.nodes) == 0 @@ -665,31 +633,29 @@ class TestGitCommit(object): # accessing root.nodes updates commit.nodes assert len(commit.nodes) == 9 - docs = root.get_node('docs') + docs = root.get_node("docs") # we haven't yet accessed anything new as docs dir was already cached assert len(commit.nodes) == 9 assert len(docs.nodes) == 8 # accessing docs.nodes updates commit.nodes assert len(commit.nodes) == 17 - assert docs is commit.get_node('docs') + assert docs is commit.get_node("docs") assert docs is root.nodes[0] assert docs is root.dirs[0] - assert docs is commit.get_node('docs') + assert docs is commit.get_node("docs") def test_nodes_with_commit(self): - commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc' + commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc" commit = self.repo.get_commit(commit_id) root = commit.root - docs = root.get_node('docs') - assert docs is commit.get_node('docs') - api = docs.get_node('api') - assert api is commit.get_node('docs/api') - index = api.get_node('index.rst') - assert index is commit.get_node('docs/api/index.rst') - assert index is commit.get_node('docs')\ - .get_node('api')\ - .get_node('index.rst') + docs = root.get_node("docs") + assert docs is commit.get_node("docs") + api = docs.get_node("api") + assert api is commit.get_node("docs/api") + index = api.get_node("index.rst") + assert index is commit.get_node("docs/api/index.rst") + assert index is commit.get_node("docs").get_node("api").get_node("index.rst") def test_branch_and_tags(self): """ @@ -716,19 +682,12 @@ class TestGitCommit(object): def test_file_size(self): to_check = ( - ('c1214f7e79e02fc37156ff215cd71275450cffc3', - 'vcs/backends/BaseRepository.py', 502), - ('d7e0d30fbcae12c90680eb095a4f5f02505ce501', - 'vcs/backends/hg.py', 854), - ('6e125e7c890379446e98980d8ed60fba87d0f6d1', - 'setup.py', 1068), - - ('d955cd312c17b02143c04fa1099a352b04368118', - 'vcs/backends/base.py', 2921), - ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e', - 'vcs/backends/base.py', 3936), - ('f50f42baeed5af6518ef4b0cb2f1423f3851a941', - 'vcs/backends/base.py', 6189), + ("c1214f7e79e02fc37156ff215cd71275450cffc3", "vcs/backends/BaseRepository.py", 502), + ("d7e0d30fbcae12c90680eb095a4f5f02505ce501", "vcs/backends/hg.py", 854), + ("6e125e7c890379446e98980d8ed60fba87d0f6d1", "setup.py", 1068), + ("d955cd312c17b02143c04fa1099a352b04368118", "vcs/backends/base.py", 2921), + ("ca1eb7957a54bce53b12d1a51b13452f95bc7c7e", "vcs/backends/base.py", 3936), + ("f50f42baeed5af6518ef4b0cb2f1423f3851a941", "vcs/backends/base.py", 6189), ) for commit_id, path, size in to_check: node = self.repo.get_commit(commit_id).get_node(path) @@ -736,80 +695,77 @@ class TestGitCommit(object): assert node.size == size def test_file_history_from_commits(self): - node = self.repo[10].get_node('setup.py') + node = self.repo[10].get_node("setup.py") commit_ids = [commit.raw_id for commit in node.history] - assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids + assert ["ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == commit_ids - node = self.repo[20].get_node('setup.py') + node = self.repo[20].get_node("setup.py") node_ids = [commit.raw_id for commit in node.history] - assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', - 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids + assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids # special case we check history from commit that has this particular # file changed this means we check if it's included as well - node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \ - .get_node('setup.py') + node = self.repo.get_commit("191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e").get_node("setup.py") node_ids = [commit.raw_id for commit in node.history] - assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', - 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids + assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids def test_file_history(self): # we can only check if those commits are present in the history # as we cannot update this test every time file is changed files = { - 'setup.py': [ - '54386793436c938cff89326944d4c2702340037d', - '51d254f0ecf5df2ce50c0b115741f4cf13985dab', - '998ed409c795fec2012b1c0ca054d99888b22090', - '5e0eb4c47f56564395f76333f319d26c79e2fb09', - '0115510b70c7229dbc5dc49036b32e7d91d23acd', - '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e', - '2a13f185e4525f9d4b59882791a2d397b90d5ddc', - '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e', - 'ff7ca51e58c505fec0dd2491de52c622bb7a806b', + "setup.py": [ + "54386793436c938cff89326944d4c2702340037d", + "51d254f0ecf5df2ce50c0b115741f4cf13985dab", + "998ed409c795fec2012b1c0ca054d99888b22090", + "5e0eb4c47f56564395f76333f319d26c79e2fb09", + "0115510b70c7229dbc5dc49036b32e7d91d23acd", + "7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e", + "2a13f185e4525f9d4b59882791a2d397b90d5ddc", + "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", + "ff7ca51e58c505fec0dd2491de52c622bb7a806b", ], - 'vcs/nodes.py': [ - '33fa3223355104431402a888fa77a4e9956feb3e', - 'fa014c12c26d10ba682fadb78f2a11c24c8118e1', - 'e686b958768ee96af8029fe19c6050b1a8dd3b2b', - 'ab5721ca0a081f26bf43d9051e615af2cc99952f', - 'c877b68d18e792a66b7f4c529ea02c8f80801542', - '4313566d2e417cb382948f8d9d7c765330356054', - '6c2303a793671e807d1cfc70134c9ca0767d98c2', - '54386793436c938cff89326944d4c2702340037d', - '54000345d2e78b03a99d561399e8e548de3f3203', - '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b', - '2d03ca750a44440fb5ea8b751176d1f36f8e8f46', - '2a08b128c206db48c2f0b8f70df060e6db0ae4f8', - '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b', - 'ac71e9503c2ca95542839af0ce7b64011b72ea7c', - '12669288fd13adba2a9b7dd5b870cc23ffab92d2', - '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382', - '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5', - '5eab1222a7cd4bfcbabc218ca6d04276d4e27378', - 'f50f42baeed5af6518ef4b0cb2f1423f3851a941', - 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25', - 'f15c21f97864b4f071cddfbf2750ec2e23859414', - 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade', - 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b', - '84dec09632a4458f79f50ddbbd155506c460b4f9', - '0115510b70c7229dbc5dc49036b32e7d91d23acd', - '2a13f185e4525f9d4b59882791a2d397b90d5ddc', - '3bf1c5868e570e39569d094f922d33ced2fa3b2b', - 'b8d04012574729d2c29886e53b1a43ef16dd00a1', - '6970b057cffe4aab0a792aa634c89f4bebf01441', - 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f', - 'ff7ca51e58c505fec0dd2491de52c622bb7a806b', + "vcs/nodes.py": [ + "33fa3223355104431402a888fa77a4e9956feb3e", + "fa014c12c26d10ba682fadb78f2a11c24c8118e1", + "e686b958768ee96af8029fe19c6050b1a8dd3b2b", + "ab5721ca0a081f26bf43d9051e615af2cc99952f", + "c877b68d18e792a66b7f4c529ea02c8f80801542", + "4313566d2e417cb382948f8d9d7c765330356054", + "6c2303a793671e807d1cfc70134c9ca0767d98c2", + "54386793436c938cff89326944d4c2702340037d", + "54000345d2e78b03a99d561399e8e548de3f3203", + "1c6b3677b37ea064cb4b51714d8f7498f93f4b2b", + "2d03ca750a44440fb5ea8b751176d1f36f8e8f46", + "2a08b128c206db48c2f0b8f70df060e6db0ae4f8", + "30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b", + "ac71e9503c2ca95542839af0ce7b64011b72ea7c", + "12669288fd13adba2a9b7dd5b870cc23ffab92d2", + "5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382", + "12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5", + "5eab1222a7cd4bfcbabc218ca6d04276d4e27378", + "f50f42baeed5af6518ef4b0cb2f1423f3851a941", + "d7e390a45f6aa96f04f5e7f583ad4f867431aa25", + "f15c21f97864b4f071cddfbf2750ec2e23859414", + "e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade", + "ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b", + "84dec09632a4458f79f50ddbbd155506c460b4f9", + "0115510b70c7229dbc5dc49036b32e7d91d23acd", + "2a13f185e4525f9d4b59882791a2d397b90d5ddc", + "3bf1c5868e570e39569d094f922d33ced2fa3b2b", + "b8d04012574729d2c29886e53b1a43ef16dd00a1", + "6970b057cffe4aab0a792aa634c89f4bebf01441", + "dd80b0f6cf5052f17cc738c2951c4f2070200d7f", + "ff7ca51e58c505fec0dd2491de52c622bb7a806b", ], - 'vcs/backends/git.py': [ - '4cf116ad5a457530381135e2f4c453e68a1b0105', - '9a751d84d8e9408e736329767387f41b36935153', - 'cb681fb539c3faaedbcdf5ca71ca413425c18f01', - '428f81bb652bcba8d631bce926e8834ff49bdcc6', - '180ab15aebf26f98f714d8c68715e0f05fa6e1c7', - '2b8e07312a2e89e92b90426ab97f349f4bce2a3a', - '50e08c506174d8645a4bb517dd122ac946a0f3bf', - '54000345d2e78b03a99d561399e8e548de3f3203', + "vcs/backends/git.py": [ + "4cf116ad5a457530381135e2f4c453e68a1b0105", + "9a751d84d8e9408e736329767387f41b36935153", + "cb681fb539c3faaedbcdf5ca71ca413425c18f01", + "428f81bb652bcba8d631bce926e8834ff49bdcc6", + "180ab15aebf26f98f714d8c68715e0f05fa6e1c7", + "2b8e07312a2e89e92b90426ab97f349f4bce2a3a", + "50e08c506174d8645a4bb517dd122ac946a0f3bf", + "54000345d2e78b03a99d561399e8e548de3f3203", ], } for path, commit_ids in files.items(): @@ -817,79 +773,79 @@ class TestGitCommit(object): node_ids = [commit.raw_id for commit in node.history] assert set(commit_ids).issubset(set(node_ids)), ( "We assumed that %s is subset of commit_ids for which file %s " - "has been changed, and history of that node returned: %s" - % (commit_ids, path, node_ids)) + "has been changed, and history of that node returned: %s" % (commit_ids, path, node_ids) + ) def test_file_annotate(self): files = { - 'vcs/backends/__init__.py': { - 'c1214f7e79e02fc37156ff215cd71275450cffc3': { - 'lines_no': 1, - 'commits': [ - 'c1214f7e79e02fc37156ff215cd71275450cffc3', + "vcs/backends/__init__.py": { + "c1214f7e79e02fc37156ff215cd71275450cffc3": { + "lines_no": 1, + "commits": [ + "c1214f7e79e02fc37156ff215cd71275450cffc3", ], }, - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': { - 'lines_no': 21, - 'commits': [ - '49d3fd156b6f7db46313fac355dca1a0b94a0017', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647": { + "lines_no": 21, + "commits": [ + "49d3fd156b6f7db46313fac355dca1a0b94a0017", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", ], }, - 'e29b67bd158580fc90fc5e9111240b90e6e86064': { - 'lines_no': 32, - 'commits': [ - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '5eab1222a7cd4bfcbabc218ca6d04276d4e27378', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '54000345d2e78b03a99d561399e8e548de3f3203', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '78c3f0c23b7ee935ec276acb8b8212444c33c396', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '2a13f185e4525f9d4b59882791a2d397b90d5ddc', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '78c3f0c23b7ee935ec276acb8b8212444c33c396', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '992f38217b979d0b0987d0bae3cc26dac85d9b19', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', - '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647', + "e29b67bd158580fc90fc5e9111240b90e6e86064": { + "lines_no": 32, + "commits": [ + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "5eab1222a7cd4bfcbabc218ca6d04276d4e27378", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "54000345d2e78b03a99d561399e8e548de3f3203", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "78c3f0c23b7ee935ec276acb8b8212444c33c396", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "2a13f185e4525f9d4b59882791a2d397b90d5ddc", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "78c3f0c23b7ee935ec276acb8b8212444c33c396", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "992f38217b979d0b0987d0bae3cc26dac85d9b19", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", + "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647", ], }, }, @@ -903,37 +859,32 @@ class TestGitCommit(object): l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)] assert l1_1 == l1_2 l1 = l1_1 - l2 = files[fname][commit_id]['commits'] + l2 = files[fname][commit_id]["commits"] assert l1 == l2, ( "The lists of commit_ids for %s@commit_id %s" "from annotation list should match each other, " - "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2)) + "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2) + ) def test_files_state(self): """ Tests state of FileNodes. """ - node = self.repo\ - .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\ - .get_node('vcs/utils/diffs.py') + node = self.repo.get_commit("e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0").get_node("vcs/utils/diffs.py") assert node.state, NodeState.ADDED assert node.added assert not node.changed assert not node.not_changed assert not node.removed - node = self.repo\ - .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\ - .get_node('.hgignore') + node = self.repo.get_commit("33fa3223355104431402a888fa77a4e9956feb3e").get_node(".hgignore") assert node.state, NodeState.CHANGED assert not node.added assert node.changed assert not node.not_changed assert not node.removed - node = self.repo\ - .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\ - .get_node('setup.py') + node = self.repo.get_commit("e29b67bd158580fc90fc5e9111240b90e6e86064").get_node("setup.py") assert node.state, NodeState.NOT_CHANGED assert not node.added assert not node.changed @@ -942,48 +893,38 @@ class TestGitCommit(object): # If node has REMOVED state then trying to fetch it would raise # CommitError exception - commit = self.repo.get_commit( - 'fa6600f6848800641328adbf7811fd2372c02ab2') - path = 'vcs/backends/BaseRepository.py' + commit = self.repo.get_commit("fa6600f6848800641328adbf7811fd2372c02ab2") + path = "vcs/backends/BaseRepository.py" with pytest.raises(NodeDoesNotExistError): commit.get_node(path) # but it would be one of ``removed`` (commit's attribute) assert path in [rf.path for rf in commit.removed] - commit = self.repo.get_commit( - '54386793436c938cff89326944d4c2702340037d') - changed = [ - 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py', - 'vcs/nodes.py'] + commit = self.repo.get_commit("54386793436c938cff89326944d4c2702340037d") + changed = ["setup.py", "tests/test_nodes.py", "vcs/backends/hg.py", "vcs/nodes.py"] assert set(changed) == set([f.path for f in commit.changed]) def test_unicode_branch_refs(self): unicode_branches = { - 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b', - u'refs/heads/uniçö∂e': 'ürl', + "refs/heads/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b", + "refs/heads/uniçö∂e": "ürl", } - with mock.patch( - ("rhodecode.lib.vcs.backends.git.repository" - ".GitRepository._refs"), - unicode_branches): + with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_branches): branches = self.repo.branches - assert 'unicode' in branches - assert 'uniçö∂e' in branches + assert "unicode" in branches + assert "uniçö∂e" in branches def test_unicode_tag_refs(self): unicode_tags = { - 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b', - u'refs/tags/uniçö∂e': '6c0ce52b229aa978889e91b38777f800e85f330b', + "refs/tags/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b", + "refs/tags/uniçö∂e": "6c0ce52b229aa978889e91b38777f800e85f330b", } - with mock.patch( - ("rhodecode.lib.vcs.backends.git.repository" - ".GitRepository._refs"), - unicode_tags): + with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_tags): tags = self.repo.tags - assert 'unicode' in tags - assert 'uniçö∂e' in tags + assert "unicode" in tags + assert "uniçö∂e" in tags def test_commit_message_is_unicode(self): for commit in self.repo: @@ -995,190 +936,186 @@ class TestGitCommit(object): def test_repo_files_content_types(self): commit = self.repo.get_commit() - for node in commit.get_node('/'): + for node in commit.get_node("/"): if node.is_file(): assert type(node.content) == bytes assert type(node.str_content) == str def test_wrong_path(self): # There is 'setup.py' in the root dir but not there: - path = 'foo/bar/setup.py' + path = "foo/bar/setup.py" tip = self.repo.get_commit() with pytest.raises(VCSError): tip.get_node(path) - @pytest.mark.parametrize("author_email, commit_id", [ - ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'), - ('lukasz.balcerzak@python-center.pl', - 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'), - ('none@none', '8430a588b43b5d6da365400117c89400326e7992'), - ]) + @pytest.mark.parametrize( + "author_email, commit_id", + [ + ("marcin@python-blog.com", "c1214f7e79e02fc37156ff215cd71275450cffc3"), + ("lukasz.balcerzak@python-center.pl", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"), + ("none@none", "8430a588b43b5d6da365400117c89400326e7992"), + ], + ) def test_author_email(self, author_email, commit_id): commit = self.repo.get_commit(commit_id) assert author_email == commit.author_email - @pytest.mark.parametrize("author, commit_id", [ - ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'), - ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'), - ('marcink', '8430a588b43b5d6da365400117c89400326e7992'), - ]) + @pytest.mark.parametrize( + "author, commit_id", + [ + ("Marcin Kuzminski", "c1214f7e79e02fc37156ff215cd71275450cffc3"), + ("Lukasz Balcerzak", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"), + ("marcink", "8430a588b43b5d6da365400117c89400326e7992"), + ], + ) def test_author_username(self, author, commit_id): commit = self.repo.get_commit(commit_id) assert author == commit.author_name class TestLargeFileRepo(object): - def test_large_file(self, backend_git): conf = make_db_config() - repo = backend_git.create_test_repo('largefiles', conf) + git_largefiles_store = conf.get("vcs_git_lfs", "store_location") + + repo = backend_git.create_test_repo("largefiles", conf) tip = repo.scm_instance().get_commit() + node = tip.get_node("1MB.zip") + # extract stored LF node into the origin cache - lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store') + repo_lfs_store: str = os.path.join(repo.repo_path, repo.repo_name, "lfs_store") - oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf' - oid_path = os.path.join(lfs_store, oid) - # Todo: oid path depends on LFSOidStorage.store_suffix. Once it will be changed update below line accordingly - oid_destination = os.path.join( - conf.get('vcs_git_lfs', 'store_location'), f'objects/{oid[:2]}/{oid[2:4]}/{oid}') + oid: str = "7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf" + # where the OID actually is INSIDE the repo... + oid_path = os.path.join(repo_lfs_store, oid) - os.makedirs(os.path.dirname(oid_destination)) + # Note: oid path depends on LFSOidStore.store_suffix. Once it will be changed update below line accordingly + oid_destination = os.path.join(git_largefiles_store, f"objects/{oid[:2]}/{oid[2:4]}/{oid}") + shutil.copy(oid_path, oid_destination) - node = tip.get_node('1MB.zip') - lf_node = node.get_largefile_node() assert lf_node.is_largefile() is True assert lf_node.size == 1024000 - assert lf_node.name == '1MB.zip' + assert lf_node.name == "1MB.zip" @pytest.mark.usefixtures("vcs_repository_support") class TestGitSpecificWithRepo(BackendTestMixin): - @classmethod def _get_commits(cls): return [ { - 'message': 'Initial', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 20), - 'added': [ - FileNode(b'foobar/static/js/admin/base.js', content=b'base'), - FileNode(b'foobar/static/admin', content=b'admin', mode=0o120000), # this is a link - FileNode(b'foo', content=b'foo'), + "message": "Initial", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 20), + "added": [ + FileNode(b"foobar/static/js/admin/base.js", content=b"base"), + FileNode(b"foobar/static/admin", content=b"admin", mode=0o120000), # this is a link + FileNode(b"foo", content=b"foo"), ], }, { - 'message': 'Second', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 22), - 'added': [ - FileNode(b'foo2', content=b'foo2'), + "message": "Second", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 22), + "added": [ + FileNode(b"foo2", content=b"foo2"), ], }, ] def test_paths_slow_traversing(self): commit = self.repo.get_commit() - assert commit.get_node('foobar').get_node('static').get_node('js')\ - .get_node('admin').get_node('base.js').content == b'base' + assert ( + commit.get_node("foobar").get_node("static").get_node("js").get_node("admin").get_node("base.js").content + == b"base" + ) def test_paths_fast_traversing(self): commit = self.repo.get_commit() - assert commit.get_node('foobar/static/js/admin/base.js').content == b'base' + assert commit.get_node("foobar/static/js/admin/base.js").content == b"base" def test_get_diff_runs_git_command_with_hashes(self): comm1 = self.repo[0] comm2 = self.repo[1] - with mock.patch.object(self.repo, '_remote', return_value=mock.Mock()) as remote_mock: + with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock: remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff) self.repo.get_diff(comm1, comm2) remote_mock.diff.assert_called_once_with( - comm1.raw_id, comm2.raw_id, - file_filter=None, opt_ignorews=False, context=3) + comm1.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3 + ) def test_get_diff_runs_git_command_with_str_hashes(self): comm2 = self.repo[1] - with mock.patch.object(self.repo, '_remote', return_value=mock.Mock()) as remote_mock: + with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock: remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff) self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2) remote_mock.diff.assert_called_once_with( - self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id, - file_filter=None, opt_ignorews=False, context=3) + self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3 + ) def test_get_diff_runs_git_command_with_path_if_its_given(self): comm1 = self.repo[0] comm2 = self.repo[1] - with mock.patch.object(self.repo, '_remote', return_value=mock.Mock()) as remote_mock: + with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock: remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff) - self.repo.get_diff(comm1, comm2, 'foo') + self.repo.get_diff(comm1, comm2, "foo") remote_mock.diff.assert_called_once_with( - self.repo._lookup_commit(0), comm2.raw_id, - file_filter='foo', opt_ignorews=False, context=3) + self.repo._lookup_commit(0), comm2.raw_id, file_filter="foo", opt_ignorews=False, context=3 + ) @pytest.mark.usefixtures("vcs_repository_support") class TestGitRegression(BackendTestMixin): - @classmethod def _get_commits(cls): return [ { - 'message': 'Initial', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 20), - 'added': [ - FileNode(b'bot/__init__.py', content=b'base'), - FileNode(b'bot/templates/404.html', content=b'base'), - FileNode(b'bot/templates/500.html', content=b'base'), + "message": "Initial", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 20), + "added": [ + FileNode(b"bot/__init__.py", content=b"base"), + FileNode(b"bot/templates/404.html", content=b"base"), + FileNode(b"bot/templates/500.html", content=b"base"), ], }, { - 'message': 'Second', - 'author': 'Joe Doe ', - 'date': datetime.datetime(2010, 1, 1, 22), - 'added': [ - FileNode(b'bot/build/migrations/1.py', content=b'foo2'), - FileNode(b'bot/build/migrations/2.py', content=b'foo2'), - FileNode(b'bot/build/static/templates/f.html', content=b'foo2'), - FileNode(b'bot/build/static/templates/f1.html', content=b'foo2'), - FileNode(b'bot/build/templates/err.html', content=b'foo2'), - FileNode(b'bot/build/templates/err2.html', content=b'foo2'), + "message": "Second", + "author": "Joe Doe ", + "date": datetime.datetime(2010, 1, 1, 22), + "added": [ + FileNode(b"bot/build/migrations/1.py", content=b"foo2"), + FileNode(b"bot/build/migrations/2.py", content=b"foo2"), + FileNode(b"bot/build/static/templates/f.html", content=b"foo2"), + FileNode(b"bot/build/static/templates/f1.html", content=b"foo2"), + FileNode(b"bot/build/templates/err.html", content=b"foo2"), + FileNode(b"bot/build/templates/err2.html", content=b"foo2"), ], }, ] - @pytest.mark.parametrize("path, expected_paths", [ - ('bot', [ - 'bot/build', - 'bot/templates', - 'bot/__init__.py']), - ('bot/build', [ - 'bot/build/migrations', - 'bot/build/static', - 'bot/build/templates']), - ('bot/build/static', [ - 'bot/build/static/templates']), - ('bot/build/static/templates', [ - 'bot/build/static/templates/f.html', - 'bot/build/static/templates/f1.html']), - ('bot/build/templates', [ - 'bot/build/templates/err.html', - 'bot/build/templates/err2.html']), - ('bot/templates/', [ - 'bot/templates/404.html', - 'bot/templates/500.html']), - ]) + @pytest.mark.parametrize( + "path, expected_paths", + [ + ("bot", ["bot/build", "bot/templates", "bot/__init__.py"]), + ("bot/build", ["bot/build/migrations", "bot/build/static", "bot/build/templates"]), + ("bot/build/static", ["bot/build/static/templates"]), + ("bot/build/static/templates", ["bot/build/static/templates/f.html", "bot/build/static/templates/f1.html"]), + ("bot/build/templates", ["bot/build/templates/err.html", "bot/build/templates/err2.html"]), + ("bot/templates/", ["bot/templates/404.html", "bot/templates/500.html"]), + ], + ) def test_similar_paths(self, path, expected_paths): commit = self.repo.get_commit() paths = [n.path for n in commit.get_nodes(path)] @@ -1186,122 +1123,120 @@ class TestGitRegression(BackendTestMixin class TestDiscoverGitVersion(object): - def test_returns_git_version(self, baseapp): version = discover_git_version() assert version def test_returns_empty_string_without_vcsserver(self): mock_connection = mock.Mock() - mock_connection.discover_git_version = mock.Mock( - side_effect=Exception) - with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection): + mock_connection.discover_git_version = mock.Mock(side_effect=Exception) + with mock.patch("rhodecode.lib.vcs.connection.Git", mock_connection): version = discover_git_version() - assert version == '' + assert version == "" class TestGetSubmoduleUrl(object): def test_submodules_file_found(self): - commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) + commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1) node = mock.Mock() - with mock.patch.object( - commit, 'get_node', return_value=node) as get_node_mock: + with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock: node.str_content = ( - '[submodule "subrepo1"]\n' - '\tpath = subrepo1\n' - '\turl = https://code.rhodecode.com/dulwich\n' + '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n" ) - result = commit._get_submodule_url('subrepo1') - get_node_mock.assert_called_once_with('.gitmodules') - assert result == 'https://code.rhodecode.com/dulwich' + result = commit._get_submodule_url("subrepo1") + get_node_mock.assert_called_once_with(".gitmodules") + assert result == "https://code.rhodecode.com/dulwich" def test_complex_submodule_path(self): - commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) + commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1) node = mock.Mock() - with mock.patch.object( - commit, 'get_node', return_value=node) as get_node_mock: + with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock: node.str_content = ( '[submodule "complex/subrepo/path"]\n' - '\tpath = complex/subrepo/path\n' - '\turl = https://code.rhodecode.com/dulwich\n' + "\tpath = complex/subrepo/path\n" + "\turl = https://code.rhodecode.com/dulwich\n" ) - result = commit._get_submodule_url('complex/subrepo/path') - get_node_mock.assert_called_once_with('.gitmodules') - assert result == 'https://code.rhodecode.com/dulwich' + result = commit._get_submodule_url("complex/subrepo/path") + get_node_mock.assert_called_once_with(".gitmodules") + assert result == "https://code.rhodecode.com/dulwich" def test_submodules_file_not_found(self): - commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) - with mock.patch.object( - commit, 'get_node', side_effect=NodeDoesNotExistError): - result = commit._get_submodule_url('complex/subrepo/path') + commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1) + with mock.patch.object(commit, "get_node", side_effect=NodeDoesNotExistError): + result = commit._get_submodule_url("complex/subrepo/path") assert result is None def test_path_not_found(self): - commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) + commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1) node = mock.Mock() - with mock.patch.object( - commit, 'get_node', return_value=node) as get_node_mock: + with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock: node.str_content = ( - '[submodule "subrepo1"]\n' - '\tpath = subrepo1\n' - '\turl = https://code.rhodecode.com/dulwich\n' + '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n" ) - result = commit._get_submodule_url('subrepo2') - get_node_mock.assert_called_once_with('.gitmodules') + result = commit._get_submodule_url("subrepo2") + get_node_mock.assert_called_once_with(".gitmodules") assert result is None def test_returns_cached_values(self): - commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) + commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1) node = mock.Mock() - with mock.patch.object( - commit, 'get_node', return_value=node) as get_node_mock: + with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock: node.str_content = ( - '[submodule "subrepo1"]\n' - '\tpath = subrepo1\n' - '\turl = https://code.rhodecode.com/dulwich\n' + '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n" ) for _ in range(3): - commit._get_submodule_url('subrepo1') - get_node_mock.assert_called_once_with('.gitmodules') + commit._get_submodule_url("subrepo1") + get_node_mock.assert_called_once_with(".gitmodules") def test_get_node_returns_a_link(self): repository = mock.Mock() - repository.alias = 'git' - commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1) - submodule_url = 'https://code.rhodecode.com/dulwich' - get_id_patch = mock.patch.object( - commit, '_get_tree_id_for_path', return_value=(1, 'link')) - get_submodule_patch = mock.patch.object( - commit, '_get_submodule_url', return_value=submodule_url) + repository.alias = "git" + commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1) + submodule_url = "https://code.rhodecode.com/dulwich" + get_id_patch = mock.patch.object(commit, "_get_tree_id_for_path", return_value=(1, "link")) + get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url) with get_id_patch, get_submodule_patch as submodule_mock: - node = commit.get_node('/abcde') + node = commit.get_node("/abcde") - submodule_mock.assert_called_once_with('/abcde') + submodule_mock.assert_called_once_with("/abcde") assert type(node) == SubModuleNode assert node.url == submodule_url def test_get_nodes_returns_links(self): repository = mock.MagicMock() - repository.alias = 'git' - repository._remote.tree_items.return_value = [ - ('subrepo', 'stat', 1, 'link') - ] - commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1) - submodule_url = 'https://code.rhodecode.com/dulwich' - get_id_patch = mock.patch.object( - commit, '_get_tree_id_for_path', return_value=(1, 'tree')) - get_submodule_patch = mock.patch.object( - commit, '_get_submodule_url', return_value=submodule_url) + repository.alias = "git" + repository._remote.tree_items.return_value = [("subrepo", "stat", 1, "link")] + commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1) + submodule_url = "https://code.rhodecode.com/dulwich" + get_id_patch = mock.patch.object(commit, "_get_tree_id_for_path", return_value=(1, "tree")) + get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url) with get_id_patch, get_submodule_patch as submodule_mock: - nodes = commit.get_nodes('/abcde') + nodes = commit.get_nodes("/abcde") - submodule_mock.assert_called_once_with('/abcde/subrepo') + submodule_mock.assert_called_once_with("/abcde/subrepo") assert len(nodes) == 1 assert type(nodes[0]) == SubModuleNode assert nodes[0].url == submodule_url + + +class TestGetShadowInstance(object): + + @pytest.fixture() + def repo(self, vcsbackend_git): + _git_repo = vcsbackend_git.repo + + mock.patch.object(_git_repo, "config", mock.Mock()) + connection_mock = mock.Mock(unsafe=True, name="connection.Hg") + + mock.patch("rhodecode.lib.vcs.connection.Git", connection_mock) + return _git_repo + + def test_getting_shadow_instance_copies_config(self, repo): + shadow = repo.get_shadow_instance(repo.path) + assert shadow.config.serialize() == repo.config.serialize() diff --git a/rhodecode/tests/vcs/test_hg.py b/rhodecode/tests/vcs/test_hg.py --- a/rhodecode/tests/vcs/test_hg.py +++ b/rhodecode/tests/vcs/test_hg.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -25,11 +24,9 @@ import pytest from rhodecode.lib.str_utils import safe_bytes from rhodecode.lib.utils import make_db_config from rhodecode.lib.vcs import backends -from rhodecode.lib.vcs.backends.base import ( - Reference, MergeResponse, MergeFailureReason) +from rhodecode.lib.vcs.backends.base import Reference, MergeResponse, MergeFailureReason from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit -from rhodecode.lib.vcs.exceptions import ( - RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError) +from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator @@ -44,12 +41,13 @@ def repo_path_generator(): i = 0 while True: i += 1 - yield '%s-%d' % (TEST_HG_REPO_CLONE, i) + yield "%s-%d" % (TEST_HG_REPO_CLONE, i) + REPO_PATH_GENERATOR = repo_path_generator() -@pytest.fixture(scope='class', autouse=True) +@pytest.fixture(scope="class", autouse=True) def repo(request, baseapp): repo = MercurialRepository(TEST_HG_REPO) if request.cls: @@ -58,7 +56,6 @@ def repo(request, baseapp): class TestMercurialRepository(object): - # pylint: disable=protected-access def get_clone_repo(self): @@ -66,8 +63,7 @@ class TestMercurialRepository(object): Return a clone of the base repo. """ clone_path = next(REPO_PATH_GENERATOR) - repo_clone = MercurialRepository( - clone_path, create=True, src_url=self.repo.path) + repo_clone = MercurialRepository(clone_path, create=True, src_url=self.repo.path) return repo_clone @@ -78,40 +74,39 @@ class TestMercurialRepository(object): return MercurialRepository(next(REPO_PATH_GENERATOR), create=True) def test_wrong_repo_path(self): - wrong_repo_path = '/tmp/errorrepo_hg' + wrong_repo_path = "/tmp/errorrepo_hg" with pytest.raises(RepositoryError): MercurialRepository(wrong_repo_path) def test_unicode_path_repo(self): with pytest.raises(VCSError): - MercurialRepository('iShouldFail') + MercurialRepository("iShouldFail") def test_unicode_commit_id(self): with pytest.raises(CommitDoesNotExistError): - self.repo.get_commit('unicode-commit-id') + self.repo.get_commit("unicode-commit-id") with pytest.raises(CommitDoesNotExistError): - self.repo.get_commit('unícøde-spéçial-chäråcter-commit-id') + self.repo.get_commit("unícøde-spéçial-chäråcter-commit-id") def test_unicode_bookmark(self): - self.repo.bookmark('unicode-bookmark') - self.repo.bookmark('unícøde-spéçial-chäråcter-bookmark') + self.repo.bookmark("unicode-bookmark") + self.repo.bookmark("unícøde-spéçial-chäråcter-bookmark") def test_unicode_branch(self): with pytest.raises(KeyError): - assert self.repo.branches['unicode-branch'] + assert self.repo.branches["unicode-branch"] with pytest.raises(KeyError): - assert self.repo.branches['unícøde-spéçial-chäråcter-branch'] + assert self.repo.branches["unícøde-spéçial-chäråcter-branch"] def test_repo_clone(self): if os.path.exists(TEST_HG_REPO_CLONE): self.fail( - 'Cannot test mercurial clone repo as location %s already ' - 'exists. You should manually remove it first.' - % TEST_HG_REPO_CLONE) + "Cannot test mercurial clone repo as location %s already " + "exists. You should manually remove it first." % TEST_HG_REPO_CLONE + ) repo = MercurialRepository(TEST_HG_REPO) - repo_clone = MercurialRepository(TEST_HG_REPO_CLONE, - src_url=TEST_HG_REPO) + repo_clone = MercurialRepository(TEST_HG_REPO_CLONE, create=True, src_url=TEST_HG_REPO) assert len(repo.commit_ids) == len(repo_clone.commit_ids) # Checking hashes of commits should be enough for commit in repo.get_commits(): @@ -121,72 +116,80 @@ class TestMercurialRepository(object): def test_repo_clone_with_update(self): repo = MercurialRepository(TEST_HG_REPO) repo_clone = MercurialRepository( - TEST_HG_REPO_CLONE + '_w_update', - src_url=TEST_HG_REPO, do_workspace_checkout=True) + TEST_HG_REPO_CLONE + "_w_update", create=True, src_url=TEST_HG_REPO, do_workspace_checkout=True + ) assert len(repo.commit_ids) == len(repo_clone.commit_ids) # check if current workdir was updated - assert os.path.isfile( - os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in')) + assert os.path.isfile(os.path.join(TEST_HG_REPO_CLONE + "_w_update", "MANIFEST.in")) def test_repo_clone_without_update(self): repo = MercurialRepository(TEST_HG_REPO) repo_clone = MercurialRepository( - TEST_HG_REPO_CLONE + '_wo_update', - src_url=TEST_HG_REPO, do_workspace_checkout=False) + TEST_HG_REPO_CLONE + "_wo_update", create=True, src_url=TEST_HG_REPO, do_workspace_checkout=False + ) assert len(repo.commit_ids) == len(repo_clone.commit_ids) - assert not os.path.isfile( - os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in')) + assert not os.path.isfile(os.path.join(TEST_HG_REPO_CLONE + "_wo_update", "MANIFEST.in")) def test_commit_ids(self): # there are 21 commits at bitbucket now # so we can assume they would be available from now on - subset = {'b986218ba1c9b0d6a259fac9b050b1724ed8e545', '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', - '6cba7170863a2411822803fa77a0a264f1310b35', '56349e29c2af3ac913b28bde9a2c6154436e615b', - '2dda4e345facb0ccff1a191052dd1606dba6781d', '6fff84722075f1607a30f436523403845f84cd9e', - '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', - 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', 'be90031137367893f1c406e0a8683010fd115b79', - 'db8e58be770518cbb2b1cdfa69146e47cd481481', '84478366594b424af694a6c784cb991a16b87c21', - '17f8e105dddb9f339600389c6dc7175d395a535c', '20a662e756499bde3095ffc9bc0643d1def2d0eb', - '2e319b85e70a707bba0beff866d9f9de032aa4f9', '786facd2c61deb9cf91e9534735124fb8fc11842', - '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', 'aa6a0de05b7612707db567078e130a6cd114a9a7', - 'eada5a770da98ab0dd7325e29d00e0714f228d09' - } + subset = { + "b986218ba1c9b0d6a259fac9b050b1724ed8e545", + "3d8f361e72ab303da48d799ff1ac40d5ac37c67e", + "6cba7170863a2411822803fa77a0a264f1310b35", + "56349e29c2af3ac913b28bde9a2c6154436e615b", + "2dda4e345facb0ccff1a191052dd1606dba6781d", + "6fff84722075f1607a30f436523403845f84cd9e", + "7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7", + "3803844fdbd3b711175fc3da9bdacfcd6d29a6fb", + "dc5d2c0661b61928834a785d3e64a3f80d3aad9c", + "be90031137367893f1c406e0a8683010fd115b79", + "db8e58be770518cbb2b1cdfa69146e47cd481481", + "84478366594b424af694a6c784cb991a16b87c21", + "17f8e105dddb9f339600389c6dc7175d395a535c", + "20a662e756499bde3095ffc9bc0643d1def2d0eb", + "2e319b85e70a707bba0beff866d9f9de032aa4f9", + "786facd2c61deb9cf91e9534735124fb8fc11842", + "94593d2128d38210a2fcd1aabff6dda0d6d9edf8", + "aa6a0de05b7612707db567078e130a6cd114a9a7", + "eada5a770da98ab0dd7325e29d00e0714f228d09", + } assert subset.issubset(set(self.repo.commit_ids)) # check if we have the proper order of commits org = [ - 'b986218ba1c9b0d6a259fac9b050b1724ed8e545', - '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', - '6cba7170863a2411822803fa77a0a264f1310b35', - '56349e29c2af3ac913b28bde9a2c6154436e615b', - '2dda4e345facb0ccff1a191052dd1606dba6781d', - '6fff84722075f1607a30f436523403845f84cd9e', - '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', - '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', - 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', - 'be90031137367893f1c406e0a8683010fd115b79', - 'db8e58be770518cbb2b1cdfa69146e47cd481481', - '84478366594b424af694a6c784cb991a16b87c21', - '17f8e105dddb9f339600389c6dc7175d395a535c', - '20a662e756499bde3095ffc9bc0643d1def2d0eb', - '2e319b85e70a707bba0beff866d9f9de032aa4f9', - '786facd2c61deb9cf91e9534735124fb8fc11842', - '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', - 'aa6a0de05b7612707db567078e130a6cd114a9a7', - 'eada5a770da98ab0dd7325e29d00e0714f228d09', - '2c1885c735575ca478bf9e17b0029dca68824458', - 'd9bcd465040bf869799b09ad732c04e0eea99fe9', - '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7', - '4fb8326d78e5120da2c7468dcf7098997be385da', - '62b4a097164940bd66030c4db51687f3ec035eed', - '536c1a19428381cfea92ac44985304f6a8049569', - '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4', - '9bb326a04ae5d98d437dece54be04f830cf1edd9', - 'f8940bcb890a98c4702319fbe36db75ea309b475', - 'ff5ab059786ebc7411e559a2cc309dfae3625a3b', - '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08', - 'ee87846a61c12153b51543bf860e1026c6d3dcba', + "b986218ba1c9b0d6a259fac9b050b1724ed8e545", + "3d8f361e72ab303da48d799ff1ac40d5ac37c67e", + "6cba7170863a2411822803fa77a0a264f1310b35", + "56349e29c2af3ac913b28bde9a2c6154436e615b", + "2dda4e345facb0ccff1a191052dd1606dba6781d", + "6fff84722075f1607a30f436523403845f84cd9e", + "7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7", + "3803844fdbd3b711175fc3da9bdacfcd6d29a6fb", + "dc5d2c0661b61928834a785d3e64a3f80d3aad9c", + "be90031137367893f1c406e0a8683010fd115b79", + "db8e58be770518cbb2b1cdfa69146e47cd481481", + "84478366594b424af694a6c784cb991a16b87c21", + "17f8e105dddb9f339600389c6dc7175d395a535c", + "20a662e756499bde3095ffc9bc0643d1def2d0eb", + "2e319b85e70a707bba0beff866d9f9de032aa4f9", + "786facd2c61deb9cf91e9534735124fb8fc11842", + "94593d2128d38210a2fcd1aabff6dda0d6d9edf8", + "aa6a0de05b7612707db567078e130a6cd114a9a7", + "eada5a770da98ab0dd7325e29d00e0714f228d09", + "2c1885c735575ca478bf9e17b0029dca68824458", + "d9bcd465040bf869799b09ad732c04e0eea99fe9", + "469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7", + "4fb8326d78e5120da2c7468dcf7098997be385da", + "62b4a097164940bd66030c4db51687f3ec035eed", + "536c1a19428381cfea92ac44985304f6a8049569", + "965e8ab3c44b070cdaa5bf727ddef0ada980ecc4", + "9bb326a04ae5d98d437dece54be04f830cf1edd9", + "f8940bcb890a98c4702319fbe36db75ea309b475", + "ff5ab059786ebc7411e559a2cc309dfae3625a3b", + "6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08", + "ee87846a61c12153b51543bf860e1026c6d3dcba", ] assert org == self.repo.commit_ids[:31] @@ -197,8 +200,7 @@ class TestMercurialRepository(object): def test_slicing(self): # 4 1 5 10 95 - for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), - (10, 20, 10), (5, 100, 95)]: + for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), (10, 20, 10), (5, 100, 95)]: indexes = list(self.repo[sfrom:sto]) assert len(indexes) == size assert indexes[0] == self.repo.get_commit(commit_idx=sfrom) @@ -208,64 +210,64 @@ class TestMercurialRepository(object): # TODO: Need more tests here # active branches - assert 'default' in self.repo.branches - assert 'stable' in self.repo.branches + assert "default" in self.repo.branches + assert "stable" in self.repo.branches # closed - assert 'git' in self.repo._get_branches(closed=True) - assert 'web' in self.repo._get_branches(closed=True) + assert "git" in self.repo._get_branches(closed=True) + assert "web" in self.repo._get_branches(closed=True) for name, id in self.repo.branches.items(): assert isinstance(self.repo.get_commit(id), MercurialCommit) def test_tip_in_tags(self): # tip is always a tag - assert 'tip' in self.repo.tags + assert "tip" in self.repo.tags def test_tip_commit_in_tags(self): tip = self.repo.get_commit() - assert self.repo.tags['tip'] == tip.raw_id + assert self.repo.tags["tip"] == tip.raw_id def test_initial_commit(self): init_commit = self.repo.get_commit(commit_idx=0) init_author = init_commit.author - assert init_commit.message == 'initial import' - assert init_author == 'Marcin Kuzminski ' + assert init_commit.message == "initial import" + assert init_author == "Marcin Kuzminski " assert init_author == init_commit.committer - assert sorted(init_commit._file_paths) == sorted([ - 'vcs/__init__.py', - 'vcs/backends/BaseRepository.py', - 'vcs/backends/__init__.py', - ]) - assert sorted(init_commit._dir_paths) == sorted( - ['', 'vcs', 'vcs/backends']) + assert sorted(init_commit._file_paths) == sorted( + [ + "vcs/__init__.py", + "vcs/backends/BaseRepository.py", + "vcs/backends/__init__.py", + ] + ) + assert sorted(init_commit._dir_paths) == sorted(["", "vcs", "vcs/backends"]) - assert init_commit._dir_paths + init_commit._file_paths == \ - init_commit._paths + assert init_commit._dir_paths + init_commit._file_paths == init_commit._paths with pytest.raises(NodeDoesNotExistError): - init_commit.get_node(path='foobar') + init_commit.get_node(path="foobar") - node = init_commit.get_node('vcs/') - assert hasattr(node, 'kind') + node = init_commit.get_node("vcs/") + assert hasattr(node, "kind") assert node.kind == NodeKind.DIR - node = init_commit.get_node('vcs') - assert hasattr(node, 'kind') + node = init_commit.get_node("vcs") + assert hasattr(node, "kind") assert node.kind == NodeKind.DIR - node = init_commit.get_node('vcs/__init__.py') - assert hasattr(node, 'kind') + node = init_commit.get_node("vcs/__init__.py") + assert hasattr(node, "kind") assert node.kind == NodeKind.FILE def test_not_existing_commit(self): # rawid with pytest.raises(RepositoryError): - self.repo.get_commit('abcd' * 10) + self.repo.get_commit("abcd" * 10) # shortid with pytest.raises(RepositoryError): - self.repo.get_commit('erro' * 4) + self.repo.get_commit("erro" * 4) # numeric with pytest.raises(RepositoryError): self.repo.get_commit(commit_idx=self.repo.count() + 1) @@ -289,7 +291,7 @@ Introduction TODO: To be written... """ - node = commit10.get_node('README.rst') + node = commit10.get_node("README.rst") assert node.kind == NodeKind.FILE assert node.str_content == README @@ -308,77 +310,73 @@ TODO: To be written... repo_clone = self.get_clone_repo() branches = repo_clone.branches - repo_clone._update('default') - assert branches['default'] == repo_clone._identify() - repo_clone._update('stable') - assert branches['stable'] == repo_clone._identify() + repo_clone._update("default") + assert branches["default"] == repo_clone._identify() + repo_clone._update("stable") + assert branches["stable"] == repo_clone._identify() def test_local_pull_branch(self): target_repo = self.get_empty_repo() source_repo = self.get_clone_repo() - default = Reference( - 'branch', 'default', source_repo.branches['default']) + default = Reference("branch", "default", source_repo.branches["default"]) target_repo._local_pull(source_repo.path, default) target_repo = MercurialRepository(target_repo.path) - assert (target_repo.branches['default'] == - source_repo.branches['default']) + assert target_repo.branches["default"] == source_repo.branches["default"] - stable = Reference('branch', 'stable', source_repo.branches['stable']) + stable = Reference("branch", "stable", source_repo.branches["stable"]) target_repo._local_pull(source_repo.path, stable) target_repo = MercurialRepository(target_repo.path) - assert target_repo.branches['stable'] == source_repo.branches['stable'] + assert target_repo.branches["stable"] == source_repo.branches["stable"] def test_local_pull_bookmark(self): target_repo = self.get_empty_repo() source_repo = self.get_clone_repo() - commits = list(source_repo.get_commits(branch_name='default')) + commits = list(source_repo.get_commits(branch_name="default")) foo1_id = commits[-5].raw_id - foo1 = Reference('book', 'foo1', foo1_id) + foo1 = Reference("book", "foo1", foo1_id) source_repo._update(foo1_id) - source_repo.bookmark('foo1') + source_repo.bookmark("foo1") foo2_id = commits[-3].raw_id - foo2 = Reference('book', 'foo2', foo2_id) + foo2 = Reference("book", "foo2", foo2_id) source_repo._update(foo2_id) - source_repo.bookmark('foo2') + source_repo.bookmark("foo2") target_repo._local_pull(source_repo.path, foo1) target_repo = MercurialRepository(target_repo.path) - assert target_repo.branches['default'] == commits[-5].raw_id + assert target_repo.branches["default"] == commits[-5].raw_id target_repo._local_pull(source_repo.path, foo2) target_repo = MercurialRepository(target_repo.path) - assert target_repo.branches['default'] == commits[-3].raw_id + assert target_repo.branches["default"] == commits[-3].raw_id def test_local_pull_commit(self): target_repo = self.get_empty_repo() source_repo = self.get_clone_repo() - commits = list(source_repo.get_commits(branch_name='default')) + commits = list(source_repo.get_commits(branch_name="default")) commit_id = commits[-5].raw_id - commit = Reference('rev', commit_id, commit_id) + commit = Reference("rev", commit_id, commit_id) target_repo._local_pull(source_repo.path, commit) target_repo = MercurialRepository(target_repo.path) - assert target_repo.branches['default'] == commit_id + assert target_repo.branches["default"] == commit_id commit_id = commits[-3].raw_id - commit = Reference('rev', commit_id, commit_id) + commit = Reference("rev", commit_id, commit_id) target_repo._local_pull(source_repo.path, commit) target_repo = MercurialRepository(target_repo.path) - assert target_repo.branches['default'] == commit_id + assert target_repo.branches["default"] == commit_id def test_local_pull_from_same_repo(self): - reference = Reference('branch', 'default', None) + reference = Reference("branch", "default", None) with pytest.raises(ValueError): self.repo._local_pull(self.repo.path, reference) - def test_validate_pull_reference_raises_on_missing_reference( - self, vcsbackend_hg): + def test_validate_pull_reference_raises_on_missing_reference(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) - reference = Reference( - 'book', 'invalid_reference', 'a' * 40) + reference = Reference("book", "invalid_reference", "a" * 40) with pytest.raises(CommitDoesNotExistError): target_repo._validate_pull_reference(reference) @@ -387,51 +385,48 @@ TODO: To be written... assert set(self.repo._heads()) == set(self.repo.branches.values()) def test_ancestor(self): - commits = [ - c.raw_id for c in self.repo.get_commits(branch_name='default')] + commits = [c.raw_id for c in self.repo.get_commits(branch_name="default")] assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5] assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5] def test_local_push(self): target_repo = self.get_empty_repo() - revisions = list(self.repo.get_commits(branch_name='default')) + revisions = list(self.repo.get_commits(branch_name="default")) revision = revisions[-5].raw_id self.repo._local_push(revision, target_repo.path) target_repo = MercurialRepository(target_repo.path) - assert target_repo.branches['default'] == revision + assert target_repo.branches["default"] == revision def test_hooks_can_be_enabled_for_local_push(self): - revision = 'deadbeef' - repo_path = 'test_group/test_repo' - with mock.patch.object(self.repo, '_remote') as remote_mock: + revision = "deadbeef" + repo_path = "test_group/test_repo" + with mock.patch.object(self.repo, "_remote") as remote_mock: self.repo._local_push(revision, repo_path, enable_hooks=True) - remote_mock.push.assert_called_once_with( - [revision], repo_path, hooks=True, push_branches=False) + remote_mock.push.assert_called_once_with([revision], repo_path, hooks=True, push_branches=False) def test_local_merge(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) source_repo = vcsbackend_hg.clone_repo(target_repo) - vcsbackend_hg.add_file(target_repo, b'README_MERGE1', b'Version 1') + vcsbackend_hg.add_file(target_repo, b"README_MERGE1", b"Version 1") target_repo = MercurialRepository(target_repo.path) - target_rev = target_repo.branches['default'] - target_ref = Reference( - type='branch', name='default', commit_id=target_rev) - vcsbackend_hg.add_file(source_repo, b'README_MERGE2', b'Version 2') + target_rev = target_repo.branches["default"] + target_ref = Reference(type="branch", name="default", commit_id=target_rev) + vcsbackend_hg.add_file(source_repo, b"README_MERGE2", b"Version 2") source_repo = MercurialRepository(source_repo.path) - source_rev = source_repo.branches['default'] - source_ref = Reference( - type='branch', name='default', commit_id=source_rev) + source_rev = source_repo.branches["default"] + source_ref = Reference(type="branch", name="default", commit_id=source_rev) target_repo._local_pull(source_repo.path, source_ref) - merge_message = 'Merge message\n\nDescription:...' - user_name = 'Albert Einstein' - user_email = 'albert@einstein.com' + merge_message = "Merge message\n\nDescription:..." + user_name = "Albert Einstein" + user_email = "albert@einstein.com" merge_commit_id, needs_push = target_repo._local_merge( - target_ref, merge_message, user_name, user_email, source_ref) + target_ref, merge_message, user_name, user_email, source_ref + ) assert needs_push target_repo = MercurialRepository(target_repo.path) @@ -439,30 +434,28 @@ TODO: To be written... assert target_repo.commit_ids[-2] == source_rev last_commit = target_repo.get_commit(merge_commit_id) assert last_commit.message.strip() == merge_message - assert last_commit.author == '%s <%s>' % (user_name, user_email) + assert last_commit.author == "%s <%s>" % (user_name, user_email) - assert not os.path.exists( - os.path.join(target_repo.path, '.hg', 'merge', 'state')) + assert not os.path.exists(os.path.join(target_repo.path, ".hg", "merge", "state")) def test_local_merge_source_is_fast_forward(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) source_repo = vcsbackend_hg.clone_repo(target_repo) - target_rev = target_repo.branches['default'] - target_ref = Reference( - type='branch', name='default', commit_id=target_rev) - vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') + target_rev = target_repo.branches["default"] + target_ref = Reference(type="branch", name="default", commit_id=target_rev) + vcsbackend_hg.add_file(source_repo, "README_MERGE2", "Version 2") source_repo = MercurialRepository(source_repo.path) - source_rev = source_repo.branches['default'] - source_ref = Reference( - type='branch', name='default', commit_id=source_rev) + source_rev = source_repo.branches["default"] + source_ref = Reference(type="branch", name="default", commit_id=source_rev) target_repo._local_pull(source_repo.path, source_ref) - merge_message = 'Merge message\n\nDescription:...' - user_name = 'Albert Einstein' - user_email = 'albert@einstein.com' + merge_message = "Merge message\n\nDescription:..." + user_name = "Albert Einstein" + user_email = "albert@einstein.com" merge_commit_id, needs_push = target_repo._local_merge( - target_ref, merge_message, user_name, user_email, source_ref) + target_ref, merge_message, user_name, user_email, source_ref + ) assert merge_commit_id == source_rev assert needs_push @@ -470,70 +463,62 @@ TODO: To be written... assert target_repo.commit_ids[-2] == target_rev assert target_repo.commit_ids[-1] == source_rev - assert not os.path.exists( - os.path.join(target_repo.path, '.hg', 'merge', 'state')) + assert not os.path.exists(os.path.join(target_repo.path, ".hg", "merge", "state")) def test_local_merge_source_is_integrated(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) - target_rev = target_repo.branches['default'] - target_ref = Reference( - type='branch', name='default', commit_id=target_rev) + target_rev = target_repo.branches["default"] + target_ref = Reference(type="branch", name="default", commit_id=target_rev) - merge_message = 'Merge message\n\nDescription:...' - user_name = 'Albert Einstein' - user_email = 'albert@einstein.com' + merge_message = "Merge message\n\nDescription:..." + user_name = "Albert Einstein" + user_email = "albert@einstein.com" merge_commit_id, needs_push = target_repo._local_merge( - target_ref, merge_message, user_name, user_email, target_ref) + target_ref, merge_message, user_name, user_email, target_ref + ) assert merge_commit_id == target_rev assert not needs_push target_repo = MercurialRepository(target_repo.path) assert target_repo.commit_ids[-1] == target_rev - assert not os.path.exists( - os.path.join(target_repo.path, '.hg', 'merge', 'state')) + assert not os.path.exists(os.path.join(target_repo.path, ".hg", "merge", "state")) def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) source_repo = vcsbackend_hg.clone_repo(target_repo) - vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1') + vcsbackend_hg.add_file(target_repo, "README_MERGE", "Version 1") target_repo = MercurialRepository(target_repo.path) - target_rev = target_repo.branches['default'] - target_ref = Reference( - type='branch', name='default', commit_id=target_rev) - vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2') + target_rev = target_repo.branches["default"] + target_ref = Reference(type="branch", name="default", commit_id=target_rev) + vcsbackend_hg.add_file(source_repo, "README_MERGE", "Version 2") source_repo = MercurialRepository(source_repo.path) - source_rev = source_repo.branches['default'] - source_ref = Reference( - type='branch', name='default', commit_id=source_rev) + source_rev = source_repo.branches["default"] + source_ref = Reference(type="branch", name="default", commit_id=source_rev) target_repo._local_pull(source_repo.path, source_ref) with pytest.raises(RepositoryError): - target_repo._local_merge( - target_ref, 'merge_message', 'user name', 'user@name.com', - source_ref) + target_repo._local_merge(target_ref, "merge_message", "user name", "user@name.com", source_ref) # Check we are not left in an intermediate merge state - assert not os.path.exists( - os.path.join(target_repo.path, '.hg', 'merge', 'state')) + assert not os.path.exists(os.path.join(target_repo.path, ".hg", "merge", "state")) def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg): commits = [ - {'message': 'a'}, - {'message': 'b', 'branch': 'b'}, + {"message": "a"}, + {"message": "b", "branch": "b"}, ] repo = backend_hg.create_repo(commits) commit_ids = backend_hg.commit_ids - target_ref = Reference( - type='branch', name='default', commit_id=commit_ids['a']) - source_ref = Reference( - type='branch', name='b', commit_id=commit_ids['b']) - merge_message = 'Merge message\n\nDescription:...' - user_name = 'Albert Einstein' - user_email = 'albert@einstein.com' + target_ref = Reference(type="branch", name="default", commit_id=commit_ids["a"]) + source_ref = Reference(type="branch", name="b", commit_id=commit_ids["b"]) + merge_message = "Merge message\n\nDescription:..." + user_name = "Albert Einstein" + user_email = "albert@einstein.com" vcs_repo = repo.scm_instance() merge_commit_id, needs_push = vcs_repo._local_merge( - target_ref, merge_message, user_name, user_email, source_ref) + target_ref, merge_message, user_name, user_email, source_ref + ) assert merge_commit_id != source_ref.commit_id assert needs_push is True commit = vcs_repo.get_commit(merge_commit_id) @@ -541,63 +526,62 @@ TODO: To be written... assert commit.message == merge_message def test_maybe_prepare_merge_workspace(self): - workspace = self.repo._maybe_prepare_merge_workspace( - 1, 'pr2', 'unused', 'unused2') + workspace = self.repo._maybe_prepare_merge_workspace(1, "pr2", "unused", "unused2") assert os.path.isdir(workspace) workspace_repo = MercurialRepository(workspace) assert workspace_repo.branches == self.repo.branches # Calling it a second time should also succeed - workspace = self.repo._maybe_prepare_merge_workspace( - 1, 'pr2', 'unused', 'unused2') + workspace = self.repo._maybe_prepare_merge_workspace(1, "pr2", "unused", "unused2") assert os.path.isdir(workspace) def test_cleanup_merge_workspace(self): - workspace = self.repo._maybe_prepare_merge_workspace( - 1, 'pr3', 'unused', 'unused2') + workspace = self.repo._maybe_prepare_merge_workspace(1, "pr3", "unused", "unused2") assert os.path.isdir(workspace) - self.repo.cleanup_merge_workspace(1, 'pr3') + self.repo.cleanup_merge_workspace(1, "pr3") assert not os.path.exists(workspace) def test_cleanup_merge_workspace_invalid_workspace_id(self): # No assert: because in case of an inexistent workspace this function # should still succeed. - self.repo.cleanup_merge_workspace(1, 'pr4') + self.repo.cleanup_merge_workspace(1, "pr4") def test_merge_target_is_bookmark(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) source_repo = vcsbackend_hg.clone_repo(target_repo) - vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') - vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') + vcsbackend_hg.add_file(target_repo, "README_MERGE1", "Version 1") + vcsbackend_hg.add_file(source_repo, "README_MERGE2", "Version 2") imc = source_repo.in_memory_commit - imc.add(FileNode(b'file_x', content=source_repo.name)) - imc.commit( - message='Automatic commit from repo merge test', - author='Automatic ') + imc.add(FileNode(b"file_x", content=source_repo.name)) + imc.commit(message="Automatic commit from repo merge test", author="Automatic ") target_commit = target_repo.get_commit() source_commit = source_repo.get_commit() default_branch = target_repo.DEFAULT_BRANCH_NAME - bookmark_name = 'bookmark' + bookmark_name = "bookmark" target_repo._update(default_branch) target_repo.bookmark(bookmark_name) - target_ref = Reference('book', bookmark_name, target_commit.raw_id) - source_ref = Reference('branch', default_branch, source_commit.raw_id) - workspace_id = 'test-merge' + target_ref = Reference("book", bookmark_name, target_commit.raw_id) + source_ref = Reference("branch", default_branch, source_commit.raw_id) + workspace_id = "test-merge" repo_id = repo_id_generator(target_repo.path) merge_response = target_repo.merge( - repo_id, workspace_id, target_ref, source_repo, source_ref, - 'test user', 'test@rhodecode.com', 'merge message 1', - dry_run=False) - expected_merge_response = MergeResponse( - True, True, merge_response.merge_ref, - MergeFailureReason.NONE) + repo_id, + workspace_id, + target_ref, + source_repo, + source_ref, + "test user", + "test@rhodecode.com", + "merge message 1", + dry_run=False, + ) + expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE) assert merge_response == expected_merge_response - target_repo = backends.get_backend(vcsbackend_hg.alias)( - target_repo.path) + target_repo = backends.get_backend(vcsbackend_hg.alias)(target_repo.path) target_commits = list(target_repo.get_commits()) commit_ids = [c.raw_id for c in target_commits[:-1]] assert source_ref.commit_id in commit_ids @@ -605,43 +589,43 @@ TODO: To be written... merge_commit = target_commits[-1] assert merge_commit.raw_id == merge_response.merge_ref.commit_id - assert merge_commit.message.strip() == 'merge message 1' - assert merge_commit.author == 'test user ' + assert merge_commit.message.strip() == "merge message 1" + assert merge_commit.author == "test user " # Check the bookmark was updated in the target repo - assert ( - target_repo.bookmarks[bookmark_name] == - merge_response.merge_ref.commit_id) + assert target_repo.bookmarks[bookmark_name] == merge_response.merge_ref.commit_id def test_merge_source_is_bookmark(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) source_repo = vcsbackend_hg.clone_repo(target_repo) imc = source_repo.in_memory_commit - imc.add(FileNode(b'file_x', content=source_repo.name)) - imc.commit( - message='Automatic commit from repo merge test', - author='Automatic ') + imc.add(FileNode(b"file_x", content=source_repo.name)) + imc.commit(message="Automatic commit from repo merge test", author="Automatic ") target_commit = target_repo.get_commit() source_commit = source_repo.get_commit() default_branch = target_repo.DEFAULT_BRANCH_NAME - bookmark_name = 'bookmark' - target_ref = Reference('branch', default_branch, target_commit.raw_id) + bookmark_name = "bookmark" + target_ref = Reference("branch", default_branch, target_commit.raw_id) source_repo._update(default_branch) source_repo.bookmark(bookmark_name) - source_ref = Reference('book', bookmark_name, source_commit.raw_id) - workspace_id = 'test-merge' + source_ref = Reference("book", bookmark_name, source_commit.raw_id) + workspace_id = "test-merge" repo_id = repo_id_generator(target_repo.path) merge_response = target_repo.merge( - repo_id, workspace_id, target_ref, source_repo, source_ref, - 'test user', 'test@rhodecode.com', 'merge message 1', - dry_run=False) - expected_merge_response = MergeResponse( - True, True, merge_response.merge_ref, - MergeFailureReason.NONE) + repo_id, + workspace_id, + target_ref, + source_repo, + source_ref, + "test user", + "test@rhodecode.com", + "merge message 1", + dry_run=False, + ) + expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE) assert merge_response == expected_merge_response - target_repo = backends.get_backend(vcsbackend_hg.alias)( - target_repo.path) + target_repo = backends.get_backend(vcsbackend_hg.alias)(target_repo.path) target_commits = list(target_repo.get_commits()) commit_ids = [c.raw_id for c in target_commits] assert source_ref.commit_id == commit_ids[-1] @@ -650,78 +634,89 @@ TODO: To be written... def test_merge_target_has_multiple_heads(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=2) source_repo = vcsbackend_hg.clone_repo(target_repo) - vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') - vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') + vcsbackend_hg.add_file(target_repo, "README_MERGE1", "Version 1") + vcsbackend_hg.add_file(source_repo, "README_MERGE2", "Version 2") # add an extra head to the target repo imc = target_repo.in_memory_commit - imc.add(FileNode(b'file_x', content='foo')) + imc.add(FileNode(b"file_x", content="foo")) commits = list(target_repo.get_commits()) imc.commit( - message='Automatic commit from repo merge test', - author='Automatic ', parents=commits[0:1]) + message="Automatic commit from repo merge test", + author="Automatic ", + parents=commits[0:1], + ) target_commit = target_repo.get_commit() source_commit = source_repo.get_commit() default_branch = target_repo.DEFAULT_BRANCH_NAME target_repo._update(default_branch) - target_ref = Reference('branch', default_branch, target_commit.raw_id) - source_ref = Reference('branch', default_branch, source_commit.raw_id) - workspace_id = 'test-merge' + target_ref = Reference("branch", default_branch, target_commit.raw_id) + source_ref = Reference("branch", default_branch, source_commit.raw_id) + workspace_id = "test-merge" - assert len(target_repo._heads(branch='default')) == 2 - heads = target_repo._heads(branch='default') + assert len(target_repo._heads(branch="default")) == 2 + heads = target_repo._heads(branch="default") expected_merge_response = MergeResponse( - False, False, None, - MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, - metadata={'heads': heads}) + False, False, None, MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, metadata={"heads": heads} + ) repo_id = repo_id_generator(target_repo.path) merge_response = target_repo.merge( - repo_id, workspace_id, target_ref, source_repo, source_ref, - 'test user', 'test@rhodecode.com', 'merge message 1', - dry_run=False) + repo_id, + workspace_id, + target_ref, + source_repo, + source_ref, + "test user", + "test@rhodecode.com", + "merge message 1", + dry_run=False, + ) assert merge_response == expected_merge_response def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg): target_repo = vcsbackend_hg.create_repo(number_of_commits=1) source_repo = vcsbackend_hg.clone_repo(target_repo) - vcsbackend_hg.add_file(target_repo, b'README_MERGE1', b'Version 1') - vcsbackend_hg.add_file(source_repo, b'README_MERGE2', b'Version 2') + vcsbackend_hg.add_file(target_repo, b"README_MERGE1", b"Version 1") + vcsbackend_hg.add_file(source_repo, b"README_MERGE2", b"Version 2") imc = source_repo.in_memory_commit - imc.add(FileNode(b'file_x', content=safe_bytes(source_repo.name))) - imc.commit( - message='Automatic commit from repo merge test', - author='Automatic ') + imc.add(FileNode(b"file_x", content=safe_bytes(source_repo.name))) + imc.commit(message="Automatic commit from repo merge test", author="Automatic ") target_commit = target_repo.get_commit() source_commit = source_repo.get_commit() - vcsbackend_hg.add_file(source_repo, b'LICENSE', b'LICENSE Info') + vcsbackend_hg.add_file(source_repo, b"LICENSE", b"LICENSE Info") default_branch = target_repo.DEFAULT_BRANCH_NAME - bookmark_name = 'bookmark' + bookmark_name = "bookmark" source_repo._update(default_branch) source_repo.bookmark(bookmark_name) - target_ref = Reference('branch', default_branch, target_commit.raw_id) - source_ref = Reference('book', bookmark_name, source_commit.raw_id) + target_ref = Reference("branch", default_branch, target_commit.raw_id) + source_ref = Reference("book", bookmark_name, source_commit.raw_id) repo_id = repo_id_generator(target_repo.path) - workspace_id = 'test-merge' + workspace_id = "test-merge" merge_response = target_repo.merge( - repo_id, workspace_id, target_ref, source_repo, source_ref, - 'test user', 'test@rhodecode.com', 'merge message 1', - dry_run=False, use_rebase=True) + repo_id, + workspace_id, + target_ref, + source_repo, + source_ref, + "test user", + "test@rhodecode.com", + "merge message 1", + dry_run=False, + use_rebase=True, + ) - expected_merge_response = MergeResponse( - True, True, merge_response.merge_ref, - MergeFailureReason.NONE) + expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE) assert merge_response == expected_merge_response - target_repo = backends.get_backend(vcsbackend_hg.alias)( - target_repo.path) + target_repo = backends.get_backend(vcsbackend_hg.alias)(target_repo.path) last_commit = target_repo.get_commit() assert last_commit.message == source_commit.message assert last_commit.author == source_commit.author @@ -736,27 +731,28 @@ TODO: To be written... class TestGetShadowInstance(object): @pytest.fixture() - def repo(self, vcsbackend_hg, monkeypatch): - repo = vcsbackend_hg.repo - monkeypatch.setattr(repo, 'config', mock.Mock()) - monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock()) - return repo + def repo(self, vcsbackend_hg): + _hg_repo = vcsbackend_hg.repo + connection_mock = mock.Mock(unsafe=True, name="connection.Hg") + mock.patch("rhodecode.lib.vcs.connection.Hg", connection_mock) + return _hg_repo - def test_passes_config(self, repo): + def test_getting_shadow_instance_copies_config(self, repo): shadow = repo.get_shadow_instance(repo.path) - assert shadow.config == repo.config.copy() + assert shadow.config.serialize() == repo.config.serialize() - def test_disables_hooks(self, repo): + def test_disables_hooks_section(self, repo): + repo.config.set('hooks', 'foo', 'val') shadow = repo.get_shadow_instance(repo.path) - shadow.config.clear_section.assert_called_once_with('hooks') + assert not shadow.config.items('hooks') def test_allows_to_keep_hooks(self, repo): + repo.config.set('hooks', 'foo', 'val') shadow = repo.get_shadow_instance(repo.path, enable_hooks=True) - assert not shadow.config.clear_section.called + assert shadow.config.items('hooks') class TestMercurialCommit(object): - def _test_equality(self, commit): idx = commit.idx assert commit == self.repo.get_commit(commit_idx=idx) @@ -768,15 +764,15 @@ class TestMercurialCommit(object): self._test_equality(commit) def test_default_commit(self): - tip = self.repo.get_commit('tip') + tip = self.repo.get_commit("tip") assert tip == self.repo.get_commit() assert tip == self.repo.get_commit(commit_id=None) assert tip == self.repo.get_commit(commit_idx=None) assert tip == list(self.repo[-1:])[0] def test_root_node(self): - tip = self.repo.get_commit('tip') - assert tip.root is tip.get_node('') + tip = self.repo.get_commit("tip") + assert tip.root is tip.get_node("") def test_lazy_fetch(self): """ @@ -792,44 +788,43 @@ class TestMercurialCommit(object): # accessing root.nodes updates commit.nodes assert len(commit.nodes) == 9 - docs = root.get_node('docs') + docs = root.get_node("docs") # we haven't yet accessed anything new as docs dir was already cached assert len(commit.nodes) == 9 assert len(docs.nodes) == 8 # accessing docs.nodes updates commit.nodes assert len(commit.nodes) == 17 - assert docs is commit.get_node('docs') + assert docs is commit.get_node("docs") assert docs is root.nodes[0] assert docs is root.dirs[0] - assert docs is commit.get_node('docs') + assert docs is commit.get_node("docs") def test_nodes_with_commit(self): commit = self.repo.get_commit(commit_idx=45) root = commit.root - docs = root.get_node('docs') - assert docs is commit.get_node('docs') - api = docs.get_node('api') - assert api is commit.get_node('docs/api') - index = api.get_node('index.rst') - assert index is commit.get_node('docs/api/index.rst') - assert index is commit.get_node( - 'docs').get_node('api').get_node('index.rst') + docs = root.get_node("docs") + assert docs is commit.get_node("docs") + api = docs.get_node("api") + assert api is commit.get_node("docs/api") + index = api.get_node("index.rst") + assert index is commit.get_node("docs/api/index.rst") + assert index is commit.get_node("docs").get_node("api").get_node("index.rst") def test_branch_and_tags(self): commit0 = self.repo.get_commit(commit_idx=0) - assert commit0.branch == 'default' + assert commit0.branch == "default" assert commit0.tags == [] commit10 = self.repo.get_commit(commit_idx=10) - assert commit10.branch == 'default' + assert commit10.branch == "default" assert commit10.tags == [] commit44 = self.repo.get_commit(commit_idx=44) - assert commit44.branch == 'web' + assert commit44.branch == "web" - tip = self.repo.get_commit('tip') - assert 'tip' in tip.tags + tip = self.repo.get_commit("tip") + assert "tip" in tip.tags def test_bookmarks(self): commit0 = self.repo.get_commit(commit_idx=0) @@ -842,46 +837,84 @@ class TestMercurialCommit(object): def test_file_size(self): to_check = ( - (10, 'setup.py', 1068), - (20, 'setup.py', 1106), - (60, 'setup.py', 1074), - - (10, 'vcs/backends/base.py', 2921), - (20, 'vcs/backends/base.py', 3936), - (60, 'vcs/backends/base.py', 6189), + (10, "setup.py", 1068), + (20, "setup.py", 1106), + (60, "setup.py", 1074), + (10, "vcs/backends/base.py", 2921), + (20, "vcs/backends/base.py", 3936), + (60, "vcs/backends/base.py", 6189), ) for idx, path, size in to_check: self._test_file_size(idx, path, size) def test_file_history_from_commits(self): - node = self.repo[10].get_node('setup.py') + node = self.repo[10].get_node("setup.py") commit_ids = [commit.raw_id for commit in node.history] - assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids + assert ["3803844fdbd3b711175fc3da9bdacfcd6d29a6fb"] == commit_ids - node = self.repo[20].get_node('setup.py') + node = self.repo[20].get_node("setup.py") node_ids = [commit.raw_id for commit in node.history] - assert ['eada5a770da98ab0dd7325e29d00e0714f228d09', - '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids + assert ["eada5a770da98ab0dd7325e29d00e0714f228d09", "3803844fdbd3b711175fc3da9bdacfcd6d29a6fb"] == node_ids # special case we check history from commit that has this particular # file changed this means we check if it's included as well - node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\ - .get_node('setup.py') + node = self.repo.get_commit("eada5a770da98ab0dd7325e29d00e0714f228d09").get_node("setup.py") node_ids = [commit.raw_id for commit in node.history] - assert ['eada5a770da98ab0dd7325e29d00e0714f228d09', - '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids + assert ["eada5a770da98ab0dd7325e29d00e0714f228d09", "3803844fdbd3b711175fc3da9bdacfcd6d29a6fb"] == node_ids def test_file_history(self): # we can only check if those commits are present in the history # as we cannot update this test every time file is changed files = { - 'setup.py': [7, 18, 45, 46, 47, 69, 77], - 'vcs/nodes.py': [ - 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76], - 'vcs/backends/hg.py': [ - 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30, - 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54, - 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82], + "setup.py": [7, 18, 45, 46, 47, 69, 77], + "vcs/nodes.py": [7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76], + "vcs/backends/hg.py": [ + 4, + 5, + 6, + 11, + 12, + 13, + 14, + 15, + 16, + 21, + 22, + 23, + 26, + 27, + 28, + 30, + 31, + 33, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 44, + 45, + 47, + 48, + 49, + 53, + 54, + 55, + 58, + 60, + 61, + 67, + 68, + 69, + 70, + 73, + 77, + 78, + 79, + 82, + ], } for path, indexes in files.items(): tip = self.repo.get_commit(commit_idx=indexes[-1]) @@ -889,72 +922,105 @@ class TestMercurialCommit(object): node_indexes = [commit.idx for commit in node.history] assert set(indexes).issubset(set(node_indexes)), ( "We assumed that %s is subset of commits for which file %s " - "has been changed, and history of that node returned: %s" - % (indexes, path, node_indexes)) + "has been changed, and history of that node returned: %s" % (indexes, path, node_indexes) + ) def test_file_annotate(self): files = { - 'vcs/backends/__init__.py': { + "vcs/backends/__init__.py": { 89: { - 'lines_no': 31, - 'commits': [ - 32, 32, 61, 32, 32, 37, 32, 32, 32, 44, - 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, - 32, 32, 32, 32, 37, 32, 37, 37, 32, - 32, 32 - ] + "lines_no": 31, + "commits": [ + 32, + 32, + 61, + 32, + 32, + 37, + 32, + 32, + 32, + 44, + 37, + 37, + 37, + 37, + 45, + 37, + 44, + 37, + 37, + 37, + 32, + 32, + 32, + 32, + 37, + 32, + 37, + 37, + 32, + 32, + 32, + ], + }, + 20: {"lines_no": 1, "commits": [4]}, + 55: { + "lines_no": 31, + "commits": [ + 32, + 32, + 45, + 32, + 32, + 37, + 32, + 32, + 32, + 44, + 37, + 37, + 37, + 37, + 45, + 37, + 44, + 37, + 37, + 37, + 32, + 32, + 32, + 32, + 37, + 32, + 37, + 37, + 32, + 32, + 32, + ], + }, + }, + "vcs/exceptions.py": { + 89: { + "lines_no": 18, + "commits": [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 16, 16, 18, 18, 18], }, 20: { - 'lines_no': 1, - 'commits': [4] + "lines_no": 18, + "commits": [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 16, 16, 18, 18, 18], }, 55: { - 'lines_no': 31, - 'commits': [ - 32, 32, 45, 32, 32, 37, 32, 32, 32, 44, - 37, 37, 37, 37, 45, 37, 44, 37, 37, 37, - 32, 32, 32, 32, 37, 32, 37, 37, 32, - 32, 32 - ] - } - }, - 'vcs/exceptions.py': { - 89: { - 'lines_no': 18, - 'commits': [ - 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 17, 16, 16, 18, 18, 18 - ] + "lines_no": 18, + "commits": [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 16, 16, 18, 18, 18], }, - 20: { - 'lines_no': 18, - 'commits': [ - 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 17, 16, 16, 18, 18, 18 - ] - }, - 55: { - 'lines_no': 18, - 'commits': [ - 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 17, 16, 16, 18, 18, 18 - ] - } }, - 'MANIFEST.in': { - 89: { - 'lines_no': 5, - 'commits': [7, 7, 7, 71, 71] - }, - 20: { - 'lines_no': 3, - 'commits': [7, 7, 7] - }, - 55: { - 'lines_no': 3, - 'commits': [7, 7, 7] - } - } + "MANIFEST.in": { + 89: {"lines_no": 5, "commits": [7, 7, 7, 71, 71]}, + 20: {"lines_no": 3, "commits": [7, 7, 7]}, + 55: {"lines_no": 3, "commits": [7, 7, 7]}, + }, } for fname, commit_dict in files.items(): @@ -963,13 +1029,13 @@ class TestMercurialCommit(object): l1_1 = [x[1] for x in commit.get_file_annotate(fname)] l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)] assert l1_1 == l1_2 - l1 = l1_2 = [ - x[2]().idx for x in commit.get_file_annotate(fname)] - l2 = files[fname][idx]['commits'] + l1 = l1_2 = [x[2]().idx for x in commit.get_file_annotate(fname)] + l2 = files[fname][idx]["commits"] assert l1 == l2, ( "The lists of commit for %s@commit_id%s" "from annotation list should match each other," - "got \n%s \nvs \n%s " % (fname, idx, l1, l2)) + "got \n%s \nvs \n%s " % (fname, idx, l1, l2) + ) def test_commit_state(self): """ @@ -981,28 +1047,52 @@ class TestMercurialCommit(object): # changed: 13 # added: 20 # removed: 1 - changed = set([ - '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst', - 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py', - 'vcs/__init__.py', 'vcs/backends/__init__.py', - 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py', - 'vcs/utils/__init__.py']) + changed = set( + [ + ".hgignore", + "README.rst", + "docs/conf.py", + "docs/index.rst", + "setup.py", + "tests/test_hg.py", + "tests/test_nodes.py", + "vcs/__init__.py", + "vcs/backends/__init__.py", + "vcs/backends/base.py", + "vcs/backends/hg.py", + "vcs/nodes.py", + "vcs/utils/__init__.py", + ] + ) - added = set([ - 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst', - 'docs/api/index.rst', 'docs/api/nodes.rst', - 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst', - 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg', - 'vcs/utils/baseui_config.py', 'vcs/utils/web.py', - 'vcs/web/__init__.py', 'vcs/web/exceptions.py', - 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py', - 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py', - 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py', - 'vcs/web/simplevcs/views.py']) + added = set( + [ + "docs/api/backends/hg.rst", + "docs/api/backends/index.rst", + "docs/api/index.rst", + "docs/api/nodes.rst", + "docs/api/web/index.rst", + "docs/api/web/simplevcs.rst", + "docs/installation.rst", + "docs/quickstart.rst", + "setup.cfg", + "vcs/utils/baseui_config.py", + "vcs/utils/web.py", + "vcs/web/__init__.py", + "vcs/web/exceptions.py", + "vcs/web/simplevcs/__init__.py", + "vcs/web/simplevcs/exceptions.py", + "vcs/web/simplevcs/middleware.py", + "vcs/web/simplevcs/models.py", + "vcs/web/simplevcs/settings.py", + "vcs/web/simplevcs/utils.py", + "vcs/web/simplevcs/views.py", + ] + ) - removed = set(['docs/api.rst']) + removed = set(["docs/api.rst"]) - commit64 = self.repo.get_commit('46ad32a4f974') + commit64 = self.repo.get_commit("46ad32a4f974") assert set((node.path for node in commit64.added)) == added assert set((node.path for node in commit64.changed)) == changed assert set((node.path for node in commit64.removed)) == removed @@ -1012,10 +1102,9 @@ class TestMercurialCommit(object): # changed: 13 # added: 20 # removed: 1 - commit88 = self.repo.get_commit('b090f22d27d6') + commit88 = self.repo.get_commit("b090f22d27d6") assert set((node.path for node in commit88.added)) == set() - assert set((node.path for node in commit88.changed)) == \ - set(['.hgignore']) + assert set((node.path for node in commit88.changed)) == set([".hgignore"]) assert set((node.path for node in commit88.removed)) == set() # @@ -1025,24 +1114,25 @@ class TestMercurialCommit(object): # changed: 4 ['vcs/web/simplevcs/models.py', ...] # removed: 1 ['vcs/utils/web.py'] commit85 = self.repo.get_commit(commit_idx=85) - assert set((node.path for node in commit85.added)) == set([ - 'vcs/utils/diffs.py', - 'vcs/web/simplevcs/views/diffs.py']) - assert set((node.path for node in commit85.changed)) == set([ - 'vcs/web/simplevcs/models.py', - 'vcs/web/simplevcs/utils.py', - 'vcs/web/simplevcs/views/__init__.py', - 'vcs/web/simplevcs/views/repository.py', - ]) - assert set((node.path for node in commit85.removed)) == \ - set(['vcs/utils/web.py']) + assert set((node.path for node in commit85.added)) == set( + ["vcs/utils/diffs.py", "vcs/web/simplevcs/views/diffs.py"] + ) + assert set((node.path for node in commit85.changed)) == set( + [ + "vcs/web/simplevcs/models.py", + "vcs/web/simplevcs/utils.py", + "vcs/web/simplevcs/views/__init__.py", + "vcs/web/simplevcs/views/repository.py", + ] + ) + assert set((node.path for node in commit85.removed)) == set(["vcs/utils/web.py"]) def test_files_state(self): """ Tests state of FileNodes. """ commit = self.repo.get_commit(commit_idx=85) - node = commit.get_node('vcs/utils/diffs.py') + node = commit.get_node("vcs/utils/diffs.py") assert node.state, NodeState.ADDED assert node.added assert not node.changed @@ -1050,7 +1140,7 @@ class TestMercurialCommit(object): assert not node.removed commit = self.repo.get_commit(commit_idx=88) - node = commit.get_node('.hgignore') + node = commit.get_node(".hgignore") assert node.state, NodeState.CHANGED assert not node.added assert node.changed @@ -1058,7 +1148,7 @@ class TestMercurialCommit(object): assert not node.removed commit = self.repo.get_commit(commit_idx=85) - node = commit.get_node('setup.py') + node = commit.get_node("setup.py") assert node.state, NodeState.NOT_CHANGED assert not node.added assert not node.changed @@ -1068,7 +1158,7 @@ class TestMercurialCommit(object): # If node has REMOVED state then trying to fetch it would raise # CommitError exception commit = self.repo.get_commit(commit_idx=2) - path = 'vcs/backends/BaseRepository.py' + path = "vcs/backends/BaseRepository.py" with pytest.raises(NodeDoesNotExistError): commit.get_node(path) # but it would be one of ``removed`` (commit's attribute) @@ -1084,57 +1174,53 @@ class TestMercurialCommit(object): def test_repo_files_content_type(self): test_commit = self.repo.get_commit(commit_idx=100) - for node in test_commit.get_node('/'): + for node in test_commit.get_node("/"): if node.is_file(): assert type(node.content) == bytes assert type(node.str_content) == str def test_wrong_path(self): # There is 'setup.py' in the root dir but not there: - path = 'foo/bar/setup.py' + path = "foo/bar/setup.py" with pytest.raises(VCSError): self.repo.get_commit().get_node(path) def test_author_email(self): - assert 'marcin@python-blog.com' == \ - self.repo.get_commit('b986218ba1c9').author_email - assert 'lukasz.balcerzak@python-center.pl' == \ - self.repo.get_commit('3803844fdbd3').author_email - assert '' == self.repo.get_commit('84478366594b').author_email + assert "marcin@python-blog.com" == self.repo.get_commit("b986218ba1c9").author_email + assert "lukasz.balcerzak@python-center.pl" == self.repo.get_commit("3803844fdbd3").author_email + assert "" == self.repo.get_commit("84478366594b").author_email def test_author_username(self): - assert 'Marcin Kuzminski' == \ - self.repo.get_commit('b986218ba1c9').author_name - assert 'Lukasz Balcerzak' == \ - self.repo.get_commit('3803844fdbd3').author_name - assert 'marcink' == \ - self.repo.get_commit('84478366594b').author_name + assert "Marcin Kuzminski" == self.repo.get_commit("b986218ba1c9").author_name + assert "Lukasz Balcerzak" == self.repo.get_commit("3803844fdbd3").author_name + assert "marcink" == self.repo.get_commit("84478366594b").author_name class TestLargeFileRepo(object): - def test_large_file(self, backend_hg): - repo = backend_hg.create_test_repo('largefiles', make_db_config()) + conf = make_db_config() + hg_largefiles_store = conf.get("largefiles", "usercache") + repo = backend_hg.create_test_repo("largefiles", conf) tip = repo.scm_instance().get_commit() - node = tip.get_node('.hglf/thisfileislarge') + node = tip.get_node(".hglf/thisfileislarge") lf_node = node.get_largefile_node() assert lf_node.is_largefile() is True assert lf_node.size == 1024000 - assert lf_node.name == '.hglf/thisfileislarge' + assert lf_node.name == ".hglf/thisfileislarge" class TestGetBranchName(object): def test_returns_ref_name_when_type_is_branch(self): - ref = self._create_ref('branch', 'fake-name') + ref = self._create_ref("branch", "fake-name") result = self.repo._get_branch_name(ref) assert result == ref.name @pytest.mark.parametrize("type_", ("book", "tag")) def test_queries_remote_when_type_is_not_branch(self, type_): - ref = self._create_ref(type_, 'wrong-fake-name') + ref = self._create_ref(type_, "wrong-fake-name") with mock.patch.object(self.repo, "_remote") as remote_mock: remote_mock.ctx_branch.return_value = "fake-name" result = self.repo._get_branch_name(ref) @@ -1144,7 +1230,7 @@ class TestGetBranchName(object): def _create_ref(self, type_, name): ref = mock.Mock() ref.type = type_ - ref.name = 'wrong-fake-name' + ref.name = "wrong-fake-name" ref.commit_id = "deadbeef" return ref @@ -1153,8 +1239,7 @@ class TestIsTheSameBranch(object): def test_returns_true_when_branches_are_equal(self): source_ref = mock.Mock(name="source-ref") target_ref = mock.Mock(name="target-ref") - branch_name_patcher = mock.patch.object( - self.repo, "_get_branch_name", return_value="default") + branch_name_patcher = mock.patch.object(self.repo, "_get_branch_name", return_value="default") with branch_name_patcher as branch_name_mock: result = self.repo._is_the_same_branch(source_ref, target_ref) @@ -1171,8 +1256,7 @@ class TestIsTheSameBranch(object): def side_effect(ref): return ref.name - branch_name_patcher = mock.patch.object( - self.repo, "_get_branch_name", side_effect=side_effect) + branch_name_patcher = mock.patch.object(self.repo, "_get_branch_name", side_effect=side_effect) with branch_name_patcher as branch_name_mock: result = self.repo._is_the_same_branch(source_ref, target_ref) diff --git a/rhodecode/tests/vcs/test_hg_vcsserver_cache_invalidation.py b/rhodecode/tests/vcs/test_hg_vcsserver_cache_invalidation.py --- a/rhodecode/tests/vcs/test_hg_vcsserver_cache_invalidation.py +++ b/rhodecode/tests/vcs/test_hg_vcsserver_cache_invalidation.py @@ -1,5 +1,3 @@ - - # Copyright (C) 2016-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -35,45 +33,47 @@ class TestMercurialRemoteRepoInvalidatio """ # Default reference used as a dummy during tests. - default_ref = Reference('branch', 'default', None) + default_ref = Reference("branch", "default", None) # Methods of vcsserver.hg.HgRemote that are "writing" operations. writing_methods = [ - 'bookmark', - 'commit', - 'merge', - 'pull', - 'pull_cmd', - 'rebase', - 'strip', - 'tag', + "bookmark", + "commit", + "merge", + "pull", + "pull_cmd", + "rebase", + "strip", + "tag", ] - @pytest.mark.parametrize('method_name, method_args', [ - ('_local_merge', [default_ref, None, None, None, default_ref]), - ('_local_pull', ['', default_ref]), - ('bookmark', [None]), - ('pull', ['', default_ref]), - ('remove_tag', ['mytag', None]), - ('strip', [None]), - ('tag', ['newtag', None]), - ]) - def test_method_invokes_invalidate_on_remote_repo( - self, method_name, method_args, backend_hg): + @pytest.mark.parametrize( + "method_name, method_args", + [ + ("_local_merge", [default_ref, None, None, None, default_ref]), + ("_local_pull", ["", default_ref]), + ("bookmark", [None]), + ("pull", ["", default_ref]), + ("remove_tag", ["mytag", None]), + ("strip", [None]), + ("tag", ["newtag", None]), + ], + ) + def test_method_invokes_invalidate_on_remote_repo(self, method_name, method_args, backend_hg): """ Check that the listed methods are invalidating the VCSServer cache after invoking a writing method of their remote repository object. """ - tags = {'mytag': 'mytag-id'} + tags = {"mytag": "mytag-id"} def add_tag(name, raw_id, *args, **kwds): tags[name] = raw_id repo = backend_hg.repo.scm_instance() - with patch.object(repo, '_remote') as remote: + with patch.object(repo, "_remote") as remote: repo.tags = tags - remote.lookup.return_value = ('commit-id', 'commit-idx') + remote.lookup.return_value = ("commit-id", "commit-idx") remote.tags.return_value = tags remote._get_tags.return_value = tags remote.is_empty.return_value = False @@ -98,6 +98,7 @@ class TestMercurialRemoteRepoInvalidatio references. """ from rhodecode.model.pull_request import PullRequestModel + repo_id = pull_request.target_repo.repo_id target_vcs = pull_request.target_repo.scm_instance() target_ref = pull_request.target_ref_parts @@ -107,7 +108,8 @@ class TestMercurialRemoteRepoInvalidatio pr = PullRequestModel() workspace_id = pr._workspace_id(pull_request) shadow_repository_path = target_vcs._maybe_prepare_merge_workspace( - repo_id, workspace_id, target_ref, source_ref) + repo_id, workspace_id, target_ref, source_ref + ) shadow_repo = target_vcs.get_shadow_instance(shadow_repository_path, cache=True) # This will populate the cache of the mercurial repository object @@ -116,7 +118,7 @@ class TestMercurialRemoteRepoInvalidatio return shadow_repo, source_ref, target_ref - @pytest.mark.backends('hg') + @pytest.mark.backends("hg") def test_commit_does_not_exist_error_happens(self, pr_util, app): """ This test is somewhat special. It does not really test the system @@ -132,18 +134,18 @@ class TestMercurialRemoteRepoInvalidatio source_vcs = pull_request.source_repo.scm_instance() shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(pull_request) - initial_cache_uid = shadow_repo._remote._wire['context'] - initial_commit_ids = shadow_repo._remote.get_all_commit_ids('visible') + initial_cache_uid = shadow_repo._remote._wire["context"] + initial_commit_ids = shadow_repo._remote.get_all_commit_ids("visible") # Pull from target and source references but without invalidation of # RemoteRepo objects and without VCSServer caching of mercurial repository objects. - with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'): + with patch.object(shadow_repo._remote, "invalidate_vcs_cache"): # NOTE: Do not use patch.dict() to disable the cache because it # restores the WHOLE dict and not only the patched keys. - shadow_repo._remote._wire['cache'] = False + shadow_repo._remote._wire["cache"] = False shadow_repo._local_pull(target_vcs.path, target_ref) shadow_repo._local_pull(source_vcs.path, source_ref) - shadow_repo._remote._wire['cache'] = True + shadow_repo._remote._wire["cache"] = True # Try to lookup the target_ref in shadow repo. This should work because # test_repo_maker_uses_session_for_instance_methods @@ -153,14 +155,14 @@ class TestMercurialRemoteRepoInvalidatio # we ensure that call context has not changed, this is what # `invalidate_vcs_cache` does - assert initial_cache_uid == shadow_repo._remote._wire['context'] + assert initial_cache_uid == shadow_repo._remote._wire["context"] # If we try to lookup all commits. # repo commit cache doesn't get invalidated. (Due to patched # invalidation and caching above). - assert initial_commit_ids == shadow_repo._remote.get_all_commit_ids('visible') + assert initial_commit_ids == shadow_repo._remote.get_all_commit_ids("visible") - @pytest.mark.backends('hg') + @pytest.mark.backends("hg") def test_commit_does_not_exist_error_does_not_happen(self, pr_util, app): """ This test simulates a pull request merge in which the pull operations @@ -179,10 +181,10 @@ class TestMercurialRemoteRepoInvalidatio # of RemoteRepo objects. # NOTE: Do not use patch.dict() to disable the cache because it # restores the WHOLE dict and not only the patched keys. - shadow_repo._remote._wire['cache'] = False + shadow_repo._remote._wire["cache"] = False shadow_repo._local_pull(target_vcs.path, target_ref) shadow_repo._local_pull(source_vcs.path, source_ref) - shadow_repo._remote._wire['cache'] = True + shadow_repo._remote._wire["cache"] = True # Try to lookup the target and source references in shadow repo. This # should work because the RemoteRepo object gets invalidated during the diff --git a/rhodecode/tests/vcs/test_inmemory.py b/rhodecode/tests/vcs/test_inmemory.py --- a/rhodecode/tests/vcs/test_inmemory.py +++ b/rhodecode/tests/vcs/test_inmemory.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -20,15 +19,21 @@ """ Tests so called "in memory commits" commit API of vcs. """ + import datetime import pytest from rhodecode.lib.str_utils import safe_bytes, safe_str from rhodecode.lib.vcs.exceptions import ( - EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError, - NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError, - NodeNotChangedError) + EmptyRepositoryError, + NodeAlreadyAddedError, + NodeAlreadyExistsError, + NodeAlreadyRemovedError, + NodeAlreadyChangedError, + NodeDoesNotExistError, + NodeNotChangedError, +) from rhodecode.lib.vcs.nodes import DirNode, FileNode from rhodecode.tests.vcs.conftest import BackendTestMixin @@ -36,19 +41,21 @@ from rhodecode.tests.vcs.conftest import @pytest.fixture() def nodes(): nodes = [ - FileNode(b'foobar', content=b'Foo & bar'), - FileNode(b'foobar2', content=b'Foo & bar, doubled!'), - FileNode(b'foo bar with spaces', content=b''), - FileNode(b'foo/bar/baz', content=b'Inside'), - FileNode(b'foo/bar/file.bin', content=( - b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00' - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe' - b'\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' - b'\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00' - b'\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00' - b'\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff' - b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' - ) + FileNode(b"foobar", content=b"Foo & bar"), + FileNode(b"foobar2", content=b"Foo & bar, doubled!"), + FileNode(b"foo bar with spaces", content=b""), + FileNode(b"foo/bar/baz", content=b"Inside"), + FileNode( + b"foo/bar/file.bin", + content=( + b"\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00" + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe" + b"\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + b"\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00" + b"\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00" + b"\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff" + b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + ), ), ] return nodes @@ -80,14 +87,14 @@ class TestInMemoryCommit(BackendTestMixi def test_add_on_branch_hg(self, nodes): for node in nodes: self.imc.add(node) - self.commit(branch='stable') + self.commit(branch="stable") self.assert_successful_commit(nodes) @pytest.mark.backends("git") def test_add_on_branch_git(self, nodes): for node in nodes: self.imc.add(node) - self.commit(branch='stable') + self.commit(branch="stable") self.assert_successful_commit(nodes) def test_add_in_bulk(self, nodes): @@ -98,10 +105,8 @@ class TestInMemoryCommit(BackendTestMixi def test_add_non_ascii_files(self): nodes = [ - FileNode(safe_bytes('żółwik/zwierzątko_utf8_str'), - content=safe_bytes('ćććć')), - FileNode(safe_bytes('żółwik/zwierzątko_unicode'), - content=safe_bytes('ćććć')), + FileNode(safe_bytes("żółwik/zwierzątko_utf8_str"), content=safe_bytes("ćććć")), + FileNode(safe_bytes("żółwik/zwierzątko_unicode"), content=safe_bytes("ćććć")), ] for node in nodes: @@ -112,60 +117,56 @@ class TestInMemoryCommit(BackendTestMixi def commit(self, branch=None): self.old_commit_count = len(self.repo.commit_ids) - self.commit_message = 'Test commit with unicode: żółwik' - self.commit_author = f'{self.__class__.__name__} ' - self.commit = self.imc.commit( - message=self.commit_message, author=self.commit_author, - branch=branch) + self.commit_message = "Test commit with unicode: żółwik" + self.commit_author = f"{self.__class__.__name__} " + self.commit = self.imc.commit(message=self.commit_message, author=self.commit_author, branch=branch) def test_add_actually_adds_all_nodes_at_second_commit_too(self): to_add = [ - FileNode(b'foo/bar/image.png', content=b'\0'), - FileNode(b'foo/README.txt', content=b'readme!'), + FileNode(b"foo/bar/image.png", content=b"\0"), + FileNode(b"foo/README.txt", content=b"readme!"), ] self.imc.add(*to_add) - commit = self.imc.commit('Initial', 'joe doe ') - assert isinstance(commit.get_node('foo'), DirNode) - assert isinstance(commit.get_node('foo/bar'), DirNode) + commit = self.imc.commit("Initial", "joe doe ") + assert isinstance(commit.get_node("foo"), DirNode) + assert isinstance(commit.get_node("foo/bar"), DirNode) self.assert_nodes_in_commit(commit, to_add) # commit some more files again to_add = [ - FileNode(b'foo/bar/foobaz/bar', content=b'foo'), - FileNode(b'foo/bar/another/bar', content=b'foo'), - FileNode(b'foo/baz.txt', content=b'foo'), - FileNode(b'foobar/foobaz/file', content=b'foo'), - FileNode(b'foobar/barbaz', content=b'foo'), + FileNode(b"foo/bar/foobaz/bar", content=b"foo"), + FileNode(b"foo/bar/another/bar", content=b"foo"), + FileNode(b"foo/baz.txt", content=b"foo"), + FileNode(b"foobar/foobaz/file", content=b"foo"), + FileNode(b"foobar/barbaz", content=b"foo"), ] self.imc.add(*to_add) - commit = self.imc.commit('Another', 'joe doe ') + commit = self.imc.commit("Another", "joe doe ") self.assert_nodes_in_commit(commit, to_add) def test_add_raise_already_added(self): - node = FileNode(b'foobar', content=b'baz') + node = FileNode(b"foobar", content=b"baz") self.imc.add(node) with pytest.raises(NodeAlreadyAddedError): self.imc.add(node) def test_check_integrity_raise_already_exist(self): - node = FileNode(b'foobar', content=b'baz') + node = FileNode(b"foobar", content=b"baz") self.imc.add(node) - self.imc.commit(message='Added foobar', - author='Some Name ') + self.imc.commit(message="Added foobar", author="Some Name ") self.imc.add(node) with pytest.raises(NodeAlreadyExistsError): - self.imc.commit(message='new message', - author='Some Name ') + self.imc.commit(message="new message", author="Some Name ") def test_change(self): - self.imc.add(FileNode(b'foo/bar/baz', content=b'foo')) - self.imc.add(FileNode(b'foo/fbar', content=b'foobar')) - tip = self.imc.commit('Initial', 'joe doe ') + self.imc.add(FileNode(b"foo/bar/baz", content=b"foo")) + self.imc.add(FileNode(b"foo/fbar", content=b"foobar")) + tip = self.imc.commit("Initial", "joe doe ") # Change node's content - node = FileNode(b'foo/bar/baz', content=b'My **changed** content') + node = FileNode(b"foo/bar/baz", content=b"My **changed** content") self.imc.change(node) - self.imc.commit('Changed %s' % node.path, 'joe doe ') + self.imc.commit("Changed %s" % node.path, "joe doe ") newtip = self.repo.get_commit() assert tip != newtip @@ -174,28 +175,22 @@ class TestInMemoryCommit(BackendTestMixi def test_change_non_ascii(self): to_add = [ - FileNode(safe_bytes('żółwik/zwierzątko'), - content=safe_bytes('ćććć')), - FileNode(safe_bytes('żółwik/zwierzątko_uni'), - content=safe_bytes('ćććć')), + FileNode(safe_bytes("żółwik/zwierzątko"), content=safe_bytes("ćććć")), + FileNode(safe_bytes("żółwik/zwierzątko_uni"), content=safe_bytes("ćććć")), ] for node in to_add: self.imc.add(node) - tip = self.imc.commit('Initial', 'joe doe ') + tip = self.imc.commit("Initial", "joe doe ") # Change node's content - node = FileNode(safe_bytes('żółwik/zwierzątko'), - content=b'My **changed** content') + node = FileNode(safe_bytes("żółwik/zwierzątko"), content=b"My **changed** content") self.imc.change(node) - self.imc.commit('Changed %s' % safe_str(node.path), - author='joe doe ') + self.imc.commit("Changed %s" % safe_str(node.path), author="joe doe ") - node_uni = FileNode(safe_bytes('żółwik/zwierzątko_uni'), - content=b'My **changed** content') + node_uni = FileNode(safe_bytes("żółwik/zwierzątko_uni"), content=b"My **changed** content") self.imc.change(node_uni) - self.imc.commit('Changed %s' % safe_str(node_uni.path), - author='joe doe ') + self.imc.commit("Changed %s" % safe_str(node_uni.path), author="joe doe ") newtip = self.repo.get_commit() assert tip != newtip @@ -204,24 +199,24 @@ class TestInMemoryCommit(BackendTestMixi self.assert_nodes_in_commit(newtip, (node, node_uni)) def test_change_raise_empty_repository(self): - node = FileNode(b'foobar') + node = FileNode(b"foobar") with pytest.raises(EmptyRepositoryError): self.imc.change(node) def test_check_integrity_change_raise_node_does_not_exist(self): - node = FileNode(b'foobar', content=b'baz') + node = FileNode(b"foobar", content=b"baz") self.imc.add(node) - self.imc.commit(message='Added foobar', author='Some Name ') - node = FileNode(b'not-foobar', content=b'') + self.imc.commit(message="Added foobar", author="Some Name ") + node = FileNode(b"not-foobar", content=b"") self.imc.change(node) with pytest.raises(NodeDoesNotExistError): - self.imc.commit(message='Changed not existing node', author='Some Name ') + self.imc.commit(message="Changed not existing node", author="Some Name ") def test_change_raise_node_already_changed(self): - node = FileNode(b'foobar', content=b'baz') + node = FileNode(b"foobar", content=b"baz") self.imc.add(node) - self.imc.commit(message='Added foobar', author='Some Nam ') - node = FileNode(b'foobar', content=b'more baz') + self.imc.commit(message="Added foobar", author="Some Nam ") + node = FileNode(b"foobar", content=b"more baz") self.imc.change(node) with pytest.raises(NodeAlreadyChangedError): self.imc.change(node) @@ -233,14 +228,14 @@ class TestInMemoryCommit(BackendTestMixi self.imc.change(node) with pytest.raises(NodeNotChangedError): self.imc.commit( - message='Trying to mark node as changed without touching it', - author='Some Name ') + message="Trying to mark node as changed without touching it", author="Some Name " + ) def test_change_raise_node_already_removed(self): - node = FileNode(b'foobar', content=b'baz') + node = FileNode(b"foobar", content=b"baz") self.imc.add(node) - self.imc.commit(message='Added foobar', author='Some Name ') - self.imc.remove(FileNode(b'foobar')) + self.imc.commit(message="Added foobar", author="Some Name ") + self.imc.remove(FileNode(b"foobar")) with pytest.raises(NodeAlreadyRemovedError): self.imc.change(node) @@ -251,7 +246,7 @@ class TestInMemoryCommit(BackendTestMixi node = nodes[0] assert node.content == tip.get_node(node.path).content self.imc.remove(node) - self.imc.commit(message=f'Removed {node.path}', author='Some Name ') + self.imc.commit(message=f"Removed {node.path}", author="Some Name ") newtip = self.repo.get_commit() assert tip != newtip @@ -260,31 +255,27 @@ class TestInMemoryCommit(BackendTestMixi newtip.get_node(node.path) def test_remove_last_file_from_directory(self): - node = FileNode(b'omg/qwe/foo/bar', content=b'foobar') + node = FileNode(b"omg/qwe/foo/bar", content=b"foobar") self.imc.add(node) - self.imc.commit('added', author='joe doe ') + self.imc.commit("added", author="joe doe ") self.imc.remove(node) - tip = self.imc.commit('removed', 'joe doe ') + tip = self.imc.commit("removed", "joe doe ") with pytest.raises(NodeDoesNotExistError): - tip.get_node('omg/qwe/foo/bar') + tip.get_node("omg/qwe/foo/bar") def test_remove_raise_node_does_not_exist(self, nodes): self.imc.remove(nodes[0]) with pytest.raises(NodeDoesNotExistError): - self.imc.commit( - message='Trying to remove node at empty repository', - author='Some Name ') + self.imc.commit(message="Trying to remove node at empty repository", author="Some Name ") def test_check_integrity_remove_raise_node_does_not_exist(self, nodes): self.test_add(nodes) # Performs first commit - node = FileNode(b'no-such-file') + node = FileNode(b"no-such-file") self.imc.remove(node) with pytest.raises(NodeDoesNotExistError): - self.imc.commit( - message='Trying to remove not existing node', - author='Some Name ') + self.imc.commit(message="Trying to remove not existing node", author="Some Name ") def test_remove_raise_node_already_removed(self, nodes): self.test_add(nodes) # Performs first commit @@ -297,13 +288,13 @@ class TestInMemoryCommit(BackendTestMixi def test_remove_raise_node_already_changed(self, nodes): self.test_add(nodes) # Performs first commit - node = FileNode(nodes[0].bytes_path, content=b'Bending time') + node = FileNode(nodes[0].bytes_path, content=b"Bending time") self.imc.change(node) with pytest.raises(NodeAlreadyChangedError): self.imc.remove(node) def test_reset(self): - self.imc.add(FileNode(b'foo', content=b'bar')) + self.imc.add(FileNode(b"foo", content=b"bar")) # self.imc.change(FileNode(b'baz', content='new')) # self.imc.remove(FileNode(b'qwe')) self.imc.reset() @@ -313,11 +304,11 @@ class TestInMemoryCommit(BackendTestMixi N = 3 # number of commits to perform last = None for x in range(N): - fname = safe_bytes('file%s' % str(x).rjust(5, '0')) - content = safe_bytes('foobar\n' * x) + fname = safe_bytes("file%s" % str(x).rjust(5, "0")) + content = safe_bytes("foobar\n" * x) node = FileNode(fname, content=content) self.imc.add(node) - commit = self.imc.commit("Commit no. %s" % (x + 1), author='Vcs User ') + commit = self.imc.commit("Commit no. %s" % (x + 1), author="Vcs User ") assert last != commit last = commit @@ -329,12 +320,10 @@ class TestInMemoryCommit(BackendTestMixi assert len(repo.commit_ids) == N def test_date_attr(self, local_dt_to_utc): - node = FileNode(b'foobar.txt', content=b'Foobared!') + node = FileNode(b"foobar.txt", content=b"Foobared!") self.imc.add(node) date = datetime.datetime(1985, 1, 30, 1, 45) - commit = self.imc.commit( - "Committed at time when I was born ;-)", - author='Test User ', date=date) + commit = self.imc.commit("Committed at time when I was born ;-)", author="Test User ", date=date) assert commit.date == local_dt_to_utc(date) diff --git a/rhodecode/tests/vcs/test_nodes.py b/rhodecode/tests/vcs/test_nodes.py --- a/rhodecode/tests/vcs/test_nodes.py +++ b/rhodecode/tests/vcs/test_nodes.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -38,34 +37,34 @@ def binary_filenode(): b"\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7" b"\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00" b"\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a" - b"\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?\"\x14j?\xa2M\x7fB\x14F\x9aQ?&" - b"\x842?\x0b\x89\"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?=" - b"\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04\'`EpNp\xa2X\'U?pVq\"Sw." + b'\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?"\x14j?\xa2M\x7fB\x14F\x9aQ?&' + b'\x842?\x0b\x89"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?=' + b"\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04'`EpNp\xa2X'U?pVq\"Sw." b"\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/" b"\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H" - b"\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$\"q[" + b'\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$"q[' b"\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?" - b"\x9f\x8cE??x\x94??\r\xbdtoJU5\"0N\x10U?\x00??V\t\x02\x9f\x81?U?" - b"\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&\"?\xb7ZP \x0cJ?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1" b"\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J" - b"\x0bV\"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X" + b'\x0bV"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X' b"\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~" b"\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u" - b"\xb2?1\xbe|/\x92M@\xa2!F?\xa9>\"\r\x92\x8e?>\x9a9Qv\x127?a" + b'\xb2?1\xbe|/\x92M@\xa2!F?\xa9>"\r\x92\x8e?>\x9a9Qv\x127?a' b"\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00" - b"IEND\xaeB`\x82") + b"IEND\xaeB`\x82" + ) return FileNode(filename, content=data) + return node_maker class TestNodeBasics: - - @pytest.mark.parametrize("path", ['/foo', '/foo/bar']) - @pytest.mark.parametrize( - "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"]) + @pytest.mark.parametrize("path", ["/foo", "/foo/bar"]) + @pytest.mark.parametrize("kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"]) def test_init_wrong_paths(self, path, kind): """ Cannot initialize Node objects with path with slash at the beginning. @@ -74,44 +73,46 @@ class TestNodeBasics: with pytest.raises(NodeError): Node(path, kind) - @pytest.mark.parametrize("path", ['path', 'some/path']) - @pytest.mark.parametrize( - "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"]) + @pytest.mark.parametrize("path", ["path", "some/path"]) + @pytest.mark.parametrize("kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"]) def test_name(self, path, kind): path = safe_bytes(path) node = Node(path, kind) - assert node.name == 'path' + assert node.name == "path" def test_name_root(self): - node = Node(b'', NodeKind.DIR) - assert node.name == '' + node = Node(b"", NodeKind.DIR) + assert node.name == "" def test_root_node_cannot_be_file(self): with pytest.raises(NodeError): - Node(b'', NodeKind.FILE) + Node(b"", NodeKind.FILE) def test_kind_setter(self): - node = Node(b'', NodeKind.DIR) + node = Node(b"", NodeKind.DIR) with pytest.raises(NodeError): node.kind = NodeKind.FILE def test_compare_equal(self): - node1 = FileNode(b'test', content=b'') - node2 = FileNode(b'test', content=b'') + node1 = FileNode(b"test", content=b"") + node2 = FileNode(b"test", content=b"") assert node1 == node2 assert not node1 != node2 def test_compare_unequal(self): - node1 = FileNode(b'test', content=b'a') - node2 = FileNode(b'test', content=b'b') + node1 = FileNode(b"test", content=b"a") + node2 = FileNode(b"test", content=b"b") assert node1 != node2 assert not node1 == node2 - @pytest.mark.parametrize("node_path, expected_parent_path", [ - ('', b''), - ('some/path/', b'some/'), - ('some/longer/path/', b'some/longer/'), - ]) + @pytest.mark.parametrize( + "node_path, expected_parent_path", + [ + ("", b""), + ("some/path/", b"some/"), + ("some/longer/path/", b"some/longer/"), + ], + ) def test_parent_path_new(self, node_path, expected_parent_path): """ Tests if node's parent path are properly computed. @@ -119,11 +120,10 @@ class TestNodeBasics: node_path = safe_bytes(node_path) node = Node(node_path, NodeKind.DIR) parent_path = node.get_parent_path() - assert (parent_path.endswith(b'/') or - node.is_root() and parent_path == b'') + assert parent_path.endswith(b"/") or node.is_root() and parent_path == b"" assert parent_path == expected_parent_path - ''' + """ def _test_trailing_slash(self, path): if not path.endswith('/'): pytest.fail("Trailing slash tests needs paths to end with slash") @@ -134,22 +134,22 @@ class TestNodeBasics: def test_trailing_slash(self): for path in ('/', 'foo/', 'foo/bar/', 'foo/bar/biz/'): self._test_trailing_slash(path) - ''' + """ def test_is_file(self): - node = Node(b'any', NodeKind.FILE) + node = Node(b"any", NodeKind.FILE) assert node.is_file() - node = FileNode(b'any') + node = FileNode(b"any") assert node.is_file() with pytest.raises(AttributeError): node.nodes # noqa def test_is_dir(self): - node = Node(b'any_dir', NodeKind.DIR) + node = Node(b"any_dir", NodeKind.DIR) assert node.is_dir() - node = DirNode(b'any_dir') + node = DirNode(b"any_dir") assert node.is_dir() with pytest.raises(NodeError): @@ -157,14 +157,14 @@ class TestNodeBasics: def test_dir_node_iter(self): nodes = [ - DirNode(b'docs'), - DirNode(b'tests'), - FileNode(b'bar'), - FileNode(b'foo'), - FileNode(b'readme.txt'), - FileNode(b'setup.py'), + DirNode(b"docs"), + DirNode(b"tests"), + FileNode(b"bar"), + FileNode(b"foo"), + FileNode(b"readme.txt"), + FileNode(b"setup.py"), ] - dirnode = DirNode(b'', nodes=nodes) + dirnode = DirNode(b"", nodes=nodes) for node in dirnode: assert node == dirnode.get_node(node.path) @@ -172,15 +172,15 @@ class TestNodeBasics: """ Without link to commit nodes should raise NodeError. """ - node = FileNode(b'anything') + node = FileNode(b"anything") with pytest.raises(NodeError): node.state # noqa - node = DirNode(b'anything') + node = DirNode(b"anything") with pytest.raises(NodeError): node.state # noqa def test_file_node_stat(self): - node = FileNode(b'foobar', b'empty... almost') + node = FileNode(b"foobar", b"empty... almost") mode = node.mode # default should be 0100644 assert mode & stat.S_IRUSR assert mode & stat.S_IWUSR @@ -193,36 +193,36 @@ class TestNodeBasics: assert not mode & stat.S_IXOTH def test_file_node_is_executable(self): - node = FileNode(b'foobar', b'empty... almost', mode=0o100755) + node = FileNode(b"foobar", b"empty... almost", mode=0o100755) assert node.is_executable - node = FileNode(b'foobar', b'empty... almost', mode=0o100500) + node = FileNode(b"foobar", b"empty... almost", mode=0o100500) assert node.is_executable - node = FileNode(b'foobar', b'empty... almost', mode=0o100644) + node = FileNode(b"foobar", b"empty... almost", mode=0o100644) assert not node.is_executable def test_file_node_is_not_symlink(self): - node = FileNode(b'foobar', b'empty...') + node = FileNode(b"foobar", b"empty...") assert not node.is_link() def test_mimetype(self): - py_node = FileNode(b'test.py') - tar_node = FileNode(b'test.tar.gz') + py_node = FileNode(b"test.py") + tar_node = FileNode(b"test.tar.gz") - ext = 'CustomExtension' + ext = "CustomExtension" - my_node2 = FileNode(b'myfile2') + my_node2 = FileNode(b"myfile2") my_node2._mimetype = [ext] - my_node3 = FileNode(b'myfile3') + my_node3 = FileNode(b"myfile3") my_node3._mimetype = [ext, ext] - assert py_node.mimetype == 'text/x-python' - assert py_node.get_mimetype() == ('text/x-python', None) + assert py_node.mimetype == "text/x-python" + assert py_node.get_mimetype() == ("text/x-python", None) - assert tar_node.mimetype == 'application/x-tar' - assert tar_node.get_mimetype() == ('application/x-tar', 'gzip') + assert tar_node.mimetype == "application/x-tar" + assert tar_node.get_mimetype() == ("application/x-tar", "gzip") with pytest.raises(NodeError): my_node2.get_mimetype() @@ -232,47 +232,45 @@ class TestNodeBasics: def test_lines_counts(self): lines = [ - b'line1\n', - b'line2\n', - b'line3\n', - b'\n', - b'\n', - b'line4\n', + b"line1\n", + b"line2\n", + b"line3\n", + b"\n", + b"\n", + b"line4\n", ] - py_node = FileNode(b'test.py', b''.join(lines)) + py_node = FileNode(b"test.py", b"".join(lines)) assert (len(lines), len(lines)) == py_node.lines() assert (len(lines), len(lines) - 2) == py_node.lines(count_empty=True) def test_lines_no_newline(self): - py_node = FileNode(b'test.py', b'oneline') + py_node = FileNode(b"test.py", b"oneline") assert (1, 1) == py_node.lines() assert (1, 1) == py_node.lines(count_empty=True) class TestNodeContent(object): - def test_if_binary(self, binary_filenode): - filenode = binary_filenode(b'calendar.jpg') + filenode = binary_filenode(b"calendar.jpg") assert filenode.is_binary def test_binary_line_counts(self, binary_filenode): - tar_node = binary_filenode(b'archive.tar.gz') + tar_node = binary_filenode(b"archive.tar.gz") assert (0, 0) == tar_node.lines(count_empty=True) def test_binary_mimetype(self, binary_filenode): - tar_node = binary_filenode(b'archive.tar.gz') - assert tar_node.mimetype == 'application/x-tar' + tar_node = binary_filenode(b"archive.tar.gz") + assert tar_node.mimetype == "application/x-tar" @pytest.mark.usefixtures("vcs_repository_support") class TestNodesCommits(BackendTestMixin): - def test_node_last_commit(self, generate_repo_with_commits): repo = generate_repo_with_commits(20) last_commit = repo.get_commit() for x in range(3): - node = last_commit.get_node(f'file_{x}.txt') + node = last_commit.get_node(f"file_{x}.txt") assert node.last_commit == repo[x] diff --git a/rhodecode/tests/vcs/test_repository.py b/rhodecode/tests/vcs/test_repository.py --- a/rhodecode/tests/vcs/test_repository.py +++ b/rhodecode/tests/vcs/test_repository.py @@ -114,9 +114,7 @@ class TestRepositoryBase(BackendTestMixi assert len(self.repo.get_hook_location()) != 0 def test_last_change(self, local_dt_to_utc): - assert self.repo.last_change >= local_dt_to_utc( - datetime.datetime(2010, 1, 1, 21, 0) - ) + assert self.repo.last_change >= local_dt_to_utc(datetime.datetime(2010, 1, 1, 21, 0)) def test_last_change_in_empty_repository(self, vcsbackend, local_dt_to_utc): delta = datetime.timedelta(seconds=1) @@ -195,9 +193,7 @@ class TestRepositoryCompare: @pytest.mark.parametrize("merge", [True, False]) def test_compare_commits_of_same_repository(self, vcsbackend, merge): target_repo = vcsbackend.create_repo(number_of_commits=5) - target_repo.compare( - target_repo[1].raw_id, target_repo[3].raw_id, target_repo, merge=merge - ) + target_repo.compare(target_repo[1].raw_id, target_repo[3].raw_id, target_repo, merge=merge) @pytest.mark.xfail_backends("svn") @pytest.mark.parametrize("merge", [True, False]) @@ -209,9 +205,7 @@ class TestRepositoryCompare: vcsbackend.add_file(source_repo, b"newfile", b"somecontent") source_commit = source_repo.get_commit() - target_repo.compare( - target_repo[1].raw_id, source_repo[3].raw_id, source_repo, merge=merge - ) + target_repo.compare(target_repo[1].raw_id, source_repo[3].raw_id, source_repo, merge=merge) @pytest.mark.xfail_backends("svn") @pytest.mark.parametrize("merge", [True, False]) @@ -351,9 +345,7 @@ class TestRepositoryMerge(object): "merge message 1", dry_run=False, ) - expected_merge_response = MergeResponse( - True, True, merge_response.merge_ref, MergeFailureReason.NONE - ) + expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE) assert merge_response == expected_merge_response target_repo = backends.get_backend(vcsbackend.alias)(self.target_repo.path) @@ -385,9 +377,7 @@ class TestRepositoryMerge(object): "merge message 2", dry_run=False, ) - expected_merge_response = MergeResponse( - True, True, merge_response.merge_ref, MergeFailureReason.NONE - ) + expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE) assert merge_response == expected_merge_response target_repo = backends.get_backend(vcsbackend.alias)(self.target_repo.path) @@ -422,9 +412,9 @@ class TestRepositoryMerge(object): # Multiple merges may differ in their commit id. Therefore, we set the # commit id to `None` before comparing the merge responses. - merge_response.merge_ref.commit_id = 'abcdeabcde' + merge_response.merge_ref.commit_id = "abcdeabcde" - merge_response_update.merge_ref.commit_id = 'abcdeabcde' + merge_response_update.merge_ref.commit_id = "abcdeabcde" assert merge_response == merge_response_update assert merge_response.possible is True @@ -436,9 +426,7 @@ class TestRepositoryMerge(object): def test_merge_conflict(self, vcsbackend, dry_run): self.prepare_for_conflict(vcsbackend) - expected_merge_response = MergeResponse( - False, False, None, MergeFailureReason.MERGE_FAILED - ) + expected_merge_response = MergeResponse(False, False, None, MergeFailureReason.MERGE_FAILED) merge_response = self.target_repo.merge( self.repo_id, @@ -491,9 +479,7 @@ class TestRepositoryMerge(object): def test_merge_missing_source_reference(self, vcsbackend): self.prepare_for_success(vcsbackend) - source_ref = Reference( - self.source_ref.type, "not_existing", self.source_ref.commit_id - ) + source_ref = Reference(self.source_ref.type, "not_existing", self.source_ref.commit_id) expected_merge_response = MergeResponse( False, False, @@ -523,9 +509,7 @@ class TestRepositoryMerge(object): metadata={"exception": "ErrorForTest"}, ) - with mock.patch.object( - self.target_repo, "_merge_repo", side_effect=RepositoryError() - ): + with mock.patch.object(self.target_repo, "_merge_repo", side_effect=RepositoryError()): merge_response = self.target_repo.merge( self.repo_id, self.workspace_id, @@ -559,9 +543,7 @@ class TestRepositoryMerge(object): workspace_id = "test-errors-in-merge" repo_id = repo_id_generator(workspace_id) with pytest.raises(ValueError): - repo.merge( - repo_id, workspace_id, ref, self, ref, "user name", "user@email.com" - ) + repo.merge(repo_id, workspace_id, ref, self, ref, "user name", "user@email.com") @pytest.mark.usefixtures("vcs_repository_support") diff --git a/rhodecode/tests/vcs/test_svn.py b/rhodecode/tests/vcs/test_svn.py --- a/rhodecode/tests/vcs/test_svn.py +++ b/rhodecode/tests/vcs/test_svn.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -47,13 +46,13 @@ def head(repo): def test_init_fails_if_path_does_not_exist(): - path = os.path.join(TEST_DIR, 'i-do-not-exist') + path = os.path.join(TEST_DIR, "i-do-not-exist") with pytest.raises(VCSError): SubversionRepository(path) def test_init_fails_if_path_is_not_a_valid_repository(tmpdir): - path = str(tmpdir.mkdir('unicode ä')) + path = str(tmpdir.mkdir("unicode ä")) with pytest.raises(VCSError): SubversionRepository(path) @@ -61,17 +60,14 @@ def test_init_fails_if_path_is_not_a_val def test_repo_clone(vcsbackend, reposerver): source = vcsbackend.create_repo(number_of_commits=3) reposerver.serve(source) - repo = SubversionRepository( - vcsbackend.new_repo_path(), - create=True, - src_url=reposerver.url) + repo = SubversionRepository(vcsbackend.new_repo_path(), create=True, src_url=reposerver.url) assert source.commit_ids == repo.commit_ids assert source[0].message == repo[0].message def test_latest_commit(head): - assert head.raw_id == '393' + assert head.raw_id == "393" def test_commit_description(head): @@ -79,18 +75,22 @@ def test_commit_description(head): def test_commit_author(head): - assert head.author == 'marcin' + assert head.author == "marcin" -@pytest.mark.parametrize("filename, content, mime_type", [ - (b'test.txt', b'Text content\n', None), - (b'test.bin', b'\0 binary \0', 'application/octet-stream'), -], ids=['text', 'binary']) +@pytest.mark.parametrize( + "filename, content, mime_type", + [ + (b"test.txt", b"Text content\n", None), + (b"test.bin", b"\0 binary \0", "application/octet-stream"), + ], + ids=["text", "binary"], +) def test_sets_mime_type_correctly(vcsbackend, filename, content, mime_type): repo = vcsbackend.create_repo() vcsbackend.ensure_file(filename, content) file_properties = repo._remote.node_properties(filename, 1) - assert file_properties.get('svn:mime-type') == mime_type + assert file_properties.get("svn:mime-type") == mime_type def test_slice_access(repo): @@ -100,7 +100,7 @@ def test_slice_access(repo): end = start + page_size - 1 commits = list(repo[start:end]) - assert [commit.raw_id for commit in commits] == ['1', '2', '3', '4'] + assert [commit.raw_id for commit in commits] == ["1", "2", "3", "4"] def test_walk_changelog_page(repo): @@ -110,14 +110,14 @@ def test_walk_changelog_page(repo): end = start + page_size - 1 commits = list(repo[start:end]) - changelog = [ - 'r%s, %s, %s' % (c.raw_id, c.author, c.message) for c in commits] + changelog = ["r%s, %s, %s" % (c.raw_id, c.author, c.message) for c in commits] expexted_messages = [ - 'r1, marcin, initial import', - 'r2, marcin, hg ignore', - 'r3, marcin, Pip standards refactor', - 'r4, marcin, Base repository few new functions added'] + "r1, marcin, initial import", + "r2, marcin, hg ignore", + "r3, marcin, Pip standards refactor", + "r4, marcin, Base repository few new functions added", + ] assert changelog == expexted_messages @@ -128,68 +128,68 @@ def test_read_full_file_tree(head): def test_topnode_files_attribute(head): - topnode = head.get_node('') + topnode = head.get_node("") topnode.files - - -@pytest.mark.parametrize("filename, content, branch, mime_type", [ - ('branches/plain/test.txt', b'Text content\n', 'plain', None), - ('branches/uniçö∂e/test.bin', b'\0 binary \0', 'uniçö∂e', 'application/octet-stream'), -], ids=['text', 'binary']) +@pytest.mark.parametrize( + "filename, content, branch, mime_type", + [ + ("branches/plain/test.txt", b"Text content\n", "plain", None), + ("branches/uniçö∂e/test.bin", b"\0 binary \0", "uniçö∂e", "application/octet-stream"), + ], + ids=["text", "binary"], +) def test_unicode_refs(vcsbackend, filename, content, branch, mime_type): filename = safe_bytes(filename) repo = vcsbackend.create_repo() vcsbackend.ensure_file(filename, content) - with mock.patch(("rhodecode.lib.vcs.backends.svn.repository" - ".SubversionRepository._patterns_from_section"), - return_value=['branches/*']): - assert f'branches/{branch}' in repo.branches + with mock.patch( + ("rhodecode.lib.vcs.backends.svn.repository" ".SubversionRepository._patterns_from_section"), + return_value=["branches/*"], + ): + assert f"branches/{branch}" in repo.branches def test_compatible_version(monkeypatch, vcsbackend): - monkeypatch.setattr(settings, 'SVN_COMPATIBLE_VERSION', 'pre-1.8-compatible') + monkeypatch.setattr(settings, "SVN_COMPATIBLE_VERSION", "pre-1.8-compatible") path = vcsbackend.new_repo_path() SubversionRepository(path, create=True) - with open(f'{path}/db/format') as f: + with open(f"{path}/db/format") as f: first_line = f.readline().strip() - assert first_line == '4' + assert first_line == "4" def test_invalid_compatible_version(monkeypatch, vcsbackend): - monkeypatch.setattr(settings, 'SVN_COMPATIBLE_VERSION', 'i-am-an-invalid-setting') + monkeypatch.setattr(settings, "SVN_COMPATIBLE_VERSION", "i-am-an-invalid-setting") path = vcsbackend.new_repo_path() with pytest.raises(Exception): SubversionRepository(path, create=True) class TestSVNCommit(object): - @pytest.fixture(autouse=True) def prepare(self, repo): self.repo = repo def test_file_history_from_commits(self): - node = self.repo[10].get_node('setup.py') + node = self.repo[10].get_node("setup.py") commit_ids = [commit.raw_id for commit in node.history] - assert ['8'] == commit_ids + assert ["8"] == commit_ids - node = self.repo[20].get_node('setup.py') + node = self.repo[20].get_node("setup.py") node_ids = [commit.raw_id for commit in node.history] - assert ['18', - '8'] == node_ids + assert ["18", "8"] == node_ids # special case we check history from commit that has this particular # file changed this means we check if it's included as well - node = self.repo.get_commit('18').get_node('setup.py') + node = self.repo.get_commit("18").get_node("setup.py") node_ids = [commit.raw_id for commit in node.history] - assert ['18', - '8'] == node_ids + assert ["18", "8"] == node_ids def test_repo_files_content_type(self): test_commit = self.repo.get_commit(commit_idx=100) - for node in test_commit.get_node('/'): + for node in test_commit.get_node("/"): if node.is_file(): assert type(node.content) == bytes assert type(node.str_content) == str diff --git a/rhodecode/tests/vcs/test_tags.py b/rhodecode/tests/vcs/test_tags.py --- a/rhodecode/tests/vcs/test_tags.py +++ b/rhodecode/tests/vcs/test_tags.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -20,8 +19,7 @@ import pytest from rhodecode.tests.vcs.conftest import BackendTestMixin -from rhodecode.lib.vcs.exceptions import ( - TagAlreadyExistError, TagDoesNotExistError) +from rhodecode.lib.vcs.exceptions import TagAlreadyExistError, TagDoesNotExistError pytestmark = pytest.mark.backends("git", "hg") @@ -29,11 +27,10 @@ pytestmark = pytest.mark.backends("git", @pytest.mark.usefixtures("vcs_repository_support") class TestTags(BackendTestMixin): - def test_new_tag(self): tip = self.repo.get_commit() tagsize = len(self.repo.tags) - tag = self.repo.tag('last-commit', 'joe', tip.raw_id) + tag = self.repo.tag("last-commit", "joe", tip.raw_id) assert len(self.repo.tags) == tagsize + 1 for top, __, __ in tip.walk(): @@ -41,29 +38,29 @@ class TestTags(BackendTestMixin): def test_tag_already_exist(self): tip = self.repo.get_commit() - self.repo.tag('last-commit', 'joe', tip.raw_id) + self.repo.tag("last-commit", "joe", tip.raw_id) with pytest.raises(TagAlreadyExistError): - self.repo.tag('last-commit', 'joe', tip.raw_id) + self.repo.tag("last-commit", "joe", tip.raw_id) commit = self.repo.get_commit(commit_idx=0) with pytest.raises(TagAlreadyExistError): - self.repo.tag('last-commit', 'jane', commit.raw_id) + self.repo.tag("last-commit", "jane", commit.raw_id) def test_remove_tag(self): tip = self.repo.get_commit() - self.repo.tag('last-commit', 'joe', tip.raw_id) + self.repo.tag("last-commit", "joe", tip.raw_id) tagsize = len(self.repo.tags) - self.repo.remove_tag('last-commit', user='evil joe') + self.repo.remove_tag("last-commit", user="evil joe") assert len(self.repo.tags) == tagsize - 1 def test_remove_tag_which_does_not_exist(self): with pytest.raises(TagDoesNotExistError): - self.repo.remove_tag('last-commit', user='evil joe') + self.repo.remove_tag("last-commit", user="evil joe") def test_name_with_slash(self): - self.repo.tag('19/10/11', 'joe') - assert '19/10/11' in self.repo.tags - self.repo.tag('rel.11', 'joe') - assert 'rel.11' in self.repo.tags + self.repo.tag("19/10/11", "joe") + assert "19/10/11" in self.repo.tags + self.repo.tag("rel.11", "joe") + assert "rel.11" in self.repo.tags diff --git a/rhodecode/tests/vcs/test_utils.py b/rhodecode/tests/vcs/test_utils.py --- a/rhodecode/tests/vcs/test_utils.py +++ b/rhodecode/tests/vcs/test_utils.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -34,23 +33,20 @@ from rhodecode.lib.vcs.utils.paths impor @pytest.mark.usefixtures("baseapp") class TestPaths(object): - def _test_get_dirs_for_path(self, path, expected): """ Tests if get_dirs_for_path returns same as expected. """ expected = sorted(expected) result = sorted(get_dirs_for_path(path)) - assert result == expected, ( - "%s != %s which was expected result for path %s" - % (result, expected, path)) + assert result == expected, "%s != %s which was expected result for path %s" % (result, expected, path) def test_get_dirs_for_path(self): - path = 'foo/bar/baz/file' + path = "foo/bar/baz/file" paths_and_results = ( - ('foo/bar/baz/file', ['foo', 'foo/bar', 'foo/bar/baz']), - ('foo/bar/', ['foo', 'foo/bar']), - ('foo/bar', ['foo']), + ("foo/bar/baz/file", ["foo", "foo/bar", "foo/bar/baz"]), + ("foo/bar/", ["foo", "foo/bar"]), + ("foo/bar", ["foo"]), ) for path, expected in paths_and_results: self._test_get_dirs_for_path(path, expected) @@ -59,18 +55,17 @@ class TestPaths(object): new = tmpdir.strpath assert get_scms_for_path(new) == [] - os.mkdir(os.path.join(new, '.tux')) + os.mkdir(os.path.join(new, ".tux")) assert get_scms_for_path(new) == [] - os.mkdir(os.path.join(new, '.git')) - assert set(get_scms_for_path(new)) == set(['git']) + os.mkdir(os.path.join(new, ".git")) + assert set(get_scms_for_path(new)) == set(["git"]) - os.mkdir(os.path.join(new, '.hg')) - assert set(get_scms_for_path(new)) == set(['git', 'hg']) + os.mkdir(os.path.join(new, ".hg")) + assert set(get_scms_for_path(new)) == set(["git", "hg"]) class TestGetScm(object): - def test_existing_repository(self, vcs_repository_support): alias, repo = vcs_repository_support assert (alias, repo.path) == get_scm(repo.path) @@ -81,114 +76,101 @@ class TestGetScm(object): def test_get_scm_error_path(self): with pytest.raises(VCSError): - get_scm('err') + get_scm("err") def test_get_two_scms_for_path(self, tmpdir): multialias_repo_path = str(tmpdir) git_default_branch = GitRepository.DEFAULT_BRANCH_NAME - subprocess.check_call(['hg', 'init', multialias_repo_path]) - subprocess.check_call(['git', '-c', f'init.defaultBranch={git_default_branch}', 'init', multialias_repo_path]) + subprocess.check_call(["hg", "init", multialias_repo_path]) + subprocess.check_call(["git", "-c", f"init.defaultBranch={git_default_branch}", "init", multialias_repo_path]) with pytest.raises(VCSError): get_scm(multialias_repo_path) def test_ignores_svn_working_copy(self, tmpdir): - tmpdir.mkdir('.svn') + tmpdir.mkdir(".svn") with pytest.raises(VCSError): get_scm(tmpdir.strpath) class TestParseDatetime(object): - def test_datetime_text(self): - assert parse_datetime('2010-04-07 21:29:41') == \ - datetime.datetime(2010, 4, 7, 21, 29, 41) + assert parse_datetime("2010-04-07 21:29:41") == datetime.datetime(2010, 4, 7, 21, 29, 41) def test_no_seconds(self): - assert parse_datetime('2010-04-07 21:29') == \ - datetime.datetime(2010, 4, 7, 21, 29) + assert parse_datetime("2010-04-07 21:29") == datetime.datetime(2010, 4, 7, 21, 29) def test_date_only(self): - assert parse_datetime('2010-04-07') == \ - datetime.datetime(2010, 4, 7) + assert parse_datetime("2010-04-07") == datetime.datetime(2010, 4, 7) def test_another_format(self): - assert parse_datetime('04/07/10 21:29:41') == \ - datetime.datetime(2010, 4, 7, 21, 29, 41) + assert parse_datetime("04/07/10 21:29:41") == datetime.datetime(2010, 4, 7, 21, 29, 41) def test_now(self): - assert parse_datetime('now') - datetime.datetime.now() < \ - datetime.timedelta(seconds=1) + assert parse_datetime("now") - datetime.datetime.now() < datetime.timedelta(seconds=1) def test_today(self): today = datetime.date.today() - assert parse_datetime('today') == \ - datetime.datetime(*today.timetuple()[:3]) + assert parse_datetime("today") == datetime.datetime(*today.timetuple()[:3]) def test_yesterday(self): yesterday = datetime.date.today() - datetime.timedelta(days=1) - assert parse_datetime('yesterday') == \ - datetime.datetime(*yesterday.timetuple()[:3]) + assert parse_datetime("yesterday") == datetime.datetime(*yesterday.timetuple()[:3]) def test_tomorrow(self): tomorrow = datetime.date.today() + datetime.timedelta(days=1) args = tomorrow.timetuple()[:3] + (23, 59, 59) - assert parse_datetime('tomorrow') == datetime.datetime(*args) + assert parse_datetime("tomorrow") == datetime.datetime(*args) def test_days(self): timestamp = datetime.datetime.today() - datetime.timedelta(days=3) args = timestamp.timetuple()[:3] + (0, 0, 0, 0) expected = datetime.datetime(*args) - assert parse_datetime('3d') == expected - assert parse_datetime('3 d') == expected - assert parse_datetime('3 day') == expected - assert parse_datetime('3 days') == expected + assert parse_datetime("3d") == expected + assert parse_datetime("3 d") == expected + assert parse_datetime("3 day") == expected + assert parse_datetime("3 days") == expected def test_weeks(self): timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7) args = timestamp.timetuple()[:3] + (0, 0, 0, 0) expected = datetime.datetime(*args) - assert parse_datetime('3w') == expected - assert parse_datetime('3 w') == expected - assert parse_datetime('3 week') == expected - assert parse_datetime('3 weeks') == expected + assert parse_datetime("3w") == expected + assert parse_datetime("3 w") == expected + assert parse_datetime("3 week") == expected + assert parse_datetime("3 weeks") == expected def test_mixed(self): - timestamp = ( - datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3)) + timestamp = datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3) args = timestamp.timetuple()[:3] + (0, 0, 0, 0) expected = datetime.datetime(*args) - assert parse_datetime('2w3d') == expected - assert parse_datetime('2w 3d') == expected - assert parse_datetime('2w 3 days') == expected - assert parse_datetime('2 weeks 3 days') == expected + assert parse_datetime("2w3d") == expected + assert parse_datetime("2w 3d") == expected + assert parse_datetime("2w 3 days") == expected + assert parse_datetime("2 weeks 3 days") == expected -@pytest.mark.parametrize("test_str, name, email", [ - ('Marcin Kuzminski ', - 'Marcin Kuzminski', 'marcin@python-works.com'), - ('Marcin Kuzminski Spaces < marcin@python-works.com >', - 'Marcin Kuzminski Spaces', 'marcin@python-works.com'), - ('Marcin Kuzminski ', - 'Marcin Kuzminski', 'marcin.kuzminski@python-works.com'), - ('mrf RFC_SPEC ', - 'mrf RFC_SPEC', 'marcin+kuzminski@python-works.com'), - ('username ', - 'username', 'user@email.com'), - ('username ', - '', 'justemail@mail.com'), - ('justname', - 'justname', ''), - ('Mr Double Name withemail@email.com ', - 'Mr Double Name', 'withemail@email.com'), -]) +@pytest.mark.parametrize( + "test_str, name, email", + [ + ("Marcin Kuzminski ", "Marcin Kuzminski", "marcin@python-works.com"), + ("Marcin Kuzminski Spaces < marcin@python-works.com >", "Marcin Kuzminski Spaces", "marcin@python-works.com"), + ( + "Marcin Kuzminski ", + "Marcin Kuzminski", + "marcin.kuzminski@python-works.com", + ), + ("mrf RFC_SPEC ", "mrf RFC_SPEC", "marcin+kuzminski@python-works.com"), + ("username ", "username", "user@email.com"), + ("username ", "", "justemail@mail.com"), + ("justname", "justname", ""), + ("Mr Double Name withemail@email.com ", "Mr Double Name", "withemail@email.com"), + ], +) class TestAuthorExtractors(object): - def test_author_email(self, test_str, name, email): assert email == author_email(test_str) diff --git a/rhodecode/tests/vcs/test_vcs.py b/rhodecode/tests/vcs/test_vcs.py --- a/rhodecode/tests/vcs/test_vcs.py +++ b/rhodecode/tests/vcs/test_vcs.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -20,6 +19,7 @@ """ Tests for main module's methods. """ + import os import tempfile import shutil @@ -48,7 +48,7 @@ def test_alias_detect(backend): def test_wrong_alias(): - alias = 'wrong_alias' + alias = "wrong_alias" with pytest.raises(VCSError): get_backend(alias) @@ -77,12 +77,13 @@ def test_get_vcs_instance_by_path_multip Test that ``get_vcs_instance_by_path`` returns None if a path is passed to a directory with multiple repositories. """ - empty_dir = tempfile.mkdtemp(prefix='pytest-empty-dir-') - os.mkdir(os.path.join(empty_dir, '.git')) - os.mkdir(os.path.join(empty_dir, '.hg')) + empty_dir = tempfile.mkdtemp(prefix="pytest-empty-dir-") + os.mkdir(os.path.join(empty_dir, ".git")) + os.mkdir(os.path.join(empty_dir, ".hg")) def fin(): shutil.rmtree(empty_dir) + request.addfinalizer(fin) repo = get_vcs_instance(empty_dir) @@ -90,39 +91,32 @@ def test_get_vcs_instance_by_path_multip assert repo is None -@mock.patch('rhodecode.lib.vcs.backends.get_scm') -@mock.patch('rhodecode.lib.vcs.backends.get_backend') -def test_get_vcs_instance_by_path_args_passed( - get_backend_mock, get_scm_mock, tmpdir, vcs_repo): +@mock.patch("rhodecode.lib.vcs.backends.get_scm") +@mock.patch("rhodecode.lib.vcs.backends.get_backend") +def test_get_vcs_instance_by_path_args_passed(get_backend_mock, get_scm_mock, tmpdir, vcs_repo): """ Test that the arguments passed to ``get_vcs_instance_by_path`` are forwarded to the vcs backend class. """ backend = mock.MagicMock() get_backend_mock.return_value = backend - args = ['these-are-test-args', 0, True, None] + args = ["these-are-test-args", 0, True, None] repo = vcs_repo.path get_vcs_instance(repo, *args) backend.assert_called_with(*args, repo_path=repo) -@mock.patch('rhodecode.lib.vcs.backends.get_scm') -@mock.patch('rhodecode.lib.vcs.backends.get_backend') -def test_get_vcs_instance_by_path_kwargs_passed( - get_backend_mock, get_scm_mock, vcs_repo): +@mock.patch("rhodecode.lib.vcs.backends.get_scm") +@mock.patch("rhodecode.lib.vcs.backends.get_backend") +def test_get_vcs_instance_by_path_kwargs_passed(get_backend_mock, get_scm_mock, vcs_repo): """ Test that the keyword arguments passed to ``get_vcs_instance_by_path`` are forwarded to the vcs backend class. """ backend = mock.MagicMock() get_backend_mock.return_value = backend - kwargs = { - 'foo': 'these-are-test-args', - 'bar': 0, - 'baz': True, - 'foobar': None - } + kwargs = {"foo": "these-are-test-args", "bar": 0, "baz": True, "foobar": None} repo = vcs_repo.path get_vcs_instance(repo, **kwargs) diff --git a/rhodecode/tests/vcs/utils.py b/rhodecode/tests/vcs/utils.py --- a/rhodecode/tests/vcs/utils.py +++ b/rhodecode/tests/vcs/utils.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -37,7 +36,7 @@ def run_command(cmd, args): """ Runs command on the system with given ``args``. """ - command = ' '.join((cmd, args)) + command = " ".join((cmd, args)) p = Popen(command, shell=True) status = os.waitpid(p.pid, 0)[1] return status @@ -51,12 +50,11 @@ def eprint(msg): Appends line break. """ sys.stderr.write(msg) - sys.stderr.write('\n') + sys.stderr.write("\n") # TODO: Revisit once we have CI running, if this is not helping us, remove it class SCMFetcher(object): - def __init__(self, alias, test_repo_path): """ :param clone_cmd: command which would clone remote repository; pass @@ -75,9 +73,8 @@ class SCMFetcher(object): Tries to fetch repository from remote path. """ remote = self.remote_repo - eprint( - "Fetching repository %s into %s" % (remote, self.test_repo_path)) - run_command(self.clone_cmd, '%s %s' % (remote, self.test_repo_path)) + eprint("Fetching repository %s into %s" % (remote, self.test_repo_path)) + run_command(self.clone_cmd, "%s %s" % (remote, self.test_repo_path)) def get_normalized_path(path): @@ -88,29 +85,29 @@ def get_normalized_path(path): """ if os.path.exists(path): dir, basename = os.path.split(path) - splitted_name = basename.split('.') + splitted_name = basename.split(".") if len(splitted_name) > 1: ext = splitted_name[-1] else: ext = None - name = '.'.join(splitted_name[:-1]) - matcher = re.compile(r'^.*-(\d{5})$') + name = ".".join(splitted_name[:-1]) + matcher = re.compile(r"^.*-(\d{5})$") start = 0 m = matcher.match(name) if not m: # Haven't append number yet so return first - newname = f'{name}-00000' + newname = f"{name}-00000" newpath = os.path.join(dir, newname) if ext: - newpath = '.'.join((newpath, ext)) + newpath = ".".join((newpath, ext)) return get_normalized_path(newpath) else: start = int(m.group(1)[-5:]) + 1 for x in range(start, 10000): - newname = name[:-5] + str(x).rjust(5, '0') + newname = name[:-5] + str(x).rjust(5, "0") newpath = os.path.join(dir, newname) if ext: - newpath = '.'.join((newpath, ext)) + newpath = ".".join((newpath, ext)) if not os.path.exists(newpath): return newpath raise VCSTestError("Couldn't compute new path for %s" % path) diff --git a/rhodecode/tests/vcs_operations/__init__.py b/rhodecode/tests/vcs_operations/__init__.py --- a/rhodecode/tests/vcs_operations/__init__.py +++ b/rhodecode/tests/vcs_operations/__init__.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -36,7 +35,6 @@ from rhodecode.lib.str_utils import safe from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO DEBUG = True -RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log') REPO_GROUP = 'a_repo_group' HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}' GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}' diff --git a/rhodecode/tests/vcs_operations/conftest.py b/rhodecode/tests/vcs_operations/conftest.py --- a/rhodecode/tests/vcs_operations/conftest.py +++ b/rhodecode/tests/vcs_operations/conftest.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -19,22 +18,17 @@ """ py.test config for test suite for making push/pull operations. - -.. important:: - - You must have git >= 1.8.5 for tests to work fine. With 68b939b git started - to redirect things to stderr instead of stdout. """ import os -import tempfile -import textwrap + +import pyramid.paster import pytest import logging import requests from rhodecode import events -from rhodecode.lib.str_utils import safe_bytes +from rhodecode.lib.type_utils import AttributeDict from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \ UserToRepoBranchPermission, User from rhodecode.model.integration import IntegrationModel @@ -42,11 +36,13 @@ from rhodecode.model.db import Repositor from rhodecode.model.meta import Session from rhodecode.integrations.types.webhook import WebhookIntegrationType + from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO from rhodecode.tests.conftest import HTTPBIN_DOMAIN, HTTPBIN_POST -from rhodecode.tests.fixture import Fixture -from rhodecode.tests.server_utils import RcWebServer - +from rhodecode.tests.fixtures.rc_fixture import Fixture +from rhodecode.tests.fixtures.fixture_utils import backend_base +from rhodecode.tests.utils import set_anonymous_access, AuthPluginManager +from rhodecode.tests import console_printer REPO_GROUP = 'a_repo_group' HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}' @@ -62,39 +58,42 @@ def check_httpbin_connection(): response = requests.get(HTTPBIN_DOMAIN, timeout=5) return response.status_code == 200 except Exception as e: - print(e) + console_printer(e) return False +#overrides backend_N with init_pyramid_app instead of baseapp +@pytest.fixture() +def vcs_backend_git(request, init_pyramid_app, test_repo): + return backend_base(request, 'git', test_repo) + + +@pytest.fixture() +def vcs_backend_hg(request, init_pyramid_app, test_repo): + return backend_base(request, 'hg', test_repo) + + +@pytest.fixture() +def vcs_backend_svn(request, init_pyramid_app, test_repo): + return backend_base(request, 'svn', test_repo) + @pytest.fixture(scope="module") -def rcextensions(request, db_connection, tmpdir_factory): - """ - Installs a testing rcextensions pack to ensure they work as expected. +def tmp_storage_location(request, tmpdir_factory): """ - init_content = textwrap.dedent(""" - # Forward import the example rcextensions to make it - # active for our tests. - from rhodecode.tests.other.example_rcextensions import * - """) + Defines a module level storage_location, used mostly to define per-test persistent repo storage + shared across vcsserver, rhodecode and celery + """ - # Note: rcextensions are looked up based on the path of the ini file - root_path = tmpdir_factory.getbasetemp() - rcextensions_path = root_path.join('rcextensions') - init_path = rcextensions_path.join('__init__.py') - - if rcextensions_path.check(): - pytest.fail( - "Path for rcextensions already exists, please clean up before " - "test run this path: %s" % (rcextensions_path, )) - else: - request.addfinalizer(rcextensions_path.remove) - init_path.write_binary(safe_bytes(init_content), ensure=True) + dest = tmpdir_factory.mktemp('tmp_storage_location_', numbered=True) + log.info("Creating test TMP directory at %s", dest) + return dest @pytest.fixture(scope="module") -def repos(request, db_connection): +def repo_group_repos(request): """Create a copy of each test repo in a repo group.""" + fixture = Fixture() repo_group = fixture.create_repo_group(REPO_GROUP) repo_group_id = repo_group.group_id @@ -116,67 +115,117 @@ def repos(request, db_connection): fixture.destroy_repo_group(repo_group_id) -@pytest.fixture(scope="module") -def rc_web_server_config_modification(): - return [] +@pytest.fixture(scope='module') +def rcstack_vcsserver_factory(vcsserver_factory): + return vcsserver_factory + + +@pytest.fixture(scope='module') +def rcstack_celery_factory(celery_factory): + return celery_factory + + +@pytest.fixture(scope='module') +def rcstack_rhodecode_factory(rhodecode_factory): + return rhodecode_factory -@pytest.fixture(scope="module") -def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification): - """ - Configuration file used for the fixture `rc_web_server`. - """ +@pytest.fixture(scope='module') +def init_pyramid_app(request, available_port_factory, ini_config_factory, rcstack_vcsserver_factory, tmp_storage_location): + from rhodecode.lib.config_utils import get_app_config + from rhodecode.config.middleware import make_pyramid_app - def factory(rcweb_port, vcsserver_port): - custom_params = [ - {'handler_console': {'level': 'DEBUG'}}, - {'server:main': {'port': rcweb_port}}, - {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}} - ] - custom_params.extend(rc_web_server_config_modification) - return testini_factory(custom_params) - return factory + store_dir = tmp_storage_location + port = available_port_factory() + rcstack_vcsserver_factory( + request, + store_dir=store_dir, + port=port, + info_prefix='init-app-' + ) + + app_ini_config = ini_config_factory(store_dir) + + pyramid.paster.setup_logging(app_ini_config) + + settings = get_app_config(app_ini_config) + settings['startup.import_repos'] = True + settings['vcs.server'] = f'localhost:{port}' + settings['repo_store.path'] = str(store_dir) + pyramid_app = make_pyramid_app({'__file__': app_ini_config}, **settings) + + return pyramid_app -@pytest.fixture(scope="module") -def rc_web_server( - request, vcsserver_factory, available_port_factory, - rc_web_server_config_factory, repos, rcextensions): +@pytest.fixture(scope='module') +def rcstack(request, tmp_storage_location, rcextensions, available_port_factory, rcstack_vcsserver_factory, rcstack_celery_factory, rcstack_rhodecode_factory): """ - Run the web server as a subprocess. with its own instance of vcsserver + Runs minimal rcstack, i.e vcsserver, celery, rhodecode unpacks rcextensions and repos to a shared location """ - rcweb_port: int = available_port_factory() - log.info('Using rcweb ops test port %s', rcweb_port) + rcstack_data = AttributeDict() + store_dir = tmp_storage_location vcsserver_port: int = available_port_factory() - log.info('Using vcsserver ops test port %s', vcsserver_port) + vcsserver_log = os.path.join(tmp_storage_location, 'vcsserver.log') + + log.info('Using vcsserver test port %s and log %s', vcsserver_port, vcsserver_log) # start vcsserver + _factory = rcstack_vcsserver_factory( + request, + store_dir=store_dir, + port=vcsserver_port, + log_file=vcsserver_log, + overrides=( + {'handler_console': {'level': 'DEBUG'}}, + )) + rcstack_data.vcsserver_port = vcsserver_port + rcstack_data.vcsserver_log = _factory.log_file + - vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log') - vcsserver_factory( - request, vcsserver_port=vcsserver_port, - log_file=vcs_log, + celery_log = os.path.join(tmp_storage_location, 'celery.log') + + + log.info('Using celery log %s', celery_log) + # start celery + _factory = rcstack_celery_factory( + request, + store_dir=store_dir, + port=None, + log_file=celery_log, overrides=( - {'server:main': {'workers': 2}}, - {'server:main': {'graceful_timeout': 10}}, + {'handler_console': {'level': 'DEBUG'}}, + {'app:main': {'vcs.server': f'localhost:{vcsserver_port}'}}, + {'app:main': {'repo_store.path': store_dir}} )) - rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log') - rc_web_server_config = rc_web_server_config_factory( - rcweb_port=rcweb_port, - vcsserver_port=vcsserver_port) - server = RcWebServer(rc_web_server_config, log_file=rc_log) - server.start() + rcstack_data.celery_log = _factory.log_file + + rhodecode_port: int = available_port_factory() + rhodecode_log = os.path.join(tmp_storage_location, 'rhodecode.log') + + + log.info('Using rhodecode test port %s and log %s', rhodecode_port, rhodecode_port) - @request.addfinalizer - def cleanup(): - server.shutdown() + # start rhodecode + rc = rcstack_rhodecode_factory( + request, + store_dir=store_dir, + port=rhodecode_port, + log_file=rhodecode_log, + overrides=( + {'handler_console': {'level': 'DEBUG'}}, + {'app:main': {'vcs.server': f'localhost:{vcsserver_port}'}}, + {'app:main': {'repo_store.path': store_dir}} + )) - server.wait_until_ready() - return server + rcstack_data.rhodecode_port = rhodecode_port + rcstack_data.rhodecode_log = rc.log_file + + rc.rcstack_data = rcstack_data + return rc @pytest.fixture() -def disable_locking(baseapp): +def disable_locking(init_pyramid_app): r = Repository.get_by_repo_name(GIT_REPO) Repository.unlock(r) r.enable_locking = False @@ -191,6 +240,28 @@ def disable_locking(baseapp): @pytest.fixture() +def disable_anonymous_user(request, init_pyramid_app, db_connection): + set_anonymous_access(False) + + @request.addfinalizer + def cleanup(): + set_anonymous_access(True) + + +@pytest.fixture(scope='module') +def enable_auth_plugins(request, init_pyramid_app): + """ + Return a factory object that when called, allows to control which + authentication plugins are enabled. + """ + + enabler = AuthPluginManager() + request.addfinalizer(enabler.cleanup) + + return enabler + + +@pytest.fixture() def fs_repo_only(request, rhodecode_fixtures): def fs_repo_fabric(repo_name, repo_type): rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type) diff --git a/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_403.py b/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_403.py --- a/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_403.py +++ b/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_403.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -32,25 +31,27 @@ from rhodecode.tests import (GIT_REPO, H from rhodecode.tests.vcs_operations import Command -@pytest.fixture(scope="module") -def rc_web_server_config_modification(): - return [ - {'app:main': {'auth_ret_code': '403'}}, - #{'app:main': {'auth_ret_code_detection': 'true'}}, - ] +custom_code = [ + {'app:main': {'auth_ret_code': '403'}}, +] - -@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") -class TestVCSOperationsOnCustomIniConfig(object): +@pytest.mark.parametrize('rcstack', custom_code, indirect=True) +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsAuthCode403(object): - def test_clone_wrong_credentials_hg_ret_code(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!') - stdout, stderr = Command('/tmp').execute( + def test_clone_wrong_credentials_hg_ret_code(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO, passwd='bad!') + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'abort: HTTP Error 403: Forbidden' in stderr - def test_clone_wrong_credentials_git_ret_code(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!') - stdout, stderr = Command('/tmp').execute( + def test_clone_wrong_credentials_git_ret_code(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO, passwd='bad!') + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) assert 'The requested URL returned error: 403' in stderr diff --git a/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_404.py b/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_404.py --- a/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_404.py +++ b/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_404.py @@ -31,26 +31,27 @@ import pytest from rhodecode.tests import (GIT_REPO, HG_REPO) from rhodecode.tests.vcs_operations import Command - -@pytest.fixture(scope="module") -def rc_web_server_config_modification(): - return [ - {'app:main': {'auth_ret_code': '404'}}, - #{'app:main': {'auth_ret_code_detection': 'false'}}, - ] +custom_code = [ + {'app:main': {'auth_ret_code': '404'}}, +] - -@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") -class TestVCSOperationsOnCustomIniConfig(object): +@pytest.mark.parametrize('rcstack', custom_code, indirect=True) +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsOnCustomAuthCode404(object): - def test_clone_wrong_credentials_hg_ret_code(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!') - stdout, stderr = Command('/tmp').execute( + def test_clone_wrong_credentials_hg_ret_code(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO, passwd='bad!') + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'abort: HTTP Error 404: Not Found' in stderr - def test_clone_wrong_credentials_git_ret_code(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!') - stdout, stderr = Command('/tmp').execute( + def test_clone_wrong_credentials_git_ret_code(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO, passwd='bad!') + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) assert 'not found' in stderr diff --git a/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_bad_code.py b/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_bad_code.py --- a/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_bad_code.py +++ b/rhodecode/tests/vcs_operations/test_vcs_calls_custom_auth_code_bad_code.py @@ -32,25 +32,27 @@ from rhodecode.tests import (GIT_REPO, H from rhodecode.tests.vcs_operations import Command -@pytest.fixture(scope="module") -def rc_web_server_config_modification(): - return [ - {'app:main': {'auth_ret_code': '600'}}, - #{'app:main': {'auth_ret_code_detection': 'false'}}, - ] +custom_code = [ + {'app:main': {'auth_ret_code': '600'}}, +] - -@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") +@pytest.mark.parametrize('rcstack', custom_code, indirect=True) +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) class TestVCSOperationsOnCustomIniConfig(object): - def test_clone_wrong_credentials_hg_ret_code(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!') - stdout, stderr = Command('/tmp').execute( + def test_clone_wrong_credentials_hg_ret_code(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO, passwd='bad!') + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'abort: authorization failed' in stderr - def test_clone_wrong_credentials_git_ret_code(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!') - stdout, stderr = Command('/tmp').execute( + def test_clone_wrong_credentials_git_ret_code(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO, passwd='bad!') + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) assert 'fatal: Authentication failed' in stderr diff --git a/rhodecode/tests/vcs_operations/test_vcs_calls_small_post_buffer.py b/rhodecode/tests/vcs_operations/test_vcs_calls_small_post_buffer.py --- a/rhodecode/tests/vcs_operations/test_vcs_calls_small_post_buffer.py +++ b/rhodecode/tests/vcs_operations/test_vcs_calls_small_post_buffer.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -20,13 +19,10 @@ """ Test suite for making push/pull operations, on specially modified INI files -.. important:: - - You must have git >= 1.8.5 for tests to work fine. With 68b939b git started - to redirect things to stderr instead of stdout. """ import os +import pytest from rhodecode.lib.vcs.backends.git.repository import GitRepository from rhodecode.lib.vcs.nodes import FileNode @@ -35,31 +31,39 @@ from rhodecode.tests.vcs_operations impo from .test_vcs_operations_git import _check_proper_clone, _check_proper_git_push -def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - cmd = Command('/tmp') - stdout, stderr = cmd.execute( - 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath) - _check_proper_clone(stdout, stderr, 'git') - cmd.assert_returncode_success() +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsOnCustomIniConfig(object): + + def test_git_clone_with_small_push_buffer(self, vcs_backend_git, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + cmd = Command(tmpdir.strpath) + stdout, stderr = cmd.execute( + 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'git') + cmd.assert_returncode_success() -def test_git_push_with_small_push_buffer(backend_git, rc_web_server, tmpdir): - empty_repo = backend_git.create_repo() + def test_git_push_with_small_push_buffer(self, vcs_backend_git, rcstack, tmpdir): + empty_repo = vcs_backend_git.create_repo() - clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name) + clone_url = rcstack.repo_clone_url(empty_repo.repo_name) - cmd = Command(tmpdir.strpath) - cmd.execute('git clone', clone_url) + cmd = Command(tmpdir.strpath) + cmd.execute('git clone', clone_url) - repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) - repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello')) - repo.in_memory_commit.commit( - message='Commit on branch Master', - author='Automatic test ', - branch='master') + repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) + repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello')) + repo.in_memory_commit.commit( + message='Commit on branch Master', + author='Automatic test ', + branch='master') - repo_cmd = Command(repo.path) - stdout, stderr = repo_cmd.execute( - f'git -c http.postBuffer=1024 push --verbose {clone_url} master') - _check_proper_git_push(stdout, stderr, branch='master') + repo_cmd = Command(repo.path) + stdout, stderr = repo_cmd.execute( + f'git -c http.postBuffer=1024 push --verbose {clone_url} master') + _check_proper_git_push(stdout, stderr, branch='master') diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_bad_client.py b/rhodecode/tests/vcs_operations/test_vcs_operations_bad_client.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_bad_client.py @@ -0,0 +1,89 @@ +# Copyright (C) 2010-2023 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +""" +Test suite for making push/pull operations, on specially modified INI files +""" + +import pytest + +from rhodecode.model.meta import Session +from rhodecode.model.settings import SettingsModel + +from rhodecode.tests import GIT_REPO, HG_REPO +from rhodecode.tests.vcs_operations import Command, _add_files_and_push + + +@pytest.fixture() +def bad_client_setter_factory(request): + def _factory(client_type, client_str_val): + # set allowed clients + setting = SettingsModel().create_or_update_setting(name=f"{client_type}_allowed_clients", val=client_str_val) + Session().add(setting) + Session().commit() + + @request.addfinalizer + def cleanup(): + setting2 = SettingsModel().create_or_update_setting(name=f"{client_type}_allowed_clients", val="*") + Session().add(setting2) + Session().commit() + + return _factory + + +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsOnUsingBadClient(object): + def test_push_with_bad_client_repo_by_other_user_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath) + + # set allowed clients + setting = SettingsModel().create_or_update_setting(name=f"hg_allowed_clients", val="0.0.0") + Session().add(setting) + Session().commit() + + # push fails repo is locked by other user ! + push_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = _add_files_and_push("hg", tmpdir.strpath, clone_url=push_url) + msg = "Your hg client (ver=mercurial/proto-1.0 (Mercurial 6.7.4)) is forbidden by security rules" + assert msg in stderr + + def test_push_with_bad_client_repo_by_other_user_git(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath) + + # set allowed clients + setting = SettingsModel().create_or_update_setting(name=f"git_allowed_clients", val="0.0.0") + Session().add(setting) + Session().commit() + + # push fails repo is locked by other user! + push_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = _add_files_and_push("git", tmpdir.strpath, clone_url=push_url) + + err = "Your git client (ver=git/2.45.2) is forbidden by security rules" + assert err in stderr + + @pytest.mark.xfail(reason="Lack of proper SVN support of cloning") + def test_push_with_bad_client_repo_by_other_user_svn(self, rcstack, tmpdir): + raise NotImplementedError("lacks svn support") diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_branch_protection.py b/rhodecode/tests/vcs_operations/test_vcs_operations_branch_protection.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_branch_protection.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_branch_protection.py @@ -27,7 +27,12 @@ from rhodecode.tests.vcs_operations impo Command, _check_proper_hg_push, _check_proper_git_push, _add_files_and_push) -@pytest.mark.usefixtures("disable_anonymous_user") +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) class TestVCSOperations(object): @pytest.mark.parametrize('username, password', [ @@ -41,13 +46,13 @@ class TestVCSOperations(object): 'branch.push_force', ]) def test_push_to_protected_branch_fails_with_message_hg( - self, rc_web_server, tmpdir, branch_perm, user_util, + self, rcstack, tmpdir, branch_perm, user_util, branch_permission_setter, username, password): repo = user_util.create_repo(repo_type='hg') repo_name = repo.repo_name branch_permission_setter(repo_name, username, permission=branch_perm) - clone_url = rc_web_server.repo_clone_url( + clone_url = rcstack.repo_clone_url( repo.repo_name, user=username, passwd=password) Command(os.path.dirname(tmpdir.strpath)).execute( 'hg clone', clone_url, tmpdir.strpath) @@ -58,8 +63,8 @@ class TestVCSOperations(object): _check_proper_hg_push(stdout, stderr) else: msg = f"Branch `default` changes rejected by rule `*`=>{branch_perm}" - assert msg in stdout - assert "transaction abort" in stdout + assert msg in stderr + #assert "transaction abort" in stdout @pytest.mark.parametrize('username, password', [ (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS), @@ -72,13 +77,13 @@ class TestVCSOperations(object): 'branch.push_force', ]) def test_push_to_protected_branch_fails_with_message_git( - self, rc_web_server, tmpdir, branch_perm, user_util, + self, rcstack, tmpdir, branch_perm, user_util, branch_permission_setter, username, password): repo = user_util.create_repo(repo_type='git') repo_name = repo.repo_name branch_permission_setter(repo_name, username, permission=branch_perm) - clone_url = rc_web_server.repo_clone_url( + clone_url = rcstack.repo_clone_url( repo.repo_name, user=username, passwd=password) Command(os.path.dirname(tmpdir.strpath)).execute( 'git clone', clone_url, tmpdir.strpath) diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_by_auth_tokens.py b/rhodecode/tests/vcs_operations/test_vcs_operations_by_auth_tokens.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_by_auth_tokens.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_by_auth_tokens.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -20,10 +19,6 @@ """ Test suite for making push/pull operations, on specially modified INI files -.. important:: - - You must have git >= 1.8.5 for tests to work fine. With 68b939b git started - to redirect things to stderr instead of stdout. """ import pytest @@ -36,10 +31,15 @@ from rhodecode.tests import (GIT_REPO, H from rhodecode.tests.vcs_operations import (Command, _check_proper_clone) -@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") -class TestVCSOperations(object): +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsByAuthTokens: def test_clone_by_auth_token( - self, rc_web_server, tmpdir, user_util, enable_auth_plugins): + self, rcstack, tmpdir, user_util, enable_auth_plugins): enable_auth_plugins.enable([ 'egg:rhodecode-enterprise-ce#token', @@ -49,16 +49,16 @@ class TestVCSOperations(object): user = user_util.create_user() token = user.auth_tokens[1] - clone_url = rc_web_server.repo_clone_url( + clone_url = rcstack.repo_clone_url( HG_REPO, user=user.username, passwd=token) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'hg') def test_clone_by_auth_token_expired( - self, rc_web_server, tmpdir, user_util, enable_auth_plugins): + self, rcstack, tmpdir, user_util, enable_auth_plugins): enable_auth_plugins.enable([ 'egg:rhodecode-enterprise-ce#token', 'egg:rhodecode-enterprise-ce#rhodecode' @@ -69,18 +69,18 @@ class TestVCSOperations(object): user.user_id, 'test-token', -10, AuthTokenModel.cls.ROLE_VCS) token = auth_token.api_key - clone_url = rc_web_server.repo_clone_url( + clone_url = rcstack.repo_clone_url( HG_REPO, user=user.username, passwd=token) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'abort: authorization failed' in stderr msg = 'reason: bad or inactive token.' - rc_web_server.assert_message_in_server_logs(msg) + rcstack.assert_message_in_server_logs(msg) def test_clone_by_auth_token_bad_role( - self, rc_web_server, tmpdir, user_util, enable_auth_plugins): + self, rcstack, tmpdir, user_util, enable_auth_plugins): enable_auth_plugins.enable([ 'egg:rhodecode-enterprise-ce#token', 'egg:rhodecode-enterprise-ce#rhodecode' @@ -91,15 +91,15 @@ class TestVCSOperations(object): user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_API) token = auth_token.api_key - clone_url = rc_web_server.repo_clone_url( + clone_url = rcstack.repo_clone_url( HG_REPO, user=user.username, passwd=token) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'abort: authorization failed' in stderr def test_clone_by_auth_token_user_disabled( - self, rc_web_server, tmpdir, user_util, enable_auth_plugins): + self, rcstack, tmpdir, user_util, enable_auth_plugins): enable_auth_plugins.enable([ 'egg:rhodecode-enterprise-ce#token', 'egg:rhodecode-enterprise-ce#rhodecode' @@ -111,18 +111,18 @@ class TestVCSOperations(object): Session().commit() token = user.auth_tokens[1] - clone_url = rc_web_server.repo_clone_url( + clone_url = rcstack.repo_clone_url( HG_REPO, user=user.username, passwd=token) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'abort: authorization failed' in stderr msg = 'reason: account not active.' - rc_web_server.assert_message_in_server_logs(msg) + rcstack.assert_message_in_server_logs(msg) def test_clone_by_auth_token_with_scope( - self, rc_web_server, tmpdir, user_util, enable_auth_plugins): + self, rcstack, tmpdir, user_util, enable_auth_plugins): enable_auth_plugins.enable([ 'egg:rhodecode-enterprise-ce#token', 'egg:rhodecode-enterprise-ce#rhodecode' @@ -138,15 +138,15 @@ class TestVCSOperations(object): Session().add(auth_token) Session().commit() - clone_url = rc_web_server.repo_clone_url( + clone_url = rcstack.repo_clone_url( HG_REPO, user=user.username, passwd=token) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'hg') def test_clone_by_auth_token_with_wrong_scope( - self, rc_web_server, tmpdir, user_util, enable_auth_plugins): + self, rcstack, tmpdir, user_util, enable_auth_plugins): enable_auth_plugins.enable([ 'egg:rhodecode-enterprise-ce#token', 'egg:rhodecode-enterprise-ce#rhodecode' @@ -162,13 +162,13 @@ class TestVCSOperations(object): Session().add(auth_token) Session().commit() - clone_url = rc_web_server.repo_clone_url( + clone_url = rcstack.repo_clone_url( HG_REPO, user=user.username, passwd=token) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'abort: authorization failed' in stderr msg = 'reason: bad or inactive token.' - rc_web_server.assert_message_in_server_logs(msg) + rcstack.assert_message_in_server_logs(msg) diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_clone.py b/rhodecode/tests/vcs_operations/test_vcs_operations_clone.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_clone.py @@ -0,0 +1,63 @@ + +# Copyright (C) 2010-2023 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + +""" +Test suite for making push/pull operations, on specially modified INI files +""" + +import pytest + +from rhodecode.tests import GIT_REPO, SVN_REPO, HG_REPO + +from rhodecode.tests.vcs_operations import (Command, _check_proper_clone) +from rhodecode.tests.vcs_operations.test_vcs_operations_svn import get_cli_flags + + +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsClone: + + def test_clone_git_repo_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + cmd = Command(tmpdir.strpath) + stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'git') + cmd.assert_returncode_success() + + def test_clone_hg_repo_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + cmd = Command(tmpdir.strpath) + stdout, stderr = cmd.execute('hg clone', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'hg') + cmd.assert_returncode_success() + + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_clone_svn_repo_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(SVN_REPO) + username, password = rcstack.repo_clone_credentials() + flags, auth = get_cli_flags(username, password) + cmd = Command(tmpdir.strpath) + stdout, stderr = cmd.execute( + f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, 'svn') + cmd.assert_returncode_success() diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_force_push.py b/rhodecode/tests/vcs_operations/test_vcs_operations_force_push.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_force_push.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_force_push.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -27,12 +26,17 @@ from rhodecode.tests.vcs_operations impo _add_files, _add_files_and_push) -@pytest.mark.usefixtures("disable_anonymous_user") +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) class TestVCSOperations(object): - def test_push_force_hg(self, rc_web_server, tmpdir, user_util): + def test_push_force_hg(self, rcstack, tmpdir, user_util): repo = user_util.create_repo(repo_type='hg') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) Command(os.path.dirname(tmpdir.strpath)).execute( 'hg clone', clone_url, tmpdir.strpath) @@ -50,9 +54,9 @@ class TestVCSOperations(object): _check_proper_hg_push(stdout, stderr) - def test_push_force_git(self, rc_web_server, tmpdir, user_util): + def test_push_force_git(self, rcstack, tmpdir, user_util): repo = user_util.create_repo(repo_type='git') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) Command(os.path.dirname(tmpdir.strpath)).execute( 'git clone', clone_url, tmpdir.strpath) @@ -69,13 +73,12 @@ class TestVCSOperations(object): assert '(forced update)' in stderr def test_push_force_hg_blocked_by_branch_permissions( - self, rc_web_server, tmpdir, user_util, branch_permission_setter): + self, rcstack, tmpdir, user_util, branch_permission_setter): repo = user_util.create_repo(repo_type='hg') repo_name = repo.repo_name username = TEST_USER_ADMIN_LOGIN - branch_permission_setter(repo_name, username, permission='branch.push') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) Command(os.path.dirname(tmpdir.strpath)).execute( 'hg clone', clone_url, tmpdir.strpath) @@ -88,21 +91,21 @@ class TestVCSOperations(object): 'hg checkout -r 1 && hg commit -m "starting new head"') _add_files('hg', tmpdir.strpath, clone_url=clone_url) + branch_permission_setter(repo_name, username, permission='branch.push') stdout, stderr = Command(tmpdir.strpath).execute( f'hg push --verbose -f {clone_url}') - assert "Branch `default` changes rejected by rule `*`=>branch.push" in stdout - assert "FORCE PUSH FORBIDDEN" in stdout - assert "transaction abort" in stdout + assert "Branch `default` changes rejected by rule `*`=>branch.push" in stderr + assert "FORCE PUSH FORBIDDEN" in stderr def test_push_force_git_blocked_by_branch_permissions( - self, rc_web_server, tmpdir, user_util, branch_permission_setter): + self, rcstack, tmpdir, user_util, branch_permission_setter): repo = user_util.create_repo(repo_type='git') repo_name = repo.repo_name username = TEST_USER_ADMIN_LOGIN branch_permission_setter(repo_name, username, permission='branch.push') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) Command(os.path.dirname(tmpdir.strpath)).execute( 'git clone', clone_url, tmpdir.strpath) diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_git.py b/rhodecode/tests/vcs_operations/test_vcs_operations_git.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_git.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_git.py @@ -25,8 +25,6 @@ Test suite for making push/pull operatio You must have git >= 1.8.5 for tests to work fine. With 68b939b git started to redirect things to stderr instead of stdout. """ - - import time import pytest @@ -42,41 +40,46 @@ from rhodecode.tests.vcs_operations impo _add_files_and_push, GIT_REPO_WITH_GROUP) -@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") -class TestVCSOperations(object): +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsGit: - def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - cmd = Command('/tmp') + def test_clone_git_repo_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + cmd = Command(tmpdir.strpath) stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'git') cmd.assert_returncode_success() - def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - cmd = Command('/tmp') + def test_clone_git_repo_by_admin_with_git_suffix(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + cmd = Command(tmpdir.strpath) stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath) _check_proper_clone(stdout, stderr, 'git') cmd.assert_returncode_success() - def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir): + def test_clone_git_repo_by_id_by_admin(self, rcstack, tmpdir): repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id - clone_url = rc_web_server.repo_clone_url('_%s' % repo_id) - cmd = Command('/tmp') + clone_url = rcstack.repo_clone_url('_%s' % repo_id) + cmd = Command(tmpdir.strpath) stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'git') cmd.assert_returncode_success() - def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP) - cmd = Command('/tmp') + def test_clone_git_repo_with_group_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO_WITH_GROUP) + cmd = Command(tmpdir.strpath) stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'git') cmd.assert_returncode_success() - def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - cmd = Command('/tmp') + def test_clone_git_repo_shallow_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + cmd = Command(tmpdir.strpath) stdout, stderr = cmd.execute( 'git clone --depth=1', clone_url, tmpdir.strpath) @@ -84,65 +87,64 @@ class TestVCSOperations(object): assert 'Cloning into' in stderr cmd.assert_returncode_success() - - def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!') - stdout, stderr = Command('/tmp').execute( + def test_clone_wrong_credentials_git(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO, passwd='bad!') + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) assert 'fatal: Authentication failed' in stderr - def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( + def test_clone_git_dir_as_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'HTTP Error 404: Not Found' in stderr - def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url('trololo') - stdout, stderr = Command('/tmp').execute( + def test_clone_non_existing_path_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url('trololo') + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'HTTP Error 404: Not Found' in stderr - def test_clone_non_existing_path_git(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url('trololo') - stdout, stderr = Command('/tmp').execute('git clone', clone_url) + def test_clone_non_existing_path_git(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url('trololo') + stdout, stderr = Command(tmpdir.strpath).execute('git clone', clone_url) assert 'not found' in stderr - def test_clone_git_with_slashes(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO) - stdout, stderr = Command('/tmp').execute('git clone', clone_url) + def test_clone_git_with_slashes(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url('//' + GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute('git clone', clone_url) assert 'not found' in stderr def test_clone_existing_path_git_not_in_database( - self, rc_web_server, tmpdir, fs_repo_only): + self, rcstack, tmpdir, fs_repo_only): db_name = fs_repo_only('not-in-db-git', repo_type='git') - clone_url = rc_web_server.repo_clone_url(db_name) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url(db_name) + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) assert 'not found' in stderr def test_clone_existing_path_git_not_in_database_different_scm( - self, rc_web_server, tmpdir, fs_repo_only): + self, rcstack, tmpdir, fs_repo_only): db_name = fs_repo_only('not-in-db-hg', repo_type='hg') - clone_url = rc_web_server.repo_clone_url(db_name) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url(db_name) + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) assert 'not found' in stderr - def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util): + def test_clone_non_existing_store_path_git(self, rcstack, tmpdir, user_util): repo = user_util.create_repo(repo_type='git') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) # Damage repo by removing it's folder RepoModel()._delete_filesystem_repo(repo) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) assert 'not found' in stderr - def test_push_new_file_git(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( + def test_push_new_file_git(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) # commit some stuff into this repo @@ -151,37 +153,37 @@ class TestVCSOperations(object): _check_proper_git_push(stdout, stderr) - def test_push_wrong_credentials_git(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( + def test_push_wrong_credentials_git(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) - push_url = rc_web_server.repo_clone_url( + push_url = rcstack.repo_clone_url( GIT_REPO, user='bad', passwd='name') stdout, stderr = _add_files_and_push( 'git', tmpdir.strpath, clone_url=push_url) assert 'fatal: Authentication failed' in stderr - def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( + def test_push_back_to_wrong_url_git(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) stdout, stderr = _add_files_and_push( 'git', tmpdir.strpath, - clone_url=rc_web_server.repo_clone_url('not-existing')) + clone_url=rcstack.repo_clone_url('not-existing')) assert 'not found' in stderr - def test_ip_restriction_git(self, rc_web_server, tmpdir): + def test_ip_restriction_git(self, rcstack, tmpdir): user_model = UserModel() try: user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') Session().commit() time.sleep(2) - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) msg = "The requested URL returned error: 403" assert msg in stderr @@ -193,7 +195,7 @@ class TestVCSOperations(object): time.sleep(2) - cmd = Command('/tmp') + cmd = Command(tmpdir.strpath) stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) cmd.assert_returncode_success() _check_proper_clone(stdout, stderr, 'git') diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_hg.py b/rhodecode/tests/vcs_operations/test_vcs_operations_hg.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_hg.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_hg.py @@ -1,5 +1,4 @@ - -# Copyright (C) 2010-2023 RhodeCode GmbH +# Copyright (C) 2010-2024 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License, version 3 @@ -19,11 +18,6 @@ """ Test suite for making push/pull operations, on specially modified INI files - -.. important:: - - You must have git >= 1.8.5 for tests to work fine. With 68b939b git started - to redirect things to stderr instead of stdout. """ @@ -42,96 +36,101 @@ from rhodecode.tests.vcs_operations impo Command, _check_proper_clone, _add_files_and_push, HG_REPO_WITH_GROUP) -@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") -class TestVCSOperations(object): +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsHg(object): - def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( + def test_clone_hg_repo_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'hg') - def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( + def test_clone_hg_repo_by_admin_pull_protocol(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone --pull', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'hg') - def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( + def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone --pull --stream', clone_url, tmpdir.strpath) assert 'files to transfer,' in stdout assert 'transferred 1.' in stdout assert '114 files updated,' in stdout - def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir): + def test_clone_hg_repo_by_id_by_admin(self, rcstack, tmpdir): repo_id = Repository.get_by_repo_name(HG_REPO).repo_id - clone_url = rc_web_server.repo_clone_url('_%s' % repo_id) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url('_%s' % repo_id) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'hg') - def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP) - stdout, stderr = Command('/tmp').execute( + def test_clone_hg_repo_with_group_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO_WITH_GROUP) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'hg') - def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!') - stdout, stderr = Command('/tmp').execute( + def test_clone_wrong_credentials_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO, passwd='bad!') + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'abort: authorization failed' in stderr - def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( + def test_clone_git_dir_as_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'HTTP Error 404: Not Found' in stderr - def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url('trololo') - stdout, stderr = Command('/tmp').execute( + def test_clone_non_existing_path_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url('trololo') + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'HTTP Error 404: Not Found' in stderr - def test_clone_hg_with_slashes(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url('//' + HG_REPO) - stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath) + def test_clone_hg_with_slashes(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url('//' + HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute('hg clone', clone_url, tmpdir.strpath) assert 'HTTP Error 404: Not Found' in stderr def test_clone_existing_path_hg_not_in_database( - self, rc_web_server, tmpdir, fs_repo_only): + self, rcstack, tmpdir, fs_repo_only): db_name = fs_repo_only('not-in-db-hg', repo_type='hg') - clone_url = rc_web_server.repo_clone_url(db_name) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url(db_name) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'HTTP Error 404: Not Found' in stderr def test_clone_existing_path_hg_not_in_database_different_scm( - self, rc_web_server, tmpdir, fs_repo_only): + self, rcstack, tmpdir, fs_repo_only): db_name = fs_repo_only('not-in-db-git', repo_type='git') - clone_url = rc_web_server.repo_clone_url(db_name) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url(db_name) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'HTTP Error 404: Not Found' in stderr - def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util): + def test_clone_non_existing_store_path_hg(self, rcstack, tmpdir, user_util): repo = user_util.create_repo() - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) # Damage repo by removing it's folder RepoModel()._delete_filesystem_repo(repo) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'HTTP Error 404: Not Found' in stderr - def test_push_new_file_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( + def test_push_new_file_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) stdout, stderr = _add_files_and_push( @@ -140,7 +139,7 @@ class TestVCSOperations(object): assert 'pushing to' in stdout assert 'size summary' in stdout - def test_push_invalidates_cache(self, rc_web_server, tmpdir): + def test_push_invalidates_cache(self, rcstack, tmpdir): hg_repo = Repository.get_by_repo_name(HG_REPO) # init cache objects @@ -159,8 +158,8 @@ class TestVCSOperations(object): old_ids = [x.cache_state_uid for x in cache_keys] # clone to init cache - clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url(hg_repo.repo_name) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) cache_keys = hg_repo.cache_keys @@ -180,37 +179,37 @@ class TestVCSOperations(object): new_ids = [x.cache_state_uid for x in cache_keys] assert new_ids != old_ids - def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( + def test_push_wrong_credentials_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) - push_url = rc_web_server.repo_clone_url( + push_url = rcstack.repo_clone_url( HG_REPO, user='bad', passwd='name') stdout, stderr = _add_files_and_push( 'hg', tmpdir.strpath, clone_url=push_url) assert 'abort: authorization failed' in stderr - def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( + def test_push_back_to_wrong_url_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) stdout, stderr = _add_files_and_push( 'hg', tmpdir.strpath, - clone_url=rc_web_server.repo_clone_url('not-existing')) + clone_url=rcstack.repo_clone_url('not-existing')) assert 'HTTP Error 404: Not Found' in stderr - def test_ip_restriction_hg(self, rc_web_server, tmpdir): + def test_ip_restriction_hg(self, rcstack, tmpdir): user_model = UserModel() try: user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') Session().commit() time.sleep(2) - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) assert 'abort: HTTP Error 403: Forbidden' in stderr finally: @@ -221,6 +220,6 @@ class TestVCSOperations(object): time.sleep(2) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'hg') diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_integrations_trigger.py b/rhodecode/tests/vcs_operations/test_vcs_operations_integrations_trigger.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_integrations_trigger.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_integrations_trigger.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -19,11 +18,6 @@ """ Test suite for making push/pull operations, on specially modified INI files - -.. important:: - - You must have git >= 1.8.5 for tests to work fine. With 68b939b git started - to redirect things to stderr instead of stdout. """ import pytest @@ -37,32 +31,41 @@ from rhodecode.tests.vcs_operations.conf connection_available = pytest.mark.skipif( not check_httpbin_connection(), reason="No outside internet connection available") - -@pytest.mark.usefixtures("baseapp", "enable_webhook_push_integration") -class TestVCSOperationsOnCustomIniConfig(object): +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", + "enable_webhook_push_integration" +) +class TestVCSOperationsOnIntegrationsTrigger(object): - def test_push_with_webhook_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) + def test_push_with_webhook_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) - Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath) + Command(tmpdir.strpath).execute('hg clone', clone_url, tmpdir.strpath) - push_url = rc_web_server.repo_clone_url(HG_REPO) + push_url = rcstack.repo_clone_url(HG_REPO) _add_files_and_push('hg', tmpdir.strpath, clone_url=push_url) - rc_log = rc_web_server.get_rc_log() - assert 'ERROR' not in rc_log - assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in rc_log - assert "handling event repo-push with integration = 1.8.5 for tests to work fine. With 68b939b git started - to redirect things to stderr instead of stdout. """ - import pytest from rhodecode.model.db import User, Repository from rhodecode.model.meta import Session from rhodecode.model.repo import RepoModel -from rhodecode.tests import ( - GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, - TEST_USER_REGULAR_PASS) -from rhodecode.tests.vcs_operations import ( - Command, _check_proper_clone, _check_proper_git_push, _add_files_and_push) +from rhodecode.tests import GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS +from rhodecode.tests.vcs_operations import Command, _check_proper_clone, _check_proper_git_push, _add_files_and_push -@pytest.fixture(scope="module") -def rc_web_server_config_modification(): - return [ - {'app:main': {'lock_ret_code': '423'}}, - ] +custom_code = [ + {'app:main': {'auth_ret_code': '423'}}, +] - -@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") -class TestVCSOperationsOnCustomIniConfig(object): - - def test_clone_and_create_lock_hg(self, rc_web_server, tmpdir): +@pytest.mark.parametrize('rcstack', custom_code, indirect=True) +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsOnLockingRepos(object): + def test_clone_and_create_lock_hg(self, rcstack, tmpdir): # enable locking r = Repository.get_by_repo_name(HG_REPO) r.enable_locking = True Session().add(r) Session().commit() # clone - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath) # check if lock was made r = Repository.get_by_repo_name(HG_REPO) - assert r.locked[0] == User.get_by_username( - TEST_USER_ADMIN_LOGIN).user_id + assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id - def test_clone_and_create_lock_git(self, rc_web_server, tmpdir): + def test_clone_and_create_lock_git(self, rcstack, tmpdir): # enable locking r = Repository.get_by_repo_name(GIT_REPO) r.enable_locking = True Session().add(r) Session().commit() # clone - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( - 'git clone', clone_url, tmpdir.strpath) + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath) # check if lock was made r = Repository.get_by_repo_name(GIT_REPO) - assert r.locked[0] == User.get_by_username( - TEST_USER_ADMIN_LOGIN).user_id + assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id - def test_clone_after_repo_was_locked_hg(self, rc_web_server, tmpdir): + def test_clone_after_repo_was_locked_hg(self, rcstack, tmpdir): # lock repo r = Repository.get_by_repo_name(HG_REPO) Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) # pull fails since repo is locked - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`""" - % (HG_REPO, TEST_USER_ADMIN_LOGIN)) + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath) + msg = f"abort: HTTP Error 423: Repository `{HG_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`" assert msg in stderr - def test_clone_after_repo_was_locked_git(self, rc_web_server, tmpdir): + def test_clone_after_repo_was_locked_git(self, rcstack, tmpdir): # lock repo r = Repository.get_by_repo_name(GIT_REPO) Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) # pull fails since repo is locked - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( - 'git clone', clone_url, tmpdir.strpath) + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath) - lock_msg = ( - 'remote: ERROR: Repository `vcs_test_git` locked by user ' + - '`test_admin`. Reason:`lock_auto`') + lock_msg = "remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`" assert lock_msg in stderr - assert 'remote: Pre pull hook failed: aborting' in stderr - assert 'fatal: remote did not send all necessary objects' in stderr + assert "fatal: remote did not send all necessary objects" in stderr + assert "remote: Pre pull hook failed: aborting" in stderr - def test_push_on_locked_repo_by_other_user_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) + def test_push_on_locked_repo_by_other_user_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath) # lock repo r = Repository.get_by_repo_name(HG_REPO) # let this user actually push ! - RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, - perm='repository.write') + RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, perm="repository.write") Session().commit() Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) # push fails repo is locked by other user ! - push_url = rc_web_server.repo_clone_url( - HG_REPO, - user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS) - stdout, stderr = _add_files_and_push( - 'hg', tmpdir.strpath, clone_url=push_url) - msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`""" - % (HG_REPO, TEST_USER_ADMIN_LOGIN)) + push_url = rcstack.repo_clone_url(HG_REPO, user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS) + stdout, stderr = _add_files_and_push("hg", tmpdir.strpath, clone_url=push_url) + msg = f"abort: HTTP Error 423: Repository `{HG_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`" assert msg in stderr - def test_push_on_locked_repo_by_other_user_git( - self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( - 'git clone', clone_url, tmpdir.strpath) + def test_push_on_locked_repo_by_other_user_git(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath) # lock repo r = Repository.get_by_repo_name(GIT_REPO) # let this user actually push ! - RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, - perm='repository.write') + RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, perm="repository.write") Session().commit() Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) # push fails repo is locked by other user! - push_url = rc_web_server.repo_clone_url( - GIT_REPO, - user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS) - stdout, stderr = _add_files_and_push( - 'git', tmpdir.strpath, clone_url=push_url) + push_url = rcstack.repo_clone_url(GIT_REPO, user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS) + stdout, stderr = _add_files_and_push("git", tmpdir.strpath, clone_url=push_url) - err = 'Repository `%s` locked by user `%s`' % ( - GIT_REPO, TEST_USER_ADMIN_LOGIN) + err = f"Repository `{GIT_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`" # err = 'RPC failed; result=22, HTTP code = 423' assert err in stderr - def test_push_unlocks_repository_hg(self, rc_web_server, tmpdir): + def test_push_unlocks_repository_hg(self, rcstack, tmpdir): # enable locking r = Repository.get_by_repo_name(HG_REPO) r.enable_locking = True Session().add(r) Session().commit() - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - _check_proper_clone(stdout, stderr, 'hg') + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, "hg") # check for lock repo after clone r = Repository.get_by_repo_name(HG_REPO) @@ -177,16 +146,14 @@ class TestVCSOperationsOnCustomIniConfig assert r.locked[0] == uid # push is ok and repo is now unlocked - stdout, stderr = _add_files_and_push( - 'hg', tmpdir.strpath, clone_url=clone_url) - assert ('remote: Released lock on repo `%s`' % HG_REPO) in stdout + stdout, stderr = _add_files_and_push("hg", tmpdir.strpath, clone_url=clone_url) + assert f"remote: Released lock on repo `{HG_REPO}`" in stdout # we need to cleanup the Session Here ! Session.remove() r = Repository.get_by_repo_name(HG_REPO) assert r.locked == [None, None, None] - def test_push_unlocks_repository_git(self, rc_web_server, tmpdir): - + def test_push_unlocks_repository_git(self, rcstack, tmpdir): # Note: Did a first debugging session. Seems that # Repository.get_locking_state is called twice. The second call # has the action "pull" and does not reset the lock. @@ -197,19 +164,16 @@ class TestVCSOperationsOnCustomIniConfig Session().add(r) Session().commit() - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( - 'git clone', clone_url, tmpdir.strpath) - _check_proper_clone(stdout, stderr, 'git') + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath) + _check_proper_clone(stdout, stderr, "git") # check for lock repo after clone r = Repository.get_by_repo_name(GIT_REPO) - assert r.locked[0] == User.get_by_username( - TEST_USER_ADMIN_LOGIN).user_id + assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id # push is ok and repo is now unlocked - stdout, stderr = _add_files_and_push( - 'git', tmpdir.strpath, clone_url=clone_url) + stdout, stderr = _add_files_and_push("git", tmpdir.strpath, clone_url=clone_url) _check_proper_git_push(stdout, stderr) # assert ('remote: Released lock on repo `%s`' % GIT_REPO) in stdout diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_locking_custom_code.py b/rhodecode/tests/vcs_operations/test_vcs_operations_locking_custom_code.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_locking_custom_code.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_locking_custom_code.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -26,102 +25,83 @@ Test suite for making push/pull operatio to redirect things to stderr instead of stdout. """ - import pytest from rhodecode.model.db import User, Repository from rhodecode.model.meta import Session from rhodecode.model.repo import RepoModel -from rhodecode.tests import ( - GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, - TEST_USER_REGULAR_PASS) +from rhodecode.tests import GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS from rhodecode.tests.vcs_operations import Command, _add_files_and_push +custom_code = [ + {'app:main': {'lock_ret_code': '400'}}, +] -@pytest.fixture(scope="module") -def rc_web_server_config_modification(): - return [ - {'app:main': {'lock_ret_code': '400'}}, - ] - - -@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") -class TestVCSOperationsOnCustomIniConfig(object): - - def test_clone_after_repo_was_locked_hg(self, rc_web_server, tmpdir): +@pytest.mark.parametrize('rcstack', custom_code, indirect=True) +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsOnCustomLockingCode(object): + def test_clone_after_repo_was_locked_hg(self, rcstack, tmpdir): # lock repo r = Repository.get_by_repo_name(HG_REPO) Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) # pull fails since repo is locked - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) - msg = ("""abort: HTTP Error 400: Repository `%s` locked by user `%s`""" - % (HG_REPO, TEST_USER_ADMIN_LOGIN)) + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath) + msg = f"abort: HTTP Error 400: Repository `{HG_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`" assert msg in stderr - def test_clone_after_repo_was_locked_git(self, rc_web_server, tmpdir): + def test_clone_after_repo_was_locked_git(self, rcstack, tmpdir): # lock repo r = Repository.get_by_repo_name(GIT_REPO) Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) # pull fails since repo is locked - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( - 'git clone', clone_url, tmpdir.strpath) + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath) - lock_msg = ( - 'remote: ERROR: Repository `vcs_test_git` locked by user ' + - '`test_admin`. Reason:`lock_auto`') + lock_msg = "remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`" assert lock_msg in stderr - assert 'remote: Pre pull hook failed: aborting' in stderr - assert 'fatal: remote did not send all necessary objects' in stderr + assert "remote: Pre pull hook failed: aborting" in stderr + assert "fatal: remote did not send all necessary objects" in stderr - def test_push_on_locked_repo_by_other_user_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( - 'hg clone', clone_url, tmpdir.strpath) + def test_push_on_locked_repo_by_other_user_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath) # lock repo r = Repository.get_by_repo_name(HG_REPO) # let this user actually push ! - RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, - perm='repository.write') + RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, perm="repository.write") Session().commit() Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) # push fails repo is locked by other user ! - push_url = rc_web_server.repo_clone_url( - HG_REPO, - user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS) - stdout, stderr = _add_files_and_push( - 'hg', tmpdir.strpath, clone_url=push_url) - msg = ("""abort: HTTP Error 400: Repository `%s` locked by user `%s`""" - % (HG_REPO, TEST_USER_ADMIN_LOGIN)) + push_url = rcstack.repo_clone_url(HG_REPO, user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS) + stdout, stderr = _add_files_and_push("hg", tmpdir.strpath, clone_url=push_url) + msg = f"abort: HTTP Error 400: Repository `{HG_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`" + assert msg in stderr - def test_push_on_locked_repo_by_other_user_git( - self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( - 'git clone', clone_url, tmpdir.strpath) + def test_push_on_locked_repo_by_other_user_git(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath) # lock repo r = Repository.get_by_repo_name(GIT_REPO) # let this user actually push ! - RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, - perm='repository.write') + RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, perm="repository.write") Session().commit() Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id) # push fails repo is locked by other user! - push_url = rc_web_server.repo_clone_url( - GIT_REPO, - user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS) - stdout, stderr = _add_files_and_push( - 'git', tmpdir.strpath, clone_url=push_url) + push_url = rcstack.repo_clone_url(GIT_REPO, user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS) + stdout, stderr = _add_files_and_push("git", tmpdir.strpath, clone_url=push_url) - err = 'Repository `%s` locked by user `%s`' % ( - GIT_REPO, TEST_USER_ADMIN_LOGIN) + err = f"Repository `{GIT_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`" assert err in stderr diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_new_branch_push.py b/rhodecode/tests/vcs_operations/test_vcs_operations_new_branch_push.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_new_branch_push.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_new_branch_push.py @@ -26,12 +26,16 @@ from rhodecode.tests.vcs_operations impo Command, _check_proper_hg_push, _check_proper_git_push, _add_files_and_push) -@pytest.mark.usefixtures("disable_anonymous_user") -class TestVCSOperations(object): +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", +) +class TestVCSOperationsNewBranchPush(object): - def test_push_new_branch_hg(self, rc_web_server, tmpdir, user_util): + def test_push_new_branch_hg(self, rcstack, tmpdir, user_util): repo = user_util.create_repo(repo_type='hg') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) Command(os.path.dirname(tmpdir.strpath)).execute( 'hg clone', clone_url, tmpdir.strpath) @@ -48,9 +52,9 @@ class TestVCSOperations(object): _check_proper_hg_push(stdout, stderr) - def test_push_new_branch_git(self, rc_web_server, tmpdir, user_util): + def test_push_new_branch_git(self, rcstack, tmpdir, user_util): repo = user_util.create_repo(repo_type='git') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) Command(os.path.dirname(tmpdir.strpath)).execute( 'git clone', clone_url, tmpdir.strpath) @@ -67,13 +71,13 @@ class TestVCSOperations(object): _check_proper_git_push(stdout, stderr, branch='dev') def test_push_new_branch_hg_with_branch_permissions_no_force_push( - self, rc_web_server, tmpdir, user_util, branch_permission_setter): + self, rcstack, tmpdir, user_util, branch_permission_setter): repo = user_util.create_repo(repo_type='hg') repo_name = repo.repo_name username = TEST_USER_ADMIN_LOGIN branch_permission_setter(repo_name, username, permission='branch.push') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) Command(os.path.dirname(tmpdir.strpath)).execute( 'hg clone', clone_url, tmpdir.strpath) @@ -91,13 +95,13 @@ class TestVCSOperations(object): _check_proper_hg_push(stdout, stderr) def test_push_new_branch_git_with_branch_permissions_no_force_push( - self, rc_web_server, tmpdir, user_util, branch_permission_setter): + self, rcstack, tmpdir, user_util, branch_permission_setter): repo = user_util.create_repo(repo_type='git') repo_name = repo.repo_name username = TEST_USER_ADMIN_LOGIN branch_permission_setter(repo_name, username, permission='branch.push') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) Command(os.path.dirname(tmpdir.strpath)).execute( 'git clone', clone_url, tmpdir.strpath) diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_rcextensions_push.py b/rhodecode/tests/vcs_operations/test_vcs_operations_rcextensions_push.py new file mode 100644 --- /dev/null +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_rcextensions_push.py @@ -0,0 +1,81 @@ +# Copyright (C) 2010-2023 RhodeCode GmbH +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License, version 3 +# (only), as published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# This program is dual-licensed. If you wish to learn more about the +# RhodeCode Enterprise Edition, including its added features, Support services, +# and proprietary license terms, please see https://rhodecode.com/licenses/ + + +import os +import pytest + +from rhodecode.tests.fixtures.rcextensions_fixtures import store_rcextensions +from rhodecode.tests.vcs_operations import ( + Command, _check_proper_hg_push, _check_proper_git_push, + _add_files_and_push) + + +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsWithRCExtensions(object): + + def test_push_when_rcextensions_fail_hg(self, rcstack, tmpdir, user_util, rcextensions, rcextensions_modification): + repo = user_util.create_repo(repo_type='hg') + clone_url = rcstack.repo_clone_url(repo.repo_name) + Command(os.path.dirname(tmpdir.strpath)).execute( + 'hg clone', clone_url, tmpdir.strpath) + + stdout, stderr = _add_files_and_push( + 'hg', tmpdir.strpath, clone_url=clone_url) + _check_proper_hg_push(stdout, stderr) + + mods = [ + ('_pre_push_hook', + """ + return HookResponse(1, 'HOOK_FAIL_TEST_HG') + """) + ] + rcstack_location = os.path.dirname(rcstack.config_file) + with rcextensions_modification(rcstack_location, mods): + stdout, stderr = _add_files_and_push( + 'hg', tmpdir.strpath, clone_url=clone_url) + assert 'HOOK_FAIL_TEST_HG' in stdout + + def test_push_when_rcextensions_fail_git(self, rcstack, tmpdir, user_util, rcextensions, rcextensions_modification): + repo = user_util.create_repo(repo_type='git') + clone_url = rcstack.repo_clone_url(repo.repo_name) + Command(os.path.dirname(tmpdir.strpath)).execute( + 'git clone', clone_url, tmpdir.strpath) + + stdout, stderr = _add_files_and_push( + 'git', tmpdir.strpath, clone_url=clone_url) + _check_proper_git_push(stdout, stderr) + + mods = [ + ('_pre_push_hook', + """ + return HookResponse(1, 'HOOK_FAIL_TEST_GIT') + """) + ] + + rcstack_location = os.path.dirname(rcstack.config_file) + with rcextensions_modification(rcstack_location, mods): + stdout, stderr = _add_files_and_push( + 'git', tmpdir.strpath, clone_url=clone_url) + assert 'HOOK_FAIL_TEST_GIT' in stderr + diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_special.py b/rhodecode/tests/vcs_operations/test_vcs_operations_special.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_special.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_special.py @@ -32,13 +32,16 @@ from rhodecode.tests.vcs_operations impo _add_files_and_push) -@pytest.mark.usefixtures("disable_locking") +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_locking", +) class TestVCSOperationsSpecial(object): - def test_git_sets_default_branch_if_not_master( - self, backend_git, tmpdir, rc_web_server): - empty_repo = backend_git.create_repo() - clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name) + def test_git_sets_default_branch_if_not_master(self, vcs_backend_git, tmpdir, rcstack): + empty_repo = vcs_backend_git.create_repo() + clone_url = rcstack.repo_clone_url(empty_repo.repo_name) cmd = Command(tmpdir.strpath) cmd.execute('git clone', clone_url) @@ -63,25 +66,24 @@ class TestVCSOperationsSpecial(object): # Doing an explicit commit in order to get latest user logs on MySQL Session().commit() - def test_git_fetches_from_remote_repository_with_annotated_tags( - self, backend_git, rc_web_server): + def test_git_fetches_from_remote_repository_with_annotated_tags(self, vcs_backend_git, rcstack): # Note: This is a test specific to the git backend. It checks the # integration of fetching from a remote repository which contains # annotated tags. # Dulwich shows this specific behavior only when # operating against a remote repository. - source_repo = backend_git['annotated-tag'] - target_vcs_repo = backend_git.create_repo().scm_instance() - target_vcs_repo.fetch(rc_web_server.repo_clone_url(source_repo.repo_name)) + source_repo = vcs_backend_git['annotated-tag'] + target_vcs_repo = vcs_backend_git.create_repo().scm_instance() + target_vcs_repo.fetch(rcstack.repo_clone_url(source_repo.repo_name)) - def test_git_push_shows_pull_request_refs(self, backend_git, rc_web_server, tmpdir): + def test_git_push_shows_pull_request_refs(self, vcs_backend_git, rcstack, tmpdir): """ test if remote info about refs is visible """ - empty_repo = backend_git.create_repo() + empty_repo = vcs_backend_git.create_repo() - clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name) + clone_url = rcstack.repo_clone_url(empty_repo.repo_name) cmd = Command(tmpdir.strpath) cmd.execute('git clone', clone_url) @@ -97,7 +99,7 @@ class TestVCSOperationsSpecial(object): stdout, stderr = repo_cmd.execute('git push --verbose origin master') _check_proper_git_push(stdout, stderr, branch='master') - ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=master' + ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=master' assert f'remote: RhodeCode: open pull request link: {ref}' in stderr assert 'remote: RhodeCode: push completed' in stderr @@ -129,14 +131,14 @@ class TestVCSOperationsSpecial(object): stdout, stderr = repo_cmd.execute('git push --verbose origin feature') _check_proper_git_push(stdout, stderr, branch='feature') - ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature' + ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature' assert f'remote: RhodeCode: open pull request link: {ref}' in stderr assert 'remote: RhodeCode: push completed' in stderr - def test_hg_push_shows_pull_request_refs(self, backend_hg, rc_web_server, tmpdir): - empty_repo = backend_hg.create_repo() + def test_hg_push_shows_pull_request_refs(self, vcs_backend_hg, rcstack, tmpdir): + empty_repo = vcs_backend_hg.create_repo() - clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name) + clone_url = rcstack.repo_clone_url(empty_repo.repo_name) cmd = Command(tmpdir.strpath) cmd.execute('hg clone', clone_url) @@ -154,7 +156,7 @@ class TestVCSOperationsSpecial(object): stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url) _check_proper_hg_push(stdout, stderr, branch='default') - ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default' + ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default' assert f'remote: RhodeCode: open pull request link: {ref}' in stdout assert 'remote: RhodeCode: push completed' in stdout @@ -189,14 +191,14 @@ class TestVCSOperationsSpecial(object): stdout, stderr = repo_cmd.execute('hg push --new-branch --verbose', clone_url) _check_proper_hg_push(stdout, stderr, branch='feature') - ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature' + ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature' assert f'remote: RhodeCode: open pull request link: {ref}' in stdout assert 'remote: RhodeCode: push completed' in stdout - def test_hg_push_shows_pull_request_refs_book(self, backend_hg, rc_web_server, tmpdir): - empty_repo = backend_hg.create_repo() + def test_hg_push_shows_pull_request_refs_book(self, vcs_backend_hg, rcstack, tmpdir): + empty_repo = vcs_backend_hg.create_repo() - clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name) + clone_url = rcstack.repo_clone_url(empty_repo.repo_name) cmd = Command(tmpdir.strpath) cmd.execute('hg clone', clone_url) @@ -214,7 +216,7 @@ class TestVCSOperationsSpecial(object): stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url) _check_proper_hg_push(stdout, stderr, branch='default') - ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default' + ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default' assert f'remote: RhodeCode: open pull request link: {ref}' in stdout assert 'remote: RhodeCode: push completed' in stdout @@ -232,23 +234,23 @@ class TestVCSOperationsSpecial(object): stdout, stderr = repo_cmd.execute('hg push -B feature2 --verbose', clone_url) _check_proper_hg_push(stdout, stderr, branch='default') - ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default' + ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default' assert f'remote: RhodeCode: open pull request link: {ref}' in stdout - ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?bookmark=feature2' + ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?bookmark=feature2' assert f'remote: RhodeCode: open pull request link: {ref}' in stdout assert 'remote: RhodeCode: push completed' in stdout assert 'exporting bookmark feature2' in stdout - def test_push_is_forbidden_on_archived_repo_hg(self, backend_hg, rc_web_server, tmpdir): - empty_repo = backend_hg.create_repo() + def test_push_is_forbidden_on_archived_repo_hg(self, vcs_backend_hg, rcstack, tmpdir): + empty_repo = vcs_backend_hg.create_repo() repo_name = empty_repo.repo_name repo = Repository.get_by_repo_name(repo_name) repo.archived = True Session().commit() - clone_url = rc_web_server.repo_clone_url(repo_name) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url(repo_name) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) stdout, stderr = _add_files_and_push( @@ -256,16 +258,16 @@ class TestVCSOperationsSpecial(object): assert 'abort: HTTP Error 403: Forbidden' in stderr - def test_push_is_forbidden_on_archived_repo_git(self, backend_git, rc_web_server, tmpdir): - empty_repo = backend_git.create_repo() + def test_push_is_forbidden_on_archived_repo_git(self, vcs_backend_git, rcstack, tmpdir): + empty_repo = vcs_backend_git.create_repo() repo_name = empty_repo.repo_name repo = Repository.get_by_repo_name(repo_name) repo.archived = True Session().commit() - clone_url = rc_web_server.repo_clone_url(repo_name) - stdout, stderr = Command('/tmp').execute( + clone_url = rcstack.repo_clone_url(repo_name) + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) stdout, stderr = _add_files_and_push( diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_svn.py b/rhodecode/tests/vcs_operations/test_vcs_operations_svn.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_svn.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_svn.py @@ -49,97 +49,110 @@ def get_cli_flags(username, password): return flags, auth -@pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") -class TestVCSOperations(object): +@pytest.mark.usefixtures( + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", +) +class TestVCSOperationsSVN(object): - def test_clone_svn_repo_by_admin(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(SVN_REPO) - username, password = rc_web_server.repo_clone_credentials() + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_clone_svn_repo_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(SVN_REPO) + username, password = rcstack.repo_clone_credentials() - cmd = Command('/tmp') + cmd = Command(tmpdir.strpath) flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'svn') cmd.assert_returncode_success() - def test_clone_svn_repo_by_id_by_admin(self, rc_web_server, tmpdir): + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_clone_svn_repo_by_id_by_admin(self, rcstack, tmpdir): repo_id = Repository.get_by_repo_name(SVN_REPO).repo_id - username, password = rc_web_server.repo_clone_credentials() + username, password = rcstack.repo_clone_credentials() - clone_url = rc_web_server.repo_clone_url('_%s' % repo_id) - cmd = Command('/tmp') + clone_url = rcstack.repo_clone_url('_%s' % repo_id) + cmd = Command(tmpdir.strpath) flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( - f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) - - _check_proper_clone(stdout, stderr, 'svn') - cmd.assert_returncode_success() - - def test_clone_svn_repo_with_group_by_admin(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(SVN_REPO_WITH_GROUP) - username, password = rc_web_server.repo_clone_credentials() - - flags, auth = get_cli_flags(username, password) - - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) _check_proper_clone(stdout, stderr, 'svn') cmd.assert_returncode_success() - def test_clone_wrong_credentials_svn(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(SVN_REPO) - username, password = rc_web_server.repo_clone_credentials() + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_clone_svn_repo_with_group_by_admin(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(SVN_REPO_WITH_GROUP) + username, password = rcstack.repo_clone_credentials() + + flags, auth = get_cli_flags(username, password) + + stdout, stderr = Command(tmpdir.strpath).execute( + f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) + + _check_proper_clone(stdout, stderr, 'svn') + rcstack.assert_returncode_success() + + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_clone_wrong_credentials_svn(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(SVN_REPO) + username, password = rcstack.repo_clone_credentials() password = 'bad-password' flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) assert 'fatal: Authentication failed' in stderr - def test_clone_svn_with_slashes(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url('//' + SVN_REPO) + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_clone_svn_with_slashes(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url('//' + SVN_REPO) username, password = '', '' flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url) assert 'not found' in stderr + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') def test_clone_existing_path_svn_not_in_database( - self, rc_web_server, tmpdir, fs_repo_only): + self, rcstack, tmpdir, fs_repo_only): db_name = fs_repo_only('not-in-db-git', repo_type='git') - clone_url = rc_web_server.repo_clone_url(db_name) + clone_url = rcstack.repo_clone_url(db_name) username, password = '', '' flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) assert 'not found' in stderr + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') def test_clone_existing_path_svn_not_in_database_different_scm( - self, rc_web_server, tmpdir, fs_repo_only): + self, rcstack, tmpdir, fs_repo_only): db_name = fs_repo_only('not-in-db-hg', repo_type='hg') - clone_url = rc_web_server.repo_clone_url(db_name) + clone_url = rcstack.repo_clone_url(db_name) username, password = '', '' flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) assert 'not found' in stderr - def test_clone_non_existing_store_path_svn(self, rc_web_server, tmpdir, user_util): + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_clone_non_existing_store_path_svn(self, rcstack, tmpdir, user_util): repo = user_util.create_repo(repo_type='git') - clone_url = rc_web_server.repo_clone_url(repo.repo_name) + clone_url = rcstack.repo_clone_url(repo.repo_name) # Damage repo by removing it's folder RepoModel()._delete_filesystem_repo(repo) @@ -147,16 +160,17 @@ class TestVCSOperations(object): username, password = '', '' flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) assert 'not found' in stderr - def test_push_new_file_svn(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(SVN_REPO) + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_push_new_file_svn(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(SVN_REPO) username, password = '', '' flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) # commit some stuff into this repo @@ -165,37 +179,40 @@ class TestVCSOperations(object): _check_proper_svn_push(stdout, stderr) - def test_push_wrong_credentials_svn(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(SVN_REPO) + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_push_wrong_credentials_svn(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(SVN_REPO) - username, password = rc_web_server.repo_clone_credentials() + username, password = rcstack.repo_clone_credentials() flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) - push_url = rc_web_server.repo_clone_url( + push_url = rcstack.repo_clone_url( SVN_REPO, user='bad', passwd='name') stdout, stderr = _add_files_and_push( 'svn', tmpdir.strpath, clone_url=push_url, username=username, password=password) assert 'fatal: Authentication failed' in stderr - def test_push_back_to_wrong_url_svn(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(SVN_REPO) + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_push_back_to_wrong_url_svn(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(SVN_REPO) username, password = '', '' flags, auth = get_cli_flags(username, password) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) stdout, stderr = _add_files_and_push( 'svn', tmpdir.strpath, - clone_url=rc_web_server.repo_clone_url('not-existing'), username=username, password=password) + clone_url=rcstack.repo_clone_url('not-existing'), username=username, password=password) assert 'not found' in stderr - def test_ip_restriction_svn(self, rc_web_server, tmpdir): + @pytest.mark.xfail(reason='Lack of proper SVN support of cloning') + def test_ip_restriction_svn(self, rcstack, tmpdir): user_model = UserModel() username, password = '', '' flags, auth = get_cli_flags(username, password) @@ -204,9 +221,9 @@ class TestVCSOperations(object): user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') Session().commit() time.sleep(2) - clone_url = rc_web_server.repo_clone_url(SVN_REPO) + clone_url = rcstack.repo_clone_url(SVN_REPO) - stdout, stderr = Command('/tmp').execute( + stdout, stderr = Command(tmpdir.strpath).execute( f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) msg = "The requested URL returned error: 403" assert msg in stderr @@ -218,7 +235,7 @@ class TestVCSOperations(object): time.sleep(2) - cmd = Command('/tmp') + cmd = Command(tmpdir.strpath) stdout, stderr = cmd.execute(f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath) cmd.assert_returncode_success() _check_proper_clone(stdout, stderr, 'svn') diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations_tag_push.py b/rhodecode/tests/vcs_operations/test_vcs_operations_tag_push.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations_tag_push.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations_tag_push.py @@ -1,4 +1,3 @@ - # Copyright (C) 2010-2023 RhodeCode GmbH # # This program is free software: you can redistribute it and/or modify @@ -19,11 +18,6 @@ """ Test suite for making push/pull operations, on specially modified INI files - -.. important:: - - You must have git >= 1.8.5 for tests to work fine. With 68b939b git started - to redirect things to stderr instead of stdout. """ import pytest @@ -37,54 +31,59 @@ connection_available = pytest.mark.skipi not check_httpbin_connection(), reason="No outside internet connection available") + @pytest.mark.usefixtures( - "disable_locking", "disable_anonymous_user", - "enable_webhook_push_integration") -class TestVCSOperationsOnCustomIniConfig(object): + "init_pyramid_app", + "repo_group_repos", + "disable_anonymous_user", + "disable_locking", + "enable_webhook_push_integration" +) +class TestVCSOperationsTagPush(object): @connection_available - def test_push_tag_with_commit_hg(self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(HG_REPO) - stdout, stderr = Command('/tmp').execute( + def test_push_tag_with_commit_hg(self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(HG_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'hg clone', clone_url, tmpdir.strpath) - push_url = rc_web_server.repo_clone_url(HG_REPO) + push_url = rcstack.repo_clone_url(HG_REPO) _add_files_and_push( 'hg', tmpdir.strpath, clone_url=push_url, tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}]) - rc_log = rc_web_server.get_rc_log() - assert 'ERROR' not in rc_log - assert "{'name': 'v1.0.0'," in rc_log + celery_log = open(rcstack.rcstack_data.celery_log).read() + assert 'ERROR' not in celery_log + assert "{'name': 'v1.0.0'," in celery_log @connection_available def test_push_tag_with_commit_git( - self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( + self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) - push_url = rc_web_server.repo_clone_url(GIT_REPO) + push_url = rcstack.repo_clone_url(GIT_REPO) _add_files_and_push( 'git', tmpdir.strpath, clone_url=push_url, tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}]) - rc_log = rc_web_server.get_rc_log() - assert 'ERROR' not in rc_log - assert "{'name': 'v1.0.0'," in rc_log + celery_log = open(rcstack.rcstack_data.celery_log).read() + assert 'ERROR' not in celery_log + assert "{'name': 'v1.0.0'," in celery_log @connection_available def test_push_tag_with_no_commit_git( - self, rc_web_server, tmpdir): - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( + self, rcstack, tmpdir): + clone_url = rcstack.repo_clone_url(GIT_REPO) + stdout, stderr = Command(tmpdir.strpath).execute( 'git clone', clone_url, tmpdir.strpath) - push_url = rc_web_server.repo_clone_url(GIT_REPO) + push_url = rcstack.repo_clone_url(GIT_REPO) _add_files_and_push( 'git', tmpdir.strpath, clone_url=push_url, tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}]) - rc_log = rc_web_server.get_rc_log() - assert 'ERROR' not in rc_log - assert "{'name': 'v1.0.0'," in rc_log + celery_log = open(rcstack.rcstack_data.celery_log).read() + assert 'ERROR' not in celery_log + assert "{'name': 'v1.0.0'," in celery_log diff --git a/rhodecode/tests/vcsserver_http.ini b/rhodecode/tests/vcsserver_http.ini --- a/rhodecode/tests/vcsserver_http.ini +++ b/rhodecode/tests/vcsserver_http.ini @@ -60,7 +60,7 @@ vcs.svn.redis_conn = redis://redis:6379/ ; Default cache dir for caches. Putting this into a ramdisk can boost performance. ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space -cache_dir = %(here)s/data +cache_dir = %(here)s/.rc-vcs-test-data ; *************************************** ; `repo_object` cache, default file based @@ -141,7 +141,7 @@ level = NOTSET handlers = console [logger_vcsserver] -level = INFO +level = DEBUG handlers = qualname = vcsserver propagate = 1