##// END OF EJS Templates
tests: fixed test suite for celery adoption
super-admin -
r5607:39b20522 default
parent child Browse files
Show More
@@ -0,0 +1,61 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import time
20 import logging
21
22 from rhodecode.lib.config_utils import get_app_config_lightweight
23
24 from rhodecode.lib.hook_daemon.base import Hooks
25 from rhodecode.lib.hook_daemon.hook_module import HooksModuleCallbackDaemon
26 from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon
27 from rhodecode.lib.type_utils import str2bool
28
29 log = logging.getLogger(__name__)
30
31
32
33 def prepare_callback_daemon(extras, protocol: str, txn_id=None):
34 hooks_config = {}
35 match protocol:
36 case 'celery':
37 config = get_app_config_lightweight(extras['config'])
38
39 broker_url = config.get('celery.broker_url')
40 result_backend = config.get('celery.result_backend')
41
42 hooks_config = {
43 'broker_url': broker_url,
44 'result_backend': result_backend,
45 }
46
47 callback_daemon = CeleryHooksCallbackDaemon(broker_url, result_backend)
48 case 'local':
49 callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__)
50 case _:
51 log.error('Unsupported callback daemon protocol "%s"', protocol)
52 raise Exception('Unsupported callback daemon protocol.')
53
54 extras['hooks_config'] = hooks_config
55 extras['hooks_protocol'] = protocol
56 extras['time'] = time.time()
57
58 # register txn_id
59 extras['txn_id'] = txn_id
60 log.debug('Prepared a callback daemon: %s', callback_daemon.__class__.__name__)
61 return callback_daemon, extras
@@ -0,0 +1,17 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,52 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import pytest
20 from rhodecode.tests.utils import CustomTestApp
21 from rhodecode.tests.fixtures.fixture_utils import plain_http_environ, plain_config_stub, plain_request_stub
22
23
24 @pytest.fixture(scope='function')
25 def request_stub():
26 return plain_request_stub()
27
28
29 @pytest.fixture(scope='function')
30 def config_stub(request, request_stub):
31 return plain_config_stub(request, request_stub)
32
33
34 @pytest.fixture(scope='function')
35 def http_environ():
36 """
37 HTTP extra environ keys.
38
39 Used by the test application and as well for setting up the pylons
40 environment. In the case of the fixture "app" it should be possible
41 to override this for a specific test case.
42 """
43 return plain_http_environ()
44
45
46 @pytest.fixture(scope='function')
47 def app(request, config_stub, http_environ, baseapp):
48 app = CustomTestApp(baseapp, extra_environ=http_environ)
49 if request.cls:
50 # inject app into a class that uses this fixtures
51 request.cls.app = app
52 return app
@@ -0,0 +1,49 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import pytest
20 from rhodecode.tests.utils import CustomTestApp
21 from rhodecode.tests.fixtures.fixture_utils import plain_http_environ, plain_config_stub, plain_request_stub
22
23
24 @pytest.fixture(scope='module')
25 def module_request_stub():
26 return plain_request_stub()
27
28
29 @pytest.fixture(scope='module')
30 def module_config_stub(request, module_request_stub):
31 return plain_config_stub(request, module_request_stub)
32
33
34 @pytest.fixture(scope='module')
35 def module_http_environ():
36 """
37 HTTP extra environ keys.
38
39 Used by the test application and as well for setting up the pylons
40 environment. In the case of the fixture "app" it should be possible
41 to override this for a specific test case.
42 """
43 return plain_http_environ()
44
45
46 @pytest.fixture(scope='module')
47 def module_app(request, module_config_stub, module_http_environ, baseapp):
48 app = CustomTestApp(baseapp, extra_environ=module_http_environ)
49 return app
@@ -0,0 +1,157 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import os
20 import shutil
21 import logging
22 import textwrap
23
24 import pytest
25
26 import rhodecode
27 import rhodecode.lib
28
29 from rhodecode.tests import console_printer
30
31 log = logging.getLogger(__name__)
32
33
34 def store_rcextensions(destination, force=False):
35 from rhodecode.config import rcextensions
36 package_path = rcextensions.__path__[0]
37
38 # Note: rcextensions are looked up based on the path of the ini file
39 rcextensions_path = os.path.join(destination, 'rcextensions')
40
41 if force:
42 shutil.rmtree(rcextensions_path, ignore_errors=True)
43 shutil.copytree(package_path, rcextensions_path)
44
45
46 @pytest.fixture(scope="module")
47 def rcextensions(request, tmp_storage_location):
48 """
49 Installs a testing rcextensions pack to ensure they work as expected.
50 """
51
52 # Note: rcextensions are looked up based on the path of the ini file
53 rcextensions_path = os.path.join(tmp_storage_location, 'rcextensions')
54
55 if os.path.exists(rcextensions_path):
56 pytest.fail(
57 f"Path for rcextensions already exists, please clean up before "
58 f"test run this path: {rcextensions_path}")
59 else:
60 store_rcextensions(tmp_storage_location)
61
62
63 @pytest.fixture(scope='function')
64 def rcextensions_present(request):
65
66 class RcExtensionsPresent:
67 def __init__(self, rcextensions_location):
68 self.rcextensions_location = rcextensions_location
69
70 def __enter__(self):
71 self.store()
72
73 def __exit__(self, exc_type, exc_val, exc_tb):
74 self.cleanup()
75
76 def store(self):
77 store_rcextensions(self.rcextensions_location)
78
79 def cleanup(self):
80 shutil.rmtree(os.path.join(self.rcextensions_location, 'rcextensions'))
81
82 return RcExtensionsPresent
83
84
85 @pytest.fixture(scope='function')
86 def rcextensions_modification(request):
87 """
88 example usage::
89
90 hook_name = '_pre_push_hook'
91 code = '''
92 raise OSError('failed')
93 return HookResponse(1, 'FAILED')
94 '''
95 mods = [
96 (hook_name, code),
97 ]
98 # rhodecode.ini file location, where rcextensions needs to live
99 rcstack_location = os.path.dirname(rcstack.config_file)
100 with rcextensions_modification(rcstack_location, mods):
101 # do some stuff
102 """
103
104 class RcextensionsModification:
105 def __init__(self, rcextensions_location, mods, create_if_missing=False, force_create=False):
106 self.force_create = force_create
107 self.create_if_missing = create_if_missing
108 self.rcextensions_location = rcextensions_location
109 self.mods = mods
110 if not isinstance(mods, list):
111 raise ValueError('mods must be a list of modifications')
112
113 def __enter__(self):
114 if self.create_if_missing:
115 store_rcextensions(self.rcextensions_location, force=self.force_create)
116
117 for hook_name, method_body in self.mods:
118 self.modification(hook_name, method_body)
119
120 def __exit__(self, exc_type, exc_val, exc_tb):
121 self.cleanup()
122
123 def cleanup(self):
124 # reset rcextensions to "bare" state from the package
125 store_rcextensions(self.rcextensions_location, force=True)
126
127 def modification(self, hook_name, method_body):
128 import ast
129
130 rcextensions_path = os.path.join(self.rcextensions_location, 'rcextensions')
131
132 # Load the code from hooks.py
133 hooks_filename = os.path.join(rcextensions_path, 'hooks.py')
134 with open(hooks_filename, "r") as file:
135 tree = ast.parse(file.read())
136
137 # Define new content for the function as a string
138 new_code = textwrap.dedent(method_body)
139
140 # Parse the new code to add it to the function
141 new_body = ast.parse(new_code).body
142
143 # Walk through the AST to find and modify the function
144 for node in tree.body:
145 if isinstance(node, ast.FunctionDef) and node.name == hook_name:
146 node.body = new_body # Replace the function body with the new body
147
148 # Compile the modified AST back to code
149 compile(tree, hooks_filename, "exec")
150
151 # Write the updated code back to hooks.py
152 with open(hooks_filename, "w") as file:
153 file.write(ast.unparse(tree)) # Requires Python 3.9+
154
155 console_printer(f" [green]rcextensions[/green] Updated the body of '{hooks_filename}' function '{hook_name}'")
156
157 return RcextensionsModification
@@ -0,0 +1,17 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,17 b''
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,89 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 """
20 Test suite for making push/pull operations, on specially modified INI files
21 """
22
23 import pytest
24
25 from rhodecode.model.meta import Session
26 from rhodecode.model.settings import SettingsModel
27
28 from rhodecode.tests import GIT_REPO, HG_REPO
29 from rhodecode.tests.vcs_operations import Command, _add_files_and_push
30
31
32 @pytest.fixture()
33 def bad_client_setter_factory(request):
34 def _factory(client_type, client_str_val):
35 # set allowed clients
36 setting = SettingsModel().create_or_update_setting(name=f"{client_type}_allowed_clients", val=client_str_val)
37 Session().add(setting)
38 Session().commit()
39
40 @request.addfinalizer
41 def cleanup():
42 setting2 = SettingsModel().create_or_update_setting(name=f"{client_type}_allowed_clients", val="*")
43 Session().add(setting2)
44 Session().commit()
45
46 return _factory
47
48
49 @pytest.mark.usefixtures(
50 "init_pyramid_app",
51 "repo_group_repos",
52 "disable_anonymous_user",
53 "disable_locking",
54 )
55 class TestVCSOperationsOnUsingBadClient(object):
56 def test_push_with_bad_client_repo_by_other_user_hg(self, rcstack, tmpdir):
57 clone_url = rcstack.repo_clone_url(HG_REPO)
58 stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath)
59
60 # set allowed clients
61 setting = SettingsModel().create_or_update_setting(name=f"hg_allowed_clients", val="0.0.0")
62 Session().add(setting)
63 Session().commit()
64
65 # push fails repo is locked by other user !
66 push_url = rcstack.repo_clone_url(HG_REPO)
67 stdout, stderr = _add_files_and_push("hg", tmpdir.strpath, clone_url=push_url)
68 msg = "Your hg client (ver=mercurial/proto-1.0 (Mercurial 6.7.4)) is forbidden by security rules"
69 assert msg in stderr
70
71 def test_push_with_bad_client_repo_by_other_user_git(self, rcstack, tmpdir):
72 clone_url = rcstack.repo_clone_url(GIT_REPO)
73 stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath)
74
75 # set allowed clients
76 setting = SettingsModel().create_or_update_setting(name=f"git_allowed_clients", val="0.0.0")
77 Session().add(setting)
78 Session().commit()
79
80 # push fails repo is locked by other user!
81 push_url = rcstack.repo_clone_url(GIT_REPO)
82 stdout, stderr = _add_files_and_push("git", tmpdir.strpath, clone_url=push_url)
83
84 err = "Your git client (ver=git/2.45.2) is forbidden by security rules"
85 assert err in stderr
86
87 @pytest.mark.xfail(reason="Lack of proper SVN support of cloning")
88 def test_push_with_bad_client_repo_by_other_user_svn(self, rcstack, tmpdir):
89 raise NotImplementedError("lacks svn support")
@@ -0,0 +1,63 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 Test suite for making push/pull operations, on specially modified INI files
22 """
23
24 import pytest
25
26 from rhodecode.tests import GIT_REPO, SVN_REPO, HG_REPO
27
28 from rhodecode.tests.vcs_operations import (Command, _check_proper_clone)
29 from rhodecode.tests.vcs_operations.test_vcs_operations_svn import get_cli_flags
30
31
32 @pytest.mark.usefixtures(
33 "init_pyramid_app",
34 "repo_group_repos",
35 "disable_anonymous_user",
36 "disable_locking",
37 )
38 class TestVCSOperationsClone:
39
40 def test_clone_git_repo_by_admin(self, rcstack, tmpdir):
41 clone_url = rcstack.repo_clone_url(GIT_REPO)
42 cmd = Command(tmpdir.strpath)
43 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
44 _check_proper_clone(stdout, stderr, 'git')
45 cmd.assert_returncode_success()
46
47 def test_clone_hg_repo_by_admin(self, rcstack, tmpdir):
48 clone_url = rcstack.repo_clone_url(HG_REPO)
49 cmd = Command(tmpdir.strpath)
50 stdout, stderr = cmd.execute('hg clone', clone_url, tmpdir.strpath)
51 _check_proper_clone(stdout, stderr, 'hg')
52 cmd.assert_returncode_success()
53
54 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
55 def test_clone_svn_repo_by_admin(self, rcstack, tmpdir):
56 clone_url = rcstack.repo_clone_url(SVN_REPO)
57 username, password = rcstack.repo_clone_credentials()
58 flags, auth = get_cli_flags(username, password)
59 cmd = Command(tmpdir.strpath)
60 stdout, stderr = cmd.execute(
61 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
62 _check_proper_clone(stdout, stderr, 'svn')
63 cmd.assert_returncode_success()
@@ -0,0 +1,81 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19
20 import os
21 import pytest
22
23 from rhodecode.tests.fixtures.rcextensions_fixtures import store_rcextensions
24 from rhodecode.tests.vcs_operations import (
25 Command, _check_proper_hg_push, _check_proper_git_push,
26 _add_files_and_push)
27
28
29 @pytest.mark.usefixtures(
30 "init_pyramid_app",
31 "repo_group_repos",
32 "disable_anonymous_user",
33 "disable_locking",
34 )
35 class TestVCSOperationsWithRCExtensions(object):
36
37 def test_push_when_rcextensions_fail_hg(self, rcstack, tmpdir, user_util, rcextensions, rcextensions_modification):
38 repo = user_util.create_repo(repo_type='hg')
39 clone_url = rcstack.repo_clone_url(repo.repo_name)
40 Command(os.path.dirname(tmpdir.strpath)).execute(
41 'hg clone', clone_url, tmpdir.strpath)
42
43 stdout, stderr = _add_files_and_push(
44 'hg', tmpdir.strpath, clone_url=clone_url)
45 _check_proper_hg_push(stdout, stderr)
46
47 mods = [
48 ('_pre_push_hook',
49 """
50 return HookResponse(1, 'HOOK_FAIL_TEST_HG')
51 """)
52 ]
53 rcstack_location = os.path.dirname(rcstack.config_file)
54 with rcextensions_modification(rcstack_location, mods):
55 stdout, stderr = _add_files_and_push(
56 'hg', tmpdir.strpath, clone_url=clone_url)
57 assert 'HOOK_FAIL_TEST_HG' in stdout
58
59 def test_push_when_rcextensions_fail_git(self, rcstack, tmpdir, user_util, rcextensions, rcextensions_modification):
60 repo = user_util.create_repo(repo_type='git')
61 clone_url = rcstack.repo_clone_url(repo.repo_name)
62 Command(os.path.dirname(tmpdir.strpath)).execute(
63 'git clone', clone_url, tmpdir.strpath)
64
65 stdout, stderr = _add_files_and_push(
66 'git', tmpdir.strpath, clone_url=clone_url)
67 _check_proper_git_push(stdout, stderr)
68
69 mods = [
70 ('_pre_push_hook',
71 """
72 return HookResponse(1, 'HOOK_FAIL_TEST_GIT')
73 """)
74 ]
75
76 rcstack_location = os.path.dirname(rcstack.config_file)
77 with rcextensions_modification(rcstack_location, mods):
78 stdout, stderr = _add_files_and_push(
79 'git', tmpdir.strpath, clone_url=clone_url)
80 assert 'HOOK_FAIL_TEST_GIT' in stderr
81
@@ -27,8 +27,11 b' from rhodecode.tests.conftest_common imp'
27
27
28
28
29 pytest_plugins = [
29 pytest_plugins = [
30 "rhodecode.tests.fixture_mods.fixture_pyramid",
30 "rhodecode.tests.fixtures.fixture_pyramid",
31 "rhodecode.tests.fixture_mods.fixture_utils",
31 "rhodecode.tests.fixtures.fixture_utils",
32 "rhodecode.tests.fixtures.function_scoped_baseapp",
33 "rhodecode.tests.fixtures.module_scoped_baseapp",
34 "rhodecode.tests.fixtures.rcextensions_fixtures",
32 ]
35 ]
33
36
34
37
@@ -65,8 +65,7 b' dependencies = {file = ["requirements.tx'
65 optional-dependencies.tests = {file = ["requirements_test.txt"]}
65 optional-dependencies.tests = {file = ["requirements_test.txt"]}
66
66
67 [tool.ruff]
67 [tool.ruff]
68
68 lint.select = [
69 select = [
70 # Pyflakes
69 # Pyflakes
71 "F",
70 "F",
72 # Pycodestyle
71 # Pycodestyle
@@ -75,16 +74,13 b' select = ['
75 # isort
74 # isort
76 "I001"
75 "I001"
77 ]
76 ]
78
77 lint.ignore = [
79 ignore = [
80 "E501", # line too long, handled by black
78 "E501", # line too long, handled by black
81 ]
79 ]
82
83 # Same as Black.
80 # Same as Black.
84 line-length = 120
81 line-length = 120
85
82
86 [tool.ruff.isort]
83 [tool.ruff.lint.isort]
87
88 known-first-party = ["rhodecode"]
84 known-first-party = ["rhodecode"]
89
85
90 [tool.ruff.format]
86 [tool.ruff.format]
@@ -4,8 +4,10 b' norecursedirs = rhodecode/public rhodeco'
4 cache_dir = /tmp/.pytest_cache
4 cache_dir = /tmp/.pytest_cache
5
5
6 pyramid_config = rhodecode/tests/rhodecode.ini
6 pyramid_config = rhodecode/tests/rhodecode.ini
7 vcsserver_protocol = http
7
8 vcsserver_config_http = rhodecode/tests/vcsserver_http.ini
8 vcsserver_config = rhodecode/tests/vcsserver_http.ini
9 rhodecode_config = rhodecode/tests/rhodecode.ini
10 celery_config = rhodecode/tests/rhodecode.ini
9
11
10 addopts =
12 addopts =
11 --pdbcls=IPython.terminal.debugger:TerminalPdb
13 --pdbcls=IPython.terminal.debugger:TerminalPdb
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -24,7 +24,7 b' from rhodecode.model.db import Gist'
24 from rhodecode.model.gist import GistModel
24 from rhodecode.model.gist import GistModel
25 from rhodecode.api.tests.utils import (
25 from rhodecode.api.tests.utils import (
26 build_data, api_call, assert_error, assert_ok, crash)
26 build_data, api_call, assert_error, assert_ok, crash)
27 from rhodecode.tests.fixture import Fixture
27 from rhodecode.tests.fixtures.rc_fixture import Fixture
28
28
29
29
30 @pytest.mark.usefixtures("testuser_api", "app")
30 @pytest.mark.usefixtures("testuser_api", "app")
@@ -27,7 +27,7 b' from rhodecode.model.user import UserMod'
27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
28 from rhodecode.api.tests.utils import (
28 from rhodecode.api.tests.utils import (
29 build_data, api_call, assert_ok, assert_error, crash)
29 build_data, api_call, assert_ok, assert_error, crash)
30 from rhodecode.tests.fixture import Fixture
30 from rhodecode.tests.fixtures.rc_fixture import Fixture
31 from rhodecode.lib.ext_json import json
31 from rhodecode.lib.ext_json import json
32 from rhodecode.lib.str_utils import safe_str
32 from rhodecode.lib.str_utils import safe_str
33
33
@@ -26,7 +26,7 b' from rhodecode.model.user import UserMod'
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 from rhodecode.api.tests.utils import (
27 from rhodecode.api.tests.utils import (
28 build_data, api_call, assert_ok, assert_error, crash)
28 build_data, api_call, assert_ok, assert_error, crash)
29 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.fixtures.rc_fixture import Fixture
30
30
31
31
32 fixture = Fixture()
32 fixture = Fixture()
@@ -26,7 +26,7 b' from rhodecode.tests import ('
26 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL)
26 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL)
27 from rhodecode.api.tests.utils import (
27 from rhodecode.api.tests.utils import (
28 build_data, api_call, assert_ok, assert_error, jsonify, crash)
28 build_data, api_call, assert_ok, assert_error, jsonify, crash)
29 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.fixtures.rc_fixture import Fixture
30 from rhodecode.model.db import RepoGroup
30 from rhodecode.model.db import RepoGroup
31
31
32
32
@@ -25,7 +25,7 b' from rhodecode.model.user import UserMod'
25 from rhodecode.model.user_group import UserGroupModel
25 from rhodecode.model.user_group import UserGroupModel
26 from rhodecode.api.tests.utils import (
26 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 from rhodecode.tests.fixture import Fixture
28 from rhodecode.tests.fixtures.rc_fixture import Fixture
29
29
30
30
31 @pytest.mark.usefixtures("testuser_api", "app")
31 @pytest.mark.usefixtures("testuser_api", "app")
@@ -28,7 +28,7 b' from rhodecode.model.user import UserMod'
28 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
28 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
29 from rhodecode.api.tests.utils import (
29 from rhodecode.api.tests.utils import (
30 build_data, api_call, assert_error, assert_ok, crash)
30 build_data, api_call, assert_error, assert_ok, crash)
31 from rhodecode.tests.fixture import Fixture
31 from rhodecode.tests.fixtures.rc_fixture import Fixture
32
32
33
33
34 fixture = Fixture()
34 fixture = Fixture()
@@ -25,8 +25,8 b' from rhodecode.model.scm import ScmModel'
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 from rhodecode.api.tests.utils import (
26 from rhodecode.api.tests.utils import (
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 from rhodecode.tests.fixture import Fixture
28 from rhodecode.tests.fixtures.rc_fixture import Fixture
29 from rhodecode.tests.fixture_mods.fixture_utils import plain_http_host_only_stub
29 from rhodecode.tests.fixtures.fixture_utils import plain_http_host_only_stub
30
30
31 fixture = Fixture()
31 fixture = Fixture()
32
32
@@ -26,7 +26,7 b' import pytest'
26 from rhodecode.lib.str_utils import safe_str
26 from rhodecode.lib.str_utils import safe_str
27 from rhodecode.tests import *
27 from rhodecode.tests import *
28 from rhodecode.tests.routes import route_path
28 from rhodecode.tests.routes import route_path
29 from rhodecode.tests.fixture import FIXTURES
29 from rhodecode.tests.fixtures.rc_fixture import FIXTURES
30 from rhodecode.model.db import UserLog
30 from rhodecode.model.db import UserLog
31 from rhodecode.model.meta import Session
31 from rhodecode.model.meta import Session
32
32
@@ -20,7 +20,7 b''
20 import pytest
20 import pytest
21
21
22 from rhodecode.tests import TestController
22 from rhodecode.tests import TestController
23 from rhodecode.tests.fixture import Fixture
23 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 from rhodecode.tests.routes import route_path
24 from rhodecode.tests.routes import route_path
25
25
26 fixture = Fixture()
26 fixture = Fixture()
@@ -37,7 +37,7 b' from rhodecode.model.user import UserMod'
37 from rhodecode.tests import (
37 from rhodecode.tests import (
38 login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN,
38 login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN,
39 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
39 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
40 from rhodecode.tests.fixture import Fixture, error_function
40 from rhodecode.tests.fixtures.rc_fixture import Fixture, error_function
41 from rhodecode.tests.utils import repo_on_filesystem
41 from rhodecode.tests.utils import repo_on_filesystem
42 from rhodecode.tests.routes import route_path
42 from rhodecode.tests.routes import route_path
43
43
@@ -27,7 +27,7 b' from rhodecode.model.meta import Session'
27 from rhodecode.model.repo_group import RepoGroupModel
27 from rhodecode.model.repo_group import RepoGroupModel
28 from rhodecode.tests import (
28 from rhodecode.tests import (
29 assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH)
29 assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH)
30 from rhodecode.tests.fixture import Fixture
30 from rhodecode.tests.fixtures.rc_fixture import Fixture
31 from rhodecode.tests.routes import route_path
31 from rhodecode.tests.routes import route_path
32
32
33
33
@@ -24,7 +24,7 b' from rhodecode.model.meta import Session'
24
24
25 from rhodecode.tests import (
25 from rhodecode.tests import (
26 TestController, assert_session_flash)
26 TestController, assert_session_flash)
27 from rhodecode.tests.fixture import Fixture
27 from rhodecode.tests.fixtures.rc_fixture import Fixture
28 from rhodecode.tests.routes import route_path
28 from rhodecode.tests.routes import route_path
29
29
30 fixture = Fixture()
30 fixture = Fixture()
@@ -28,7 +28,7 b' from rhodecode.model.user import UserMod'
28
28
29 from rhodecode.tests import (
29 from rhodecode.tests import (
30 TestController, TEST_USER_REGULAR_LOGIN, assert_session_flash)
30 TestController, TEST_USER_REGULAR_LOGIN, assert_session_flash)
31 from rhodecode.tests.fixture import Fixture
31 from rhodecode.tests.fixtures.rc_fixture import Fixture
32 from rhodecode.tests.routes import route_path
32 from rhodecode.tests.routes import route_path
33
33
34 fixture = Fixture()
34 fixture = Fixture()
@@ -22,7 +22,7 b' import pytest'
22 from rhodecode.model.db import User, UserSshKeys
22 from rhodecode.model.db import User, UserSshKeys
23
23
24 from rhodecode.tests import TestController, assert_session_flash
24 from rhodecode.tests import TestController, assert_session_flash
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixtures.rc_fixture import Fixture
26 from rhodecode.tests.routes import route_path
26 from rhodecode.tests.routes import route_path
27
27
28 fixture = Fixture()
28 fixture = Fixture()
@@ -27,7 +27,7 b' from rhodecode.model.repo_group import R'
27 from rhodecode.model.db import Session, Repository, RepoGroup
27 from rhodecode.model.db import Session, Repository, RepoGroup
28
28
29 from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN
29 from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN
30 from rhodecode.tests.fixture import Fixture
30 from rhodecode.tests.fixtures.rc_fixture import Fixture
31 from rhodecode.tests.routes import route_path
31 from rhodecode.tests.routes import route_path
32
32
33 fixture = Fixture()
33 fixture = Fixture()
@@ -22,7 +22,7 b' from rhodecode.model.db import Repositor'
22 from rhodecode.lib.ext_json import json
22 from rhodecode.lib.ext_json import json
23
23
24 from rhodecode.tests import TestController
24 from rhodecode.tests import TestController
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixtures.rc_fixture import Fixture
26 from rhodecode.tests.routes import route_path
26 from rhodecode.tests.routes import route_path
27
27
28 fixture = Fixture()
28 fixture = Fixture()
@@ -20,7 +20,7 b' import pytest'
20 from rhodecode.lib.ext_json import json
20 from rhodecode.lib.ext_json import json
21
21
22 from rhodecode.tests import TestController
22 from rhodecode.tests import TestController
23 from rhodecode.tests.fixture import Fixture
23 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 from rhodecode.tests.routes import route_path
24 from rhodecode.tests.routes import route_path
25
25
26 fixture = Fixture()
26 fixture = Fixture()
@@ -40,7 +40,7 b' import pytest'
40 from rhodecode.lib.ext_json import json
40 from rhodecode.lib.ext_json import json
41
41
42 from rhodecode.tests import TestController
42 from rhodecode.tests import TestController
43 from rhodecode.tests.fixture import Fixture
43 from rhodecode.tests.fixtures.rc_fixture import Fixture
44 from rhodecode.tests.routes import route_path
44 from rhodecode.tests.routes import route_path
45
45
46 fixture = Fixture()
46 fixture = Fixture()
@@ -24,7 +24,7 b' from rhodecode.model.db import Repositor'
24 from rhodecode.model.meta import Session
24 from rhodecode.model.meta import Session
25 from rhodecode.model.settings import SettingsModel
25 from rhodecode.model.settings import SettingsModel
26 from rhodecode.tests import TestController
26 from rhodecode.tests import TestController
27 from rhodecode.tests.fixture import Fixture
27 from rhodecode.tests.fixtures.rc_fixture import Fixture
28 from rhodecode.tests.routes import route_path
28 from rhodecode.tests.routes import route_path
29
29
30
30
@@ -3,7 +3,7 b' import mock'
3
3
4 from rhodecode.lib.type_utils import AttributeDict
4 from rhodecode.lib.type_utils import AttributeDict
5 from rhodecode.model.meta import Session
5 from rhodecode.model.meta import Session
6 from rhodecode.tests.fixture import Fixture
6 from rhodecode.tests.fixtures.rc_fixture import Fixture
7 from rhodecode.tests.routes import route_path
7 from rhodecode.tests.routes import route_path
8 from rhodecode.model.settings import SettingsModel
8 from rhodecode.model.settings import SettingsModel
9
9
@@ -31,7 +31,7 b' from rhodecode.model.meta import Session'
31 from rhodecode.tests import (
31 from rhodecode.tests import (
32 assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN,
32 assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN,
33 no_newline_id_generator)
33 no_newline_id_generator)
34 from rhodecode.tests.fixture import Fixture
34 from rhodecode.tests.fixtures.rc_fixture import Fixture
35 from rhodecode.tests.routes import route_path
35 from rhodecode.tests.routes import route_path
36
36
37 fixture = Fixture()
37 fixture = Fixture()
@@ -22,7 +22,7 b' from rhodecode.lib import helpers as h'
22 from rhodecode.tests import (
22 from rhodecode.tests import (
23 TestController, clear_cache_regions,
23 TestController, clear_cache_regions,
24 TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
24 TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixtures.rc_fixture import Fixture
26 from rhodecode.tests.utils import AssertResponse
26 from rhodecode.tests.utils import AssertResponse
27 from rhodecode.tests.routes import route_path
27 from rhodecode.tests.routes import route_path
28
28
@@ -22,7 +22,7 b' from rhodecode.apps._base import ADMIN_P'
22 from rhodecode.model.db import User
22 from rhodecode.model.db import User
23 from rhodecode.tests import (
23 from rhodecode.tests import (
24 TestController, assert_session_flash)
24 TestController, assert_session_flash)
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixtures.rc_fixture import Fixture
26 from rhodecode.tests.routes import route_path
26 from rhodecode.tests.routes import route_path
27
27
28
28
@@ -23,7 +23,7 b' from rhodecode.model.db import User, Use'
23 from rhodecode.tests import (
23 from rhodecode.tests import (
24 TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL,
24 TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL,
25 assert_session_flash, TEST_USER_REGULAR_PASS)
25 assert_session_flash, TEST_USER_REGULAR_PASS)
26 from rhodecode.tests.fixture import Fixture
26 from rhodecode.tests.fixtures.rc_fixture import Fixture
27 from rhodecode.tests.routes import route_path
27 from rhodecode.tests.routes import route_path
28
28
29
29
@@ -21,7 +21,7 b' import pytest'
21 from rhodecode.tests import (
21 from rhodecode.tests import (
22 TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
22 TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS,
23 TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
23 TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
24 from rhodecode.tests.fixture import Fixture
24 from rhodecode.tests.fixtures.rc_fixture import Fixture
25 from rhodecode.tests.routes import route_path
25 from rhodecode.tests.routes import route_path
26
26
27 from rhodecode.model.db import Notification, User
27 from rhodecode.model.db import Notification, User
@@ -24,7 +24,7 b' from rhodecode.lib.auth import check_pas'
24 from rhodecode.model.meta import Session
24 from rhodecode.model.meta import Session
25 from rhodecode.model.user import UserModel
25 from rhodecode.model.user import UserModel
26 from rhodecode.tests import assert_session_flash, TestController
26 from rhodecode.tests import assert_session_flash, TestController
27 from rhodecode.tests.fixture import Fixture, error_function
27 from rhodecode.tests.fixtures.rc_fixture import Fixture, error_function
28 from rhodecode.tests.routes import route_path
28 from rhodecode.tests.routes import route_path
29
29
30 fixture = Fixture()
30 fixture = Fixture()
@@ -20,7 +20,7 b''
20 from rhodecode.tests import (
20 from rhodecode.tests import (
21 TestController, TEST_USER_ADMIN_LOGIN,
21 TestController, TEST_USER_ADMIN_LOGIN,
22 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
22 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
23 from rhodecode.tests.fixture import Fixture
23 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 from rhodecode.tests.routes import route_path
24 from rhodecode.tests.routes import route_path
25
25
26 fixture = Fixture()
26 fixture = Fixture()
@@ -19,7 +19,7 b''
19
19
20 from rhodecode.model.db import User, Repository, UserFollowing
20 from rhodecode.model.db import User, Repository, UserFollowing
21 from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN
21 from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN
22 from rhodecode.tests.fixture import Fixture
22 from rhodecode.tests.fixtures.rc_fixture import Fixture
23 from rhodecode.tests.routes import route_path
23 from rhodecode.tests.routes import route_path
24
24
25 fixture = Fixture()
25 fixture = Fixture()
@@ -21,7 +21,7 b''
21 from rhodecode.model.db import User, UserSshKeys
21 from rhodecode.model.db import User, UserSshKeys
22
22
23 from rhodecode.tests import TestController, assert_session_flash
23 from rhodecode.tests import TestController, assert_session_flash
24 from rhodecode.tests.fixture import Fixture
24 from rhodecode.tests.fixtures.rc_fixture import Fixture
25 from rhodecode.tests.routes import route_path
25 from rhodecode.tests.routes import route_path
26
26
27 fixture = Fixture()
27 fixture = Fixture()
@@ -22,7 +22,7 b' import pytest'
22 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
22 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
23 from rhodecode.lib.vcs import nodes
23 from rhodecode.lib.vcs import nodes
24 from rhodecode.lib.vcs.backends.base import EmptyCommit
24 from rhodecode.lib.vcs.backends.base import EmptyCommit
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixtures.rc_fixture import Fixture
26 from rhodecode.tests.utils import commit_change
26 from rhodecode.tests.utils import commit_change
27 from rhodecode.tests.routes import route_path
27 from rhodecode.tests.routes import route_path
28
28
@@ -166,14 +166,15 b' class TestSideBySideDiff(object):'
166 response.mustcontain('Collapse 2 commits')
166 response.mustcontain('Collapse 2 commits')
167 response.mustcontain('123 file changed')
167 response.mustcontain('123 file changed')
168
168
169 response.mustcontain(
169 response.mustcontain(f'r{commit1.idx}:{commit1.short_id}...r{commit2.idx}:{commit2.short_id}')
170 'r%s:%s...r%s:%s' % (
171 commit1.idx, commit1.short_id, commit2.idx, commit2.short_id))
172
170
173 response.mustcontain(f_path)
171 response.mustcontain(f_path)
174
172
175 @pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)')
173 #@pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)')
176 def test_diff_side_by_side_from_0_commit_with_file_filter(self, app, backend, backend_stub):
174 def test_diff_side_by_side_from_0_commit_with_file_filter(self, app, backend, backend_stub):
175 if backend.alias == 'git':
176 pytest.skip('GIT does not handle empty commit compare correct (missing 1 commit)')
177
177 f_path = b'test_sidebyside_file.py'
178 f_path = b'test_sidebyside_file.py'
178 commit1_content = b'content-25d7e49c18b159446c\n'
179 commit1_content = b'content-25d7e49c18b159446c\n'
179 commit2_content = b'content-603d6c72c46d953420\n'
180 commit2_content = b'content-603d6c72c46d953420\n'
@@ -200,9 +201,7 b' class TestSideBySideDiff(object):'
200 response.mustcontain('Collapse 2 commits')
201 response.mustcontain('Collapse 2 commits')
201 response.mustcontain('1 file changed')
202 response.mustcontain('1 file changed')
202
203
203 response.mustcontain(
204 response.mustcontain(f'r{commit1.idx}:{commit1.short_id}...r{commit2.idx}:{commit2.short_id}')
204 'r%s:%s...r%s:%s' % (
205 commit1.idx, commit1.short_id, commit2.idx, commit2.short_id))
206
205
207 response.mustcontain(f_path)
206 response.mustcontain(f_path)
208
207
@@ -33,7 +33,7 b' from rhodecode.lib.vcs.conf import setti'
33 from rhodecode.model.db import Session, Repository
33 from rhodecode.model.db import Session, Repository
34
34
35 from rhodecode.tests import assert_session_flash
35 from rhodecode.tests import assert_session_flash
36 from rhodecode.tests.fixture import Fixture
36 from rhodecode.tests.fixtures.rc_fixture import Fixture
37 from rhodecode.tests.routes import route_path
37 from rhodecode.tests.routes import route_path
38
38
39
39
@@ -21,7 +21,7 b' import pytest'
21
21
22 from rhodecode.tests import TestController, assert_session_flash, HG_FORK, GIT_FORK
22 from rhodecode.tests import TestController, assert_session_flash, HG_FORK, GIT_FORK
23
23
24 from rhodecode.tests.fixture import Fixture
24 from rhodecode.tests.fixtures.rc_fixture import Fixture
25 from rhodecode.lib import helpers as h
25 from rhodecode.lib import helpers as h
26
26
27 from rhodecode.model.db import Repository
27 from rhodecode.model.db import Repository
@@ -21,7 +21,7 b' import pytest'
21
21
22 from rhodecode.model.db import Repository, UserRepoToPerm, Permission, User
22 from rhodecode.model.db import Repository, UserRepoToPerm, Permission, User
23
23
24 from rhodecode.tests.fixture import Fixture
24 from rhodecode.tests.fixtures.rc_fixture import Fixture
25 from rhodecode.tests.routes import route_path
25 from rhodecode.tests.routes import route_path
26
26
27 fixture = Fixture()
27 fixture = Fixture()
@@ -15,6 +15,9 b''
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 import logging
19 import os
20
18 import mock
21 import mock
19 import pytest
22 import pytest
20
23
@@ -41,7 +44,7 b' from rhodecode.tests import ('
41 TEST_USER_ADMIN_LOGIN,
44 TEST_USER_ADMIN_LOGIN,
42 TEST_USER_REGULAR_LOGIN,
45 TEST_USER_REGULAR_LOGIN,
43 )
46 )
44 from rhodecode.tests.fixture_mods.fixture_utils import PRTestUtility
47 from rhodecode.tests.fixtures.fixture_utils import PRTestUtility
45 from rhodecode.tests.routes import route_path
48 from rhodecode.tests.routes import route_path
46
49
47
50
@@ -1050,7 +1053,6 b' class TestPullrequestsView(object):'
1050 )
1053 )
1051 assert len(notifications.all()) == 2
1054 assert len(notifications.all()) == 2
1052
1055
1053 @pytest.mark.xfail(reason="unable to fix this test after python3 migration")
1054 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
1056 def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token):
1055 commits = [
1057 commits = [
1056 {
1058 {
@@ -1125,20 +1127,38 b' class TestPullrequestsView(object):'
1125 response.mustcontain(no=["content_of_ancestor-child"])
1127 response.mustcontain(no=["content_of_ancestor-child"])
1126 response.mustcontain("content_of_change")
1128 response.mustcontain("content_of_change")
1127
1129
1128 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
1130 def test_merge_pull_request_enabled(self, pr_util, csrf_token, rcextensions_modification):
1129 # Clear any previous calls to rcextensions
1130 rhodecode.EXTENSIONS.calls.clear()
1131
1131
1132 pull_request = pr_util.create_pull_request(approved=True, mergeable=True)
1132 pull_request = pr_util.create_pull_request(approved=True, mergeable=True)
1133 pull_request_id = pull_request.pull_request_id
1133 pull_request_id = pull_request.pull_request_id
1134 repo_name = (pull_request.target_repo.scm_instance().name,)
1134 repo_name = pull_request.target_repo.scm_instance().name
1135
1135
1136 url = route_path(
1136 url = route_path(
1137 "pullrequest_merge",
1137 "pullrequest_merge",
1138 repo_name=str(repo_name[0]),
1138 repo_name=repo_name,
1139 pull_request_id=pull_request_id,
1139 pull_request_id=pull_request_id,
1140 )
1140 )
1141 response = self.app.post(url, params={"csrf_token": csrf_token}).follow()
1141
1142 rcstack_location = os.path.dirname(self.app._pyramid_registry.settings['__file__'])
1143 rc_ext_location = os.path.join(rcstack_location, 'rcextension-output.txt')
1144
1145
1146 mods = [
1147 ('_push_hook',
1148 f"""
1149 import os
1150 action = kwargs['action']
1151 commit_ids = kwargs['commit_ids']
1152 with open('{rc_ext_location}', 'w') as f:
1153 f.write('test-execution'+os.linesep)
1154 f.write(f'{{action}}'+os.linesep)
1155 f.write(f'{{commit_ids}}'+os.linesep)
1156 return HookResponse(0, 'HOOK_TEST')
1157 """)
1158 ]
1159 # Add the hook
1160 with rcextensions_modification(rcstack_location, mods, create_if_missing=True, force_create=True):
1161 response = self.app.post(url, params={"csrf_token": csrf_token}).follow()
1142
1162
1143 pull_request = PullRequest.get(pull_request_id)
1163 pull_request = PullRequest.get(pull_request_id)
1144
1164
@@ -1162,12 +1182,39 b' class TestPullrequestsView(object):'
1162 assert actions[-1].action == "user.push"
1182 assert actions[-1].action == "user.push"
1163 assert actions[-1].action_data["commit_ids"] == pr_commit_ids
1183 assert actions[-1].action_data["commit_ids"] == pr_commit_ids
1164
1184
1165 # Check post_push rcextension was really executed
1185 with open(rc_ext_location) as f:
1166 push_calls = rhodecode.EXTENSIONS.calls["_push_hook"]
1186 f_data = f.read()
1167 assert len(push_calls) == 1
1187 assert 'test-execution' in f_data
1168 unused_last_call_args, last_call_kwargs = push_calls[0]
1188 for commit_id in pr_commit_ids:
1169 assert last_call_kwargs["action"] == "push"
1189 assert f'{commit_id}' in f_data
1170 assert last_call_kwargs["commit_ids"] == pr_commit_ids
1190
1191 def test_merge_pull_request_forbidden_by_pre_push_hook(self, pr_util, csrf_token, rcextensions_modification, caplog):
1192 caplog.set_level(logging.WARNING, logger="rhodecode.model.pull_request")
1193
1194 pull_request = pr_util.create_pull_request(approved=True, mergeable=True)
1195 pull_request_id = pull_request.pull_request_id
1196 repo_name = pull_request.target_repo.scm_instance().name
1197
1198 url = route_path(
1199 "pullrequest_merge",
1200 repo_name=repo_name,
1201 pull_request_id=pull_request_id,
1202 )
1203
1204 rcstack_location = os.path.dirname(self.app._pyramid_registry.settings['__file__'])
1205
1206 mods = [
1207 ('_pre_push_hook',
1208 f"""
1209 return HookResponse(1, 'HOOK_TEST_FORBIDDEN')
1210 """)
1211 ]
1212 # Add the hook
1213 with rcextensions_modification(rcstack_location, mods, create_if_missing=True, force_create=True):
1214 self.app.post(url, params={"csrf_token": csrf_token})
1215
1216 assert 'Merge failed, not updating the pull request.' in [r[2] for r in caplog.record_tuples]
1217
1171
1218
1172 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1219 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
1173 pull_request = pr_util.create_pull_request(mergeable=False)
1220 pull_request = pr_util.create_pull_request(mergeable=False)
@@ -1523,7 +1570,6 b' class TestPullrequestsView(object):'
1523
1570
1524 assert pull_request.revisions == [commit_ids["change-rebased"]]
1571 assert pull_request.revisions == [commit_ids["change-rebased"]]
1525
1572
1526
1527 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1573 def test_remove_pull_request_branch(self, backend_git, csrf_token):
1528 branch_name = "development"
1574 branch_name = "development"
1529 commits = [
1575 commits = [
@@ -26,7 +26,7 b' from rhodecode.model.db import Repositor'
26 from rhodecode.model.meta import Session
26 from rhodecode.model.meta import Session
27 from rhodecode.tests import (
27 from rhodecode.tests import (
28 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, assert_session_flash)
28 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, assert_session_flash)
29 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.fixtures.rc_fixture import Fixture
30 from rhodecode.tests.routes import route_path
30 from rhodecode.tests.routes import route_path
31
31
32 fixture = Fixture()
32 fixture = Fixture()
@@ -24,7 +24,7 b' from rhodecode.model.db import Repositor'
24 from rhodecode.model.repo import RepoModel
24 from rhodecode.model.repo import RepoModel
25 from rhodecode.tests import (
25 from rhodecode.tests import (
26 HG_REPO, GIT_REPO, assert_session_flash, no_newline_id_generator)
26 HG_REPO, GIT_REPO, assert_session_flash, no_newline_id_generator)
27 from rhodecode.tests.fixture import Fixture
27 from rhodecode.tests.fixtures.rc_fixture import Fixture
28 from rhodecode.tests.utils import repo_on_filesystem
28 from rhodecode.tests.utils import repo_on_filesystem
29 from rhodecode.tests.routes import route_path
29 from rhodecode.tests.routes import route_path
30
30
@@ -31,7 +31,7 b' from rhodecode.model.meta import Session'
31 from rhodecode.model.repo import RepoModel
31 from rhodecode.model.repo import RepoModel
32 from rhodecode.model.scm import ScmModel
32 from rhodecode.model.scm import ScmModel
33 from rhodecode.tests import assert_session_flash
33 from rhodecode.tests import assert_session_flash
34 from rhodecode.tests.fixture import Fixture
34 from rhodecode.tests.fixtures.rc_fixture import Fixture
35 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
35 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
36 from rhodecode.tests.routes import route_path
36 from rhodecode.tests.routes import route_path
37
37
@@ -30,7 +30,7 b' from rhodecode.model.user import UserMod'
30 from rhodecode.tests import (
30 from rhodecode.tests import (
31 login_user_session, logout_user_session,
31 login_user_session, logout_user_session,
32 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
32 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
33 from rhodecode.tests.fixture import Fixture
33 from rhodecode.tests.fixtures.rc_fixture import Fixture
34 from rhodecode.tests.utils import AssertResponse
34 from rhodecode.tests.utils import AssertResponse
35 from rhodecode.tests.routes import route_path
35 from rhodecode.tests.routes import route_path
36
36
@@ -32,16 +32,13 b' class TestAdminRepoVcsSettings(object):'
32 @pytest.mark.parametrize('setting_name, setting_backends', [
32 @pytest.mark.parametrize('setting_name, setting_backends', [
33 ('hg_use_rebase_for_merging', ['hg']),
33 ('hg_use_rebase_for_merging', ['hg']),
34 ])
34 ])
35 def test_labs_settings_visible_if_enabled(
35 def test_labs_settings_visible_if_enabled(self, setting_name, setting_backends, backend):
36 self, setting_name, setting_backends, backend):
37 if backend.alias not in setting_backends:
36 if backend.alias not in setting_backends:
38 pytest.skip('Setting not available for backend {}'.format(backend))
37 pytest.skip('Setting not available for backend {}'.format(backend))
39
38
40 vcs_settings_url = route_path(
39 vcs_settings_url = route_path('edit_repo_vcs', repo_name=backend.repo.repo_name)
41 'edit_repo_vcs', repo_name=backend.repo.repo_name)
42
40
43 with mock.patch.dict(
41 with mock.patch.dict(rhodecode.CONFIG, {'labs_settings_active': 'true'}):
44 rhodecode.CONFIG, {'labs_settings_active': 'true'}):
45 response = self.app.get(vcs_settings_url)
42 response = self.app.get(vcs_settings_url)
46
43
47 assertr = response.assert_response()
44 assertr = response.assert_response()
@@ -20,7 +20,7 b' import os'
20 import sys
20 import sys
21 import logging
21 import logging
22
22
23 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
23 from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon
24 from rhodecode.lib.ext_json import sjson as json
24 from rhodecode.lib.ext_json import sjson as json
25 from rhodecode.lib.vcs.conf import settings as vcs_settings
25 from rhodecode.lib.vcs.conf import settings as vcs_settings
26 from rhodecode.lib.api_utils import call_service_api
26 from rhodecode.lib.api_utils import call_service_api
@@ -162,9 +162,7 b' class SshVcsServer(object):'
162 extras = {}
162 extras = {}
163 extras.update(tunnel_extras)
163 extras.update(tunnel_extras)
164
164
165 callback_daemon, extras = prepare_callback_daemon(
165 callback_daemon, extras = prepare_callback_daemon(extras, protocol=self.hooks_protocol)
166 extras, protocol=self.hooks_protocol,
167 host=vcs_settings.HOOKS_HOST)
168
166
169 with callback_daemon:
167 with callback_daemon:
170 try:
168 try:
@@ -33,19 +33,24 b' class GitServerCreator(object):'
33 'app:main': {
33 'app:main': {
34 'ssh.executable.git': git_path,
34 'ssh.executable.git': git_path,
35 'vcs.hooks.protocol.v2': 'celery',
35 'vcs.hooks.protocol.v2': 'celery',
36 'app.service_api.host': 'http://localhost',
37 'app.service_api.token': 'secret4',
38 'rhodecode.api.url': '/_admin/api',
36 }
39 }
37 }
40 }
38 repo_name = 'test_git'
41 repo_name = 'test_git'
39 repo_mode = 'receive-pack'
42 repo_mode = 'receive-pack'
40 user = plain_dummy_user()
43 user = plain_dummy_user()
41
44
42 def __init__(self):
45 def __init__(self, service_api_url, ini_file):
43 pass
46 self.service_api_url = service_api_url
47 self.ini_file = ini_file
44
48
45 def create(self, **kwargs):
49 def create(self, **kwargs):
50 self.config_data['app:main']['app.service_api.host'] = self.service_api_url
46 parameters = {
51 parameters = {
47 'store': self.root,
52 'store': self.root,
48 'ini_path': '',
53 'ini_path': self.ini_file,
49 'user': self.user,
54 'user': self.user,
50 'repo_name': self.repo_name,
55 'repo_name': self.repo_name,
51 'repo_mode': self.repo_mode,
56 'repo_mode': self.repo_mode,
@@ -60,12 +65,30 b' class GitServerCreator(object):'
60 return server
65 return server
61
66
62
67
63 @pytest.fixture()
68 @pytest.fixture(scope='module')
64 def git_server(app):
69 def git_server(request, module_app, rhodecode_factory, available_port_factory):
65 return GitServerCreator()
70 ini_file = module_app._pyramid_settings['__file__']
71 vcsserver_host = module_app._pyramid_settings['vcs.server']
72
73 store_dir = os.path.dirname(ini_file)
74
75 # start rhodecode for service API
76 rc = rhodecode_factory(
77 request,
78 store_dir=store_dir,
79 port=available_port_factory(),
80 overrides=(
81 {'handler_console': {'level': 'DEBUG'}},
82 {'app:main': {'vcs.server': vcsserver_host}},
83 {'app:main': {'repo_store.path': store_dir}}
84 ))
85
86 service_api_url = f'http://{rc.bind_addr}'
87
88 return GitServerCreator(service_api_url, ini_file)
66
89
67
90
68 class TestGitServer(object):
91 class TestGitServer:
69
92
70 def test_command(self, git_server):
93 def test_command(self, git_server):
71 server = git_server.create()
94 server = git_server.create()
@@ -102,14 +125,14 b' class TestGitServer(object):'
102 assert result is value
125 assert result is value
103
126
104 def test_run_returns_executes_command(self, git_server):
127 def test_run_returns_executes_command(self, git_server):
128 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
105 server = git_server.create()
129 server = git_server.create()
106 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
107
130
108 os.environ['SSH_CLIENT'] = '127.0.0.1'
131 os.environ['SSH_CLIENT'] = '127.0.0.1'
109 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
132 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
110 _patch.return_value = 0
133 _patch.return_value = 0
111 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
134 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
112 exit_code = server.run()
135 exit_code = server.run(tunnel_extras={'config': server.ini_path})
113
136
114 assert exit_code == (0, False)
137 assert exit_code == (0, False)
115
138
@@ -135,7 +158,7 b' class TestGitServer(object):'
135 'action': action,
158 'action': action,
136 'ip': '10.10.10.10',
159 'ip': '10.10.10.10',
137 'locked_by': [None, None],
160 'locked_by': [None, None],
138 'config': '',
161 'config': git_server.ini_file,
139 'repo_store': store,
162 'repo_store': store,
140 'server_url': None,
163 'server_url': None,
141 'hooks': ['push', 'pull'],
164 'hooks': ['push', 'pull'],
@@ -17,6 +17,7 b''
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
19 import os
20
20 import mock
21 import mock
21 import pytest
22 import pytest
22
23
@@ -32,22 +33,27 b' class MercurialServerCreator(object):'
32 'app:main': {
33 'app:main': {
33 'ssh.executable.hg': hg_path,
34 'ssh.executable.hg': hg_path,
34 'vcs.hooks.protocol.v2': 'celery',
35 'vcs.hooks.protocol.v2': 'celery',
36 'app.service_api.host': 'http://localhost',
37 'app.service_api.token': 'secret4',
38 'rhodecode.api.url': '/_admin/api',
35 }
39 }
36 }
40 }
37 repo_name = 'test_hg'
41 repo_name = 'test_hg'
38 user = plain_dummy_user()
42 user = plain_dummy_user()
39
43
40 def __init__(self):
44 def __init__(self, service_api_url, ini_file):
41 pass
45 self.service_api_url = service_api_url
46 self.ini_file = ini_file
42
47
43 def create(self, **kwargs):
48 def create(self, **kwargs):
49 self.config_data['app:main']['app.service_api.host'] = self.service_api_url
44 parameters = {
50 parameters = {
45 'store': self.root,
51 'store': self.root,
46 'ini_path': '',
52 'ini_path': self.ini_file,
47 'user': self.user,
53 'user': self.user,
48 'repo_name': self.repo_name,
54 'repo_name': self.repo_name,
49 'user_permissions': {
55 'user_permissions': {
50 'test_hg': 'repository.admin'
56 self.repo_name: 'repository.admin'
51 },
57 },
52 'settings': self.config_data['app:main'],
58 'settings': self.config_data['app:main'],
53 'env': plain_dummy_env()
59 'env': plain_dummy_env()
@@ -57,12 +63,30 b' class MercurialServerCreator(object):'
57 return server
63 return server
58
64
59
65
60 @pytest.fixture()
66 @pytest.fixture(scope='module')
61 def hg_server(app):
67 def hg_server(request, module_app, rhodecode_factory, available_port_factory):
62 return MercurialServerCreator()
68 ini_file = module_app._pyramid_settings['__file__']
69 vcsserver_host = module_app._pyramid_settings['vcs.server']
70
71 store_dir = os.path.dirname(ini_file)
72
73 # start rhodecode for service API
74 rc = rhodecode_factory(
75 request,
76 store_dir=store_dir,
77 port=available_port_factory(),
78 overrides=(
79 {'handler_console': {'level': 'DEBUG'}},
80 {'app:main': {'vcs.server': vcsserver_host}},
81 {'app:main': {'repo_store.path': store_dir}}
82 ))
83
84 service_api_url = f'http://{rc.bind_addr}'
85
86 return MercurialServerCreator(service_api_url, ini_file)
63
87
64
88
65 class TestMercurialServer(object):
89 class TestMercurialServer:
66
90
67 def test_command(self, hg_server, tmpdir):
91 def test_command(self, hg_server, tmpdir):
68 server = hg_server.create()
92 server = hg_server.create()
@@ -107,7 +131,7 b' class TestMercurialServer(object):'
107 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
131 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
108 _patch.return_value = 0
132 _patch.return_value = 0
109 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
133 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
110 exit_code = server.run()
134 exit_code = server.run(tunnel_extras={'config': server.ini_path})
111
135
112 assert exit_code == (0, False)
136 assert exit_code == (0, False)
113
137
@@ -15,7 +15,9 b''
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18 import os
19 import os
20
19 import mock
21 import mock
20 import pytest
22 import pytest
21
23
@@ -26,39 +28,62 b' from rhodecode.apps.ssh_support.tests.co'
26 class SubversionServerCreator(object):
28 class SubversionServerCreator(object):
27 root = '/tmp/repo/path/'
29 root = '/tmp/repo/path/'
28 svn_path = '/usr/local/bin/svnserve'
30 svn_path = '/usr/local/bin/svnserve'
31
29 config_data = {
32 config_data = {
30 'app:main': {
33 'app:main': {
31 'ssh.executable.svn': svn_path,
34 'ssh.executable.svn': svn_path,
32 'vcs.hooks.protocol.v2': 'celery',
35 'vcs.hooks.protocol.v2': 'celery',
36 'app.service_api.host': 'http://localhost',
37 'app.service_api.token': 'secret4',
38 'rhodecode.api.url': '/_admin/api',
33 }
39 }
34 }
40 }
35 repo_name = 'test-svn'
41 repo_name = 'test-svn'
36 user = plain_dummy_user()
42 user = plain_dummy_user()
37
43
38 def __init__(self):
44 def __init__(self, service_api_url, ini_file):
39 pass
45 self.service_api_url = service_api_url
46 self.ini_file = ini_file
40
47
41 def create(self, **kwargs):
48 def create(self, **kwargs):
49 self.config_data['app:main']['app.service_api.host'] = self.service_api_url
42 parameters = {
50 parameters = {
43 'store': self.root,
51 'store': self.root,
52 'ini_path': self.ini_file,
53 'user': self.user,
44 'repo_name': self.repo_name,
54 'repo_name': self.repo_name,
45 'ini_path': '',
46 'user': self.user,
47 'user_permissions': {
55 'user_permissions': {
48 self.repo_name: 'repository.admin'
56 self.repo_name: 'repository.admin'
49 },
57 },
50 'settings': self.config_data['app:main'],
58 'settings': self.config_data['app:main'],
51 'env': plain_dummy_env()
59 'env': plain_dummy_env()
52 }
60 }
53
54 parameters.update(kwargs)
61 parameters.update(kwargs)
55 server = SubversionServer(**parameters)
62 server = SubversionServer(**parameters)
56 return server
63 return server
57
64
58
65
59 @pytest.fixture()
66 @pytest.fixture(scope='module')
60 def svn_server(app):
67 def svn_server(request, module_app, rhodecode_factory, available_port_factory):
61 return SubversionServerCreator()
68 ini_file = module_app._pyramid_settings['__file__']
69 vcsserver_host = module_app._pyramid_settings['vcs.server']
70
71 store_dir = os.path.dirname(ini_file)
72
73 # start rhodecode for service API
74 rc = rhodecode_factory(
75 request,
76 store_dir=store_dir,
77 port=available_port_factory(),
78 overrides=(
79 {'handler_console': {'level': 'DEBUG'}},
80 {'app:main': {'vcs.server': vcsserver_host}},
81 {'app:main': {'repo_store.path': store_dir}}
82 ))
83
84 service_api_url = f'http://{rc.bind_addr}'
85
86 return SubversionServerCreator(service_api_url, ini_file)
62
87
63
88
64 class TestSubversionServer(object):
89 class TestSubversionServer(object):
@@ -168,8 +193,9 b' class TestSubversionServer(object):'
168 assert repo_name == expected_match
193 assert repo_name == expected_match
169
194
170 def test_run_returns_executes_command(self, svn_server):
195 def test_run_returns_executes_command(self, svn_server):
196 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
197
171 server = svn_server.create()
198 server = svn_server.create()
172 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
173 os.environ['SSH_CLIENT'] = '127.0.0.1'
199 os.environ['SSH_CLIENT'] = '127.0.0.1'
174 with mock.patch.object(
200 with mock.patch.object(
175 SubversionTunnelWrapper, 'get_first_client_response',
201 SubversionTunnelWrapper, 'get_first_client_response',
@@ -184,20 +210,18 b' class TestSubversionServer(object):'
184 SubversionTunnelWrapper, 'command',
210 SubversionTunnelWrapper, 'command',
185 return_value=['date']):
211 return_value=['date']):
186
212
187 exit_code = server.run()
213 exit_code = server.run(tunnel_extras={'config': server.ini_path})
188 # SVN has this differently configured, and we get in our mock env
214 # SVN has this differently configured, and we get in our mock env
189 # None as return code
215 # None as return code
190 assert exit_code == (None, False)
216 assert exit_code == (None, False)
191
217
192 def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server):
218 def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server):
219 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
220
193 server = svn_server.create()
221 server = svn_server.create()
194 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
222 with mock.patch.object(SubversionTunnelWrapper, 'command', return_value=['date']):
195 with mock.patch.object(
223 with mock.patch.object(SubversionTunnelWrapper, 'get_first_client_response',
196 SubversionTunnelWrapper, 'command',
197 return_value=['date']):
198 with mock.patch.object(
199 SubversionTunnelWrapper, 'get_first_client_response',
200 return_value=None):
224 return_value=None):
201 exit_code = server.run()
225 exit_code = server.run(tunnel_extras={'config': server.ini_path})
202
226
203 assert exit_code == (1, False)
227 assert exit_code == (1, False)
@@ -22,7 +22,7 b' from rhodecode.tests import ('
22 TestController, assert_session_flash, TEST_USER_ADMIN_LOGIN)
22 TestController, assert_session_flash, TEST_USER_ADMIN_LOGIN)
23 from rhodecode.model.db import UserGroup
23 from rhodecode.model.db import UserGroup
24 from rhodecode.model.meta import Session
24 from rhodecode.model.meta import Session
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixtures.rc_fixture import Fixture
26 from rhodecode.tests.routes import route_path
26 from rhodecode.tests.routes import route_path
27
27
28 fixture = Fixture()
28 fixture = Fixture()
@@ -18,7 +18,7 b''
18 from rhodecode.model.user_group import UserGroupModel
18 from rhodecode.model.user_group import UserGroupModel
19 from rhodecode.tests import (
19 from rhodecode.tests import (
20 TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
20 TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
21 from rhodecode.tests.fixture import Fixture
21 from rhodecode.tests.fixtures.rc_fixture import Fixture
22 from rhodecode.tests.routes import route_path
22 from rhodecode.tests.routes import route_path
23
23
24 fixture = Fixture()
24 fixture = Fixture()
@@ -22,7 +22,7 b' from rhodecode.model.db import User'
22 from rhodecode.tests import (
22 from rhodecode.tests import (
23 TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
23 TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS,
24 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
24 TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixtures.rc_fixture import Fixture
26 from rhodecode.tests.utils import AssertResponse
26 from rhodecode.tests.utils import AssertResponse
27 from rhodecode.tests.routes import route_path
27 from rhodecode.tests.routes import route_path
28
28
@@ -30,7 +30,7 b' from rhodecode.lib.vcs import connect_vc'
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 def propagate_rhodecode_config(global_config, settings, config):
33 def propagate_rhodecode_config(global_config, settings, config, full=True):
34 # Store the settings to make them available to other modules.
34 # Store the settings to make them available to other modules.
35 settings_merged = global_config.copy()
35 settings_merged = global_config.copy()
36 settings_merged.update(settings)
36 settings_merged.update(settings)
@@ -40,7 +40,7 b' def propagate_rhodecode_config(global_co'
40 rhodecode.PYRAMID_SETTINGS = settings_merged
40 rhodecode.PYRAMID_SETTINGS = settings_merged
41 rhodecode.CONFIG = settings_merged
41 rhodecode.CONFIG = settings_merged
42
42
43 if 'default_user_id' not in rhodecode.CONFIG:
43 if full and 'default_user_id' not in rhodecode.CONFIG:
44 rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id()
44 rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id()
45 log.debug('set rhodecode.CONFIG data')
45 log.debug('set rhodecode.CONFIG data')
46
46
@@ -93,6 +93,7 b' def load_pyramid_environment(global_conf'
93 # first run, to store data...
93 # first run, to store data...
94 propagate_rhodecode_config(global_config, settings, {})
94 propagate_rhodecode_config(global_config, settings, {})
95
95
96
96 if vcs_server_enabled:
97 if vcs_server_enabled:
97 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
98 connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings))
98 else:
99 else:
@@ -101,6 +101,9 b' def make_pyramid_app(global_config, **se'
101 patches.inspect_getargspec()
101 patches.inspect_getargspec()
102 patches.repoze_sendmail_lf_fix()
102 patches.repoze_sendmail_lf_fix()
103
103
104 # first init, so load_pyramid_enviroment, can access some critical data, like __file__
105 propagate_rhodecode_config(global_config, {}, {}, full=False)
106
104 load_pyramid_environment(global_config, settings)
107 load_pyramid_environment(global_config, settings)
105
108
106 # Static file view comes first
109 # Static file view comes first
@@ -17,7 +17,7 b''
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 rcextensions module, please edit `hooks.py` to over write hooks logic
20 rcextensions module, please edit `hooks.py` to over-write hooks logic
21 """
21 """
22
22
23 from .hooks import (
23 from .hooks import (
@@ -85,7 +85,7 b' def _pre_push_hook(*args, **kwargs):'
85
85
86 # check files names
86 # check files names
87 if forbidden_files:
87 if forbidden_files:
88 reason = 'File {} is forbidden to be pushed'.format(file_name)
88 reason = f'File {file_name} is forbidden to be pushed'
89 for forbidden_pattern in forbid_files:
89 for forbidden_pattern in forbid_files:
90 # here we can also filter for operation, e.g if check for only ADDED files
90 # here we can also filter for operation, e.g if check for only ADDED files
91 # if operation == 'A':
91 # if operation == 'A':
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -55,7 +54,7 b' def run(*args, **kwargs):'
55 return fields
54 return fields
56
55
57
56
58 class _Undefined(object):
57 class _Undefined:
59 pass
58 pass
60
59
61
60
@@ -67,7 +66,7 b' def get_field(extra_fields_data, key, de'
67
66
68 if key not in extra_fields_data:
67 if key not in extra_fields_data:
69 if isinstance(default, _Undefined):
68 if isinstance(default, _Undefined):
70 raise ValueError('key {} not present in extra_fields'.format(key))
69 raise ValueError(f'key {key} not present in extra_fields')
71 return default
70 return default
72
71
73 # NOTE(dan): from metadata we get field_label, field_value, field_desc, field_type
72 # NOTE(dan): from metadata we get field_label, field_value, field_desc, field_type
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -52,7 +51,7 b' def get_git_commits(repo, refs):'
52 cmd = [
51 cmd = [
53 'log',
52 'log',
54 '--pretty=format:{"commit_id": "%H", "author": "%aN <%aE>", "date": "%ad", "message": "%s"}',
53 '--pretty=format:{"commit_id": "%H", "author": "%aN <%aE>", "date": "%ad", "message": "%s"}',
55 '{}...{}'.format(old_rev, new_rev)
54 f'{old_rev}...{new_rev}'
56 ]
55 ]
57
56
58 stdout, stderr = repo.run_git_command(cmd, extra_env=git_env)
57 stdout, stderr = repo.run_git_command(cmd, extra_env=git_env)
@@ -80,12 +79,12 b' def run(*args, **kwargs):'
80
79
81 if vcs_type == 'git':
80 if vcs_type == 'git':
82 for rev_data in kwargs['commit_ids']:
81 for rev_data in kwargs['commit_ids']:
83 new_environ = dict((k, v) for k, v in rev_data['git_env'])
82 new_environ = {k: v for k, v in rev_data['git_env']}
84 commits = get_git_commits(vcs_repo, kwargs['commit_ids'])
83 commits = get_git_commits(vcs_repo, kwargs['commit_ids'])
85
84
86 if vcs_type == 'hg':
85 if vcs_type == 'hg':
87 for rev_data in kwargs['commit_ids']:
86 for rev_data in kwargs['commit_ids']:
88 new_environ = dict((k, v) for k, v in rev_data['hg_env'])
87 new_environ = {k: v for k, v in rev_data['hg_env']}
89 commits = get_hg_commits(vcs_repo, kwargs['commit_ids'])
88 commits = get_hg_commits(vcs_repo, kwargs['commit_ids'])
90
89
91 return commits
90 return commits
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -133,12 +132,12 b' def run(*args, **kwargs):'
133
132
134 if vcs_type == 'git':
133 if vcs_type == 'git':
135 for rev_data in kwargs['commit_ids']:
134 for rev_data in kwargs['commit_ids']:
136 new_environ = dict((k, v) for k, v in rev_data['git_env'])
135 new_environ = {k: v for k, v in rev_data['git_env']}
137 files = get_git_files(repo, vcs_repo, kwargs['commit_ids'])
136 files = get_git_files(repo, vcs_repo, kwargs['commit_ids'])
138
137
139 if vcs_type == 'hg':
138 if vcs_type == 'hg':
140 for rev_data in kwargs['commit_ids']:
139 for rev_data in kwargs['commit_ids']:
141 new_environ = dict((k, v) for k, v in rev_data['hg_env'])
140 new_environ = {k: v for k, v in rev_data['hg_env']}
142 files = get_hg_files(repo, vcs_repo, kwargs['commit_ids'])
141 files = get_hg_files(repo, vcs_repo, kwargs['commit_ids'])
143
142
144 if vcs_type == 'svn':
143 if vcs_type == 'svn':
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -28,7 +28,7 b' import urllib.error'
28 log = logging.getLogger('rhodecode.' + __name__)
28 log = logging.getLogger('rhodecode.' + __name__)
29
29
30
30
31 class HookResponse(object):
31 class HookResponse:
32 def __init__(self, status, output):
32 def __init__(self, status, output):
33 self.status = status
33 self.status = status
34 self.output = output
34 self.output = output
@@ -44,6 +44,11 b' class HookResponse(object):'
44 def __bool__(self):
44 def __bool__(self):
45 return self.status == 0
45 return self.status == 0
46
46
47 def to_json(self):
48 return {'status': self.status, 'output': self.output}
49
50 def __repr__(self):
51 return self.to_json().__repr__()
47
52
48 class DotDict(dict):
53 class DotDict(dict):
49
54
@@ -91,8 +96,8 b' class DotDict(dict):'
91 def __repr__(self):
96 def __repr__(self):
92 keys = list(self.keys())
97 keys = list(self.keys())
93 keys.sort()
98 keys.sort()
94 args = ', '.join(['%s=%r' % (key, self[key]) for key in keys])
99 args = ', '.join(['{}={!r}'.format(key, self[key]) for key in keys])
95 return '%s(%s)' % (self.__class__.__name__, args)
100 return '{}({})'.format(self.__class__.__name__, args)
96
101
97 @staticmethod
102 @staticmethod
98 def fromDict(d):
103 def fromDict(d):
@@ -110,7 +115,7 b' def serialize(x):'
110
115
111 def unserialize(x):
116 def unserialize(x):
112 if isinstance(x, dict):
117 if isinstance(x, dict):
113 return dict((k, unserialize(v)) for k, v in x.items())
118 return {k: unserialize(v) for k, v in x.items()}
114 elif isinstance(x, (list, tuple)):
119 elif isinstance(x, (list, tuple)):
115 return type(x)(unserialize(v) for v in x)
120 return type(x)(unserialize(v) for v in x)
116 else:
121 else:
@@ -161,7 +166,8 b' def str2bool(_str) -> bool:'
161 string into boolean
166 string into boolean
162
167
163 :param _str: string value to translate into boolean
168 :param _str: string value to translate into boolean
164 :returns: bool from given string
169 :rtype: boolean
170 :returns: boolean from given string
165 """
171 """
166 if _str is None:
172 if _str is None:
167 return False
173 return False
@@ -49,22 +49,22 b' link_config = ['
49 {
49 {
50 "name": "enterprise_docs",
50 "name": "enterprise_docs",
51 "target": "https://rhodecode.com/r1/enterprise/docs/",
51 "target": "https://rhodecode.com/r1/enterprise/docs/",
52 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/",
52 "external_target": "https://docs.rhodecode.com/4.x/rce/index.html",
53 },
53 },
54 {
54 {
55 "name": "enterprise_log_file_locations",
55 "name": "enterprise_log_file_locations",
56 "target": "https://rhodecode.com/r1/enterprise/docs/admin-system-overview/",
56 "target": "https://rhodecode.com/r1/enterprise/docs/admin-system-overview/",
57 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/admin/system-overview.html#log-files",
57 "external_target": "https://docs.rhodecode.com/4.x/rce/admin/system-overview.html#log-files",
58 },
58 },
59 {
59 {
60 "name": "enterprise_issue_tracker_settings",
60 "name": "enterprise_issue_tracker_settings",
61 "target": "https://rhodecode.com/r1/enterprise/docs/issue-trackers-overview/",
61 "target": "https://rhodecode.com/r1/enterprise/docs/issue-trackers-overview/",
62 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/issue-trackers/issue-trackers.html",
62 "external_target": "https://docs.rhodecode.com/4.x/rce/issue-trackers/issue-trackers.html",
63 },
63 },
64 {
64 {
65 "name": "enterprise_svn_setup",
65 "name": "enterprise_svn_setup",
66 "target": "https://rhodecode.com/r1/enterprise/docs/svn-setup/",
66 "target": "https://rhodecode.com/r1/enterprise/docs/svn-setup/",
67 "external_target": "https://docs.rhodecode.com/RhodeCode-Enterprise/admin/svn-http.html",
67 "external_target": "https://docs.rhodecode.com/4.x/rce/admin/svn-http.html",
68 },
68 },
69 {
69 {
70 "name": "enterprise_license_convert_from_old",
70 "name": "enterprise_license_convert_from_old",
@@ -19,6 +19,8 b''
19 import os
19 import os
20 import platform
20 import platform
21
21
22 from rhodecode.lib.type_utils import str2bool
23
22 DEFAULT_USER = 'default'
24 DEFAULT_USER = 'default'
23
25
24
26
@@ -48,28 +50,23 b' def initialize_database(config):'
48 engine = engine_from_config(config, 'sqlalchemy.db1.')
50 engine = engine_from_config(config, 'sqlalchemy.db1.')
49 init_model(engine, encryption_key=get_encryption_key(config))
51 init_model(engine, encryption_key=get_encryption_key(config))
50
52
53 def initialize_test_environment(settings):
54 skip_test_env = str2bool(os.environ.get('RC_NO_TEST_ENV'))
55 if skip_test_env:
56 return
51
57
52 def initialize_test_environment(settings, test_env=None):
58 repo_store_path = os.environ.get('RC_TEST_ENV_REPO_STORE') or settings['repo_store.path']
53 if test_env is None:
54 test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0))
55
59
56 from rhodecode.lib.utils import (
60 from rhodecode.lib.utils import (
57 create_test_directory, create_test_database, create_test_repositories,
61 create_test_directory, create_test_database, create_test_repositories,
58 create_test_index)
62 create_test_index)
59 from rhodecode.tests import TESTS_TMP_PATH
60 from rhodecode.lib.vcs.backends.hg import largefiles_store
61 from rhodecode.lib.vcs.backends.git import lfs_store
62
63
64 create_test_directory(repo_store_path)
65
66 create_test_database(repo_store_path, settings)
63 # test repos
67 # test repos
64 if test_env:
68 create_test_repositories(repo_store_path, settings)
65 create_test_directory(TESTS_TMP_PATH)
69 create_test_index(repo_store_path, settings)
66 # large object stores
67 create_test_directory(largefiles_store(TESTS_TMP_PATH))
68 create_test_directory(lfs_store(TESTS_TMP_PATH))
69
70 create_test_database(TESTS_TMP_PATH, settings)
71 create_test_repositories(TESTS_TMP_PATH, settings)
72 create_test_index(TESTS_TMP_PATH, settings)
73
70
74
71
75 def get_vcs_server_protocol(config):
72 def get_vcs_server_protocol(config):
@@ -20,8 +20,7 b''
20 Set of custom exceptions used in RhodeCode
20 Set of custom exceptions used in RhodeCode
21 """
21 """
22
22
23 from webob.exc import HTTPClientError
23 from pyramid.httpexceptions import HTTPBadGateway, HTTPClientError
24 from pyramid.httpexceptions import HTTPBadGateway
25
24
26
25
27 class LdapUsernameError(Exception):
26 class LdapUsernameError(Exception):
@@ -102,12 +101,7 b' class HTTPRequirementError(HTTPClientErr'
102 self.args = (message, )
101 self.args = (message, )
103
102
104
103
105 class ClientNotSupportedError(HTTPRequirementError):
104 class HTTPLockedRepo(HTTPClientError):
106 title = explanation = 'Client Not Supported'
107 reason = None
108
109
110 class HTTPLockedRC(HTTPClientError):
111 """
105 """
112 Special Exception For locked Repos in RhodeCode, the return code can
106 Special Exception For locked Repos in RhodeCode, the return code can
113 be overwritten by _code keyword argument passed into constructors
107 be overwritten by _code keyword argument passed into constructors
@@ -131,14 +125,13 b' class HTTPBranchProtected(HTTPClientErro'
131 Special Exception For Indicating that branch is protected in RhodeCode, the
125 Special Exception For Indicating that branch is protected in RhodeCode, the
132 return code can be overwritten by _code keyword argument passed into constructors
126 return code can be overwritten by _code keyword argument passed into constructors
133 """
127 """
134 code = 403
135 title = explanation = 'Branch Protected'
128 title = explanation = 'Branch Protected'
136 reason = None
129 reason = None
137
130
138 def __init__(self, message, *args, **kwargs):
131
139 self.title = self.explanation = message
132 class ClientNotSupported(HTTPRequirementError):
140 super().__init__(*args, **kwargs)
133 title = explanation = 'Client Not Supported'
141 self.args = (message, )
134 reason = None
142
135
143
136
144 class IMCCommitError(Exception):
137 class IMCCommitError(Exception):
@@ -1,4 +1,4 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -16,13 +16,14 b''
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import os
20 import time
21 import logging
19 import logging
20 import traceback
22
21
23 from rhodecode.lib.config_utils import get_app_config_lightweight
22 from rhodecode.model import meta
23 from rhodecode.lib import hooks_base
24 from rhodecode.lib.utils2 import AttributeDict
25 from rhodecode.lib.exceptions import HTTPLockedRepo, HTTPBranchProtected
24
26
25 from rhodecode.lib.svn_txn_utils import get_txn_id_from_store
26
27
27 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
28
29
@@ -42,53 +43,82 b' class BaseHooksCallbackDaemon:'
42 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
43 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
43
44
44
45
45 class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon):
46 class Hooks(object):
47 """
48 Exposes the hooks module for calling them using the local HooksModuleCallbackDaemon
49 """
50 def __init__(self, request=None, log_prefix=''):
51 self.log_prefix = log_prefix
52 self.request = request
46
53
47 def __init__(self, module):
54 def repo_size(self, extras):
48 super().__init__()
55 log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
49 self.hooks_module = module
56 return self._call_hook(hooks_base.repo_size, extras)
50
57
51 def __repr__(self):
58 def pre_pull(self, extras):
52 return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})'
59 log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
53
60 return self._call_hook(hooks_base.pre_pull, extras)
54
61
55 def prepare_callback_daemon(extras, protocol, host, txn_id=None):
62 def post_pull(self, extras):
63 log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
64 return self._call_hook(hooks_base.post_pull, extras)
65
66 def pre_push(self, extras):
67 log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
68 return self._call_hook(hooks_base.pre_push, extras)
56
69
57 match protocol:
70 def post_push(self, extras):
58 case 'http':
71 log.debug("%sCalled post_push of %s object", self.log_prefix, self)
59 from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon
72 return self._call_hook(hooks_base.post_push, extras)
60 port = 0
73
61 if txn_id:
74 def _call_hook(self, hook, extras):
62 # read txn-id to re-use the PORT for callback daemon
75 extras = AttributeDict(extras)
63 repo_path = os.path.join(extras['repo_store'], extras['repository'])
76 _server_url = extras['server_url']
64 txn_details = get_txn_id_from_store(repo_path, txn_id)
65 port = txn_details.get('port', 0)
66
77
67 callback_daemon = HttpHooksCallbackDaemon(
78 extras.request = self.request
68 txn_id=txn_id, host=host, port=port)
79 try:
69 case 'celery':
80 result = hook(extras)
70 from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon
81 if result is None:
71
82 raise Exception(f'Failed to obtain hook result from func: {hook}')
72 config = get_app_config_lightweight(extras['config'])
83 except HTTPBranchProtected as error:
73 task_queue = config.get('celery.broker_url')
84 # Those special cases don't need error reporting. It's a case of
74 task_backend = config.get('celery.result_backend')
85 # locked repo or protected branch
86 result = AttributeDict({
87 'status': error.code,
88 'output': error.explanation
89 })
90 except HTTPLockedRepo as error:
91 # Those special cases don't need error reporting. It's a case of
92 # locked repo or protected branch
93 result = AttributeDict({
94 'status': error.code,
95 'output': error.explanation
96 })
97 except Exception as error:
98 # locked needs different handling since we need to also
99 # handle PULL operations
100 log.exception('%sException when handling hook %s', self.log_prefix, hook)
101 exc_tb = traceback.format_exc()
102 error_args = error.args
103 return {
104 'status': 128,
105 'output': '',
106 'exception': type(error).__name__,
107 'exception_traceback': exc_tb,
108 'exception_args': error_args,
109 }
110 finally:
111 meta.Session.remove()
75
112
76 callback_daemon = CeleryHooksCallbackDaemon(task_queue, task_backend)
113 log.debug('%sGot hook call response %s', self.log_prefix, result)
77 case 'local':
114 return {
78 from rhodecode.lib.hook_daemon.hook_module import Hooks
115 'status': result.status,
79 callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__)
116 'output': result.output,
80 case _:
117 }
81 log.error('Unsupported callback daemon protocol "%s"', protocol)
82 raise Exception('Unsupported callback daemon protocol.')
83
118
84 extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '')
119 def __enter__(self):
85 extras['task_queue'] = getattr(callback_daemon, 'task_queue', '')
120 return self
86 extras['task_backend'] = getattr(callback_daemon, 'task_backend', '')
87 extras['hooks_protocol'] = protocol
88 extras['time'] = time.time()
89
121
90 # register txn_id
122 def __exit__(self, exc_type, exc_val, exc_tb):
91 extras['txn_id'] = txn_id
123 pass
92 log.debug('Prepared a callback daemon: %s',
124
93 callback_daemon.__class__.__name__)
94 return callback_daemon, extras
@@ -22,14 +22,16 b' from rhodecode.lib.hook_daemon.base impo'
22 class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
22 class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
23 """
23 """
24 Context manger for achieving a compatibility with celery backend
24 Context manger for achieving a compatibility with celery backend
25 It is calling a call to vcsserver, where it uses HooksCeleryClient to actually call a task from
26
27 f'rhodecode.lib.celerylib.tasks.{method}'
28
25 """
29 """
26
30
27 def __init__(self, task_queue, task_backend):
31 def __init__(self, broker_url, result_backend):
28 self.task_queue = task_queue
32 super().__init__()
29 self.task_backend = task_backend
33 self.broker_url = broker_url
34 self.result_backend = result_backend
30
35
31 def __repr__(self):
36 def __repr__(self):
32 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
37 return f'CeleryHooksCallbackDaemon(broker_url={self.broker_url}, result_backend={self.result_backend})'
33
34 def __repr__(self):
35 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
@@ -17,88 +17,18 b''
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import traceback
21
20
22 from rhodecode.model import meta
21 from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon
23
24 from rhodecode.lib import hooks_base
25 from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected
26 from rhodecode.lib.utils2 import AttributeDict
27
22
28 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
29
24
30
25
31 class Hooks(object):
26 class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon):
32 """
33 Exposes the hooks for remote callbacks
34 """
35 def __init__(self, request=None, log_prefix=''):
36 self.log_prefix = log_prefix
37 self.request = request
38
39 def repo_size(self, extras):
40 log.debug("%sCalled repo_size of %s object", self.log_prefix, self)
41 return self._call_hook(hooks_base.repo_size, extras)
42
43 def pre_pull(self, extras):
44 log.debug("%sCalled pre_pull of %s object", self.log_prefix, self)
45 return self._call_hook(hooks_base.pre_pull, extras)
46
47 def post_pull(self, extras):
48 log.debug("%sCalled post_pull of %s object", self.log_prefix, self)
49 return self._call_hook(hooks_base.post_pull, extras)
50
51 def pre_push(self, extras):
52 log.debug("%sCalled pre_push of %s object", self.log_prefix, self)
53 return self._call_hook(hooks_base.pre_push, extras)
54
55 def post_push(self, extras):
56 log.debug("%sCalled post_push of %s object", self.log_prefix, self)
57 return self._call_hook(hooks_base.post_push, extras)
58
59 def _call_hook(self, hook, extras):
60 extras = AttributeDict(extras)
61 _server_url = extras['server_url']
62
63 extras.request = self.request
64
27
65 try:
28 def __init__(self, module):
66 result = hook(extras)
29 super().__init__()
67 if result is None:
30 self.hooks_module = module
68 raise Exception(f'Failed to obtain hook result from func: {hook}')
69 except HTTPBranchProtected as error:
70 # Those special cases don't need error reporting. It's a case of
71 # locked repo or protected branch
72 result = AttributeDict({
73 'status': error.code,
74 'output': error.explanation
75 })
76 except (HTTPLockedRC, Exception) as error:
77 # locked needs different handling since we need to also
78 # handle PULL operations
79 exc_tb = ''
80 if not isinstance(error, HTTPLockedRC):
81 exc_tb = traceback.format_exc()
82 log.exception('%sException when handling hook %s', self.log_prefix, hook)
83 error_args = error.args
84 return {
85 'status': 128,
86 'output': '',
87 'exception': type(error).__name__,
88 'exception_traceback': exc_tb,
89 'exception_args': error_args,
90 }
91 finally:
92 meta.Session.remove()
93
31
94 log.debug('%sGot hook call response %s', self.log_prefix, result)
32 def __repr__(self):
95 return {
33 return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})'
96 'status': result.status,
97 'output': result.output,
98 }
99
34
100 def __enter__(self):
101 return self
102
103 def __exit__(self, exc_type, exc_val, exc_tb):
104 pass
@@ -30,14 +30,14 b' from rhodecode.lib import helpers as h'
30 from rhodecode.lib import audit_logger
30 from rhodecode.lib import audit_logger
31 from rhodecode.lib.utils2 import safe_str, user_agent_normalizer
31 from rhodecode.lib.utils2 import safe_str, user_agent_normalizer
32 from rhodecode.lib.exceptions import (
32 from rhodecode.lib.exceptions import (
33 HTTPLockedRC, HTTPBranchProtected, UserCreationError, ClientNotSupportedError)
33 HTTPLockedRepo, HTTPBranchProtected, UserCreationError, ClientNotSupported)
34 from rhodecode.model.db import Repository, User
34 from rhodecode.model.db import Repository, User
35 from rhodecode.lib.statsd_client import StatsdClient
35 from rhodecode.lib.statsd_client import StatsdClient
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HookResponse(object):
40 class HookResponse:
41 def __init__(self, status, output):
41 def __init__(self, status, output):
42 self.status = status
42 self.status = status
43 self.output = output
43 self.output = output
@@ -56,6 +56,8 b' class HookResponse(object):'
56 def to_json(self):
56 def to_json(self):
57 return {'status': self.status, 'output': self.output}
57 return {'status': self.status, 'output': self.output}
58
58
59 def __repr__(self):
60 return self.to_json().__repr__()
59
61
60 def is_shadow_repo(extras):
62 def is_shadow_repo(extras):
61 """
63 """
@@ -73,8 +75,69 b' def check_vcs_client(extras):'
73 except ModuleNotFoundError:
75 except ModuleNotFoundError:
74 is_vcs_client_whitelisted = lambda *x: True
76 is_vcs_client_whitelisted = lambda *x: True
75 backend = extras.get('scm')
77 backend = extras.get('scm')
76 if not is_vcs_client_whitelisted(extras.get('user_agent'), backend):
78 user_agent = extras.get('user_agent')
77 raise ClientNotSupportedError(f"Your {backend} client is forbidden")
79 if not is_vcs_client_whitelisted(user_agent, backend):
80 raise ClientNotSupported(f"Your {backend} client (version={user_agent}) is forbidden by security rules")
81
82
83 def check_locked_repo(extras, check_same_user=True):
84 user = User.get_by_username(extras.username)
85 output = ''
86 if extras.locked_by[0] and (not check_same_user or user.user_id != extras.locked_by[0]):
87
88 locked_by = User.get(extras.locked_by[0]).username
89 reason = extras.locked_by[2]
90 # this exception is interpreted in git/hg middlewares and based
91 # on that proper return code is server to client
92 _http_ret = HTTPLockedRepo(_locked_by_explanation(extras.repository, locked_by, reason))
93 if str(_http_ret.code).startswith('2'):
94 # 2xx Codes don't raise exceptions
95 output = _http_ret.title
96 else:
97 raise _http_ret
98
99 return output
100
101
102 def check_branch_protected(extras):
103 if extras.commit_ids and extras.check_branch_perms:
104 user = User.get_by_username(extras.username)
105 auth_user = user.AuthUser()
106 repo = Repository.get_by_repo_name(extras.repository)
107 if not repo:
108 raise ValueError(f'Repo for {extras.repository} not found')
109 affected_branches = []
110 if repo.repo_type == 'hg':
111 for entry in extras.commit_ids:
112 if entry['type'] == 'branch':
113 is_forced = bool(entry['multiple_heads'])
114 affected_branches.append([entry['name'], is_forced])
115 elif repo.repo_type == 'git':
116 for entry in extras.commit_ids:
117 if entry['type'] == 'heads':
118 is_forced = bool(entry['pruned_sha'])
119 affected_branches.append([entry['name'], is_forced])
120
121 for branch_name, is_forced in affected_branches:
122
123 rule, branch_perm = auth_user.get_rule_and_branch_permission(extras.repository, branch_name)
124 if not branch_perm:
125 # no branch permission found for this branch, just keep checking
126 continue
127
128 if branch_perm == 'branch.push_force':
129 continue
130 elif branch_perm == 'branch.push' and is_forced is False:
131 continue
132 elif branch_perm == 'branch.push' and is_forced is True:
133 halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}. ' \
134 f'FORCE PUSH FORBIDDEN.'
135 else:
136 halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}.'
137
138 if halt_message:
139 _http_ret = HTTPBranchProtected(halt_message)
140 raise _http_ret
78
141
79 def _get_scm_size(alias, root_path):
142 def _get_scm_size(alias, root_path):
80
143
@@ -109,116 +172,30 b' def repo_size(extras):'
109 repo = Repository.get_by_repo_name(extras.repository)
172 repo = Repository.get_by_repo_name(extras.repository)
110 vcs_part = f'.{repo.repo_type}'
173 vcs_part = f'.{repo.repo_type}'
111 size_vcs, size_root, size_total = _get_scm_size(vcs_part, repo.repo_full_path)
174 size_vcs, size_root, size_total = _get_scm_size(vcs_part, repo.repo_full_path)
112 msg = (f'RhodeCode: `{repo.repo_name}` size summary {vcs_part}:{size_vcs} repo:{size_root} total:{size_total}\n')
175 msg = f'RhodeCode: `{repo.repo_name}` size summary {vcs_part}:{size_vcs} repo:{size_root} total:{size_total}\n'
113 return HookResponse(0, msg)
176 return HookResponse(0, msg)
114
177
115
178
116 def pre_push(extras):
117 """
118 Hook executed before pushing code.
119
120 It bans pushing when the repository is locked.
121 """
122
123 check_vcs_client(extras)
124 user = User.get_by_username(extras.username)
125 output = ''
126 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
127 locked_by = User.get(extras.locked_by[0]).username
128 reason = extras.locked_by[2]
129 # this exception is interpreted in git/hg middlewares and based
130 # on that proper return code is server to client
131 _http_ret = HTTPLockedRC(
132 _locked_by_explanation(extras.repository, locked_by, reason))
133 if str(_http_ret.code).startswith('2'):
134 # 2xx Codes don't raise exceptions
135 output = _http_ret.title
136 else:
137 raise _http_ret
138
139 hook_response = ''
140 if not is_shadow_repo(extras):
141
142 if extras.commit_ids and extras.check_branch_perms:
143 auth_user = user.AuthUser()
144 repo = Repository.get_by_repo_name(extras.repository)
145 if not repo:
146 raise ValueError(f'Repo for {extras.repository} not found')
147 affected_branches = []
148 if repo.repo_type == 'hg':
149 for entry in extras.commit_ids:
150 if entry['type'] == 'branch':
151 is_forced = bool(entry['multiple_heads'])
152 affected_branches.append([entry['name'], is_forced])
153 elif repo.repo_type == 'git':
154 for entry in extras.commit_ids:
155 if entry['type'] == 'heads':
156 is_forced = bool(entry['pruned_sha'])
157 affected_branches.append([entry['name'], is_forced])
158
159 for branch_name, is_forced in affected_branches:
160
161 rule, branch_perm = auth_user.get_rule_and_branch_permission(
162 extras.repository, branch_name)
163 if not branch_perm:
164 # no branch permission found for this branch, just keep checking
165 continue
166
167 if branch_perm == 'branch.push_force':
168 continue
169 elif branch_perm == 'branch.push' and is_forced is False:
170 continue
171 elif branch_perm == 'branch.push' and is_forced is True:
172 halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}. ' \
173 f'FORCE PUSH FORBIDDEN.'
174 else:
175 halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}.'
176
177 if halt_message:
178 _http_ret = HTTPBranchProtected(halt_message)
179 raise _http_ret
180
181 # Propagate to external components. This is done after checking the
182 # lock, for consistent behavior.
183 hook_response = pre_push_extension(
184 repo_store_path=Repository.base_path(), **extras)
185 events.trigger(events.RepoPrePushEvent(
186 repo_name=extras.repository, extras=extras))
187
188 return HookResponse(0, output) + hook_response
189
190
191 def pre_pull(extras):
179 def pre_pull(extras):
192 """
180 """
193 Hook executed before pulling the code.
181 Hook executed before pulling the code.
194
182
195 It bans pulling when the repository is locked.
183 It bans pulling when the repository is locked.
184 It bans pulling when incorrect client is used.
196 """
185 """
197
198 check_vcs_client(extras)
199 output = ''
186 output = ''
200 if extras.locked_by[0]:
187 check_vcs_client(extras)
201 locked_by = User.get(extras.locked_by[0]).username
188
202 reason = extras.locked_by[2]
189 # locking repo can, but not have to stop the operation it can also just produce output
203 # this exception is interpreted in git/hg middlewares and based
190 output += check_locked_repo(extras, check_same_user=False)
204 # on that proper return code is server to client
205 _http_ret = HTTPLockedRC(
206 _locked_by_explanation(extras.repository, locked_by, reason))
207 if str(_http_ret.code).startswith('2'):
208 # 2xx Codes don't raise exceptions
209 output = _http_ret.title
210 else:
211 raise _http_ret
212
191
213 # Propagate to external components. This is done after checking the
192 # Propagate to external components. This is done after checking the
214 # lock, for consistent behavior.
193 # lock, for consistent behavior.
215 hook_response = ''
194 hook_response = ''
216 if not is_shadow_repo(extras):
195 if not is_shadow_repo(extras):
217 extras.hook_type = extras.hook_type or 'pre_pull'
196 extras.hook_type = extras.hook_type or 'pre_pull'
218 hook_response = pre_pull_extension(
197 hook_response = pre_pull_extension(repo_store_path=Repository.base_path(), **extras)
219 repo_store_path=Repository.base_path(), **extras)
198 events.trigger(events.RepoPrePullEvent(repo_name=extras.repository, extras=extras))
220 events.trigger(events.RepoPrePullEvent(
221 repo_name=extras.repository, extras=extras))
222
199
223 return HookResponse(0, output) + hook_response
200 return HookResponse(0, output) + hook_response
224
201
@@ -239,6 +216,7 b' def post_pull(extras):'
239 statsd.incr('rhodecode_pull_total', tags=[
216 statsd.incr('rhodecode_pull_total', tags=[
240 f'user-agent:{user_agent_normalizer(extras.user_agent)}',
217 f'user-agent:{user_agent_normalizer(extras.user_agent)}',
241 ])
218 ])
219
242 output = ''
220 output = ''
243 # make lock is a tri state False, True, None. We only make lock on True
221 # make lock is a tri state False, True, None. We only make lock on True
244 if extras.make_lock is True and not is_shadow_repo(extras):
222 if extras.make_lock is True and not is_shadow_repo(extras):
@@ -246,18 +224,9 b' def post_pull(extras):'
246 Repository.lock(Repository.get_by_repo_name(extras.repository),
224 Repository.lock(Repository.get_by_repo_name(extras.repository),
247 user.user_id,
225 user.user_id,
248 lock_reason=Repository.LOCK_PULL)
226 lock_reason=Repository.LOCK_PULL)
249 msg = 'Made lock on repo `{}`'.format(extras.repository)
227 msg = f'Made lock on repo `{extras.repository}`'
250 output += msg
228 output += msg
251
229
252 if extras.locked_by[0]:
253 locked_by = User.get(extras.locked_by[0]).username
254 reason = extras.locked_by[2]
255 _http_ret = HTTPLockedRC(
256 _locked_by_explanation(extras.repository, locked_by, reason))
257 if str(_http_ret.code).startswith('2'):
258 # 2xx Codes don't raise exceptions
259 output += _http_ret.title
260
261 # Propagate to external components.
230 # Propagate to external components.
262 hook_response = ''
231 hook_response = ''
263 if not is_shadow_repo(extras):
232 if not is_shadow_repo(extras):
@@ -270,6 +239,33 b' def post_pull(extras):'
270 return HookResponse(0, output) + hook_response
239 return HookResponse(0, output) + hook_response
271
240
272
241
242 def pre_push(extras):
243 """
244 Hook executed before pushing code.
245
246 It bans pushing when the repository is locked.
247 It banks pushing when incorrect client is used.
248 It also checks for Branch protection
249 """
250 output = ''
251 check_vcs_client(extras)
252
253 # locking repo can, but not have to stop the operation it can also just produce output
254 output += check_locked_repo(extras)
255
256 hook_response = ''
257 if not is_shadow_repo(extras):
258
259 check_branch_protected(extras)
260
261 # Propagate to external components. This is done after checking the
262 # lock, for consistent behavior.
263 hook_response = pre_push_extension(repo_store_path=Repository.base_path(), **extras)
264 events.trigger(events.RepoPrePushEvent(repo_name=extras.repository, extras=extras))
265
266 return HookResponse(0, output) + hook_response
267
268
273 def post_push(extras):
269 def post_push(extras):
274 """Hook executed after user pushes to the repository."""
270 """Hook executed after user pushes to the repository."""
275 commit_ids = extras.commit_ids
271 commit_ids = extras.commit_ids
@@ -292,22 +288,13 b' def post_push(extras):'
292
288
293 # Propagate to external components.
289 # Propagate to external components.
294 output = ''
290 output = ''
291
295 # make lock is a tri state False, True, None. We only release lock on False
292 # make lock is a tri state False, True, None. We only release lock on False
296 if extras.make_lock is False and not is_shadow_repo(extras):
293 if extras.make_lock is False and not is_shadow_repo(extras):
297 Repository.unlock(Repository.get_by_repo_name(extras.repository))
294 Repository.unlock(Repository.get_by_repo_name(extras.repository))
298 msg = f'Released lock on repo `{extras.repository}`\n'
295 msg = f'Released lock on repo `{extras.repository}`\n'
299 output += msg
296 output += msg
300
297
301 if extras.locked_by[0]:
302 locked_by = User.get(extras.locked_by[0]).username
303 reason = extras.locked_by[2]
304 _http_ret = HTTPLockedRC(
305 _locked_by_explanation(extras.repository, locked_by, reason))
306 # TODO: johbo: if not?
307 if str(_http_ret.code).startswith('2'):
308 # 2xx Codes don't raise exceptions
309 output += _http_ret.title
310
311 if extras.new_refs:
298 if extras.new_refs:
312 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
299 tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format(
313 safe_str(extras.server_url), safe_str(extras.repository))
300 safe_str(extras.server_url), safe_str(extras.repository))
@@ -322,11 +309,8 b' def post_push(extras):'
322
309
323 hook_response = ''
310 hook_response = ''
324 if not is_shadow_repo(extras):
311 if not is_shadow_repo(extras):
325 hook_response = post_push_extension(
312 hook_response = post_push_extension(repo_store_path=Repository.base_path(), **extras)
326 repo_store_path=Repository.base_path(),
313 events.trigger(events.RepoPushEvent(repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
327 **extras)
328 events.trigger(events.RepoPushEvent(
329 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
330
314
331 output += 'RhodeCode: push completed\n'
315 output += 'RhodeCode: push completed\n'
332 return HookResponse(0, output) + hook_response
316 return HookResponse(0, output) + hook_response
@@ -380,12 +364,20 b' class ExtensionCallback(object):'
380 # with older rcextensions that require api_key present
364 # with older rcextensions that require api_key present
381 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
365 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
382 kwargs_to_pass['api_key'] = '_DEPRECATED_'
366 kwargs_to_pass['api_key'] = '_DEPRECATED_'
383 return callback(**kwargs_to_pass)
367 result = callback(**kwargs_to_pass)
368 log.debug('got rcextensions result: %s', result)
369 return result
384
370
385 def is_active(self):
371 def is_active(self):
386 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
372 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
387
373
388 def _get_callback(self):
374 def _get_callback(self):
375 if rhodecode.is_test:
376 log.debug('In test mode, reloading rcextensions...')
377 # NOTE: for test re-load rcextensions always so we can dynamically change them for testing purposes
378 from rhodecode.lib.utils import load_rcextensions
379 load_rcextensions(root_path=os.path.dirname(rhodecode.CONFIG['__file__']))
380 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
389 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
381 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
390
382
391
383
@@ -40,16 +40,6 b' GIT_PROTO_PAT = re.compile('
40 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
40 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
41
41
42
42
43 def default_lfs_store():
44 """
45 Default lfs store location, it's consistent with Mercurials large file
46 store which is in .cache/largefiles
47 """
48 from rhodecode.lib.vcs.backends.git import lfs_store
49 user_home = os.path.expanduser("~")
50 return lfs_store(user_home)
51
52
53 class SimpleGit(simplevcs.SimpleVCS):
43 class SimpleGit(simplevcs.SimpleVCS):
54
44
55 SCM = 'git'
45 SCM = 'git'
@@ -151,6 +141,6 b' class SimpleGit(simplevcs.SimpleVCS):'
151
141
152 extras['git_lfs_enabled'] = utils2.str2bool(
142 extras['git_lfs_enabled'] = utils2.str2bool(
153 config.get('vcs_git_lfs', 'enabled'))
143 config.get('vcs_git_lfs', 'enabled'))
154 extras['git_lfs_store_path'] = custom_store or default_lfs_store()
144 extras['git_lfs_store_path'] = custom_store
155 extras['git_lfs_http_scheme'] = scheme
145 extras['git_lfs_http_scheme'] = scheme
156 return extras
146 return extras
@@ -1,5 +1,3 b''
1
2
3 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -32,8 +30,7 b' from functools import wraps'
32 import time
30 import time
33 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
31 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
34
32
35 from pyramid.httpexceptions import (
33 from pyramid.httpexceptions import HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError
36 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
37 from zope.cachedescriptors.property import Lazy as LazyProperty
34 from zope.cachedescriptors.property import Lazy as LazyProperty
38
35
39 import rhodecode
36 import rhodecode
@@ -41,10 +38,9 b' from rhodecode.authentication.base impor'
41 from rhodecode.lib import rc_cache
38 from rhodecode.lib import rc_cache
42 from rhodecode.lib.svn_txn_utils import store_txn_id_data
39 from rhodecode.lib.svn_txn_utils import store_txn_id_data
43 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
40 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
44 from rhodecode.lib.base import (
41 from rhodecode.lib.base import BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context
45 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
42 from rhodecode.lib.exceptions import UserCreationError, NotAllowedToCreateUserError
46 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
43 from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon
47 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
48 from rhodecode.lib.middleware import appenlight
44 from rhodecode.lib.middleware import appenlight
49 from rhodecode.lib.middleware.utils import scm_app_http
45 from rhodecode.lib.middleware.utils import scm_app_http
50 from rhodecode.lib.str_utils import safe_bytes, safe_int
46 from rhodecode.lib.str_utils import safe_bytes, safe_int
@@ -78,17 +74,18 b' def initialize_generator(factory):'
78 try:
74 try:
79 init = next(gen)
75 init = next(gen)
80 except StopIteration:
76 except StopIteration:
81 raise ValueError('Generator must yield at least one element.')
77 raise ValueError("Generator must yield at least one element.")
82 if init != "__init__":
78 if init != "__init__":
83 raise ValueError('First yielded element must be "__init__".')
79 raise ValueError('First yielded element must be "__init__".')
84 return gen
80 return gen
81
85 return wrapper
82 return wrapper
86
83
87
84
88 class SimpleVCS(object):
85 class SimpleVCS(object):
89 """Common functionality for SCM HTTP handlers."""
86 """Common functionality for SCM HTTP handlers."""
90
87
91 SCM = 'unknown'
88 SCM = "unknown"
92
89
93 acl_repo_name = None
90 acl_repo_name = None
94 url_repo_name = None
91 url_repo_name = None
@@ -100,11 +97,11 b' class SimpleVCS(object):'
100 # we use this regex which will match only on URLs pointing to shadow
97 # we use this regex which will match only on URLs pointing to shadow
101 # repositories.
98 # repositories.
102 shadow_repo_re = re.compile(
99 shadow_repo_re = re.compile(
103 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
100 "(?P<groups>(?:{slug_pat}/)*)" # repo groups
104 '(?P<target>{slug_pat})/' # target repo
101 "(?P<target>{slug_pat})/" # target repo
105 'pull-request/(?P<pr_id>\\d+)/' # pull request
102 "pull-request/(?P<pr_id>\\d+)/" # pull request
106 'repository$' # shadow repo
103 "repository$".format(slug_pat=SLUG_RE.pattern) # shadow repo
107 .format(slug_pat=SLUG_RE.pattern))
104 )
108
105
109 def __init__(self, config, registry):
106 def __init__(self, config, registry):
110 self.registry = registry
107 self.registry = registry
@@ -113,15 +110,14 b' class SimpleVCS(object):'
113 self.repo_vcs_config = base.Config()
110 self.repo_vcs_config = base.Config()
114
111
115 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
112 rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False)
116 realm = rc_settings.get('rhodecode_realm') or 'RhodeCode AUTH'
113 realm = rc_settings.get("rhodecode_realm") or "RhodeCode AUTH"
117
114
118 # authenticate this VCS request using authfunc
115 # authenticate this VCS request using authfunc
119 auth_ret_code_detection = \
116 auth_ret_code_detection = str2bool(self.config.get("auth_ret_code_detection", False))
120 str2bool(self.config.get('auth_ret_code_detection', False))
121 self.authenticate = BasicAuth(
117 self.authenticate = BasicAuth(
122 '', authenticate, registry, config.get('auth_ret_code'),
118 "", authenticate, registry, config.get("auth_ret_code"), auth_ret_code_detection, rc_realm=realm
123 auth_ret_code_detection, rc_realm=realm)
119 )
124 self.ip_addr = '0.0.0.0'
120 self.ip_addr = "0.0.0.0"
125
121
126 @LazyProperty
122 @LazyProperty
127 def global_vcs_config(self):
123 def global_vcs_config(self):
@@ -132,10 +128,10 b' class SimpleVCS(object):'
132
128
133 @property
129 @property
134 def base_path(self):
130 def base_path(self):
135 settings_path = self.config.get('repo_store.path')
131 settings_path = self.config.get("repo_store.path")
136
132
137 if not settings_path:
133 if not settings_path:
138 raise ValueError('FATAL: repo_store.path is empty')
134 raise ValueError("FATAL: repo_store.path is empty")
139 return settings_path
135 return settings_path
140
136
141 def set_repo_names(self, environ):
137 def set_repo_names(self, environ):
@@ -164,17 +160,16 b' class SimpleVCS(object):'
164 match_dict = match.groupdict()
160 match_dict = match.groupdict()
165
161
166 # Build acl repo name from regex match.
162 # Build acl repo name from regex match.
167 acl_repo_name = safe_str('{groups}{target}'.format(
163 acl_repo_name = safe_str(
168 groups=match_dict['groups'] or '',
164 "{groups}{target}".format(groups=match_dict["groups"] or "", target=match_dict["target"])
169 target=match_dict['target']))
165 )
170
166
171 # Retrieve pull request instance by ID from regex match.
167 # Retrieve pull request instance by ID from regex match.
172 pull_request = PullRequest.get(match_dict['pr_id'])
168 pull_request = PullRequest.get(match_dict["pr_id"])
173
169
174 # Only proceed if we got a pull request and if acl repo name from
170 # Only proceed if we got a pull request and if acl repo name from
175 # URL equals the target repo name of the pull request.
171 # URL equals the target repo name of the pull request.
176 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
172 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
177
178 # Get file system path to shadow repository.
173 # Get file system path to shadow repository.
179 workspace_id = PullRequestModel()._workspace_id(pull_request)
174 workspace_id = PullRequestModel()._workspace_id(pull_request)
180 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
175 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
@@ -184,21 +179,23 b' class SimpleVCS(object):'
184 self.acl_repo_name = acl_repo_name
179 self.acl_repo_name = acl_repo_name
185 self.is_shadow_repo = True
180 self.is_shadow_repo = True
186
181
187 log.debug('Setting all VCS repository names: %s', {
182 log.debug(
188 'acl_repo_name': self.acl_repo_name,
183 "Setting all VCS repository names: %s",
189 'url_repo_name': self.url_repo_name,
184 {
190 'vcs_repo_name': self.vcs_repo_name,
185 "acl_repo_name": self.acl_repo_name,
191 })
186 "url_repo_name": self.url_repo_name,
187 "vcs_repo_name": self.vcs_repo_name,
188 },
189 )
192
190
193 @property
191 @property
194 def scm_app(self):
192 def scm_app(self):
195 custom_implementation = self.config['vcs.scm_app_implementation']
193 custom_implementation = self.config["vcs.scm_app_implementation"]
196 if custom_implementation == 'http':
194 if custom_implementation == "http":
197 log.debug('Using HTTP implementation of scm app.')
195 log.debug("Using HTTP implementation of scm app.")
198 scm_app_impl = scm_app_http
196 scm_app_impl = scm_app_http
199 else:
197 else:
200 log.debug('Using custom implementation of scm_app: "{}"'.format(
198 log.debug('Using custom implementation of scm_app: "{}"'.format(custom_implementation))
201 custom_implementation))
202 scm_app_impl = importlib.import_module(custom_implementation)
199 scm_app_impl = importlib.import_module(custom_implementation)
203 return scm_app_impl
200 return scm_app_impl
204
201
@@ -208,17 +205,18 b' class SimpleVCS(object):'
208 with a repository_name for support of _<ID> non changeable urls
205 with a repository_name for support of _<ID> non changeable urls
209 """
206 """
210
207
211 data = repo_name.split('/')
208 data = repo_name.split("/")
212 if len(data) >= 2:
209 if len(data) >= 2:
213 from rhodecode.model.repo import RepoModel
210 from rhodecode.model.repo import RepoModel
211
214 by_id_match = RepoModel().get_repo_by_id(repo_name)
212 by_id_match = RepoModel().get_repo_by_id(repo_name)
215 if by_id_match:
213 if by_id_match:
216 data[1] = by_id_match.repo_name
214 data[1] = by_id_match.repo_name
217
215
218 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
216 # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO
219 # and we use this data
217 # and we use this data
220 maybe_new_path = '/'.join(data)
218 maybe_new_path = "/".join(data)
221 return safe_bytes(maybe_new_path).decode('latin1')
219 return safe_bytes(maybe_new_path).decode("latin1")
222
220
223 def _invalidate_cache(self, repo_name):
221 def _invalidate_cache(self, repo_name):
224 """
222 """
@@ -231,21 +229,18 b' class SimpleVCS(object):'
231 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
229 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
232 db_repo = Repository.get_by_repo_name(repo_name)
230 db_repo = Repository.get_by_repo_name(repo_name)
233 if not db_repo:
231 if not db_repo:
234 log.debug('Repository `%s` not found inside the database.',
232 log.debug("Repository `%s` not found inside the database.", repo_name)
235 repo_name)
236 return False
233 return False
237
234
238 if db_repo.repo_type != scm_type:
235 if db_repo.repo_type != scm_type:
239 log.warning(
236 log.warning(
240 'Repository `%s` have incorrect scm_type, expected %s got %s',
237 "Repository `%s` have incorrect scm_type, expected %s got %s", repo_name, db_repo.repo_type, scm_type
241 repo_name, db_repo.repo_type, scm_type)
238 )
242 return False
239 return False
243
240
244 config = db_repo._config
241 config = db_repo._config
245 config.set('extensions', 'largefiles', '')
242 config.set("extensions", "largefiles", "")
246 return is_valid_repo(
243 return is_valid_repo(repo_name, base_path, explicit_scm=scm_type, expect_scm=scm_type, config=config)
247 repo_name, base_path,
248 explicit_scm=scm_type, expect_scm=scm_type, config=config)
249
244
250 def valid_and_active_user(self, user):
245 def valid_and_active_user(self, user):
251 """
246 """
@@ -267,8 +262,9 b' class SimpleVCS(object):'
267 def is_shadow_repo_dir(self):
262 def is_shadow_repo_dir(self):
268 return os.path.isdir(self.vcs_repo_name)
263 return os.path.isdir(self.vcs_repo_name)
269
264
270 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
265 def _check_permission(
271 plugin_id='', plugin_cache_active=False, cache_ttl=0):
266 self, action, user, auth_user, repo_name, ip_addr=None, plugin_id="", plugin_cache_active=False, cache_ttl=0
267 ):
272 """
268 """
273 Checks permissions using action (push/pull) user and repository
269 Checks permissions using action (push/pull) user and repository
274 name. If plugin_cache and ttl is set it will use the plugin which
270 name. If plugin_cache and ttl is set it will use the plugin which
@@ -280,71 +276,67 b' class SimpleVCS(object):'
280 :param repo_name: repository name
276 :param repo_name: repository name
281 """
277 """
282
278
283 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
279 log.debug("AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)", plugin_id, plugin_cache_active, cache_ttl)
284 plugin_id, plugin_cache_active, cache_ttl)
285
280
286 user_id = user.user_id
281 user_id = user.user_id
287 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
282 cache_namespace_uid = f"cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}"
288 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
283 region = rc_cache.get_or_create_region("cache_perms", cache_namespace_uid)
289
284
290 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
285 @region.conditional_cache_on_arguments(
291 expiration_time=cache_ttl,
286 namespace=cache_namespace_uid, expiration_time=cache_ttl, condition=plugin_cache_active
292 condition=plugin_cache_active)
287 )
293 def compute_perm_vcs(
288 def compute_perm_vcs(cache_name, plugin_id, action, user_id, repo_name, ip_addr):
294 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
289 log.debug("auth: calculating permission access now for vcs operation: %s", action)
295
296 log.debug('auth: calculating permission access now for vcs operation: %s', action)
297 # check IP
290 # check IP
298 inherit = user.inherit_default_permissions
291 inherit = user.inherit_default_permissions
299 ip_allowed = AuthUser.check_ip_allowed(
292 ip_allowed = AuthUser.check_ip_allowed(user_id, ip_addr, inherit_from_default=inherit)
300 user_id, ip_addr, inherit_from_default=inherit)
301 if ip_allowed:
293 if ip_allowed:
302 log.info('Access for IP:%s allowed', ip_addr)
294 log.info("Access for IP:%s allowed", ip_addr)
303 else:
295 else:
304 return False
296 return False
305
297
306 if action == 'push':
298 if action == "push":
307 perms = ('repository.write', 'repository.admin')
299 perms = ("repository.write", "repository.admin")
308 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
300 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
309 return False
301 return False
310
302
311 else:
303 else:
312 # any other action need at least read permission
304 # any other action need at least read permission
313 perms = (
305 perms = ("repository.read", "repository.write", "repository.admin")
314 'repository.read', 'repository.write', 'repository.admin')
315 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
306 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
316 return False
307 return False
317
308
318 return True
309 return True
319
310
320 start = time.time()
311 start = time.time()
321 log.debug('Running plugin `%s` permissions check', plugin_id)
312 log.debug("Running plugin `%s` permissions check", plugin_id)
322
313
323 # for environ based auth, password can be empty, but then the validation is
314 # for environ based auth, password can be empty, but then the validation is
324 # on the server that fills in the env data needed for authentication
315 # on the server that fills in the env data needed for authentication
325 perm_result = compute_perm_vcs(
316 perm_result = compute_perm_vcs("vcs_permissions", plugin_id, action, user.user_id, repo_name, ip_addr)
326 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
327
317
328 auth_time = time.time() - start
318 auth_time = time.time() - start
329 log.debug('Permissions for plugin `%s` completed in %.4fs, '
319 log.debug(
330 'expiration time of fetched cache %.1fs.',
320 "Permissions for plugin `%s` completed in %.4fs, " "expiration time of fetched cache %.1fs.",
331 plugin_id, auth_time, cache_ttl)
321 plugin_id,
322 auth_time,
323 cache_ttl,
324 )
332
325
333 return perm_result
326 return perm_result
334
327
335 def _get_http_scheme(self, environ):
328 def _get_http_scheme(self, environ):
336 try:
329 try:
337 return environ['wsgi.url_scheme']
330 return environ["wsgi.url_scheme"]
338 except Exception:
331 except Exception:
339 log.exception('Failed to read http scheme')
332 log.exception("Failed to read http scheme")
340 return 'http'
333 return "http"
341
334
342 def _get_default_cache_ttl(self):
335 def _get_default_cache_ttl(self):
343 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
336 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
344 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
337 plugin = loadplugin("egg:rhodecode-enterprise-ce#rhodecode")
345 plugin_settings = plugin.get_settings()
338 plugin_settings = plugin.get_settings()
346 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
339 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(plugin_settings) or (False, 0)
347 plugin_settings) or (False, 0)
348 return plugin_cache_active, cache_ttl
340 return plugin_cache_active, cache_ttl
349
341
350 def __call__(self, environ, start_response):
342 def __call__(self, environ, start_response):
@@ -359,17 +351,17 b' class SimpleVCS(object):'
359
351
360 def _handle_request(self, environ, start_response):
352 def _handle_request(self, environ, start_response):
361 if not self.url_repo_name:
353 if not self.url_repo_name:
362 log.warning('Repository name is empty: %s', self.url_repo_name)
354 log.warning("Repository name is empty: %s", self.url_repo_name)
363 # failed to get repo name, we fail now
355 # failed to get repo name, we fail now
364 return HTTPNotFound()(environ, start_response)
356 return HTTPNotFound()(environ, start_response)
365 log.debug('Extracted repo name is %s', self.url_repo_name)
357 log.debug("Extracted repo name is %s", self.url_repo_name)
366
358
367 ip_addr = get_ip_addr(environ)
359 ip_addr = get_ip_addr(environ)
368 user_agent = get_user_agent(environ)
360 user_agent = get_user_agent(environ)
369 username = None
361 username = None
370
362
371 # skip passing error to error controller
363 # skip passing error to error controller
372 environ['pylons.status_code_redirect'] = True
364 environ["pylons.status_code_redirect"] = True
373
365
374 # ======================================================================
366 # ======================================================================
375 # GET ACTION PULL or PUSH
367 # GET ACTION PULL or PUSH
@@ -380,17 +372,15 b' class SimpleVCS(object):'
380 # Check if this is a request to a shadow repository of a pull request.
372 # Check if this is a request to a shadow repository of a pull request.
381 # In this case only pull action is allowed.
373 # In this case only pull action is allowed.
382 # ======================================================================
374 # ======================================================================
383 if self.is_shadow_repo and action != 'pull':
375 if self.is_shadow_repo and action != "pull":
384 reason = 'Only pull action is allowed for shadow repositories.'
376 reason = "Only pull action is allowed for shadow repositories."
385 log.debug('User not allowed to proceed, %s', reason)
377 log.debug("User not allowed to proceed, %s", reason)
386 return HTTPNotAcceptable(reason)(environ, start_response)
378 return HTTPNotAcceptable(reason)(environ, start_response)
387
379
388 # Check if the shadow repo actually exists, in case someone refers
380 # Check if the shadow repo actually exists, in case someone refers
389 # to it, and it has been deleted because of successful merge.
381 # to it, and it has been deleted because of successful merge.
390 if self.is_shadow_repo and not self.is_shadow_repo_dir:
382 if self.is_shadow_repo and not self.is_shadow_repo_dir:
391 log.debug(
383 log.debug("Shadow repo detected, and shadow repo dir `%s` is missing", self.is_shadow_repo_dir)
392 'Shadow repo detected, and shadow repo dir `%s` is missing',
393 self.is_shadow_repo_dir)
394 return HTTPNotFound()(environ, start_response)
384 return HTTPNotFound()(environ, start_response)
395
385
396 # ======================================================================
386 # ======================================================================
@@ -398,7 +388,7 b' class SimpleVCS(object):'
398 # ======================================================================
388 # ======================================================================
399 detect_force_push = False
389 detect_force_push = False
400 check_branch_perms = False
390 check_branch_perms = False
401 if action in ['pull', 'push']:
391 if action in ["pull", "push"]:
402 user_obj = anonymous_user = User.get_default_user()
392 user_obj = anonymous_user = User.get_default_user()
403 auth_user = user_obj.AuthUser()
393 auth_user = user_obj.AuthUser()
404 username = anonymous_user.username
394 username = anonymous_user.username
@@ -406,8 +396,12 b' class SimpleVCS(object):'
406 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
396 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
407 # ONLY check permissions if the user is activated
397 # ONLY check permissions if the user is activated
408 anonymous_perm = self._check_permission(
398 anonymous_perm = self._check_permission(
409 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
399 action,
410 plugin_id='anonymous_access',
400 anonymous_user,
401 auth_user,
402 self.acl_repo_name,
403 ip_addr,
404 plugin_id="anonymous_access",
411 plugin_cache_active=plugin_cache_active,
405 plugin_cache_active=plugin_cache_active,
412 cache_ttl=cache_ttl,
406 cache_ttl=cache_ttl,
413 )
407 )
@@ -416,12 +410,13 b' class SimpleVCS(object):'
416
410
417 if not anonymous_user.active or not anonymous_perm:
411 if not anonymous_user.active or not anonymous_perm:
418 if not anonymous_user.active:
412 if not anonymous_user.active:
419 log.debug('Anonymous access is disabled, running '
413 log.debug("Anonymous access is disabled, running " "authentication")
420 'authentication')
421
414
422 if not anonymous_perm:
415 if not anonymous_perm:
423 log.debug('Not enough credentials to access repo: `%s` '
416 log.debug(
424 'repository as anonymous user', self.acl_repo_name)
417 "Not enough credentials to access repo: `%s` " "repository as anonymous user",
418 self.acl_repo_name,
419 )
425
420
426 username = None
421 username = None
427 # ==============================================================
422 # ==============================================================
@@ -430,19 +425,18 b' class SimpleVCS(object):'
430 # ==============================================================
425 # ==============================================================
431
426
432 # try to auth based on environ, container auth methods
427 # try to auth based on environ, container auth methods
433 log.debug('Running PRE-AUTH for container|headers based authentication')
428 log.debug("Running PRE-AUTH for container|headers based authentication")
434
429
435 # headers auth, by just reading special headers and bypass the auth with user/passwd
430 # headers auth, by just reading special headers and bypass the auth with user/passwd
436 pre_auth = authenticate(
431 pre_auth = authenticate(
437 '', '', environ, VCS_TYPE, registry=self.registry,
432 "", "", environ, VCS_TYPE, registry=self.registry, acl_repo_name=self.acl_repo_name
438 acl_repo_name=self.acl_repo_name)
433 )
439
434
440 if pre_auth and pre_auth.get('username'):
435 if pre_auth and pre_auth.get("username"):
441 username = pre_auth['username']
436 username = pre_auth["username"]
442 log.debug('PRE-AUTH got `%s` as username', username)
437 log.debug("PRE-AUTH got `%s` as username", username)
443 if pre_auth:
438 if pre_auth:
444 log.debug('PRE-AUTH successful from %s',
439 log.debug("PRE-AUTH successful from %s", pre_auth.get("auth_data", {}).get("_plugin"))
445 pre_auth.get('auth_data', {}).get('_plugin'))
446
440
447 # If not authenticated by the container, running basic auth
441 # If not authenticated by the container, running basic auth
448 # before inject the calling repo_name for special scope checks
442 # before inject the calling repo_name for special scope checks
@@ -463,16 +457,16 b' class SimpleVCS(object):'
463 return HTTPNotAcceptable(reason)(environ, start_response)
457 return HTTPNotAcceptable(reason)(environ, start_response)
464
458
465 if isinstance(auth_result, dict):
459 if isinstance(auth_result, dict):
466 AUTH_TYPE.update(environ, 'basic')
460 AUTH_TYPE.update(environ, "basic")
467 REMOTE_USER.update(environ, auth_result['username'])
461 REMOTE_USER.update(environ, auth_result["username"])
468 username = auth_result['username']
462 username = auth_result["username"]
469 plugin = auth_result.get('auth_data', {}).get('_plugin')
463 plugin = auth_result.get("auth_data", {}).get("_plugin")
470 log.info(
464 log.info("MAIN-AUTH successful for user `%s` from %s plugin", username, plugin)
471 'MAIN-AUTH successful for user `%s` from %s plugin',
472 username, plugin)
473
465
474 plugin_cache_active, cache_ttl = auth_result.get(
466 plugin_cache_active, cache_ttl = auth_result.get("auth_data", {}).get("_ttl_cache") or (
475 'auth_data', {}).get('_ttl_cache') or (False, 0)
467 False,
468 0,
469 )
476 else:
470 else:
477 return auth_result.wsgi_application(environ, start_response)
471 return auth_result.wsgi_application(environ, start_response)
478
472
@@ -488,21 +482,24 b' class SimpleVCS(object):'
488 # check user attributes for password change flag
482 # check user attributes for password change flag
489 user_obj = user
483 user_obj = user
490 auth_user = user_obj.AuthUser()
484 auth_user = user_obj.AuthUser()
491 if user_obj and user_obj.username != User.DEFAULT_USER and \
485 if (
492 user_obj.user_data.get('force_password_change'):
486 user_obj
493 reason = 'password change required'
487 and user_obj.username != User.DEFAULT_USER
494 log.debug('User not allowed to authenticate, %s', reason)
488 and user_obj.user_data.get("force_password_change")
489 ):
490 reason = "password change required"
491 log.debug("User not allowed to authenticate, %s", reason)
495 return HTTPNotAcceptable(reason)(environ, start_response)
492 return HTTPNotAcceptable(reason)(environ, start_response)
496
493
497 # check permissions for this repository
494 # check permissions for this repository
498 perm = self._check_permission(
495 perm = self._check_permission(
499 action, user, auth_user, self.acl_repo_name, ip_addr,
496 action, user, auth_user, self.acl_repo_name, ip_addr, plugin, plugin_cache_active, cache_ttl
500 plugin, plugin_cache_active, cache_ttl)
497 )
501 if not perm:
498 if not perm:
502 return HTTPForbidden()(environ, start_response)
499 return HTTPForbidden()(environ, start_response)
503 environ['rc_auth_user_id'] = str(user_id)
500 environ["rc_auth_user_id"] = str(user_id)
504
501
505 if action == 'push':
502 if action == "push":
506 perms = auth_user.get_branch_permissions(self.acl_repo_name)
503 perms = auth_user.get_branch_permissions(self.acl_repo_name)
507 if perms:
504 if perms:
508 check_branch_perms = True
505 check_branch_perms = True
@@ -510,41 +507,48 b' class SimpleVCS(object):'
510
507
511 # extras are injected into UI object and later available
508 # extras are injected into UI object and later available
512 # in hooks executed by RhodeCode
509 # in hooks executed by RhodeCode
513 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
510 check_locking = _should_check_locking(environ.get("QUERY_STRING"))
514
511
515 extras = vcs_operation_context(
512 extras = vcs_operation_context(
516 environ, repo_name=self.acl_repo_name, username=username,
513 environ,
517 action=action, scm=self.SCM, check_locking=check_locking,
514 repo_name=self.acl_repo_name,
518 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
515 username=username,
519 detect_force_push=detect_force_push
516 action=action,
517 scm=self.SCM,
518 check_locking=check_locking,
519 is_shadow_repo=self.is_shadow_repo,
520 check_branch_perms=check_branch_perms,
521 detect_force_push=detect_force_push,
520 )
522 )
521
523
522 # ======================================================================
524 # ======================================================================
523 # REQUEST HANDLING
525 # REQUEST HANDLING
524 # ======================================================================
526 # ======================================================================
525 repo_path = os.path.join(
527 repo_path = os.path.join(safe_str(self.base_path), safe_str(self.vcs_repo_name))
526 safe_str(self.base_path), safe_str(self.vcs_repo_name))
528 log.debug("Repository path is %s", repo_path)
527 log.debug('Repository path is %s', repo_path)
528
529
529 fix_PATH()
530 fix_PATH()
530
531
531 log.info(
532 log.info(
532 '%s action on %s repo "%s" by "%s" from %s %s',
533 '%s action on %s repo "%s" by "%s" from %s %s',
533 action, self.SCM, safe_str(self.url_repo_name),
534 action,
534 safe_str(username), ip_addr, user_agent)
535 self.SCM,
536 safe_str(self.url_repo_name),
537 safe_str(username),
538 ip_addr,
539 user_agent,
540 )
535
541
536 return self._generate_vcs_response(
542 return self._generate_vcs_response(environ, start_response, repo_path, extras, action)
537 environ, start_response, repo_path, extras, action)
538
543
539 def _get_txn_id(self, environ):
544 def _get_txn_id(self, environ):
540
545 for k in ["RAW_URI", "HTTP_DESTINATION"]:
541 for k in ['RAW_URI', 'HTTP_DESTINATION']:
542 url = environ.get(k)
546 url = environ.get(k)
543 if not url:
547 if not url:
544 continue
548 continue
545
549
546 # regex to search for svn-txn-id
550 # regex to search for svn-txn-id
547 pattern = r'/!svn/txr/([^/]+)/'
551 pattern = r"/!svn/txr/([^/]+)/"
548
552
549 # Search for the pattern in the URL
553 # Search for the pattern in the URL
550 match = re.search(pattern, url)
554 match = re.search(pattern, url)
@@ -555,8 +559,7 b' class SimpleVCS(object):'
555 return txn_id
559 return txn_id
556
560
557 @initialize_generator
561 @initialize_generator
558 def _generate_vcs_response(
562 def _generate_vcs_response(self, environ, start_response, repo_path, extras, action):
559 self, environ, start_response, repo_path, extras, action):
560 """
563 """
561 Returns a generator for the response content.
564 Returns a generator for the response content.
562
565
@@ -565,24 +568,20 b' class SimpleVCS(object):'
565 also handles the locking exceptions which will be triggered when
568 also handles the locking exceptions which will be triggered when
566 the first chunk is produced by the underlying WSGI application.
569 the first chunk is produced by the underlying WSGI application.
567 """
570 """
568 svn_txn_id = ''
571 svn_txn_id = ""
569 if action == 'push':
572 if action == "push":
570 svn_txn_id = self._get_txn_id(environ)
573 svn_txn_id = self._get_txn_id(environ)
571
574
572 callback_daemon, extras = self._prepare_callback_daemon(
575 callback_daemon, extras = self._prepare_callback_daemon(extras, environ, action, txn_id=svn_txn_id)
573 extras, environ, action, txn_id=svn_txn_id)
574
576
575 if svn_txn_id:
577 if svn_txn_id:
576
577 port = safe_int(extras['hooks_uri'].split(':')[-1])
578 txn_id_data = extras.copy()
578 txn_id_data = extras.copy()
579 txn_id_data.update({'port': port})
579 txn_id_data.update({"req_method": environ["REQUEST_METHOD"]})
580 txn_id_data.update({'req_method': environ['REQUEST_METHOD']})
581
580
582 full_repo_path = repo_path
581 full_repo_path = repo_path
583 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
582 store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data)
584
583
585 log.debug('HOOKS extras is %s', extras)
584 log.debug("HOOKS extras is %s", extras)
586
585
587 http_scheme = self._get_http_scheme(environ)
586 http_scheme = self._get_http_scheme(environ)
588
587
@@ -609,7 +608,7 b' class SimpleVCS(object):'
609
608
610 try:
609 try:
611 # invalidate cache on push
610 # invalidate cache on push
612 if action == 'push':
611 if action == "push":
613 self._invalidate_cache(self.url_repo_name)
612 self._invalidate_cache(self.url_repo_name)
614 finally:
613 finally:
615 meta.Session.remove()
614 meta.Session.remove()
@@ -632,12 +631,12 b' class SimpleVCS(object):'
632 """Return the WSGI app that will finally handle the request."""
631 """Return the WSGI app that will finally handle the request."""
633 raise NotImplementedError()
632 raise NotImplementedError()
634
633
635 def _create_config(self, extras, repo_name, scheme='http'):
634 def _create_config(self, extras, repo_name, scheme="http"):
636 """Create a safe config representation."""
635 """Create a safe config representation."""
637 raise NotImplementedError()
636 raise NotImplementedError()
638
637
639 def _should_use_callback_daemon(self, extras, environ, action):
638 def _should_use_callback_daemon(self, extras, environ, action):
640 if extras.get('is_shadow_repo'):
639 if extras.get("is_shadow_repo"):
641 # we don't want to execute hooks, and callback daemon for shadow repos
640 # we don't want to execute hooks, and callback daemon for shadow repos
642 return False
641 return False
643 return True
642 return True
@@ -647,11 +646,9 b' class SimpleVCS(object):'
647
646
648 if not self._should_use_callback_daemon(extras, environ, action):
647 if not self._should_use_callback_daemon(extras, environ, action):
649 # disable callback daemon for actions that don't require it
648 # disable callback daemon for actions that don't require it
650 protocol = 'local'
649 protocol = "local"
651
650
652 return prepare_callback_daemon(
651 return prepare_callback_daemon(extras, protocol=protocol, txn_id=txn_id)
653 extras, protocol=protocol,
654 host=vcs_settings.HOOKS_HOST, txn_id=txn_id)
655
652
656
653
657 def _should_check_locking(query_string):
654 def _should_check_locking(query_string):
@@ -659,4 +656,4 b' def _should_check_locking(query_string):'
659 # server see all operation on commit; bookmarks, phases and
656 # server see all operation on commit; bookmarks, phases and
660 # obsolescence marker in different transaction, we don't want to check
657 # obsolescence marker in different transaction, we don't want to check
661 # locking on those
658 # locking on those
662 return query_string not in ['cmd=listkeys']
659 return query_string not in ["cmd=listkeys"]
@@ -21,6 +21,7 b' Utilities library for RhodeCode'
21 """
21 """
22
22
23 import datetime
23 import datetime
24 import importlib
24
25
25 import decorator
26 import decorator
26 import logging
27 import logging
@@ -42,8 +43,9 b' from webhelpers2.text import collapse, s'
42
43
43 from mako import exceptions
44 from mako import exceptions
44
45
46 import rhodecode
45 from rhodecode import ConfigGet
47 from rhodecode import ConfigGet
46 from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRC
48 from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRepo, ClientNotSupported
47 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
49 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
48 from rhodecode.lib.type_utils import AttributeDict
50 from rhodecode.lib.type_utils import AttributeDict
49 from rhodecode.lib.str_utils import safe_bytes, safe_str
51 from rhodecode.lib.str_utils import safe_bytes, safe_str
@@ -86,6 +88,7 b' def adopt_for_celery(func):'
86 @wraps(func)
88 @wraps(func)
87 def wrapper(extras):
89 def wrapper(extras):
88 extras = AttributeDict(extras)
90 extras = AttributeDict(extras)
91
89 try:
92 try:
90 # HooksResponse implements to_json method which must be used there.
93 # HooksResponse implements to_json method which must be used there.
91 return func(extras).to_json()
94 return func(extras).to_json()
@@ -100,7 +103,18 b' def adopt_for_celery(func):'
100 'exception_args': error_args,
103 'exception_args': error_args,
101 'exception_traceback': '',
104 'exception_traceback': '',
102 }
105 }
103 except HTTPLockedRC as error:
106 except ClientNotSupported as error:
107 # Those special cases don't need error reporting. It's a case of
108 # locked repo or protected branch
109 error_args = error.args
110 return {
111 'status': error.code,
112 'output': error.explanation,
113 'exception': type(error).__name__,
114 'exception_args': error_args,
115 'exception_traceback': '',
116 }
117 except HTTPLockedRepo as error:
104 # Those special cases don't need error reporting. It's a case of
118 # Those special cases don't need error reporting. It's a case of
105 # locked repo or protected branch
119 # locked repo or protected branch
106 error_args = error.args
120 error_args = error.args
@@ -117,7 +131,7 b' def adopt_for_celery(func):'
117 'output': '',
131 'output': '',
118 'exception': type(e).__name__,
132 'exception': type(e).__name__,
119 'exception_args': e.args,
133 'exception_args': e.args,
120 'exception_traceback': '',
134 'exception_traceback': traceback.format_exc(),
121 }
135 }
122 return wrapper
136 return wrapper
123
137
@@ -411,6 +425,10 b' def prepare_config_data(clear_session=Tr'
411 ('web', 'push_ssl', 'false'),
425 ('web', 'push_ssl', 'false'),
412 ]
426 ]
413 for setting in ui_settings:
427 for setting in ui_settings:
428 # skip certain deprecated keys that might be still in DB
429 if f"{setting.section}_{setting.key}" in ['extensions_hgsubversion']:
430 continue
431
414 # Todo: remove this section once transition to *.ini files will be completed
432 # Todo: remove this section once transition to *.ini files will be completed
415 if setting.section in ('largefiles', 'vcs_git_lfs'):
433 if setting.section in ('largefiles', 'vcs_git_lfs'):
416 if setting.key != 'enabled':
434 if setting.key != 'enabled':
@@ -686,22 +704,41 b' def repo2db_mapper(initial_repo_list, re'
686
704
687 return added, removed
705 return added, removed
688
706
707 def deep_reload_package(package_name):
708 """
709 Deeply reload a package by removing it and its submodules from sys.modules,
710 then re-importing it.
711 """
712 # Remove the package and its submodules from sys.modules
713 to_reload = [name for name in sys.modules if name == package_name or name.startswith(package_name + ".")]
714 for module_name in to_reload:
715 del sys.modules[module_name]
716 log.debug(f"Removed module from cache: {module_name}")
717
718 # Re-import the package
719 package = importlib.import_module(package_name)
720 log.debug(f"Re-imported package: {package_name}")
721
722 return package
689
723
690 def load_rcextensions(root_path):
724 def load_rcextensions(root_path):
691 import rhodecode
725 import rhodecode
692 from rhodecode.config import conf
726 from rhodecode.config import conf
693
727
694 path = os.path.join(root_path)
728 path = os.path.join(root_path)
695 sys.path.append(path)
729 deep_reload = path in sys.path
730 sys.path.insert(0, path)
696
731
697 try:
732 try:
698 rcextensions = __import__('rcextensions')
733 rcextensions = __import__('rcextensions', fromlist=[''])
699 except ImportError:
734 except ImportError:
700 if os.path.isdir(os.path.join(path, 'rcextensions')):
735 if os.path.isdir(os.path.join(path, 'rcextensions')):
701 log.warning('Unable to load rcextensions from %s', path)
736 log.warning('Unable to load rcextensions from %s', path)
702 rcextensions = None
737 rcextensions = None
703
738
704 if rcextensions:
739 if rcextensions:
740 if deep_reload:
741 rcextensions = deep_reload_package('rcextensions')
705 log.info('Loaded rcextensions from %s...', rcextensions)
742 log.info('Loaded rcextensions from %s...', rcextensions)
706 rhodecode.EXTENSIONS = rcextensions
743 rhodecode.EXTENSIONS = rcextensions
707
744
@@ -741,6 +778,7 b' def create_test_index(repo_location, con'
741 except ImportError:
778 except ImportError:
742 raise ImportError('Failed to import rc_testdata, '
779 raise ImportError('Failed to import rc_testdata, '
743 'please make sure this package is installed from requirements_test.txt')
780 'please make sure this package is installed from requirements_test.txt')
781
744 rc_testdata.extract_search_index(
782 rc_testdata.extract_search_index(
745 'vcs_search_index', os.path.dirname(config['search.location']))
783 'vcs_search_index', os.path.dirname(config['search.location']))
746
784
@@ -785,22 +823,15 b' def create_test_repositories(test_path, '
785 Creates test repositories in the temporary directory. Repositories are
823 Creates test repositories in the temporary directory. Repositories are
786 extracted from archives within the rc_testdata package.
824 extracted from archives within the rc_testdata package.
787 """
825 """
788 import rc_testdata
826 try:
827 import rc_testdata
828 except ImportError:
829 raise ImportError('Failed to import rc_testdata, '
830 'please make sure this package is installed from requirements_test.txt')
831
789 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
832 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
790
833
791 log.debug('making test vcs repositories')
834 log.debug('making test vcs repositories at %s', test_path)
792
793 idx_path = config['search.location']
794 data_path = config['cache_dir']
795
796 # clean index and data
797 if idx_path and os.path.exists(idx_path):
798 log.debug('remove %s', idx_path)
799 shutil.rmtree(idx_path)
800
801 if data_path and os.path.exists(data_path):
802 log.debug('remove %s', data_path)
803 shutil.rmtree(data_path)
804
835
805 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
836 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
806 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
837 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
@@ -140,7 +140,7 b' class CurlSession(object):'
140 try:
140 try:
141 curl.perform()
141 curl.perform()
142 except pycurl.error as exc:
142 except pycurl.error as exc:
143 log.error('Failed to call endpoint url: {} using pycurl'.format(url))
143 log.error('Failed to call endpoint url: %s using pycurl', url)
144 raise
144 raise
145
145
146 status_code = curl.getinfo(pycurl.HTTP_CODE)
146 status_code = curl.getinfo(pycurl.HTTP_CODE)
@@ -45,10 +45,3 b' def discover_git_version(raise_on_exc=Fa'
45 if raise_on_exc:
45 if raise_on_exc:
46 raise
46 raise
47 return ''
47 return ''
48
49
50 def lfs_store(base_location):
51 """
52 Return a lfs store relative to base_location
53 """
54 return os.path.join(base_location, '.cache', 'lfs_store')
@@ -45,10 +45,3 b' def discover_hg_version(raise_on_exc=Fal'
45 if raise_on_exc:
45 if raise_on_exc:
46 raise
46 raise
47 return ''
47 return ''
48
49
50 def largefiles_store(base_location):
51 """
52 Return a largefile store relative to base_location
53 """
54 return os.path.join(base_location, '.cache', 'largefiles')
@@ -216,7 +216,7 b' class RemoteRepo(object):'
216 self._cache_region, self._cache_namespace = \
216 self._cache_region, self._cache_namespace = \
217 remote_maker.init_cache_region(cache_repo_id)
217 remote_maker.init_cache_region(cache_repo_id)
218
218
219 with_wire = with_wire or {}
219 with_wire = with_wire or {"cache": False}
220
220
221 repo_state_uid = with_wire.get('repo_state_uid') or 'state'
221 repo_state_uid = with_wire.get('repo_state_uid') or 'state'
222
222
@@ -373,6 +373,7 b' class CommentsModel(BaseModel):'
373
373
374 Session().add(comment)
374 Session().add(comment)
375 Session().flush()
375 Session().flush()
376
376 kwargs = {
377 kwargs = {
377 'user': user,
378 'user': user,
378 'renderer_type': renderer,
379 'renderer_type': renderer,
@@ -387,8 +388,7 b' class CommentsModel(BaseModel):'
387 }
388 }
388
389
389 if commit_obj:
390 if commit_obj:
390 recipients = ChangesetComment.get_users(
391 recipients = ChangesetComment.get_users(revision=commit_obj.raw_id)
391 revision=commit_obj.raw_id)
392 # add commit author if it's in RhodeCode system
392 # add commit author if it's in RhodeCode system
393 cs_author = User.get_from_cs_author(commit_obj.author)
393 cs_author = User.get_from_cs_author(commit_obj.author)
394 if not cs_author:
394 if not cs_author:
@@ -397,16 +397,13 b' class CommentsModel(BaseModel):'
397 recipients += [cs_author]
397 recipients += [cs_author]
398
398
399 commit_comment_url = self.get_url(comment, request=request)
399 commit_comment_url = self.get_url(comment, request=request)
400 commit_comment_reply_url = self.get_url(
400 commit_comment_reply_url = self.get_url(comment, request=request, anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
401 comment, request=request,
402 anchor=f'comment-{comment.comment_id}/?/ReplyToComment')
403
401
404 target_repo_url = h.link_to(
402 target_repo_url = h.link_to(
405 repo.repo_name,
403 repo.repo_name,
406 h.route_url('repo_summary', repo_name=repo.repo_name))
404 h.route_url('repo_summary', repo_name=repo.repo_name))
407
405
408 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
406 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name, commit_id=commit_id)
409 commit_id=commit_id)
410
407
411 # commit specifics
408 # commit specifics
412 kwargs.update({
409 kwargs.update({
@@ -489,7 +486,6 b' class CommentsModel(BaseModel):'
489
486
490 if not is_draft:
487 if not is_draft:
491 comment_data = comment.get_api_data()
488 comment_data = comment.get_api_data()
492
493 self._log_audit_action(
489 self._log_audit_action(
494 action, {'data': comment_data}, auth_user, comment)
490 action, {'data': comment_data}, auth_user, comment)
495
491
@@ -38,7 +38,7 b' from rhodecode.translation import lazy_u'
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import audit_logger
39 from rhodecode.lib import audit_logger
40 from collections import OrderedDict
40 from collections import OrderedDict
41 from rhodecode.lib.hook_daemon.base import prepare_callback_daemon
41 from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon
42 from rhodecode.lib.ext_json import sjson as json
42 from rhodecode.lib.ext_json import sjson as json
43 from rhodecode.lib.markup_renderer import (
43 from rhodecode.lib.markup_renderer import (
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
@@ -980,9 +980,7 b' class PullRequestModel(BaseModel):'
980 target_ref = self._refresh_reference(
980 target_ref = self._refresh_reference(
981 pull_request.target_ref_parts, target_vcs)
981 pull_request.target_ref_parts, target_vcs)
982
982
983 callback_daemon, extras = prepare_callback_daemon(
983 callback_daemon, extras = prepare_callback_daemon(extras, protocol=vcs_settings.HOOKS_PROTOCOL)
984 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
985 host=vcs_settings.HOOKS_HOST)
986
984
987 with callback_daemon:
985 with callback_daemon:
988 # TODO: johbo: Implement a clean way to run a config_override
986 # TODO: johbo: Implement a clean way to run a config_override
@@ -862,27 +862,3 b' class VcsSettingsModel(object):'
862 raise ValueError(
862 raise ValueError(
863 f'The given data does not contain {data_key} key')
863 f'The given data does not contain {data_key} key')
864 return data_keys
864 return data_keys
865
866 def create_largeobjects_dirs_if_needed(self, repo_store_path):
867 """
868 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
869 does a repository scan if enabled in the settings.
870 """
871
872 from rhodecode.lib.vcs.backends.hg import largefiles_store
873 from rhodecode.lib.vcs.backends.git import lfs_store
874
875 paths = [
876 largefiles_store(repo_store_path),
877 lfs_store(repo_store_path)]
878
879 for path in paths:
880 if os.path.isdir(path):
881 continue
882 if os.path.isfile(path):
883 continue
884 # not a file nor dir, we try to create it
885 try:
886 os.makedirs(path)
887 except Exception:
888 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -38,7 +37,7 b' from rhodecode.lib.hash_utils import sha'
38 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
39
38
40 __all__ = [
39 __all__ = [
41 'get_new_dir', 'TestController',
40 'get_new_dir', 'TestController', 'console_printer',
42 'clear_cache_regions',
41 'clear_cache_regions',
43 'assert_session_flash', 'login_user', 'no_newline_id_generator',
42 'assert_session_flash', 'login_user', 'no_newline_id_generator',
44 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
43 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
@@ -244,3 +243,11 b' def no_newline_id_generator(test_name):'
244
243
245 return test_name or 'test-with-empty-name'
244 return test_name or 'test-with-empty-name'
246
245
246 def console_printer(*msg):
247 print_func = print
248 try:
249 from rich import print as print_func
250 except ImportError:
251 pass
252
253 print_func(*msg)
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -90,7 +89,7 b' class RhodeCodeAuthPlugin(RhodeCodeExter'
90 'firstname': firstname,
89 'firstname': firstname,
91 'lastname': lastname,
90 'lastname': lastname,
92 'groups': [],
91 'groups': [],
93 'email': '%s@rhodecode.com' % username,
92 'email': f'{username}@rhodecode.com',
94 'admin': admin,
93 'admin': admin,
95 'active': active,
94 'active': active,
96 "active_from_extern": None,
95 "active_from_extern": None,
@@ -20,14 +20,14 b''
20 import pytest
20 import pytest
21 import requests
21 import requests
22 from rhodecode.config import routing_links
22 from rhodecode.config import routing_links
23
23 from rhodecode.tests import console_printer
24
24
25 def check_connection():
25 def check_connection():
26 try:
26 try:
27 response = requests.get('https://rhodecode.com')
27 response = requests.get('https://rhodecode.com')
28 return response.status_code == 200
28 return response.status_code == 200
29 except Exception as e:
29 except Exception as e:
30 print(e)
30 console_printer(e)
31
31
32 return False
32 return False
33
33
@@ -1,4 +1,4 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -16,23 +16,10 b''
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 import pytest # noqa
20 py.test config for test suite for making push/pull operations.
21
22 .. important::
23
24 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 to redirect things to stderr instead of stdout.
26 """
27
28 import pytest
29 import logging
20 import logging
30
21 import collections
31 from rhodecode.authentication import AuthenticationPluginRegistry
22 import rhodecode
32 from rhodecode.model.db import Permission, User
33 from rhodecode.model.meta import Session
34 from rhodecode.model.settings import SettingsModel
35 from rhodecode.model.user import UserModel
36
23
37
24
38 log = logging.getLogger(__name__)
25 log = logging.getLogger(__name__)
@@ -40,99 +27,3 b' log = logging.getLogger(__name__)'
40 # Docker image running httpbin...
27 # Docker image running httpbin...
41 HTTPBIN_DOMAIN = 'http://httpbin'
28 HTTPBIN_DOMAIN = 'http://httpbin'
42 HTTPBIN_POST = HTTPBIN_DOMAIN + '/post'
29 HTTPBIN_POST = HTTPBIN_DOMAIN + '/post'
43
44
45 @pytest.fixture()
46 def enable_auth_plugins(request, baseapp, csrf_token):
47 """
48 Return a factory object that when called, allows to control which
49 authentication plugins are enabled.
50 """
51
52 class AuthPluginManager(object):
53
54 def cleanup(self):
55 self._enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode'])
56
57 def enable(self, plugins_list, override=None):
58 return self._enable_plugins(plugins_list, override)
59
60 def _enable_plugins(self, plugins_list, override=None):
61 override = override or {}
62 params = {
63 'auth_plugins': ','.join(plugins_list),
64 }
65
66 # helper translate some names to others, to fix settings code
67 name_map = {
68 'token': 'authtoken'
69 }
70 log.debug('enable_auth_plugins: enabling following auth-plugins: %s', plugins_list)
71
72 for module in plugins_list:
73 plugin_name = module.partition('#')[-1]
74 if plugin_name in name_map:
75 plugin_name = name_map[plugin_name]
76 enabled_plugin = f'auth_{plugin_name}_enabled'
77 cache_ttl = f'auth_{plugin_name}_cache_ttl'
78
79 # default params that are needed for each plugin,
80 # `enabled` and `cache_ttl`
81 params.update({
82 enabled_plugin: True,
83 cache_ttl: 0
84 })
85 if override.get:
86 params.update(override.get(module, {}))
87
88 validated_params = params
89
90 for k, v in validated_params.items():
91 setting = SettingsModel().create_or_update_setting(k, v)
92 Session().add(setting)
93 Session().commit()
94
95 AuthenticationPluginRegistry.invalidate_auth_plugins_cache(hard=True)
96
97 enabled_plugins = SettingsModel().get_auth_plugins()
98 assert plugins_list == enabled_plugins
99
100 enabler = AuthPluginManager()
101 request.addfinalizer(enabler.cleanup)
102
103 return enabler
104
105
106 @pytest.fixture()
107 def test_user_factory(request, baseapp):
108
109 def user_factory(username='test_user', password='qweqwe', first_name='John', last_name='Testing', **kwargs):
110 usr = UserModel().create_or_update(
111 username=username,
112 password=password,
113 email=f'{username}@rhodecode.org',
114 firstname=first_name, lastname=last_name)
115 Session().commit()
116
117 for k, v in kwargs.items():
118 setattr(usr, k, v)
119 Session().add(usr)
120
121 new_usr = User.get_by_username(username)
122 new_usr_id = new_usr.user_id
123 assert new_usr == usr
124
125 @request.addfinalizer
126 def cleanup():
127 if User.get(new_usr_id) is None:
128 return
129
130 perm = Permission.query().all()
131 for p in perm:
132 UserModel().revoke_perm(usr, p)
133
134 UserModel().delete(new_usr_id)
135 Session().commit()
136 return usr
137
138 return user_factory
@@ -1,4 +1,4 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -98,16 +98,16 b' def pytest_addoption(parser):'
98 'pyramid_config',
98 'pyramid_config',
99 "Set up a Pyramid environment with the specified config file.")
99 "Set up a Pyramid environment with the specified config file.")
100
100
101 parser.addini('rhodecode_config', 'rhodecode config ini for tests')
102 parser.addini('celery_config', 'celery config ini for tests')
103 parser.addini('vcsserver_config', 'vcsserver config ini for tests')
104
101 vcsgroup = parser.getgroup('vcs')
105 vcsgroup = parser.getgroup('vcs')
106
102 vcsgroup.addoption(
107 vcsgroup.addoption(
103 '--without-vcsserver', dest='with_vcsserver', action='store_false',
108 '--without-vcsserver', dest='with_vcsserver', action='store_false',
104 help="Do not start the VCSServer in a background process.")
109 help="Do not start the VCSServer in a background process.")
105 vcsgroup.addoption(
110
106 '--with-vcsserver-http', dest='vcsserver_config_http',
107 help="Start the HTTP VCSServer with the specified config file.")
108 vcsgroup.addoption(
109 '--vcsserver-protocol', dest='vcsserver_protocol',
110 help="Start the VCSServer with HTTP protocol support.")
111 vcsgroup.addoption(
111 vcsgroup.addoption(
112 '--vcsserver-config-override', action='store', type=_parse_json,
112 '--vcsserver-config-override', action='store', type=_parse_json,
113 default=None, dest='vcsserver_config_override', help=(
113 default=None, dest='vcsserver_config_override', help=(
@@ -122,12 +122,6 b' def pytest_addoption(parser):'
122 "Allows to set the port of the vcsserver. Useful when testing "
122 "Allows to set the port of the vcsserver. Useful when testing "
123 "against an already running server and random ports cause "
123 "against an already running server and random ports cause "
124 "trouble."))
124 "trouble."))
125 parser.addini(
126 'vcsserver_config_http',
127 "Start the HTTP VCSServer with the specified config file.")
128 parser.addini(
129 'vcsserver_protocol',
130 "Start the VCSServer with HTTP protocol support.")
131
125
132
126
133 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
127 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -17,7 +16,7 b''
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 from subprocess import Popen, PIPE
19 import subprocess
21 import os
20 import os
22 import sys
21 import sys
23 import tempfile
22 import tempfile
@@ -26,87 +25,71 b' import pytest'
26 from sqlalchemy.engine import url
25 from sqlalchemy.engine import url
27
26
28 from rhodecode.lib.str_utils import safe_str, safe_bytes
27 from rhodecode.lib.str_utils import safe_str, safe_bytes
29 from rhodecode.tests.fixture import TestINI
28 from rhodecode.tests.fixtures.rc_fixture import TestINI
30
29
31
30
32 def _get_dbs_from_metafunc(metafunc):
31 def _get_dbs_from_metafunc(metafunc):
33 dbs_mark = metafunc.definition.get_closest_marker('dbs')
32 dbs_mark = metafunc.definition.get_closest_marker("dbs")
34
33
35 if dbs_mark:
34 if dbs_mark:
36 # Supported backends by this test function, created from pytest.mark.dbs
35 # Supported backends by this test function, created from pytest.mark.dbs
37 backends = dbs_mark.args
36 backends = dbs_mark.args
38 else:
37 else:
39 backends = metafunc.config.getoption('--dbs')
38 backends = metafunc.config.getoption("--dbs")
40 return backends
39 return backends
41
40
42
41
43 def pytest_generate_tests(metafunc):
42 def pytest_generate_tests(metafunc):
44 # Support test generation based on --dbs parameter
43 # Support test generation based on --dbs parameter
45 if 'db_backend' in metafunc.fixturenames:
44 if "db_backend" in metafunc.fixturenames:
46 requested_backends = set(metafunc.config.getoption('--dbs'))
45 requested_backends = set(metafunc.config.getoption("--dbs"))
47 backends = _get_dbs_from_metafunc(metafunc)
46 backends = _get_dbs_from_metafunc(metafunc)
48 backends = requested_backends.intersection(backends)
47 backends = requested_backends.intersection(backends)
49 # TODO: johbo: Disabling a backend did not work out with
48 # TODO: johbo: Disabling a backend did not work out with
50 # parametrization, find better way to achieve this.
49 # parametrization, find better way to achieve this.
51 if not backends:
50 if not backends:
52 metafunc.function._skip = True
51 metafunc.function._skip = True
53 metafunc.parametrize('db_backend_name', backends)
52 metafunc.parametrize("db_backend_name", backends)
54
53
55
54
56 def pytest_collection_modifyitems(session, config, items):
55 def pytest_collection_modifyitems(session, config, items):
57 remaining = [
56 remaining = [i for i in items if not getattr(i.obj, "_skip", False)]
58 i for i in items if not getattr(i.obj, '_skip', False)]
59 items[:] = remaining
57 items[:] = remaining
60
58
61
59
62 @pytest.fixture()
60 @pytest.fixture()
63 def db_backend(
61 def db_backend(request, db_backend_name, ini_config, tmpdir_factory):
64 request, db_backend_name, ini_config, tmpdir_factory):
65 basetemp = tmpdir_factory.getbasetemp().strpath
62 basetemp = tmpdir_factory.getbasetemp().strpath
66 klass = _get_backend(db_backend_name)
63 klass = _get_backend(db_backend_name)
67
64
68 option_name = '--{}-connection-string'.format(db_backend_name)
65 option_name = "--{}-connection-string".format(db_backend_name)
69 connection_string = request.config.getoption(option_name) or None
66 connection_string = request.config.getoption(option_name) or None
70
67
71 return klass(
68 return klass(config_file=ini_config, basetemp=basetemp, connection_string=connection_string)
72 config_file=ini_config, basetemp=basetemp,
73 connection_string=connection_string)
74
69
75
70
76 def _get_backend(backend_type):
71 def _get_backend(backend_type):
77 return {
72 return {"sqlite": SQLiteDBBackend, "postgres": PostgresDBBackend, "mysql": MySQLDBBackend, "": EmptyDBBackend}[
78 'sqlite': SQLiteDBBackend,
73 backend_type
79 'postgres': PostgresDBBackend,
74 ]
80 'mysql': MySQLDBBackend,
81 '': EmptyDBBackend
82 }[backend_type]
83
75
84
76
85 class DBBackend(object):
77 class DBBackend(object):
86 _store = os.path.dirname(os.path.abspath(__file__))
78 _store = os.path.dirname(os.path.abspath(__file__))
87 _type = None
79 _type = None
88 _base_ini_config = [{'app:main': {'vcs.start_server': 'false',
80 _base_ini_config = [{"app:main": {"vcs.start_server": "false", "startup.import_repos": "false"}}]
89 'startup.import_repos': 'false'}}]
81 _db_url = [{"app:main": {"sqlalchemy.db1.url": ""}}]
90 _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}]
82 _base_db_name = "rhodecode_test_db_backend"
91 _base_db_name = 'rhodecode_test_db_backend'
83 std_env = {"RC_TEST": "0"}
92 std_env = {'RC_TEST': '0'}
93
84
94 def __init__(
85 def __init__(self, config_file, db_name=None, basetemp=None, connection_string=None):
95 self, config_file, db_name=None, basetemp=None,
96 connection_string=None):
97
98 from rhodecode.lib.vcs.backends.hg import largefiles_store
99 from rhodecode.lib.vcs.backends.git import lfs_store
100
101 self.fixture_store = os.path.join(self._store, self._type)
86 self.fixture_store = os.path.join(self._store, self._type)
102 self.db_name = db_name or self._base_db_name
87 self.db_name = db_name or self._base_db_name
103 self._base_ini_file = config_file
88 self._base_ini_file = config_file
104 self.stderr = ''
89 self.stderr = ""
105 self.stdout = ''
90 self.stdout = ""
106 self._basetemp = basetemp or tempfile.gettempdir()
91 self._basetemp = basetemp or tempfile.gettempdir()
107 self._repos_location = os.path.join(self._basetemp, 'rc_test_repos')
92 self._repos_location = os.path.join(self._basetemp, "rc_test_repos")
108 self._repos_hg_largefiles_store = largefiles_store(self._basetemp)
109 self._repos_git_lfs_store = lfs_store(self._basetemp)
110 self.connection_string = connection_string
93 self.connection_string = connection_string
111
94
112 @property
95 @property
@@ -118,8 +101,7 b' class DBBackend(object):'
118 if not new_connection_string:
101 if not new_connection_string:
119 new_connection_string = self.get_default_connection_string()
102 new_connection_string = self.get_default_connection_string()
120 else:
103 else:
121 new_connection_string = new_connection_string.format(
104 new_connection_string = new_connection_string.format(db_name=self.db_name)
122 db_name=self.db_name)
123 url_parts = url.make_url(new_connection_string)
105 url_parts = url.make_url(new_connection_string)
124 self._connection_string = new_connection_string
106 self._connection_string = new_connection_string
125 self.user = url_parts.username
107 self.user = url_parts.username
@@ -127,73 +109,67 b' class DBBackend(object):'
127 self.host = url_parts.host
109 self.host = url_parts.host
128
110
129 def get_default_connection_string(self):
111 def get_default_connection_string(self):
130 raise NotImplementedError('default connection_string is required.')
112 raise NotImplementedError("default connection_string is required.")
131
113
132 def execute(self, cmd, env=None, *args):
114 def execute(self, cmd, env=None, *args):
133 """
115 """
134 Runs command on the system with given ``args``.
116 Runs command on the system with given ``args``.
135 """
117 """
136
118
137 command = cmd + ' ' + ' '.join(args)
119 command = cmd + " " + " ".join(args)
138 sys.stdout.write(f'CMD: {command}')
120 sys.stdout.write(f"CMD: {command}")
139
121
140 # Tell Python to use UTF-8 encoding out stdout
122 # Tell Python to use UTF-8 encoding out stdout
141 _env = os.environ.copy()
123 _env = os.environ.copy()
142 _env['PYTHONIOENCODING'] = 'UTF-8'
124 _env["PYTHONIOENCODING"] = "UTF-8"
143 _env.update(self.std_env)
125 _env.update(self.std_env)
144 if env:
126 if env:
145 _env.update(env)
127 _env.update(env)
146
128
147 self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env)
129 self.p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=_env)
148 self.stdout, self.stderr = self.p.communicate()
130 self.stdout, self.stderr = self.p.communicate()
149 stdout_str = safe_str(self.stdout)
131 stdout_str = safe_str(self.stdout)
150 sys.stdout.write(f'COMMAND:{command}\n')
132 sys.stdout.write(f"COMMAND:{command}\n")
151 sys.stdout.write(stdout_str)
133 sys.stdout.write(stdout_str)
152 return self.stdout, self.stderr
134 return self.stdout, self.stderr
153
135
154 def assert_returncode_success(self):
136 def assert_returncode_success(self):
155 from rich import print as pprint
137 from rich import print as pprint
138
156 if not self.p.returncode == 0:
139 if not self.p.returncode == 0:
157 pprint(safe_str(self.stderr))
140 pprint(safe_str(self.stderr))
158 raise AssertionError(f'non 0 retcode:{self.p.returncode}')
141 raise AssertionError(f"non 0 retcode:{self.p.returncode}")
159
142
160 def assert_correct_output(self, stdout, version):
143 def assert_correct_output(self, stdout, version):
161 assert b'UPGRADE FOR STEP %b COMPLETED' % safe_bytes(version) in stdout
144 assert b"UPGRADE FOR STEP %b COMPLETED" % safe_bytes(version) in stdout
162
145
163 def setup_rhodecode_db(self, ini_params=None, env=None):
146 def setup_rhodecode_db(self, ini_params=None, env=None):
164 if not ini_params:
147 if not ini_params:
165 ini_params = self._base_ini_config
148 ini_params = self._base_ini_config
166
149
167 ini_params.extend(self._db_url)
150 ini_params.extend(self._db_url)
168 with TestINI(self._base_ini_file, ini_params,
151 with TestINI(self._base_ini_file, ini_params, self._type, destroy=True) as _ini_file:
169 self._type, destroy=True) as _ini_file:
170
171 if not os.path.isdir(self._repos_location):
152 if not os.path.isdir(self._repos_location):
172 os.makedirs(self._repos_location)
153 os.makedirs(self._repos_location)
173 if not os.path.isdir(self._repos_hg_largefiles_store):
174 os.makedirs(self._repos_hg_largefiles_store)
175 if not os.path.isdir(self._repos_git_lfs_store):
176 os.makedirs(self._repos_git_lfs_store)
177
154
178 return self.execute(
155 return self.execute(
179 "rc-setup-app {0} --user=marcink "
156 "rc-setup-app {0} --user=marcink "
180 "--email=marcin@rhodeocode.com --password={1} "
157 "--email=marcin@rhodeocode.com --password={1} "
181 "--repos={2} --force-yes".format(
158 "--repos={2} --force-yes".format(_ini_file, "qweqwe", self._repos_location),
182 _ini_file, 'qweqwe', self._repos_location), env=env)
159 env=env,
160 )
183
161
184 def upgrade_database(self, ini_params=None):
162 def upgrade_database(self, ini_params=None):
185 if not ini_params:
163 if not ini_params:
186 ini_params = self._base_ini_config
164 ini_params = self._base_ini_config
187 ini_params.extend(self._db_url)
165 ini_params.extend(self._db_url)
188
166
189 test_ini = TestINI(
167 test_ini = TestINI(self._base_ini_file, ini_params, self._type, destroy=True)
190 self._base_ini_file, ini_params, self._type, destroy=True)
191 with test_ini as ini_file:
168 with test_ini as ini_file:
192 if not os.path.isdir(self._repos_location):
169 if not os.path.isdir(self._repos_location):
193 os.makedirs(self._repos_location)
170 os.makedirs(self._repos_location)
194
171
195 return self.execute(
172 return self.execute("rc-upgrade-db {0} --force-yes".format(ini_file))
196 "rc-upgrade-db {0} --force-yes".format(ini_file))
197
173
198 def setup_db(self):
174 def setup_db(self):
199 raise NotImplementedError
175 raise NotImplementedError
@@ -206,7 +182,7 b' class DBBackend(object):'
206
182
207
183
208 class EmptyDBBackend(DBBackend):
184 class EmptyDBBackend(DBBackend):
209 _type = ''
185 _type = ""
210
186
211 def setup_db(self):
187 def setup_db(self):
212 pass
188 pass
@@ -222,21 +198,20 b' class EmptyDBBackend(DBBackend):'
222
198
223
199
224 class SQLiteDBBackend(DBBackend):
200 class SQLiteDBBackend(DBBackend):
225 _type = 'sqlite'
201 _type = "sqlite"
226
202
227 def get_default_connection_string(self):
203 def get_default_connection_string(self):
228 return 'sqlite:///{}/{}.sqlite'.format(self._basetemp, self.db_name)
204 return "sqlite:///{}/{}.sqlite".format(self._basetemp, self.db_name)
229
205
230 def setup_db(self):
206 def setup_db(self):
231 # dump schema for tests
207 # dump schema for tests
232 # cp -v $TEST_DB_NAME
208 # cp -v $TEST_DB_NAME
233 self._db_url = [{'app:main': {
209 self._db_url = [{"app:main": {"sqlalchemy.db1.url": self.connection_string}}]
234 'sqlalchemy.db1.url': self.connection_string}}]
235
210
236 def import_dump(self, dumpname):
211 def import_dump(self, dumpname):
237 dump = os.path.join(self.fixture_store, dumpname)
212 dump = os.path.join(self.fixture_store, dumpname)
238 target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self))
213 target = os.path.join(self._basetemp, "{0.db_name}.sqlite".format(self))
239 return self.execute(f'cp -v {dump} {target}')
214 return self.execute(f"cp -v {dump} {target}")
240
215
241 def teardown_db(self):
216 def teardown_db(self):
242 target_db = os.path.join(self._basetemp, self.db_name)
217 target_db = os.path.join(self._basetemp, self.db_name)
@@ -244,39 +219,39 b' class SQLiteDBBackend(DBBackend):'
244
219
245
220
246 class MySQLDBBackend(DBBackend):
221 class MySQLDBBackend(DBBackend):
247 _type = 'mysql'
222 _type = "mysql"
248
223
249 def get_default_connection_string(self):
224 def get_default_connection_string(self):
250 return 'mysql://root:qweqwe@127.0.0.1/{}'.format(self.db_name)
225 return "mysql://root:qweqwe@127.0.0.1/{}".format(self.db_name)
251
226
252 def setup_db(self):
227 def setup_db(self):
253 # dump schema for tests
228 # dump schema for tests
254 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
229 # mysqldump -uroot -pqweqwe $TEST_DB_NAME
255 self._db_url = [{'app:main': {
230 self._db_url = [{"app:main": {"sqlalchemy.db1.url": self.connection_string}}]
256 'sqlalchemy.db1.url': self.connection_string}}]
231 return self.execute(
257 return self.execute("mysql -v -u{} -p{} -e 'create database '{}';'".format(
232 "mysql -v -u{} -p{} -e 'create database '{}';'".format(self.user, self.password, self.db_name)
258 self.user, self.password, self.db_name))
233 )
259
234
260 def import_dump(self, dumpname):
235 def import_dump(self, dumpname):
261 dump = os.path.join(self.fixture_store, dumpname)
236 dump = os.path.join(self.fixture_store, dumpname)
262 return self.execute("mysql -u{} -p{} {} < {}".format(
237 return self.execute("mysql -u{} -p{} {} < {}".format(self.user, self.password, self.db_name, dump))
263 self.user, self.password, self.db_name, dump))
264
238
265 def teardown_db(self):
239 def teardown_db(self):
266 return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format(
240 return self.execute(
267 self.user, self.password, self.db_name))
241 "mysql -v -u{} -p{} -e 'drop database '{}';'".format(self.user, self.password, self.db_name)
242 )
268
243
269
244
270 class PostgresDBBackend(DBBackend):
245 class PostgresDBBackend(DBBackend):
271 _type = 'postgres'
246 _type = "postgres"
272
247
273 def get_default_connection_string(self):
248 def get_default_connection_string(self):
274 return 'postgresql://postgres:qweqwe@localhost/{}'.format(self.db_name)
249 return "postgresql://postgres:qweqwe@localhost/{}".format(self.db_name)
275
250
276 def setup_db(self):
251 def setup_db(self):
277 # dump schema for tests
252 # dump schema for tests
278 # pg_dump -U postgres -h localhost $TEST_DB_NAME
253 # pg_dump -U postgres -h localhost $TEST_DB_NAME
279 self._db_url = [{'app:main': {'sqlalchemy.db1.url': self.connection_string}}]
254 self._db_url = [{"app:main": {"sqlalchemy.db1.url": self.connection_string}}]
280 cmd = f"PGPASSWORD={self.password} psql -U {self.user} -h localhost -c 'create database '{self.db_name}';'"
255 cmd = f"PGPASSWORD={self.password} psql -U {self.user} -h localhost -c 'create database '{self.db_name}';'"
281 return self.execute(cmd)
256 return self.execute(cmd)
282
257
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -21,33 +20,42 b' import pytest'
21
20
22
21
23 @pytest.mark.dbs("postgres")
22 @pytest.mark.dbs("postgres")
24 @pytest.mark.parametrize("dumpname", [
23 @pytest.mark.parametrize(
25 '1.4.4.sql',
24 "dumpname",
26 '1.5.0.sql',
25 [
27 '1.6.0.sql',
26 "1.4.4.sql",
28 '1.6.0_no_repo_name_index.sql',
27 "1.5.0.sql",
29 ])
28 "1.6.0.sql",
29 "1.6.0_no_repo_name_index.sql",
30 ],
31 )
30 def test_migrate_postgres_db(db_backend, dumpname):
32 def test_migrate_postgres_db(db_backend, dumpname):
31 _run_migration_test(db_backend, dumpname)
33 _run_migration_test(db_backend, dumpname)
32
34
33
35
34 @pytest.mark.dbs("sqlite")
36 @pytest.mark.dbs("sqlite")
35 @pytest.mark.parametrize("dumpname", [
37 @pytest.mark.parametrize(
36 'rhodecode.1.4.4.sqlite',
38 "dumpname",
37 'rhodecode.1.4.4_with_groups.sqlite',
39 [
38 'rhodecode.1.4.4_with_ldap_active.sqlite',
40 "rhodecode.1.4.4.sqlite",
39 ])
41 "rhodecode.1.4.4_with_groups.sqlite",
42 "rhodecode.1.4.4_with_ldap_active.sqlite",
43 ],
44 )
40 def test_migrate_sqlite_db(db_backend, dumpname):
45 def test_migrate_sqlite_db(db_backend, dumpname):
41 _run_migration_test(db_backend, dumpname)
46 _run_migration_test(db_backend, dumpname)
42
47
43
48
44 @pytest.mark.dbs("mysql")
49 @pytest.mark.dbs("mysql")
45 @pytest.mark.parametrize("dumpname", [
50 @pytest.mark.parametrize(
46 '1.4.4.sql',
51 "dumpname",
47 '1.5.0.sql',
52 [
48 '1.6.0.sql',
53 "1.4.4.sql",
49 '1.6.0_no_repo_name_index.sql',
54 "1.5.0.sql",
50 ])
55 "1.6.0.sql",
56 "1.6.0_no_repo_name_index.sql",
57 ],
58 )
51 def test_migrate_mysql_db(db_backend, dumpname):
59 def test_migrate_mysql_db(db_backend, dumpname):
52 _run_migration_test(db_backend, dumpname)
60 _run_migration_test(db_backend, dumpname)
53
61
@@ -60,5 +68,5 b' def _run_migration_test(db_backend, dump'
60 db_backend.import_dump(dumpname)
68 db_backend.import_dump(dumpname)
61 stdout, stderr = db_backend.upgrade_database()
69 stdout, stderr = db_backend.upgrade_database()
62
70
63 db_backend.assert_correct_output(stdout+stderr, version='16')
71 db_backend.assert_correct_output(stdout + stderr, version="16")
64 db_backend.assert_returncode_success()
72 db_backend.assert_returncode_success()
1 NO CONTENT: file renamed from rhodecode/tests/fixture_mods/__init__.py to rhodecode/tests/fixtures/__init__.py
NO CONTENT: file renamed from rhodecode/tests/fixture_mods/__init__.py to rhodecode/tests/fixtures/__init__.py
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/diff_with_diff_data.diff to rhodecode/tests/fixtures/diff_fixtures/diff_with_diff_data.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/diff_with_diff_data.diff to rhodecode/tests/fixtures/diff_fixtures/diff_with_diff_data.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_and_normal.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_and_normal.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_and_normal.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_and_normal.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_special_files.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_special_files.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_special_files_2.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files_2.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_special_files_2.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files_2.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_chmod.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_chmod.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_js_chars.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_js_chars.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_js_chars.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_js_chars.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_mod_single_binary_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_mod_single_binary_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_rename_file.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_rename_file.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_rename_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file_with_spaces.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_rename_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file_with_spaces.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/git_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/git_node_history_response.json
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/git_node_history_response.json
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_add_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_add_single_binary_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_add_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_add_single_binary_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_binary_and_normal.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_binary_and_normal.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_binary_and_normal.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_binary_and_normal.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_chmod_and_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod_and_mod_single_binary_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_chmod_and_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod_and_mod_single_binary_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_and_chmod_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_chmod_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_and_chmod_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_chmod_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_and_modify_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_modify_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_and_modify_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_modify_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_chmod_and_edit_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_chmod_and_edit_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_chmod_and_edit_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_chmod_and_edit_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file_with_spaces.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file_with_spaces.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_del_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_del_single_binary_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_del_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_del_single_binary_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_double_file_change_double_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_double_newline.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_double_file_change_double_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_double_newline.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_double_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_newline.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_double_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_newline.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_four_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_four_file_change_newline.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_four_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_four_file_change_newline.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mixed_filename_encodings.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mixed_filename_encodings.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mixed_filename_encodings.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mixed_filename_encodings.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mod_file_and_rename.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_file_and_rename.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mod_file_and_rename.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_file_and_rename.diff
1 NO CONTENT: file copied from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_binary_file.diff
NO CONTENT: file copied from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_binary_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_no_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_no_newline.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_no_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_no_newline.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_and_chmod_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_and_chmod_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_and_chmod_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_and_chmod_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file_with_spaces.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file_with_spaces.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_single_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_single_file_change_newline.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_single_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_single_file_change_newline.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/hg_node_history_response.json
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/hg_node_history_response.json
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/journal_dump.csv to rhodecode/tests/fixtures/diff_fixtures/journal_dump.csv
NO CONTENT: file renamed from rhodecode/tests/fixtures/journal_dump.csv to rhodecode/tests/fixtures/diff_fixtures/journal_dump.csv
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/large_diff.diff to rhodecode/tests/fixtures/diff_fixtures/large_diff.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/large_diff.diff to rhodecode/tests/fixtures/diff_fixtures/large_diff.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_diff_binary_add_file.diff to rhodecode/tests/fixtures/diff_fixtures/svn_diff_binary_add_file.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_diff_binary_add_file.diff to rhodecode/tests/fixtures/diff_fixtures/svn_diff_binary_add_file.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_diff_multiple_changes.diff to rhodecode/tests/fixtures/diff_fixtures/svn_diff_multiple_changes.diff
NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_diff_multiple_changes.diff to rhodecode/tests/fixtures/diff_fixtures/svn_diff_multiple_changes.diff
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_node_history_branches.json to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_branches.json
NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_node_history_branches.json to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_branches.json
1 NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_response.json
NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_response.json
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -20,61 +19,128 b''
20 import pytest
19 import pytest
21
20
22 from rhodecode.lib.config_utils import get_app_config
21 from rhodecode.lib.config_utils import get_app_config
23 from rhodecode.tests.fixture import TestINI
22 from rhodecode.tests.fixtures.rc_fixture import TestINI
24 from rhodecode.tests import TESTS_TMP_PATH
23 from rhodecode.tests import TESTS_TMP_PATH
25 from rhodecode.tests.server_utils import RcVCSServer
24 from rhodecode.tests.server_utils import RcVCSServer
25 from rhodecode.tests.server_utils import RcWebServer
26 from rhodecode.tests.server_utils import CeleryServer
26
27
27
28
28 @pytest.fixture(scope='session')
29 @pytest.fixture(scope="session")
29 def vcsserver(request, vcsserver_port, vcsserver_factory):
30 def vcsserver_factory():
30 """
31 Session scope VCSServer.
32
33 Tests which need the VCSServer have to rely on this fixture in order
34 to ensure it will be running.
35
36 For specific needs, the fixture vcsserver_factory can be used. It allows to
37 adjust the configuration file for the test run.
38
39 Command line args:
40
41 --without-vcsserver: Allows to switch this fixture off. You have to
42 manually start the server.
43
44 --vcsserver-port: Will expect the VCSServer to listen on this port.
45 """
46
47 if not request.config.getoption('with_vcsserver'):
48 return None
49
50 return vcsserver_factory(
51 request, vcsserver_port=vcsserver_port)
52
53
54 @pytest.fixture(scope='session')
55 def vcsserver_factory(tmpdir_factory):
56 """
31 """
57 Use this if you need a running vcsserver with a special configuration.
32 Use this if you need a running vcsserver with a special configuration.
58 """
33 """
59
34
60 def factory(request, overrides=(), vcsserver_port=None,
35 def factory(request, store_dir, overrides=(), config_file=None, port=None, log_file=None, workers="3", env=None, info_prefix=""):
61 log_file=None, workers='3'):
36 env = env or {"RC_NO_TEST_ENV": "1"}
62
37 vcsserver_port = port
63 if vcsserver_port is None:
38 if port is None:
64 vcsserver_port = get_available_port()
39 vcsserver_port = get_available_port()
65
40
66 overrides = list(overrides)
41 overrides = list(overrides)
67 overrides.append({'server:main': {'port': vcsserver_port}})
42 overrides.append({"server:main": {"port": vcsserver_port}})
43
44 if getattr(request, 'param', None):
45 config_overrides = [request.param]
46 overrides.extend(config_overrides)
47
48 option_name = "vcsserver_config"
49 override_option_name = None
50 if not config_file:
51 config_file = get_config(
52 request.config,
53 option_name=option_name,
54 override_option_name=override_option_name,
55 overrides=overrides,
56 basetemp=store_dir,
57 prefix=f"{info_prefix}test_vcsserver_ini_",
58 )
59 server = RcVCSServer(config_file, log_file, workers, env=env, info_prefix=info_prefix)
60 server.start()
61
62 @request.addfinalizer
63 def cleanup():
64 server.shutdown()
65
66 server.wait_until_ready()
67 return server
68
69 return factory
70
71
72 @pytest.fixture(scope="session")
73 def rhodecode_factory():
74 def factory(request, store_dir, overrides=(), config_file=None, port=None, log_file=None, workers="3", env=None, info_prefix=""):
75 env = env or {"RC_NO_TEST_ENV": "1"}
76 rhodecode_port = port
77 if port is None:
78 rhodecode_port = get_available_port()
79
80 overrides = list(overrides)
81 overrides.append({"server:main": {"port": rhodecode_port}})
82 overrides.append({"app:main": {"use_celery": "true"}})
83 overrides.append({"app:main": {"celery.task_always_eager": "false"}})
84
85 if getattr(request, 'param', None):
86 config_overrides = [request.param]
87 overrides.extend(config_overrides)
88
68
89
69 option_name = 'vcsserver_config_http'
90 option_name = "rhodecode_config"
70 override_option_name = 'vcsserver_config_override'
91 override_option_name = None
71 config_file = get_config(
92 if not config_file:
72 request.config, option_name=option_name,
93 config_file = get_config(
73 override_option_name=override_option_name, overrides=overrides,
94 request.config,
74 basetemp=tmpdir_factory.getbasetemp().strpath,
95 option_name=option_name,
75 prefix='test_vcs_')
96 override_option_name=override_option_name,
97 overrides=overrides,
98 basetemp=store_dir,
99 prefix=f"{info_prefix}test_rhodecode_ini",
100 )
101
102 server = RcWebServer(config_file, log_file, workers, env, info_prefix=info_prefix)
103 server.start()
104
105 @request.addfinalizer
106 def cleanup():
107 server.shutdown()
108
109 server.wait_until_ready()
110 return server
111
112 return factory
113
76
114
77 server = RcVCSServer(config_file, log_file, workers)
115 @pytest.fixture(scope="session")
116 def celery_factory():
117 def factory(request, store_dir, overrides=(), config_file=None, port=None, log_file=None, workers="3", env=None, info_prefix=""):
118 env = env or {"RC_NO_TEST_ENV": "1"}
119 rhodecode_port = port
120
121 overrides = list(overrides)
122 overrides.append({"app:main": {"use_celery": "true"}})
123 overrides.append({"app:main": {"celery.task_always_eager": "false"}})
124 config_overrides = None
125
126 if getattr(request, 'param', None):
127 config_overrides = [request.param]
128 overrides.extend(config_overrides)
129
130 option_name = "celery_config"
131 override_option_name = None
132
133 if not config_file:
134 config_file = get_config(
135 request.config,
136 option_name=option_name,
137 override_option_name=override_option_name,
138 overrides=overrides,
139 basetemp=store_dir,
140 prefix=f"{info_prefix}test_celery_ini_",
141 )
142
143 server = CeleryServer(config_file, log_file, workers, env, info_prefix=info_prefix)
78 server.start()
144 server.start()
79
145
80 @request.addfinalizer
146 @request.addfinalizer
@@ -88,52 +154,68 b' def vcsserver_factory(tmpdir_factory):'
88
154
89
155
90 def _use_log_level(config):
156 def _use_log_level(config):
91 level = config.getoption('test_loglevel') or 'critical'
157 level = config.getoption("test_loglevel") or "critical"
92 return level.upper()
158 return level.upper()
93
159
94
160
95 @pytest.fixture(scope='session')
161 def _ini_config_factory(request, base_dir, rcserver_port, vcsserver_port):
96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
162 option_name = "pyramid_config"
97 option_name = 'pyramid_config'
98 log_level = _use_log_level(request.config)
163 log_level = _use_log_level(request.config)
99
164
100 overrides = [
165 overrides = [
101 {'server:main': {'port': rcserver_port}},
166 {"server:main": {"port": rcserver_port}},
102 {'app:main': {
167 {
103 'cache_dir': '%(here)s/rc-tests/rc_data',
168 "app:main": {
104 'vcs.server': f'localhost:{vcsserver_port}',
169 #'cache_dir': '%(here)s/rc-tests/rc_data',
105 # johbo: We will always start the VCSServer on our own based on the
170 "vcs.server": f"localhost:{vcsserver_port}",
106 # fixtures of the test cases. For the test run it must always be
171 # johbo: We will always start the VCSServer on our own based on the
107 # off in the INI file.
172 # fixtures of the test cases. For the test run it must always be
108 'vcs.start_server': 'false',
173 # off in the INI file.
109
174 "vcs.start_server": "false",
110 'vcs.server.protocol': 'http',
175 "vcs.server.protocol": "http",
111 'vcs.scm_app_implementation': 'http',
176 "vcs.scm_app_implementation": "http",
112 'vcs.svn.proxy.enabled': 'true',
177 "vcs.svn.proxy.enabled": "true",
113 'vcs.hooks.protocol.v2': 'celery',
178 "vcs.hooks.protocol.v2": "celery",
114 'vcs.hooks.host': '*',
179 "vcs.hooks.host": "*",
115 'repo_store.path': TESTS_TMP_PATH,
180 "repo_store.path": TESTS_TMP_PATH,
116 'app.service_api.token': 'service_secret_token',
181 "app.service_api.token": "service_secret_token",
117 }},
182 }
118
183 },
119 {'handler_console': {
184 {
120 'class': 'StreamHandler',
185 "handler_console": {
121 'args': '(sys.stderr,)',
186 "class": "StreamHandler",
122 'level': log_level,
187 "args": "(sys.stderr,)",
123 }},
188 "level": log_level,
124
189 }
190 },
125 ]
191 ]
126
192
127 filename = get_config(
193 filename = get_config(
128 request.config, option_name=option_name,
194 request.config,
129 override_option_name='{}_override'.format(option_name),
195 option_name=option_name,
196 override_option_name=f"{option_name}_override",
130 overrides=overrides,
197 overrides=overrides,
131 basetemp=tmpdir_factory.getbasetemp().strpath,
198 basetemp=base_dir,
132 prefix='test_rce_')
199 prefix="test_rce_",
200 )
133 return filename
201 return filename
134
202
135
203
136 @pytest.fixture(scope='session')
204 @pytest.fixture(scope="session")
205 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
206 base_dir = tmpdir_factory.getbasetemp().strpath
207 return _ini_config_factory(request, base_dir, rcserver_port, vcsserver_port)
208
209
210 @pytest.fixture(scope="session")
211 def ini_config_factory(request, tmpdir_factory, rcserver_port, vcsserver_port):
212 def _factory(ini_config_basedir, overrides=()):
213 return _ini_config_factory(request, ini_config_basedir, rcserver_port, vcsserver_port)
214
215 return _factory
216
217
218 @pytest.fixture(scope="session")
137 def ini_settings(ini_config):
219 def ini_settings(ini_config):
138 ini_path = ini_config
220 ini_path = ini_config
139 return get_app_config(ini_path)
221 return get_app_config(ini_path)
@@ -141,26 +223,25 b' def ini_settings(ini_config):'
141
223
142 def get_available_port(min_port=40000, max_port=55555):
224 def get_available_port(min_port=40000, max_port=55555):
143 from rhodecode.lib.utils2 import get_available_port as _get_port
225 from rhodecode.lib.utils2 import get_available_port as _get_port
226
144 return _get_port(min_port, max_port)
227 return _get_port(min_port, max_port)
145
228
146
229
147 @pytest.fixture(scope='session')
230 @pytest.fixture(scope="session")
148 def rcserver_port(request):
231 def rcserver_port(request):
149 port = get_available_port()
232 port = get_available_port()
150 print(f'Using rhodecode port {port}')
151 return port
233 return port
152
234
153
235
154 @pytest.fixture(scope='session')
236 @pytest.fixture(scope="session")
155 def vcsserver_port(request):
237 def vcsserver_port(request):
156 port = request.config.getoption('--vcsserver-port')
238 port = request.config.getoption("--vcsserver-port")
157 if port is None:
239 if port is None:
158 port = get_available_port()
240 port = get_available_port()
159 print(f'Using vcsserver port {port}')
160 return port
241 return port
161
242
162
243
163 @pytest.fixture(scope='session')
244 @pytest.fixture(scope="session")
164 def available_port_factory() -> get_available_port:
245 def available_port_factory() -> get_available_port:
165 """
246 """
166 Returns a callable which returns free port numbers.
247 Returns a callable which returns free port numbers.
@@ -178,7 +259,7 b' def available_port(available_port_factor'
178 return available_port_factory()
259 return available_port_factory()
179
260
180
261
181 @pytest.fixture(scope='session')
262 @pytest.fixture(scope="session")
182 def testini_factory(tmpdir_factory, ini_config):
263 def testini_factory(tmpdir_factory, ini_config):
183 """
264 """
184 Factory to create an INI file based on TestINI.
265 Factory to create an INI file based on TestINI.
@@ -190,37 +271,38 b' def testini_factory(tmpdir_factory, ini_'
190
271
191
272
192 class TestIniFactory(object):
273 class TestIniFactory(object):
193
274 def __init__(self, ini_store_dir, template_ini):
194 def __init__(self, basetemp, template_ini):
275 self._ini_store_dir = ini_store_dir
195 self._basetemp = basetemp
196 self._template_ini = template_ini
276 self._template_ini = template_ini
197
277
198 def __call__(self, ini_params, new_file_prefix='test'):
278 def __call__(self, ini_params, new_file_prefix="test"):
199 ini_file = TestINI(
279 ini_file = TestINI(
200 self._template_ini, ini_params=ini_params,
280 self._template_ini, ini_params=ini_params, new_file_prefix=new_file_prefix, dir=self._ini_store_dir
201 new_file_prefix=new_file_prefix, dir=self._basetemp)
281 )
202 result = ini_file.create()
282 result = ini_file.create()
203 return result
283 return result
204
284
205
285
206 def get_config(
286 def get_config(config, option_name, override_option_name, overrides=None, basetemp=None, prefix="test"):
207 config, option_name, override_option_name, overrides=None,
208 basetemp=None, prefix='test'):
209 """
287 """
210 Find a configuration file and apply overrides for the given `prefix`.
288 Find a configuration file and apply overrides for the given `prefix`.
211 """
289 """
212 config_file = (
290 try:
213 config.getoption(option_name) or config.getini(option_name))
291 config_file = config.getoption(option_name)
292 except ValueError:
293 config_file = None
294
214 if not config_file:
295 if not config_file:
215 pytest.exit(
296 config_file = config.getini(option_name)
216 "Configuration error, could not extract {}.".format(option_name))
297
298 if not config_file:
299 pytest.exit(f"Configuration error, could not extract {option_name}.")
217
300
218 overrides = overrides or []
301 overrides = overrides or []
219 config_override = config.getoption(override_option_name)
302 if override_option_name:
220 if config_override:
303 config_override = config.getoption(override_option_name)
221 overrides.append(config_override)
304 if config_override:
222 temp_ini_file = TestINI(
305 overrides.append(config_override)
223 config_file, ini_params=overrides, new_file_prefix=prefix,
306 temp_ini_file = TestINI(config_file, ini_params=overrides, new_file_prefix=prefix, dir=basetemp)
224 dir=basetemp)
225
307
226 return temp_ini_file.create()
308 return temp_ini_file.create()
This diff has been collapsed as it changes many lines, (775 lines changed) Show them Hide them
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -30,6 +29,7 b' import uuid'
30 import dateutil.tz
29 import dateutil.tz
31 import logging
30 import logging
32 import functools
31 import functools
32 import textwrap
33
33
34 import mock
34 import mock
35 import pyramid.testing
35 import pyramid.testing
@@ -43,8 +43,17 b' import rhodecode.lib'
43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
46 PullRequest,
47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 PullRequestReviewers,
48 Repository,
49 RhodeCodeSetting,
50 ChangesetStatus,
51 RepoGroup,
52 UserGroup,
53 RepoRhodeCodeUi,
54 RepoRhodeCodeSetting,
55 RhodeCodeUi,
56 )
48 from rhodecode.model.meta import Session
57 from rhodecode.model.meta import Session
49 from rhodecode.model.pull_request import PullRequestModel
58 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.repo import RepoModel
59 from rhodecode.model.repo import RepoModel
@@ -60,12 +69,20 b' from rhodecode.lib.str_utils import safe'
60 from rhodecode.lib.hash_utils import sha1_safe
69 from rhodecode.lib.hash_utils import sha1_safe
61 from rhodecode.lib.vcs.backends import get_backend
70 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
71 from rhodecode.lib.vcs.nodes import FileNode
72 from rhodecode.lib.base import bootstrap_config
63 from rhodecode.tests import (
73 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
74 login_user_session,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
75 get_new_dir,
66 TEST_USER_REGULAR_PASS)
76 utils,
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
77 TESTS_TMP_PATH,
68 from rhodecode.tests.fixture import Fixture
78 TEST_USER_ADMIN_LOGIN,
79 TEST_USER_REGULAR_LOGIN,
80 TEST_USER_REGULAR2_LOGIN,
81 TEST_USER_REGULAR_PASS,
82 console_printer,
83 )
84 from rhodecode.tests.utils import set_anonymous_access
85 from rhodecode.tests.fixtures.rc_fixture import Fixture
69 from rhodecode.config import utils as config_utils
86 from rhodecode.config import utils as config_utils
70
87
71 log = logging.getLogger(__name__)
88 log = logging.getLogger(__name__)
@@ -76,36 +93,7 b' def cmp(a, b):'
76 return (a > b) - (a < b)
93 return (a > b) - (a < b)
77
94
78
95
79 @pytest.fixture(scope='session', autouse=True)
96 @pytest.fixture(scope="session")
80 def activate_example_rcextensions(request):
81 """
82 Patch in an example rcextensions module which verifies passed in kwargs.
83 """
84 from rhodecode.config import rcextensions
85
86 old_extensions = rhodecode.EXTENSIONS
87 rhodecode.EXTENSIONS = rcextensions
88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
89
90 @request.addfinalizer
91 def cleanup():
92 rhodecode.EXTENSIONS = old_extensions
93
94
95 @pytest.fixture()
96 def capture_rcextensions():
97 """
98 Returns the recorded calls to entry points in rcextensions.
99 """
100 calls = rhodecode.EXTENSIONS.calls
101 calls.clear()
102 # Note: At this moment, it is still the empty dict, but that will
103 # be filled during the test run and since it is a reference this
104 # is enough to make it work.
105 return calls
106
107
108 @pytest.fixture(scope='session')
109 def http_environ_session():
97 def http_environ_session():
110 """
98 """
111 Allow to use "http_environ" in session scope.
99 Allow to use "http_environ" in session scope.
@@ -117,7 +105,31 b' def plain_http_host_stub():'
117 """
105 """
118 Value of HTTP_HOST in the test run.
106 Value of HTTP_HOST in the test run.
119 """
107 """
120 return 'example.com:80'
108 return "example.com:80"
109
110
111 def plain_config_stub(request, request_stub):
112 """
113 Set up pyramid.testing and return the Configurator.
114 """
115
116 config = bootstrap_config(request=request_stub)
117
118 @request.addfinalizer
119 def cleanup():
120 pyramid.testing.tearDown()
121
122 return config
123
124
125 def plain_request_stub():
126 """
127 Stub request object.
128 """
129 from rhodecode.lib.base import bootstrap_request
130
131 _request = bootstrap_request(scheme="https")
132 return _request
121
133
122
134
123 @pytest.fixture()
135 @pytest.fixture()
@@ -132,7 +144,7 b' def plain_http_host_only_stub():'
132 """
144 """
133 Value of HTTP_HOST in the test run.
145 Value of HTTP_HOST in the test run.
134 """
146 """
135 return plain_http_host_stub().split(':')[0]
147 return plain_http_host_stub().split(":")[0]
136
148
137
149
138 @pytest.fixture()
150 @pytest.fixture()
@@ -147,33 +159,21 b' def plain_http_environ():'
147 """
159 """
148 HTTP extra environ keys.
160 HTTP extra environ keys.
149
161
150 User by the test application and as well for setting up the pylons
162 Used by the test application and as well for setting up the pylons
151 environment. In the case of the fixture "app" it should be possible
163 environment. In the case of the fixture "app" it should be possible
152 to override this for a specific test case.
164 to override this for a specific test case.
153 """
165 """
154 return {
166 return {
155 'SERVER_NAME': plain_http_host_only_stub(),
167 "SERVER_NAME": plain_http_host_only_stub(),
156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
168 "SERVER_PORT": plain_http_host_stub().split(":")[1],
157 'HTTP_HOST': plain_http_host_stub(),
169 "HTTP_HOST": plain_http_host_stub(),
158 'HTTP_USER_AGENT': 'rc-test-agent',
170 "HTTP_USER_AGENT": "rc-test-agent",
159 'REQUEST_METHOD': 'GET'
171 "REQUEST_METHOD": "GET",
160 }
172 }
161
173
162
174
163 @pytest.fixture()
175 @pytest.fixture(scope="session")
164 def http_environ():
176 def baseapp(request, ini_config, http_environ_session, available_port_factory, vcsserver_factory, celery_factory):
165 """
166 HTTP extra environ keys.
167
168 User by the test application and as well for setting up the pylons
169 environment. In the case of the fixture "app" it should be possible
170 to override this for a specific test case.
171 """
172 return plain_http_environ()
173
174
175 @pytest.fixture(scope='session')
176 def baseapp(ini_config, vcsserver, http_environ_session):
177 from rhodecode.lib.config_utils import get_app_config
177 from rhodecode.lib.config_utils import get_app_config
178 from rhodecode.config.middleware import make_pyramid_app
178 from rhodecode.config.middleware import make_pyramid_app
179
179
@@ -181,22 +181,41 b' def baseapp(ini_config, vcsserver, http_'
181 pyramid.paster.setup_logging(ini_config)
181 pyramid.paster.setup_logging(ini_config)
182
182
183 settings = get_app_config(ini_config)
183 settings = get_app_config(ini_config)
184 app = make_pyramid_app({'__file__': ini_config}, **settings)
184 store_dir = os.path.dirname(ini_config)
185
186 # start vcsserver
187 _vcsserver_port = available_port_factory()
188 vcsserver_instance = vcsserver_factory(
189 request,
190 store_dir=store_dir,
191 port=_vcsserver_port,
192 info_prefix="base-app-"
193 )
194
195 settings["vcs.server"] = vcsserver_instance.bind_addr
185
196
186 return app
197 # we skip setting store_dir for baseapp, it's internally set via testing rhodecode.ini
198 # settings['repo_store.path'] = str(store_dir)
199 console_printer(f' :warning: [green]pytest-setup[/green] Starting base pyramid-app: {ini_config}')
200 pyramid_baseapp = make_pyramid_app({"__file__": ini_config}, **settings)
201
202 # start celery
203 celery_factory(
204 request,
205 store_dir=store_dir,
206 port=None,
207 info_prefix="base-app-",
208 overrides=(
209 {'handler_console': {'level': 'DEBUG'}},
210 {'app:main': {'vcs.server': vcsserver_instance.bind_addr}},
211 {'app:main': {'repo_store.path': store_dir}}
212 )
213 )
214
215 return pyramid_baseapp
187
216
188
217
189 @pytest.fixture(scope='function')
218 @pytest.fixture(scope="session")
190 def app(request, config_stub, baseapp, http_environ):
191 app = CustomTestApp(
192 baseapp,
193 extra_environ=http_environ)
194 if request.cls:
195 request.cls.app = app
196 return app
197
198
199 @pytest.fixture(scope='session')
200 def app_settings(baseapp, ini_config):
219 def app_settings(baseapp, ini_config):
201 """
220 """
202 Settings dictionary used to create the app.
221 Settings dictionary used to create the app.
@@ -207,19 +226,19 b' def app_settings(baseapp, ini_config):'
207 return baseapp.config.get_settings()
226 return baseapp.config.get_settings()
208
227
209
228
210 @pytest.fixture(scope='session')
229 @pytest.fixture(scope="session")
211 def db_connection(ini_settings):
230 def db_connection(ini_settings):
212 # Initialize the database connection.
231 # Initialize the database connection.
213 config_utils.initialize_database(ini_settings)
232 config_utils.initialize_database(ini_settings)
214
233
215
234
216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
235 LoginData = collections.namedtuple("LoginData", ("csrf_token", "user"))
217
236
218
237
219 def _autologin_user(app, *args):
238 def _autologin_user(app, *args):
220 session = login_user_session(app, *args)
239 session = login_user_session(app, *args)
221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
240 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
222 return LoginData(csrf_token, session['rhodecode_user'])
241 return LoginData(csrf_token, session["rhodecode_user"])
223
242
224
243
225 @pytest.fixture()
244 @pytest.fixture()
@@ -235,18 +254,17 b' def autologin_regular_user(app):'
235 """
254 """
236 Utility fixture which makes sure that the regular user is logged in
255 Utility fixture which makes sure that the regular user is logged in
237 """
256 """
238 return _autologin_user(
257 return _autologin_user(app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
240
258
241
259
242 @pytest.fixture(scope='function')
260 @pytest.fixture(scope="function")
243 def csrf_token(request, autologin_user):
261 def csrf_token(request, autologin_user):
244 return autologin_user.csrf_token
262 return autologin_user.csrf_token
245
263
246
264
247 @pytest.fixture(scope='function')
265 @pytest.fixture(scope="function")
248 def xhr_header(request):
266 def xhr_header(request):
249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
267 return {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}
250
268
251
269
252 @pytest.fixture()
270 @pytest.fixture()
@@ -257,18 +275,18 b' def real_crypto_backend(monkeypatch):'
257 During the test run the crypto backend is replaced with a faster
275 During the test run the crypto backend is replaced with a faster
258 implementation based on the MD5 algorithm.
276 implementation based on the MD5 algorithm.
259 """
277 """
260 monkeypatch.setattr(rhodecode, 'is_test', False)
278 monkeypatch.setattr(rhodecode, "is_test", False)
261
279
262
280
263 @pytest.fixture(scope='class')
281 @pytest.fixture(scope="class")
264 def index_location(request, baseapp):
282 def index_location(request, baseapp):
265 index_location = baseapp.config.get_settings()['search.location']
283 index_location = baseapp.config.get_settings()["search.location"]
266 if request.cls:
284 if request.cls:
267 request.cls.index_location = index_location
285 request.cls.index_location = index_location
268 return index_location
286 return index_location
269
287
270
288
271 @pytest.fixture(scope='session', autouse=True)
289 @pytest.fixture(scope="session", autouse=True)
272 def tests_tmp_path(request):
290 def tests_tmp_path(request):
273 """
291 """
274 Create temporary directory to be used during the test session.
292 Create temporary directory to be used during the test session.
@@ -276,7 +294,8 b' def tests_tmp_path(request):'
276 if not os.path.exists(TESTS_TMP_PATH):
294 if not os.path.exists(TESTS_TMP_PATH):
277 os.makedirs(TESTS_TMP_PATH)
295 os.makedirs(TESTS_TMP_PATH)
278
296
279 if not request.config.getoption('--keep-tmp-path'):
297 if not request.config.getoption("--keep-tmp-path"):
298
280 @request.addfinalizer
299 @request.addfinalizer
281 def remove_tmp_path():
300 def remove_tmp_path():
282 shutil.rmtree(TESTS_TMP_PATH)
301 shutil.rmtree(TESTS_TMP_PATH)
@@ -291,7 +310,7 b' def test_repo_group(request):'
291 usage automatically
310 usage automatically
292 """
311 """
293 fixture = Fixture()
312 fixture = Fixture()
294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
313 repogroupid = "test_repo_group_%s" % str(time.time()).replace(".", "")
295 repo_group = fixture.create_repo_group(repogroupid)
314 repo_group = fixture.create_repo_group(repogroupid)
296
315
297 def _cleanup():
316 def _cleanup():
@@ -308,7 +327,7 b' def test_user_group(request):'
308 usage automatically
327 usage automatically
309 """
328 """
310 fixture = Fixture()
329 fixture = Fixture()
311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
330 usergroupid = "test_user_group_%s" % str(time.time()).replace(".", "")
312 user_group = fixture.create_user_group(usergroupid)
331 user_group = fixture.create_user_group(usergroupid)
313
332
314 def _cleanup():
333 def _cleanup():
@@ -318,7 +337,7 b' def test_user_group(request):'
318 return user_group
337 return user_group
319
338
320
339
321 @pytest.fixture(scope='session')
340 @pytest.fixture(scope="session")
322 def test_repo(request):
341 def test_repo(request):
323 container = TestRepoContainer()
342 container = TestRepoContainer()
324 request.addfinalizer(container._cleanup)
343 request.addfinalizer(container._cleanup)
@@ -340,9 +359,9 b' class TestRepoContainer(object):'
340 """
359 """
341
360
342 dump_extractors = {
361 dump_extractors = {
343 'git': utils.extract_git_repo_from_dump,
362 "git": utils.extract_git_repo_from_dump,
344 'hg': utils.extract_hg_repo_from_dump,
363 "hg": utils.extract_hg_repo_from_dump,
345 'svn': utils.extract_svn_repo_from_dump,
364 "svn": utils.extract_svn_repo_from_dump,
346 }
365 }
347
366
348 def __init__(self):
367 def __init__(self):
@@ -358,7 +377,7 b' class TestRepoContainer(object):'
358 return Repository.get(self._repos[key])
377 return Repository.get(self._repos[key])
359
378
360 def _create_repo(self, dump_name, backend_alias, config):
379 def _create_repo(self, dump_name, backend_alias, config):
361 repo_name = f'{backend_alias}-{dump_name}'
380 repo_name = f"{backend_alias}-{dump_name}"
362 backend = get_backend(backend_alias)
381 backend = get_backend(backend_alias)
363 dump_extractor = self.dump_extractors[backend_alias]
382 dump_extractor = self.dump_extractors[backend_alias]
364 repo_path = dump_extractor(dump_name, repo_name)
383 repo_path = dump_extractor(dump_name, repo_name)
@@ -375,19 +394,17 b' class TestRepoContainer(object):'
375 self._fixture.destroy_repo(repo_name)
394 self._fixture.destroy_repo(repo_name)
376
395
377
396
378 def backend_base(request, backend_alias, baseapp, test_repo):
397 def backend_base(request, backend_alias, test_repo):
379 if backend_alias not in request.config.getoption('--backends'):
398 if backend_alias not in request.config.getoption("--backends"):
380 pytest.skip("Backend %s not selected." % (backend_alias, ))
399 pytest.skip(f"Backend {backend_alias} not selected.")
381
400
382 utils.check_xfail_backends(request.node, backend_alias)
401 utils.check_xfail_backends(request.node, backend_alias)
383 utils.check_skip_backends(request.node, backend_alias)
402 utils.check_skip_backends(request.node, backend_alias)
384
403
385 repo_name = 'vcs_test_%s' % (backend_alias, )
404 repo_name = "vcs_test_%s" % (backend_alias,)
386 backend = Backend(
405 backend = Backend(
387 alias=backend_alias,
406 alias=backend_alias, repo_name=repo_name, test_name=request.node.name, test_repo_container=test_repo
388 repo_name=repo_name,
407 )
389 test_name=request.node.name,
390 test_repo_container=test_repo)
391 request.addfinalizer(backend.cleanup)
408 request.addfinalizer(backend.cleanup)
392 return backend
409 return backend
393
410
@@ -404,22 +421,22 b' def backend(request, backend_alias, base'
404 for specific backends. This is intended as a utility for incremental
421 for specific backends. This is intended as a utility for incremental
405 development of a new backend implementation.
422 development of a new backend implementation.
406 """
423 """
407 return backend_base(request, backend_alias, baseapp, test_repo)
424 return backend_base(request, backend_alias, test_repo)
408
425
409
426
410 @pytest.fixture()
427 @pytest.fixture()
411 def backend_git(request, baseapp, test_repo):
428 def backend_git(request, baseapp, test_repo):
412 return backend_base(request, 'git', baseapp, test_repo)
429 return backend_base(request, "git", test_repo)
413
430
414
431
415 @pytest.fixture()
432 @pytest.fixture()
416 def backend_hg(request, baseapp, test_repo):
433 def backend_hg(request, baseapp, test_repo):
417 return backend_base(request, 'hg', baseapp, test_repo)
434 return backend_base(request, "hg", test_repo)
418
435
419
436
420 @pytest.fixture()
437 @pytest.fixture()
421 def backend_svn(request, baseapp, test_repo):
438 def backend_svn(request, baseapp, test_repo):
422 return backend_base(request, 'svn', baseapp, test_repo)
439 return backend_base(request, "svn", test_repo)
423
440
424
441
425 @pytest.fixture()
442 @pytest.fixture()
@@ -467,9 +484,9 b' class Backend(object):'
467 session.
484 session.
468 """
485 """
469
486
470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
487 invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+")
471 _master_repo = None
488 _master_repo = None
472 _master_repo_path = ''
489 _master_repo_path = ""
473 _commit_ids = {}
490 _commit_ids = {}
474
491
475 def __init__(self, alias, repo_name, test_name, test_repo_container):
492 def __init__(self, alias, repo_name, test_name, test_repo_container):
@@ -500,6 +517,7 b' class Backend(object):'
500 last repo which has been created with `create_repo`.
517 last repo which has been created with `create_repo`.
501 """
518 """
502 from rhodecode.model.db import Repository
519 from rhodecode.model.db import Repository
520
503 return Repository.get_by_repo_name(self.repo_name)
521 return Repository.get_by_repo_name(self.repo_name)
504
522
505 @property
523 @property
@@ -517,9 +535,7 b' class Backend(object):'
517 which can serve as the base to create a new commit on top of it.
535 which can serve as the base to create a new commit on top of it.
518 """
536 """
519 vcsrepo = self.repo.scm_instance()
537 vcsrepo = self.repo.scm_instance()
520 head_id = (
538 head_id = vcsrepo.DEFAULT_BRANCH_NAME or vcsrepo.commit_ids[-1]
521 vcsrepo.DEFAULT_BRANCH_NAME or
522 vcsrepo.commit_ids[-1])
523 return head_id
539 return head_id
524
540
525 @property
541 @property
@@ -543,9 +559,7 b' class Backend(object):'
543
559
544 return self._commit_ids
560 return self._commit_ids
545
561
546 def create_repo(
562 def create_repo(self, commits=None, number_of_commits=0, heads=None, name_suffix="", bare=False, **kwargs):
547 self, commits=None, number_of_commits=0, heads=None,
548 name_suffix='', bare=False, **kwargs):
549 """
563 """
550 Create a repository and record it for later cleanup.
564 Create a repository and record it for later cleanup.
551
565
@@ -559,13 +573,10 b' class Backend(object):'
559 :param bare: set a repo as bare (no checkout)
573 :param bare: set a repo as bare (no checkout)
560 """
574 """
561 self.repo_name = self._next_repo_name() + name_suffix
575 self.repo_name = self._next_repo_name() + name_suffix
562 repo = self._fixture.create_repo(
576 repo = self._fixture.create_repo(self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
564 self._cleanup_repos.append(repo.repo_name)
577 self._cleanup_repos.append(repo.repo_name)
565
578
566 commits = commits or [
579 commits = commits or [{"message": f"Commit {x} of {self.repo_name}"} for x in range(number_of_commits)]
567 {'message': f'Commit {x} of {self.repo_name}'}
568 for x in range(number_of_commits)]
569 vcs_repo = repo.scm_instance()
580 vcs_repo = repo.scm_instance()
570 vcs_repo.count()
581 vcs_repo.count()
571 self._add_commits_to_repo(vcs_repo, commits)
582 self._add_commits_to_repo(vcs_repo, commits)
@@ -579,7 +590,7 b' class Backend(object):'
579 Make sure that repo contains all commits mentioned in `heads`
590 Make sure that repo contains all commits mentioned in `heads`
580 """
591 """
581 vcsrepo = repo.scm_instance()
592 vcsrepo = repo.scm_instance()
582 vcsrepo.config.clear_section('hooks')
593 vcsrepo.config.clear_section("hooks")
583 commit_ids = [self._commit_ids[h] for h in heads]
594 commit_ids = [self._commit_ids[h] for h in heads]
584 if do_fetch:
595 if do_fetch:
585 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
596 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
@@ -592,21 +603,22 b' class Backend(object):'
592 self._cleanup_repos.append(self.repo_name)
603 self._cleanup_repos.append(self.repo_name)
593 return repo
604 return repo
594
605
595 def new_repo_name(self, suffix=''):
606 def new_repo_name(self, suffix=""):
596 self.repo_name = self._next_repo_name() + suffix
607 self.repo_name = self._next_repo_name() + suffix
597 self._cleanup_repos.append(self.repo_name)
608 self._cleanup_repos.append(self.repo_name)
598 return self.repo_name
609 return self.repo_name
599
610
600 def _next_repo_name(self):
611 def _next_repo_name(self):
601 return "%s_%s" % (
612 return "%s_%s" % (self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos))
602 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
603
613
604 def ensure_file(self, filename, content=b'Test content\n'):
614 def ensure_file(self, filename, content=b"Test content\n"):
605 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
615 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
606 commits = [
616 commits = [
607 {'added': [
617 {
608 FileNode(filename, content=content),
618 "added": [
609 ]},
619 FileNode(filename, content=content),
620 ]
621 },
610 ]
622 ]
611 self._add_commits_to_repo(self.repo.scm_instance(), commits)
623 self._add_commits_to_repo(self.repo.scm_instance(), commits)
612
624
@@ -627,11 +639,11 b' class Backend(object):'
627 self._commit_ids = commit_ids
639 self._commit_ids = commit_ids
628
640
629 # Creating refs for Git to allow fetching them from remote repository
641 # Creating refs for Git to allow fetching them from remote repository
630 if self.alias == 'git':
642 if self.alias == "git":
631 refs = {}
643 refs = {}
632 for message in self._commit_ids:
644 for message in self._commit_ids:
633 cleanup_message = message.replace(' ', '')
645 cleanup_message = message.replace(" ", "")
634 ref_name = f'refs/test-refs/{cleanup_message}'
646 ref_name = f"refs/test-refs/{cleanup_message}"
635 refs[ref_name] = self._commit_ids[message]
647 refs[ref_name] = self._commit_ids[message]
636 self._create_refs(repo, refs)
648 self._create_refs(repo, refs)
637
649
@@ -645,7 +657,7 b' class VcsBackend(object):'
645 Represents the test configuration for one supported vcs backend.
657 Represents the test configuration for one supported vcs backend.
646 """
658 """
647
659
648 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
660 invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+")
649
661
650 def __init__(self, alias, repo_path, test_name, test_repo_container):
662 def __init__(self, alias, repo_path, test_name, test_repo_container):
651 self.alias = alias
663 self.alias = alias
@@ -658,7 +670,7 b' class VcsBackend(object):'
658 return self._test_repo_container(key, self.alias).scm_instance()
670 return self._test_repo_container(key, self.alias).scm_instance()
659
671
660 def __repr__(self):
672 def __repr__(self):
661 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
673 return f"{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})"
662
674
663 @property
675 @property
664 def repo(self):
676 def repo(self):
@@ -676,8 +688,7 b' class VcsBackend(object):'
676 """
688 """
677 return get_backend(self.alias)
689 return get_backend(self.alias)
678
690
679 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
691 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, bare=False):
680 bare=False):
681 repo_name = self._next_repo_name()
692 repo_name = self._next_repo_name()
682 self._repo_path = get_new_dir(repo_name)
693 self._repo_path = get_new_dir(repo_name)
683 repo_class = get_backend(self.alias)
694 repo_class = get_backend(self.alias)
@@ -687,9 +698,7 b' class VcsBackend(object):'
687 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
698 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
688 self._cleanup_repos.append(repo)
699 self._cleanup_repos.append(repo)
689
700
690 commits = commits or [
701 commits = commits or [{"message": "Commit %s of %s" % (x, repo_name)} for x in range(number_of_commits)]
691 {'message': 'Commit %s of %s' % (x, repo_name)}
692 for x in range(number_of_commits)]
693 _add_commits_to_repo(repo, commits)
702 _add_commits_to_repo(repo, commits)
694 return repo
703 return repo
695
704
@@ -706,38 +715,30 b' class VcsBackend(object):'
706 return self._repo_path
715 return self._repo_path
707
716
708 def _next_repo_name(self):
717 def _next_repo_name(self):
718 return "{}_{}".format(self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos))
709
719
710 return "{}_{}".format(
720 def add_file(self, repo, filename, content="Test content\n"):
711 self.invalid_repo_name.sub('_', self._test_name),
712 len(self._cleanup_repos)
713 )
714
715 def add_file(self, repo, filename, content='Test content\n'):
716 imc = repo.in_memory_commit
721 imc = repo.in_memory_commit
717 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
722 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
718 imc.commit(
723 imc.commit(message="Automatic commit from vcsbackend fixture", author="Automatic <automatic@rhodecode.com>")
719 message='Automatic commit from vcsbackend fixture',
720 author='Automatic <automatic@rhodecode.com>')
721
724
722 def ensure_file(self, filename, content='Test content\n'):
725 def ensure_file(self, filename, content="Test content\n"):
723 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
726 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
724 self.add_file(self.repo, filename, content)
727 self.add_file(self.repo, filename, content)
725
728
726
729
727 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
730 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
728 if backend_alias not in request.config.getoption('--backends'):
731 if backend_alias not in request.config.getoption("--backends"):
729 pytest.skip("Backend %s not selected." % (backend_alias, ))
732 pytest.skip("Backend %s not selected." % (backend_alias,))
730
733
731 utils.check_xfail_backends(request.node, backend_alias)
734 utils.check_xfail_backends(request.node, backend_alias)
732 utils.check_skip_backends(request.node, backend_alias)
735 utils.check_skip_backends(request.node, backend_alias)
733
736
734 repo_name = f'vcs_test_{backend_alias}'
737 repo_name = f"vcs_test_{backend_alias}"
735 repo_path = os.path.join(tests_tmp_path, repo_name)
738 repo_path = os.path.join(tests_tmp_path, repo_name)
736 backend = VcsBackend(
739 backend = VcsBackend(
737 alias=backend_alias,
740 alias=backend_alias, repo_path=repo_path, test_name=request.node.name, test_repo_container=test_repo
738 repo_path=repo_path,
741 )
739 test_name=request.node.name,
740 test_repo_container=test_repo)
741 request.addfinalizer(backend.cleanup)
742 request.addfinalizer(backend.cleanup)
742 return backend
743 return backend
743
744
@@ -758,17 +759,17 b' def vcsbackend(request, backend_alias, t'
758
759
759 @pytest.fixture()
760 @pytest.fixture()
760 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
761 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
761 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
762 return vcsbackend_base(request, "git", tests_tmp_path, baseapp, test_repo)
762
763
763
764
764 @pytest.fixture()
765 @pytest.fixture()
765 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
766 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
766 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
767 return vcsbackend_base(request, "hg", tests_tmp_path, baseapp, test_repo)
767
768
768
769
769 @pytest.fixture()
770 @pytest.fixture()
770 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
771 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
771 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
772 return vcsbackend_base(request, "svn", tests_tmp_path, baseapp, test_repo)
772
773
773
774
774 @pytest.fixture()
775 @pytest.fixture()
@@ -789,29 +790,28 b' def _add_commits_to_repo(vcs_repo, commi'
789 imc = vcs_repo.in_memory_commit
790 imc = vcs_repo.in_memory_commit
790
791
791 for idx, commit in enumerate(commits):
792 for idx, commit in enumerate(commits):
792 message = str(commit.get('message', f'Commit {idx}'))
793 message = str(commit.get("message", f"Commit {idx}"))
793
794
794 for node in commit.get('added', []):
795 for node in commit.get("added", []):
795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
796 imc.add(FileNode(safe_bytes(node.path), content=node.content))
796 for node in commit.get('changed', []):
797 for node in commit.get("changed", []):
797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
798 imc.change(FileNode(safe_bytes(node.path), content=node.content))
798 for node in commit.get('removed', []):
799 for node in commit.get("removed", []):
799 imc.remove(FileNode(safe_bytes(node.path)))
800 imc.remove(FileNode(safe_bytes(node.path)))
800
801
801 parents = [
802 parents = [vcs_repo.get_commit(commit_id=commit_ids[p]) for p in commit.get("parents", [])]
802 vcs_repo.get_commit(commit_id=commit_ids[p])
803 for p in commit.get('parents', [])]
804
803
805 operations = ('added', 'changed', 'removed')
804 operations = ("added", "changed", "removed")
806 if not any((commit.get(o) for o in operations)):
805 if not any((commit.get(o) for o in operations)):
807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
806 imc.add(FileNode(b"file_%b" % safe_bytes(str(idx)), content=safe_bytes(message)))
808
807
809 commit = imc.commit(
808 commit = imc.commit(
810 message=message,
809 message=message,
811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
810 author=str(commit.get("author", "Automatic <automatic@rhodecode.com>")),
812 date=commit.get('date'),
811 date=commit.get("date"),
813 branch=commit.get('branch'),
812 branch=commit.get("branch"),
814 parents=parents)
813 parents=parents,
814 )
815
815
816 commit_ids[commit.message] = commit.raw_id
816 commit_ids[commit.message] = commit.raw_id
817
817
@@ -842,14 +842,14 b' class RepoServer(object):'
842 self._cleanup_servers = []
842 self._cleanup_servers = []
843
843
844 def serve(self, vcsrepo):
844 def serve(self, vcsrepo):
845 if vcsrepo.alias != 'svn':
845 if vcsrepo.alias != "svn":
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
847
847
848 proc = subprocess.Popen(
848 proc = subprocess.Popen(
849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
849 ["svnserve", "-d", "--foreground", "--listen-host", "localhost", "--root", vcsrepo.path]
850 '--root', vcsrepo.path])
850 )
851 self._cleanup_servers.append(proc)
851 self._cleanup_servers.append(proc)
852 self.url = 'svn://localhost'
852 self.url = "svn://localhost"
853
853
854 def cleanup(self):
854 def cleanup(self):
855 for proc in self._cleanup_servers:
855 for proc in self._cleanup_servers:
@@ -874,7 +874,6 b' def pr_util(backend, request, config_stu'
874
874
875
875
876 class PRTestUtility(object):
876 class PRTestUtility(object):
877
878 pull_request = None
877 pull_request = None
879 pull_request_id = None
878 pull_request_id = None
880 mergeable_patcher = None
879 mergeable_patcher = None
@@ -886,48 +885,55 b' class PRTestUtility(object):'
886 self.backend = backend
885 self.backend = backend
887
886
888 def create_pull_request(
887 def create_pull_request(
889 self, commits=None, target_head=None, source_head=None,
888 self,
890 revisions=None, approved=False, author=None, mergeable=False,
889 commits=None,
891 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
890 target_head=None,
892 title="Test", description="Description"):
891 source_head=None,
892 revisions=None,
893 approved=False,
894 author=None,
895 mergeable=False,
896 enable_notifications=True,
897 name_suffix="",
898 reviewers=None,
899 observers=None,
900 title="Test",
901 description="Description",
902 ):
893 self.set_mergeable(mergeable)
903 self.set_mergeable(mergeable)
894 if not enable_notifications:
904 if not enable_notifications:
895 # mock notification side effect
905 # mock notification side effect
896 self.notification_patcher = mock.patch(
906 self.notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create")
897 'rhodecode.model.notification.NotificationModel.create')
898 self.notification_patcher.start()
907 self.notification_patcher.start()
899
908
900 if not self.pull_request:
909 if not self.pull_request:
901 if not commits:
910 if not commits:
902 commits = [
911 commits = [
903 {'message': 'c1'},
912 {"message": "c1"},
904 {'message': 'c2'},
913 {"message": "c2"},
905 {'message': 'c3'},
914 {"message": "c3"},
906 ]
915 ]
907 target_head = 'c1'
916 target_head = "c1"
908 source_head = 'c2'
917 source_head = "c2"
909 revisions = ['c2']
918 revisions = ["c2"]
910
919
911 self.commit_ids = self.backend.create_master_repo(commits)
920 self.commit_ids = self.backend.create_master_repo(commits)
912 self.target_repository = self.backend.create_repo(
921 self.target_repository = self.backend.create_repo(heads=[target_head], name_suffix=name_suffix)
913 heads=[target_head], name_suffix=name_suffix)
922 self.source_repository = self.backend.create_repo(heads=[source_head], name_suffix=name_suffix)
914 self.source_repository = self.backend.create_repo(
923 self.author = author or UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
915 heads=[source_head], name_suffix=name_suffix)
916 self.author = author or UserModel().get_by_username(
917 TEST_USER_ADMIN_LOGIN)
918
924
919 model = PullRequestModel()
925 model = PullRequestModel()
920 self.create_parameters = {
926 self.create_parameters = {
921 'created_by': self.author,
927 "created_by": self.author,
922 'source_repo': self.source_repository.repo_name,
928 "source_repo": self.source_repository.repo_name,
923 'source_ref': self._default_branch_reference(source_head),
929 "source_ref": self._default_branch_reference(source_head),
924 'target_repo': self.target_repository.repo_name,
930 "target_repo": self.target_repository.repo_name,
925 'target_ref': self._default_branch_reference(target_head),
931 "target_ref": self._default_branch_reference(target_head),
926 'revisions': [self.commit_ids[r] for r in revisions],
932 "revisions": [self.commit_ids[r] for r in revisions],
927 'reviewers': reviewers or self._get_reviewers(),
933 "reviewers": reviewers or self._get_reviewers(),
928 'observers': observers or self._get_observers(),
934 "observers": observers or self._get_observers(),
929 'title': title,
935 "title": title,
930 'description': description,
936 "description": description,
931 }
937 }
932 self.pull_request = model.create(**self.create_parameters)
938 self.pull_request = model.create(**self.create_parameters)
933 assert model.get_versions(self.pull_request) == []
939 assert model.get_versions(self.pull_request) == []
@@ -943,9 +949,7 b' class PRTestUtility(object):'
943 return self.pull_request
949 return self.pull_request
944
950
945 def approve(self):
951 def approve(self):
946 self.create_status_votes(
952 self.create_status_votes(ChangesetStatus.STATUS_APPROVED, *self.pull_request.reviewers)
947 ChangesetStatus.STATUS_APPROVED,
948 *self.pull_request.reviewers)
949
953
950 def close(self):
954 def close(self):
951 PullRequestModel().close_pull_request(self.pull_request, self.author)
955 PullRequestModel().close_pull_request(self.pull_request, self.author)
@@ -953,28 +957,26 b' class PRTestUtility(object):'
953 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
957 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
954 default_branch = branch or self.backend.default_branch_name
958 default_branch = branch or self.backend.default_branch_name
955 message = self.commit_ids[commit_message]
959 message = self.commit_ids[commit_message]
956 reference = f'branch:{default_branch}:{message}'
960 reference = f"branch:{default_branch}:{message}"
957
961
958 return reference
962 return reference
959
963
960 def _get_reviewers(self):
964 def _get_reviewers(self):
961 role = PullRequestReviewers.ROLE_REVIEWER
965 role = PullRequestReviewers.ROLE_REVIEWER
962 return [
966 return [
963 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
967 (TEST_USER_REGULAR_LOGIN, ["default1"], False, role, []),
964 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
968 (TEST_USER_REGULAR2_LOGIN, ["default2"], False, role, []),
965 ]
969 ]
966
970
967 def _get_observers(self):
971 def _get_observers(self):
968 return [
972 return []
969
970 ]
971
973
972 def update_source_repository(self, head=None, do_fetch=False):
974 def update_source_repository(self, head=None, do_fetch=False):
973 heads = [head or 'c3']
975 heads = [head or "c3"]
974 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
976 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
975
977
976 def update_target_repository(self, head=None, do_fetch=False):
978 def update_target_repository(self, head=None, do_fetch=False):
977 heads = [head or 'c3']
979 heads = [head or "c3"]
978 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
980 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
979
981
980 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
982 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
@@ -1004,7 +1006,7 b' class PRTestUtility(object):'
1004 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1006 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1005 # remove the if once that's sorted out.
1007 # remove the if once that's sorted out.
1006 if self.backend.alias == "git":
1008 if self.backend.alias == "git":
1007 kwargs = {'branch_name': self.backend.default_branch_name}
1009 kwargs = {"branch_name": self.backend.default_branch_name}
1008 else:
1010 else:
1009 kwargs = {}
1011 kwargs = {}
1010 source_vcs.strip(removed_commit_id, **kwargs)
1012 source_vcs.strip(removed_commit_id, **kwargs)
@@ -1015,10 +1017,8 b' class PRTestUtility(object):'
1015
1017
1016 def create_comment(self, linked_to=None):
1018 def create_comment(self, linked_to=None):
1017 comment = CommentsModel().create(
1019 comment = CommentsModel().create(
1018 text="Test comment",
1020 text="Test comment", repo=self.target_repository.repo_name, user=self.author, pull_request=self.pull_request
1019 repo=self.target_repository.repo_name,
1021 )
1020 user=self.author,
1021 pull_request=self.pull_request)
1022 assert comment.pull_request_version_id is None
1022 assert comment.pull_request_version_id is None
1023
1023
1024 if linked_to:
1024 if linked_to:
@@ -1026,15 +1026,15 b' class PRTestUtility(object):'
1026
1026
1027 return comment
1027 return comment
1028
1028
1029 def create_inline_comment(
1029 def create_inline_comment(self, linked_to=None, line_no="n1", file_path="file_1"):
1030 self, linked_to=None, line_no='n1', file_path='file_1'):
1031 comment = CommentsModel().create(
1030 comment = CommentsModel().create(
1032 text="Test comment",
1031 text="Test comment",
1033 repo=self.target_repository.repo_name,
1032 repo=self.target_repository.repo_name,
1034 user=self.author,
1033 user=self.author,
1035 line_no=line_no,
1034 line_no=line_no,
1036 f_path=file_path,
1035 f_path=file_path,
1037 pull_request=self.pull_request)
1036 pull_request=self.pull_request,
1037 )
1038 assert comment.pull_request_version_id is None
1038 assert comment.pull_request_version_id is None
1039
1039
1040 if linked_to:
1040 if linked_to:
@@ -1044,25 +1044,20 b' class PRTestUtility(object):'
1044
1044
1045 def create_version_of_pull_request(self):
1045 def create_version_of_pull_request(self):
1046 pull_request = self.create_pull_request()
1046 pull_request = self.create_pull_request()
1047 version = PullRequestModel()._create_version_from_snapshot(
1047 version = PullRequestModel()._create_version_from_snapshot(pull_request)
1048 pull_request)
1049 return version
1048 return version
1050
1049
1051 def create_status_votes(self, status, *reviewers):
1050 def create_status_votes(self, status, *reviewers):
1052 for reviewer in reviewers:
1051 for reviewer in reviewers:
1053 ChangesetStatusModel().set_status(
1052 ChangesetStatusModel().set_status(
1054 repo=self.pull_request.target_repo,
1053 repo=self.pull_request.target_repo, status=status, user=reviewer.user_id, pull_request=self.pull_request
1055 status=status,
1054 )
1056 user=reviewer.user_id,
1057 pull_request=self.pull_request)
1058
1055
1059 def set_mergeable(self, value):
1056 def set_mergeable(self, value):
1060 if not self.mergeable_patcher:
1057 if not self.mergeable_patcher:
1061 self.mergeable_patcher = mock.patch.object(
1058 self.mergeable_patcher = mock.patch.object(VcsSettingsModel, "get_general_settings")
1062 VcsSettingsModel, 'get_general_settings')
1063 self.mergeable_mock = self.mergeable_patcher.start()
1059 self.mergeable_mock = self.mergeable_patcher.start()
1064 self.mergeable_mock.return_value = {
1060 self.mergeable_mock.return_value = {"rhodecode_pr_merge_enabled": value}
1065 'rhodecode_pr_merge_enabled': value}
1066
1061
1067 def cleanup(self):
1062 def cleanup(self):
1068 # In case the source repository is already cleaned up, the pull
1063 # In case the source repository is already cleaned up, the pull
@@ -1109,7 +1104,6 b' def user_util(request, db_connection):'
1109
1104
1110 # TODO: johbo: Split this up into utilities per domain or something similar
1105 # TODO: johbo: Split this up into utilities per domain or something similar
1111 class UserUtility(object):
1106 class UserUtility(object):
1112
1113 def __init__(self, test_name="test"):
1107 def __init__(self, test_name="test"):
1114 self._test_name = self._sanitize_name(test_name)
1108 self._test_name = self._sanitize_name(test_name)
1115 self.fixture = Fixture()
1109 self.fixture = Fixture()
@@ -1126,37 +1120,29 b' class UserUtility(object):'
1126 self.user_permissions = []
1120 self.user_permissions = []
1127
1121
1128 def _sanitize_name(self, name):
1122 def _sanitize_name(self, name):
1129 for char in ['[', ']']:
1123 for char in ["[", "]"]:
1130 name = name.replace(char, '_')
1124 name = name.replace(char, "_")
1131 return name
1125 return name
1132
1126
1133 def create_repo_group(
1127 def create_repo_group(self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1134 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1128 group_name = "{prefix}_repogroup_{count}".format(prefix=self._test_name, count=len(self.repo_group_ids))
1135 group_name = "{prefix}_repogroup_{count}".format(
1129 repo_group = self.fixture.create_repo_group(group_name, cur_user=owner)
1136 prefix=self._test_name,
1137 count=len(self.repo_group_ids))
1138 repo_group = self.fixture.create_repo_group(
1139 group_name, cur_user=owner)
1140 if auto_cleanup:
1130 if auto_cleanup:
1141 self.repo_group_ids.append(repo_group.group_id)
1131 self.repo_group_ids.append(repo_group.group_id)
1142 return repo_group
1132 return repo_group
1143
1133
1144 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1134 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True, repo_type="hg", bare=False):
1145 auto_cleanup=True, repo_type='hg', bare=False):
1135 repo_name = "{prefix}_repository_{count}".format(prefix=self._test_name, count=len(self.repos_ids))
1146 repo_name = "{prefix}_repository_{count}".format(
1147 prefix=self._test_name,
1148 count=len(self.repos_ids))
1149
1136
1150 repository = self.fixture.create_repo(
1137 repository = self.fixture.create_repo(
1151 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1138 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare
1139 )
1152 if auto_cleanup:
1140 if auto_cleanup:
1153 self.repos_ids.append(repository.repo_id)
1141 self.repos_ids.append(repository.repo_id)
1154 return repository
1142 return repository
1155
1143
1156 def create_user(self, auto_cleanup=True, **kwargs):
1144 def create_user(self, auto_cleanup=True, **kwargs):
1157 user_name = "{prefix}_user_{count}".format(
1145 user_name = "{prefix}_user_{count}".format(prefix=self._test_name, count=len(self.user_ids))
1158 prefix=self._test_name,
1159 count=len(self.user_ids))
1160 user = self.fixture.create_user(user_name, **kwargs)
1146 user = self.fixture.create_user(user_name, **kwargs)
1161 if auto_cleanup:
1147 if auto_cleanup:
1162 self.user_ids.append(user.user_id)
1148 self.user_ids.append(user.user_id)
@@ -1171,13 +1157,9 b' class UserUtility(object):'
1171 user_group = self.create_user_group(members=[user])
1157 user_group = self.create_user_group(members=[user])
1172 return user, user_group
1158 return user, user_group
1173
1159
1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1160 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, auto_cleanup=True, **kwargs):
1175 auto_cleanup=True, **kwargs):
1161 group_name = "{prefix}_usergroup_{count}".format(prefix=self._test_name, count=len(self.user_group_ids))
1176 group_name = "{prefix}_usergroup_{count}".format(
1162 user_group = self.fixture.create_user_group(group_name, cur_user=owner, **kwargs)
1177 prefix=self._test_name,
1178 count=len(self.user_group_ids))
1179 user_group = self.fixture.create_user_group(
1180 group_name, cur_user=owner, **kwargs)
1181
1163
1182 if auto_cleanup:
1164 if auto_cleanup:
1183 self.user_group_ids.append(user_group.users_group_id)
1165 self.user_group_ids.append(user_group.users_group_id)
@@ -1190,52 +1172,34 b' class UserUtility(object):'
1190 self.inherit_default_user_permissions(user_name, False)
1172 self.inherit_default_user_permissions(user_name, False)
1191 self.user_permissions.append((user_name, permission_name))
1173 self.user_permissions.append((user_name, permission_name))
1192
1174
1193 def grant_user_permission_to_repo_group(
1175 def grant_user_permission_to_repo_group(self, repo_group, user, permission_name):
1194 self, repo_group, user, permission_name):
1176 permission = RepoGroupModel().grant_user_permission(repo_group, user, permission_name)
1195 permission = RepoGroupModel().grant_user_permission(
1177 self.user_repo_group_permission_ids.append((repo_group.group_id, user.user_id))
1196 repo_group, user, permission_name)
1197 self.user_repo_group_permission_ids.append(
1198 (repo_group.group_id, user.user_id))
1199 return permission
1178 return permission
1200
1179
1201 def grant_user_group_permission_to_repo_group(
1180 def grant_user_group_permission_to_repo_group(self, repo_group, user_group, permission_name):
1202 self, repo_group, user_group, permission_name):
1181 permission = RepoGroupModel().grant_user_group_permission(repo_group, user_group, permission_name)
1203 permission = RepoGroupModel().grant_user_group_permission(
1182 self.user_group_repo_group_permission_ids.append((repo_group.group_id, user_group.users_group_id))
1204 repo_group, user_group, permission_name)
1205 self.user_group_repo_group_permission_ids.append(
1206 (repo_group.group_id, user_group.users_group_id))
1207 return permission
1183 return permission
1208
1184
1209 def grant_user_permission_to_repo(
1185 def grant_user_permission_to_repo(self, repo, user, permission_name):
1210 self, repo, user, permission_name):
1186 permission = RepoModel().grant_user_permission(repo, user, permission_name)
1211 permission = RepoModel().grant_user_permission(
1187 self.user_repo_permission_ids.append((repo.repo_id, user.user_id))
1212 repo, user, permission_name)
1213 self.user_repo_permission_ids.append(
1214 (repo.repo_id, user.user_id))
1215 return permission
1188 return permission
1216
1189
1217 def grant_user_group_permission_to_repo(
1190 def grant_user_group_permission_to_repo(self, repo, user_group, permission_name):
1218 self, repo, user_group, permission_name):
1191 permission = RepoModel().grant_user_group_permission(repo, user_group, permission_name)
1219 permission = RepoModel().grant_user_group_permission(
1192 self.user_group_repo_permission_ids.append((repo.repo_id, user_group.users_group_id))
1220 repo, user_group, permission_name)
1221 self.user_group_repo_permission_ids.append(
1222 (repo.repo_id, user_group.users_group_id))
1223 return permission
1193 return permission
1224
1194
1225 def grant_user_permission_to_user_group(
1195 def grant_user_permission_to_user_group(self, target_user_group, user, permission_name):
1226 self, target_user_group, user, permission_name):
1196 permission = UserGroupModel().grant_user_permission(target_user_group, user, permission_name)
1227 permission = UserGroupModel().grant_user_permission(
1197 self.user_user_group_permission_ids.append((target_user_group.users_group_id, user.user_id))
1228 target_user_group, user, permission_name)
1229 self.user_user_group_permission_ids.append(
1230 (target_user_group.users_group_id, user.user_id))
1231 return permission
1198 return permission
1232
1199
1233 def grant_user_group_permission_to_user_group(
1200 def grant_user_group_permission_to_user_group(self, target_user_group, user_group, permission_name):
1234 self, target_user_group, user_group, permission_name):
1201 permission = UserGroupModel().grant_user_group_permission(target_user_group, user_group, permission_name)
1235 permission = UserGroupModel().grant_user_group_permission(
1202 self.user_group_user_group_permission_ids.append((target_user_group.users_group_id, user_group.users_group_id))
1236 target_user_group, user_group, permission_name)
1237 self.user_group_user_group_permission_ids.append(
1238 (target_user_group.users_group_id, user_group.users_group_id))
1239 return permission
1203 return permission
1240
1204
1241 def revoke_user_permission(self, user_name, permission_name):
1205 def revoke_user_permission(self, user_name, permission_name):
@@ -1285,14 +1249,11 b' class UserUtility(object):'
1285 """
1249 """
1286 first_group = RepoGroup.get(first_group_id)
1250 first_group = RepoGroup.get(first_group_id)
1287 second_group = RepoGroup.get(second_group_id)
1251 second_group = RepoGroup.get(second_group_id)
1288 first_group_parts = (
1252 first_group_parts = len(first_group.group_name.split("/")) if first_group else 0
1289 len(first_group.group_name.split('/')) if first_group else 0)
1253 second_group_parts = len(second_group.group_name.split("/")) if second_group else 0
1290 second_group_parts = (
1291 len(second_group.group_name.split('/')) if second_group else 0)
1292 return cmp(second_group_parts, first_group_parts)
1254 return cmp(second_group_parts, first_group_parts)
1293
1255
1294 sorted_repo_group_ids = sorted(
1256 sorted_repo_group_ids = sorted(self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1295 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1296 for repo_group_id in sorted_repo_group_ids:
1257 for repo_group_id in sorted_repo_group_ids:
1297 self.fixture.destroy_repo_group(repo_group_id)
1258 self.fixture.destroy_repo_group(repo_group_id)
1298
1259
@@ -1308,16 +1269,11 b' class UserUtility(object):'
1308 """
1269 """
1309 first_group = UserGroup.get(first_group_id)
1270 first_group = UserGroup.get(first_group_id)
1310 second_group = UserGroup.get(second_group_id)
1271 second_group = UserGroup.get(second_group_id)
1311 first_group_parts = (
1272 first_group_parts = len(first_group.users_group_name.split("/")) if first_group else 0
1312 len(first_group.users_group_name.split('/'))
1273 second_group_parts = len(second_group.users_group_name.split("/")) if second_group else 0
1313 if first_group else 0)
1314 second_group_parts = (
1315 len(second_group.users_group_name.split('/'))
1316 if second_group else 0)
1317 return cmp(second_group_parts, first_group_parts)
1274 return cmp(second_group_parts, first_group_parts)
1318
1275
1319 sorted_user_group_ids = sorted(
1276 sorted_user_group_ids = sorted(self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1320 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1321 for user_group_id in sorted_user_group_ids:
1277 for user_group_id in sorted_user_group_ids:
1322 self.fixture.destroy_user_group(user_group_id)
1278 self.fixture.destroy_user_group(user_group_id)
1323
1279
@@ -1326,22 +1282,19 b' class UserUtility(object):'
1326 self.fixture.destroy_user(user_id)
1282 self.fixture.destroy_user(user_id)
1327
1283
1328
1284
1329 @pytest.fixture(scope='session')
1285 @pytest.fixture(scope="session")
1330 def testrun():
1286 def testrun():
1331 return {
1287 return {
1332 'uuid': uuid.uuid4(),
1288 "uuid": uuid.uuid4(),
1333 'start': datetime.datetime.utcnow().isoformat(),
1289 "start": datetime.datetime.utcnow().isoformat(),
1334 'timestamp': int(time.time()),
1290 "timestamp": int(time.time()),
1335 }
1291 }
1336
1292
1337
1293
1338 class AppenlightClient(object):
1294 class AppenlightClient(object):
1339
1295 url_template = "{url}?protocol_version=0.5"
1340 url_template = '{url}?protocol_version=0.5'
1341
1296
1342 def __init__(
1297 def __init__(self, url, api_key, add_server=True, add_timestamp=True, namespace=None, request=None, testrun=None):
1343 self, url, api_key, add_server=True, add_timestamp=True,
1344 namespace=None, request=None, testrun=None):
1345 self.url = self.url_template.format(url=url)
1298 self.url = self.url_template.format(url=url)
1346 self.api_key = api_key
1299 self.api_key = api_key
1347 self.add_server = add_server
1300 self.add_server = add_server
@@ -1362,40 +1315,41 b' class AppenlightClient(object):'
1362
1315
1363 def collect(self, data):
1316 def collect(self, data):
1364 if self.add_server:
1317 if self.add_server:
1365 data.setdefault('server', self.server)
1318 data.setdefault("server", self.server)
1366 if self.add_timestamp:
1319 if self.add_timestamp:
1367 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1320 data.setdefault("date", datetime.datetime.utcnow().isoformat())
1368 if self.namespace:
1321 if self.namespace:
1369 data.setdefault('namespace', self.namespace)
1322 data.setdefault("namespace", self.namespace)
1370 if self.request:
1323 if self.request:
1371 data.setdefault('request', self.request)
1324 data.setdefault("request", self.request)
1372 self.stats.append(data)
1325 self.stats.append(data)
1373
1326
1374 def send_stats(self):
1327 def send_stats(self):
1375 tags = [
1328 tags = [
1376 ('testrun', self.request),
1329 ("testrun", self.request),
1377 ('testrun.start', self.testrun['start']),
1330 ("testrun.start", self.testrun["start"]),
1378 ('testrun.timestamp', self.testrun['timestamp']),
1331 ("testrun.timestamp", self.testrun["timestamp"]),
1379 ('test', self.namespace),
1332 ("test", self.namespace),
1380 ]
1333 ]
1381 for key, value in self.tags_before.items():
1334 for key, value in self.tags_before.items():
1382 tags.append((key + '.before', value))
1335 tags.append((key + ".before", value))
1383 try:
1336 try:
1384 delta = self.tags_after[key] - value
1337 delta = self.tags_after[key] - value
1385 tags.append((key + '.delta', delta))
1338 tags.append((key + ".delta", delta))
1386 except Exception:
1339 except Exception:
1387 pass
1340 pass
1388 for key, value in self.tags_after.items():
1341 for key, value in self.tags_after.items():
1389 tags.append((key + '.after', value))
1342 tags.append((key + ".after", value))
1390 self.collect({
1343 self.collect(
1391 'message': "Collected tags",
1344 {
1392 'tags': tags,
1345 "message": "Collected tags",
1393 })
1346 "tags": tags,
1347 }
1348 )
1394
1349
1395 response = requests.post(
1350 response = requests.post(
1396 self.url,
1351 self.url,
1397 headers={
1352 headers={"X-appenlight-api-key": self.api_key},
1398 'X-appenlight-api-key': self.api_key},
1399 json=self.stats,
1353 json=self.stats,
1400 )
1354 )
1401
1355
@@ -1403,7 +1357,7 b' class AppenlightClient(object):'
1403 pprint.pprint(self.stats)
1357 pprint.pprint(self.stats)
1404 print(response.headers)
1358 print(response.headers)
1405 print(response.text)
1359 print(response.text)
1406 raise Exception('Sending to appenlight failed')
1360 raise Exception("Sending to appenlight failed")
1407
1361
1408
1362
1409 @pytest.fixture()
1363 @pytest.fixture()
@@ -1454,9 +1408,8 b' class SettingsUtility(object):'
1454 self.repo_rhodecode_ui_ids = []
1408 self.repo_rhodecode_ui_ids = []
1455 self.repo_rhodecode_setting_ids = []
1409 self.repo_rhodecode_setting_ids = []
1456
1410
1457 def create_repo_rhodecode_ui(
1411 def create_repo_rhodecode_ui(self, repo, section, value, key=None, active=True, cleanup=True):
1458 self, repo, section, value, key=None, active=True, cleanup=True):
1412 key = key or sha1_safe(f"{section}{value}{repo.repo_id}")
1459 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1460
1413
1461 setting = RepoRhodeCodeUi()
1414 setting = RepoRhodeCodeUi()
1462 setting.repository_id = repo.repo_id
1415 setting.repository_id = repo.repo_id
@@ -1471,9 +1424,8 b' class SettingsUtility(object):'
1471 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1424 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1472 return setting
1425 return setting
1473
1426
1474 def create_rhodecode_ui(
1427 def create_rhodecode_ui(self, section, value, key=None, active=True, cleanup=True):
1475 self, section, value, key=None, active=True, cleanup=True):
1428 key = key or sha1_safe(f"{section}{value}")
1476 key = key or sha1_safe(f'{section}{value}')
1477
1429
1478 setting = RhodeCodeUi()
1430 setting = RhodeCodeUi()
1479 setting.ui_section = section
1431 setting.ui_section = section
@@ -1487,10 +1439,8 b' class SettingsUtility(object):'
1487 self.rhodecode_ui_ids.append(setting.ui_id)
1439 self.rhodecode_ui_ids.append(setting.ui_id)
1488 return setting
1440 return setting
1489
1441
1490 def create_repo_rhodecode_setting(
1442 def create_repo_rhodecode_setting(self, repo, name, value, type_, cleanup=True):
1491 self, repo, name, value, type_, cleanup=True):
1443 setting = RepoRhodeCodeSetting(repo.repo_id, key=name, val=value, type=type_)
1492 setting = RepoRhodeCodeSetting(
1493 repo.repo_id, key=name, val=value, type=type_)
1494 Session().add(setting)
1444 Session().add(setting)
1495 Session().commit()
1445 Session().commit()
1496
1446
@@ -1530,13 +1480,12 b' class SettingsUtility(object):'
1530
1480
1531 @pytest.fixture()
1481 @pytest.fixture()
1532 def no_notifications(request):
1482 def no_notifications(request):
1533 notification_patcher = mock.patch(
1483 notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create")
1534 'rhodecode.model.notification.NotificationModel.create')
1535 notification_patcher.start()
1484 notification_patcher.start()
1536 request.addfinalizer(notification_patcher.stop)
1485 request.addfinalizer(notification_patcher.stop)
1537
1486
1538
1487
1539 @pytest.fixture(scope='session')
1488 @pytest.fixture(scope="session")
1540 def repeat(request):
1489 def repeat(request):
1541 """
1490 """
1542 The number of repetitions is based on this fixture.
1491 The number of repetitions is based on this fixture.
@@ -1544,7 +1493,7 b' def repeat(request):'
1544 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1493 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1545 tests are not too slow in our default test suite.
1494 tests are not too slow in our default test suite.
1546 """
1495 """
1547 return request.config.getoption('--repeat')
1496 return request.config.getoption("--repeat")
1548
1497
1549
1498
1550 @pytest.fixture()
1499 @pytest.fixture()
@@ -1562,42 +1511,17 b' def context_stub():'
1562
1511
1563
1512
1564 @pytest.fixture()
1513 @pytest.fixture()
1565 def request_stub():
1566 """
1567 Stub request object.
1568 """
1569 from rhodecode.lib.base import bootstrap_request
1570 request = bootstrap_request(scheme='https')
1571 return request
1572
1573
1574 @pytest.fixture()
1575 def config_stub(request, request_stub):
1576 """
1577 Set up pyramid.testing and return the Configurator.
1578 """
1579 from rhodecode.lib.base import bootstrap_config
1580 config = bootstrap_config(request=request_stub)
1581
1582 @request.addfinalizer
1583 def cleanup():
1584 pyramid.testing.tearDown()
1585
1586 return config
1587
1588
1589 @pytest.fixture()
1590 def StubIntegrationType():
1514 def StubIntegrationType():
1591 class _StubIntegrationType(IntegrationTypeBase):
1515 class _StubIntegrationType(IntegrationTypeBase):
1592 """ Test integration type class """
1516 """Test integration type class"""
1593
1517
1594 key = 'test'
1518 key = "test"
1595 display_name = 'Test integration type'
1519 display_name = "Test integration type"
1596 description = 'A test integration type for testing'
1520 description = "A test integration type for testing"
1597
1521
1598 @classmethod
1522 @classmethod
1599 def icon(cls):
1523 def icon(cls):
1600 return 'test_icon_html_image'
1524 return "test_icon_html_image"
1601
1525
1602 def __init__(self, settings):
1526 def __init__(self, settings):
1603 super(_StubIntegrationType, self).__init__(settings)
1527 super(_StubIntegrationType, self).__init__(settings)
@@ -1611,15 +1535,15 b' def StubIntegrationType():'
1611 test_string_field = colander.SchemaNode(
1535 test_string_field = colander.SchemaNode(
1612 colander.String(),
1536 colander.String(),
1613 missing=colander.required,
1537 missing=colander.required,
1614 title='test string field',
1538 title="test string field",
1615 )
1539 )
1616 test_int_field = colander.SchemaNode(
1540 test_int_field = colander.SchemaNode(
1617 colander.Int(),
1541 colander.Int(),
1618 title='some integer setting',
1542 title="some integer setting",
1619 )
1543 )
1544
1620 return SettingsSchema()
1545 return SettingsSchema()
1621
1546
1622
1623 integration_type_registry.register_integration_type(_StubIntegrationType)
1547 integration_type_registry.register_integration_type(_StubIntegrationType)
1624 return _StubIntegrationType
1548 return _StubIntegrationType
1625
1549
@@ -1627,18 +1551,22 b' def StubIntegrationType():'
1627 @pytest.fixture()
1551 @pytest.fixture()
1628 def stub_integration_settings():
1552 def stub_integration_settings():
1629 return {
1553 return {
1630 'test_string_field': 'some data',
1554 "test_string_field": "some data",
1631 'test_int_field': 100,
1555 "test_int_field": 100,
1632 }
1556 }
1633
1557
1634
1558
1635 @pytest.fixture()
1559 @pytest.fixture()
1636 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1560 def repo_integration_stub(request, repo_stub, StubIntegrationType, stub_integration_settings):
1637 stub_integration_settings):
1638 integration = IntegrationModel().create(
1561 integration = IntegrationModel().create(
1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1562 StubIntegrationType,
1640 name='test repo integration',
1563 settings=stub_integration_settings,
1641 repo=repo_stub, repo_group=None, child_repos_only=None)
1564 enabled=True,
1565 name="test repo integration",
1566 repo=repo_stub,
1567 repo_group=None,
1568 child_repos_only=None,
1569 )
1642
1570
1643 @request.addfinalizer
1571 @request.addfinalizer
1644 def cleanup():
1572 def cleanup():
@@ -1648,12 +1576,16 b' def repo_integration_stub(request, repo_'
1648
1576
1649
1577
1650 @pytest.fixture()
1578 @pytest.fixture()
1651 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1579 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings):
1652 stub_integration_settings):
1653 integration = IntegrationModel().create(
1580 integration = IntegrationModel().create(
1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1581 StubIntegrationType,
1655 name='test repogroup integration',
1582 settings=stub_integration_settings,
1656 repo=None, repo_group=test_repo_group, child_repos_only=True)
1583 enabled=True,
1584 name="test repogroup integration",
1585 repo=None,
1586 repo_group=test_repo_group,
1587 child_repos_only=True,
1588 )
1657
1589
1658 @request.addfinalizer
1590 @request.addfinalizer
1659 def cleanup():
1591 def cleanup():
@@ -1663,12 +1595,16 b' def repogroup_integration_stub(request, '
1663
1595
1664
1596
1665 @pytest.fixture()
1597 @pytest.fixture()
1666 def repogroup_recursive_integration_stub(request, test_repo_group,
1598 def repogroup_recursive_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings):
1667 StubIntegrationType, stub_integration_settings):
1668 integration = IntegrationModel().create(
1599 integration = IntegrationModel().create(
1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1600 StubIntegrationType,
1670 name='test recursive repogroup integration',
1601 settings=stub_integration_settings,
1671 repo=None, repo_group=test_repo_group, child_repos_only=False)
1602 enabled=True,
1603 name="test recursive repogroup integration",
1604 repo=None,
1605 repo_group=test_repo_group,
1606 child_repos_only=False,
1607 )
1672
1608
1673 @request.addfinalizer
1609 @request.addfinalizer
1674 def cleanup():
1610 def cleanup():
@@ -1678,12 +1614,16 b' def repogroup_recursive_integration_stub'
1678
1614
1679
1615
1680 @pytest.fixture()
1616 @pytest.fixture()
1681 def global_integration_stub(request, StubIntegrationType,
1617 def global_integration_stub(request, StubIntegrationType, stub_integration_settings):
1682 stub_integration_settings):
1683 integration = IntegrationModel().create(
1618 integration = IntegrationModel().create(
1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1619 StubIntegrationType,
1685 name='test global integration',
1620 settings=stub_integration_settings,
1686 repo=None, repo_group=None, child_repos_only=None)
1621 enabled=True,
1622 name="test global integration",
1623 repo=None,
1624 repo_group=None,
1625 child_repos_only=None,
1626 )
1687
1627
1688 @request.addfinalizer
1628 @request.addfinalizer
1689 def cleanup():
1629 def cleanup():
@@ -1693,12 +1633,16 b' def global_integration_stub(request, Stu'
1693
1633
1694
1634
1695 @pytest.fixture()
1635 @pytest.fixture()
1696 def root_repos_integration_stub(request, StubIntegrationType,
1636 def root_repos_integration_stub(request, StubIntegrationType, stub_integration_settings):
1697 stub_integration_settings):
1698 integration = IntegrationModel().create(
1637 integration = IntegrationModel().create(
1699 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1638 StubIntegrationType,
1700 name='test global integration',
1639 settings=stub_integration_settings,
1701 repo=None, repo_group=None, child_repos_only=True)
1640 enabled=True,
1641 name="test global integration",
1642 repo=None,
1643 repo_group=None,
1644 child_repos_only=True,
1645 )
1702
1646
1703 @request.addfinalizer
1647 @request.addfinalizer
1704 def cleanup():
1648 def cleanup():
@@ -1710,8 +1654,8 b' def root_repos_integration_stub(request,'
1710 @pytest.fixture()
1654 @pytest.fixture()
1711 def local_dt_to_utc():
1655 def local_dt_to_utc():
1712 def _factory(dt):
1656 def _factory(dt):
1713 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1657 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(dateutil.tz.tzutc()).replace(tzinfo=None)
1714 dateutil.tz.tzutc()).replace(tzinfo=None)
1658
1715 return _factory
1659 return _factory
1716
1660
1717
1661
@@ -1724,7 +1668,7 b' def disable_anonymous_user(request, base'
1724 set_anonymous_access(True)
1668 set_anonymous_access(True)
1725
1669
1726
1670
1727 @pytest.fixture(scope='module')
1671 @pytest.fixture(scope="module")
1728 def rc_fixture(request):
1672 def rc_fixture(request):
1729 return Fixture()
1673 return Fixture()
1730
1674
@@ -1734,9 +1678,9 b' def repo_groups(request):'
1734 fixture = Fixture()
1678 fixture = Fixture()
1735
1679
1736 session = Session()
1680 session = Session()
1737 zombie_group = fixture.create_repo_group('zombie')
1681 zombie_group = fixture.create_repo_group("zombie")
1738 parent_group = fixture.create_repo_group('parent')
1682 parent_group = fixture.create_repo_group("parent")
1739 child_group = fixture.create_repo_group('parent/child')
1683 child_group = fixture.create_repo_group("parent/child")
1740 groups_in_db = session.query(RepoGroup).all()
1684 groups_in_db = session.query(RepoGroup).all()
1741 assert len(groups_in_db) == 3
1685 assert len(groups_in_db) == 3
1742 assert child_group.group_parent_id == parent_group.group_id
1686 assert child_group.group_parent_id == parent_group.group_id
@@ -1748,3 +1692,4 b' def repo_groups(request):'
1748 fixture.destroy_repo_group(parent_group)
1692 fixture.destroy_repo_group(parent_group)
1749
1693
1750 return zombie_group, parent_group, child_group
1694 return zombie_group, parent_group, child_group
1695
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -37,17 +36,16 b' from rhodecode.model.user_group import U'
37 from rhodecode.model.gist import GistModel
36 from rhodecode.model.gist import GistModel
38 from rhodecode.model.auth_token import AuthTokenModel
37 from rhodecode.model.auth_token import AuthTokenModel
39 from rhodecode.model.scm import ScmModel
38 from rhodecode.model.scm import ScmModel
40 from rhodecode.authentication.plugins.auth_rhodecode import \
39 from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin
41 RhodeCodeAuthPlugin
42
40
43 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
41 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
44
42
45 dn = os.path.dirname
43 dn = os.path.dirname
46 FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures')
44 FIXTURES = os.path.join(dn(os.path.abspath(__file__)), "diff_fixtures")
47
45
48
46
49 def error_function(*args, **kwargs):
47 def error_function(*args, **kwargs):
50 raise Exception('Total Crash !')
48 raise Exception("Total Crash !")
51
49
52
50
53 class TestINI(object):
51 class TestINI(object):
@@ -59,8 +57,7 b' class TestINI(object):'
59 print('paster server %s' % new_test_ini)
57 print('paster server %s' % new_test_ini)
60 """
58 """
61
59
62 def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT',
60 def __init__(self, ini_file_path, ini_params, new_file_prefix="DEFAULT", destroy=True, dir=None):
63 destroy=True, dir=None):
64 self.ini_file_path = ini_file_path
61 self.ini_file_path = ini_file_path
65 self.ini_params = ini_params
62 self.ini_params = ini_params
66 self.new_path = None
63 self.new_path = None
@@ -85,9 +82,8 b' class TestINI(object):'
85 parser[section][key] = str(val)
82 parser[section][key] = str(val)
86
83
87 with tempfile.NamedTemporaryFile(
84 with tempfile.NamedTemporaryFile(
88 mode='w',
85 mode="w", prefix=self.new_path_prefix, suffix=".ini", dir=self._dir, delete=False
89 prefix=self.new_path_prefix, suffix='.ini', dir=self._dir,
86 ) as new_ini_file:
90 delete=False) as new_ini_file:
91 parser.write(new_ini_file)
87 parser.write(new_ini_file)
92 self.new_path = new_ini_file.name
88 self.new_path = new_ini_file.name
93
89
@@ -99,7 +95,6 b' class TestINI(object):'
99
95
100
96
101 class Fixture(object):
97 class Fixture(object):
102
103 def anon_access(self, status):
98 def anon_access(self, status):
104 """
99 """
105 Context process for disabling anonymous access. use like:
100 Context process for disabling anonymous access. use like:
@@ -139,22 +134,19 b' class Fixture(object):'
139
134
140 class context(object):
135 class context(object):
141 def _get_plugin(self):
136 def _get_plugin(self):
142 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
137 plugin_id = "egg:rhodecode-enterprise-ce#{}".format(RhodeCodeAuthPlugin.uid)
143 plugin = RhodeCodeAuthPlugin(plugin_id)
138 plugin = RhodeCodeAuthPlugin(plugin_id)
144 return plugin
139 return plugin
145
140
146 def __enter__(self):
141 def __enter__(self):
147
148 plugin = self._get_plugin()
142 plugin = self._get_plugin()
149 plugin.create_or_update_setting('auth_restriction', auth_restriction)
143 plugin.create_or_update_setting("auth_restriction", auth_restriction)
150 Session().commit()
144 Session().commit()
151 SettingsModel().invalidate_settings_cache(hard=True)
145 SettingsModel().invalidate_settings_cache(hard=True)
152
146
153 def __exit__(self, exc_type, exc_val, exc_tb):
147 def __exit__(self, exc_type, exc_val, exc_tb):
154
155 plugin = self._get_plugin()
148 plugin = self._get_plugin()
156 plugin.create_or_update_setting(
149 plugin.create_or_update_setting("auth_restriction", RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE)
157 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE)
158 Session().commit()
150 Session().commit()
159 SettingsModel().invalidate_settings_cache(hard=True)
151 SettingsModel().invalidate_settings_cache(hard=True)
160
152
@@ -173,62 +165,61 b' class Fixture(object):'
173
165
174 class context(object):
166 class context(object):
175 def _get_plugin(self):
167 def _get_plugin(self):
176 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
168 plugin_id = "egg:rhodecode-enterprise-ce#{}".format(RhodeCodeAuthPlugin.uid)
177 plugin = RhodeCodeAuthPlugin(plugin_id)
169 plugin = RhodeCodeAuthPlugin(plugin_id)
178 return plugin
170 return plugin
179
171
180 def __enter__(self):
172 def __enter__(self):
181 plugin = self._get_plugin()
173 plugin = self._get_plugin()
182 plugin.create_or_update_setting('scope_restriction', scope_restriction)
174 plugin.create_or_update_setting("scope_restriction", scope_restriction)
183 Session().commit()
175 Session().commit()
184 SettingsModel().invalidate_settings_cache(hard=True)
176 SettingsModel().invalidate_settings_cache(hard=True)
185
177
186 def __exit__(self, exc_type, exc_val, exc_tb):
178 def __exit__(self, exc_type, exc_val, exc_tb):
187 plugin = self._get_plugin()
179 plugin = self._get_plugin()
188 plugin.create_or_update_setting(
180 plugin.create_or_update_setting("scope_restriction", RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL)
189 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL)
190 Session().commit()
181 Session().commit()
191 SettingsModel().invalidate_settings_cache(hard=True)
182 SettingsModel().invalidate_settings_cache(hard=True)
192
183
193 return context()
184 return context()
194
185
195 def _get_repo_create_params(self, **custom):
186 def _get_repo_create_params(self, **custom):
196 repo_type = custom.get('repo_type') or 'hg'
187 repo_type = custom.get("repo_type") or "hg"
197
188
198 default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type)
189 default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type)
199
190
200 defs = {
191 defs = {
201 'repo_name': None,
192 "repo_name": None,
202 'repo_type': repo_type,
193 "repo_type": repo_type,
203 'clone_uri': '',
194 "clone_uri": "",
204 'push_uri': '',
195 "push_uri": "",
205 'repo_group': '-1',
196 "repo_group": "-1",
206 'repo_description': 'DESC',
197 "repo_description": "DESC",
207 'repo_private': False,
198 "repo_private": False,
208 'repo_landing_commit_ref': default_landing_ref,
199 "repo_landing_commit_ref": default_landing_ref,
209 'repo_copy_permissions': False,
200 "repo_copy_permissions": False,
210 'repo_state': Repository.STATE_CREATED,
201 "repo_state": Repository.STATE_CREATED,
211 }
202 }
212 defs.update(custom)
203 defs.update(custom)
213 if 'repo_name_full' not in custom:
204 if "repo_name_full" not in custom:
214 defs.update({'repo_name_full': defs['repo_name']})
205 defs.update({"repo_name_full": defs["repo_name"]})
215
206
216 # fix the repo name if passed as repo_name_full
207 # fix the repo name if passed as repo_name_full
217 if defs['repo_name']:
208 if defs["repo_name"]:
218 defs['repo_name'] = defs['repo_name'].split('/')[-1]
209 defs["repo_name"] = defs["repo_name"].split("/")[-1]
219
210
220 return defs
211 return defs
221
212
222 def _get_group_create_params(self, **custom):
213 def _get_group_create_params(self, **custom):
223 defs = {
214 defs = {
224 'group_name': None,
215 "group_name": None,
225 'group_description': 'DESC',
216 "group_description": "DESC",
226 'perm_updates': [],
217 "perm_updates": [],
227 'perm_additions': [],
218 "perm_additions": [],
228 'perm_deletions': [],
219 "perm_deletions": [],
229 'group_parent_id': -1,
220 "group_parent_id": -1,
230 'enable_locking': False,
221 "enable_locking": False,
231 'recursive': False,
222 "recursive": False,
232 }
223 }
233 defs.update(custom)
224 defs.update(custom)
234
225
@@ -236,16 +227,16 b' class Fixture(object):'
236
227
237 def _get_user_create_params(self, name, **custom):
228 def _get_user_create_params(self, name, **custom):
238 defs = {
229 defs = {
239 'username': name,
230 "username": name,
240 'password': 'qweqwe',
231 "password": "qweqwe",
241 'email': '%s+test@rhodecode.org' % name,
232 "email": "%s+test@rhodecode.org" % name,
242 'firstname': 'TestUser',
233 "firstname": "TestUser",
243 'lastname': 'Test',
234 "lastname": "Test",
244 'description': 'test description',
235 "description": "test description",
245 'active': True,
236 "active": True,
246 'admin': False,
237 "admin": False,
247 'extern_type': 'rhodecode',
238 "extern_type": "rhodecode",
248 'extern_name': None,
239 "extern_name": None,
249 }
240 }
250 defs.update(custom)
241 defs.update(custom)
251
242
@@ -253,30 +244,30 b' class Fixture(object):'
253
244
254 def _get_user_group_create_params(self, name, **custom):
245 def _get_user_group_create_params(self, name, **custom):
255 defs = {
246 defs = {
256 'users_group_name': name,
247 "users_group_name": name,
257 'user_group_description': 'DESC',
248 "user_group_description": "DESC",
258 'users_group_active': True,
249 "users_group_active": True,
259 'user_group_data': {},
250 "user_group_data": {},
260 }
251 }
261 defs.update(custom)
252 defs.update(custom)
262
253
263 return defs
254 return defs
264
255
265 def create_repo(self, name, **kwargs):
256 def create_repo(self, name, **kwargs):
266 repo_group = kwargs.get('repo_group')
257 repo_group = kwargs.get("repo_group")
267 if isinstance(repo_group, RepoGroup):
258 if isinstance(repo_group, RepoGroup):
268 kwargs['repo_group'] = repo_group.group_id
259 kwargs["repo_group"] = repo_group.group_id
269 name = name.split(Repository.NAME_SEP)[-1]
260 name = name.split(Repository.NAME_SEP)[-1]
270 name = Repository.NAME_SEP.join((repo_group.group_name, name))
261 name = Repository.NAME_SEP.join((repo_group.group_name, name))
271
262
272 if 'skip_if_exists' in kwargs:
263 if "skip_if_exists" in kwargs:
273 del kwargs['skip_if_exists']
264 del kwargs["skip_if_exists"]
274 r = Repository.get_by_repo_name(name)
265 r = Repository.get_by_repo_name(name)
275 if r:
266 if r:
276 return r
267 return r
277
268
278 form_data = self._get_repo_create_params(repo_name=name, **kwargs)
269 form_data = self._get_repo_create_params(repo_name=name, **kwargs)
279 cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
270 cur_user = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN)
280 RepoModel().create(form_data, cur_user)
271 RepoModel().create(form_data, cur_user)
281 Session().commit()
272 Session().commit()
282 repo = Repository.get_by_repo_name(name)
273 repo = Repository.get_by_repo_name(name)
@@ -287,17 +278,15 b' class Fixture(object):'
287 repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
278 repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
288
279
289 form_data = self._get_repo_create_params(
280 form_data = self._get_repo_create_params(
290 repo_name=fork_name,
281 repo_name=fork_name, fork_parent_id=repo_to_fork.repo_id, repo_type=repo_to_fork.repo_type, **kwargs
291 fork_parent_id=repo_to_fork.repo_id,
282 )
292 repo_type=repo_to_fork.repo_type,
293 **kwargs)
294
283
295 # TODO: fix it !!
284 # TODO: fix it !!
296 form_data['description'] = form_data['repo_description']
285 form_data["description"] = form_data["repo_description"]
297 form_data['private'] = form_data['repo_private']
286 form_data["private"] = form_data["repo_private"]
298 form_data['landing_rev'] = form_data['repo_landing_commit_ref']
287 form_data["landing_rev"] = form_data["repo_landing_commit_ref"]
299
288
300 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
289 owner = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN)
301 RepoModel().create_fork(form_data, cur_user=owner)
290 RepoModel().create_fork(form_data, cur_user=owner)
302 Session().commit()
291 Session().commit()
303 r = Repository.get_by_repo_name(fork_name)
292 r = Repository.get_by_repo_name(fork_name)
@@ -305,7 +294,7 b' class Fixture(object):'
305 return r
294 return r
306
295
307 def destroy_repo(self, repo_name, **kwargs):
296 def destroy_repo(self, repo_name, **kwargs):
308 RepoModel().delete(repo_name, pull_requests='delete', artifacts='delete', **kwargs)
297 RepoModel().delete(repo_name, pull_requests="delete", artifacts="delete", **kwargs)
309 Session().commit()
298 Session().commit()
310
299
311 def destroy_repo_on_filesystem(self, repo_name):
300 def destroy_repo_on_filesystem(self, repo_name):
@@ -314,17 +303,16 b' class Fixture(object):'
314 shutil.rmtree(rm_path)
303 shutil.rmtree(rm_path)
315
304
316 def create_repo_group(self, name, **kwargs):
305 def create_repo_group(self, name, **kwargs):
317 if 'skip_if_exists' in kwargs:
306 if "skip_if_exists" in kwargs:
318 del kwargs['skip_if_exists']
307 del kwargs["skip_if_exists"]
319 gr = RepoGroup.get_by_group_name(group_name=name)
308 gr = RepoGroup.get_by_group_name(group_name=name)
320 if gr:
309 if gr:
321 return gr
310 return gr
322 form_data = self._get_group_create_params(group_name=name, **kwargs)
311 form_data = self._get_group_create_params(group_name=name, **kwargs)
323 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
312 owner = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN)
324 gr = RepoGroupModel().create(
313 gr = RepoGroupModel().create(
325 group_name=form_data['group_name'],
314 group_name=form_data["group_name"], group_description=form_data["group_name"], owner=owner
326 group_description=form_data['group_name'],
315 )
327 owner=owner)
328 Session().commit()
316 Session().commit()
329 gr = RepoGroup.get_by_group_name(gr.group_name)
317 gr = RepoGroup.get_by_group_name(gr.group_name)
330 return gr
318 return gr
@@ -334,8 +322,8 b' class Fixture(object):'
334 Session().commit()
322 Session().commit()
335
323
336 def create_user(self, name, **kwargs):
324 def create_user(self, name, **kwargs):
337 if 'skip_if_exists' in kwargs:
325 if "skip_if_exists" in kwargs:
338 del kwargs['skip_if_exists']
326 del kwargs["skip_if_exists"]
339 user = User.get_by_username(name)
327 user = User.get_by_username(name)
340 if user:
328 if user:
341 return user
329 return user
@@ -343,8 +331,7 b' class Fixture(object):'
343 user = UserModel().create(form_data)
331 user = UserModel().create(form_data)
344
332
345 # create token for user
333 # create token for user
346 AuthTokenModel().create(
334 AuthTokenModel().create(user=user, description="TEST_USER_TOKEN")
347 user=user, description=u'TEST_USER_TOKEN')
348
335
349 Session().commit()
336 Session().commit()
350 user = User.get_by_username(user.username)
337 user = User.get_by_username(user.username)
@@ -368,22 +355,24 b' class Fixture(object):'
368 Session().commit()
355 Session().commit()
369
356
370 def create_user_group(self, name, **kwargs):
357 def create_user_group(self, name, **kwargs):
371 if 'skip_if_exists' in kwargs:
358 if "skip_if_exists" in kwargs:
372 del kwargs['skip_if_exists']
359 del kwargs["skip_if_exists"]
373 gr = UserGroup.get_by_group_name(group_name=name)
360 gr = UserGroup.get_by_group_name(group_name=name)
374 if gr:
361 if gr:
375 return gr
362 return gr
376 # map active flag to the real attribute. For API consistency of fixtures
363 # map active flag to the real attribute. For API consistency of fixtures
377 if 'active' in kwargs:
364 if "active" in kwargs:
378 kwargs['users_group_active'] = kwargs['active']
365 kwargs["users_group_active"] = kwargs["active"]
379 del kwargs['active']
366 del kwargs["active"]
380 form_data = self._get_user_group_create_params(name, **kwargs)
367 form_data = self._get_user_group_create_params(name, **kwargs)
381 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
368 owner = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN)
382 user_group = UserGroupModel().create(
369 user_group = UserGroupModel().create(
383 name=form_data['users_group_name'],
370 name=form_data["users_group_name"],
384 description=form_data['user_group_description'],
371 description=form_data["user_group_description"],
385 owner=owner, active=form_data['users_group_active'],
372 owner=owner,
386 group_data=form_data['user_group_data'])
373 active=form_data["users_group_active"],
374 group_data=form_data["user_group_data"],
375 )
387 Session().commit()
376 Session().commit()
388 user_group = UserGroup.get_by_group_name(user_group.users_group_name)
377 user_group = UserGroup.get_by_group_name(user_group.users_group_name)
389 return user_group
378 return user_group
@@ -394,18 +383,23 b' class Fixture(object):'
394
383
395 def create_gist(self, **kwargs):
384 def create_gist(self, **kwargs):
396 form_data = {
385 form_data = {
397 'description': 'new-gist',
386 "description": "new-gist",
398 'owner': TEST_USER_ADMIN_LOGIN,
387 "owner": TEST_USER_ADMIN_LOGIN,
399 'gist_type': GistModel.cls.GIST_PUBLIC,
388 "gist_type": GistModel.cls.GIST_PUBLIC,
400 'lifetime': -1,
389 "lifetime": -1,
401 'acl_level': Gist.ACL_LEVEL_PUBLIC,
390 "acl_level": Gist.ACL_LEVEL_PUBLIC,
402 'gist_mapping': {b'filename1.txt': {'content': b'hello world'},}
391 "gist_mapping": {
392 b"filename1.txt": {"content": b"hello world"},
393 },
403 }
394 }
404 form_data.update(kwargs)
395 form_data.update(kwargs)
405 gist = GistModel().create(
396 gist = GistModel().create(
406 description=form_data['description'], owner=form_data['owner'],
397 description=form_data["description"],
407 gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'],
398 owner=form_data["owner"],
408 lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level']
399 gist_mapping=form_data["gist_mapping"],
400 gist_type=form_data["gist_type"],
401 lifetime=form_data["lifetime"],
402 gist_acl_level=form_data["acl_level"],
409 )
403 )
410 Session().commit()
404 Session().commit()
411 return gist
405 return gist
@@ -420,7 +414,7 b' class Fixture(object):'
420 Session().commit()
414 Session().commit()
421
415
422 def load_resource(self, resource_name, strip=False):
416 def load_resource(self, resource_name, strip=False):
423 with open(os.path.join(FIXTURES, resource_name), 'rb') as f:
417 with open(os.path.join(FIXTURES, resource_name), "rb") as f:
424 source = f.read()
418 source = f.read()
425 if strip:
419 if strip:
426 source = source.strip()
420 source = source.strip()
@@ -21,7 +21,7 b''
21 import pytest
21 import pytest
22
22
23 from rhodecode.tests import TestController
23 from rhodecode.tests import TestController
24 from rhodecode.tests.fixture import Fixture
24 from rhodecode.tests.fixtures.rc_fixture import Fixture
25 from rhodecode.tests.routes import route_path
25 from rhodecode.tests.routes import route_path
26
26
27
27
@@ -20,7 +20,7 b' import time'
20 import pytest
20 import pytest
21
21
22 from rhodecode import events
22 from rhodecode import events
23 from rhodecode.tests.fixture import Fixture
23 from rhodecode.tests.fixtures.rc_fixture import Fixture
24 from rhodecode.model.db import Session, Integration
24 from rhodecode.model.db import Session, Integration
25 from rhodecode.model.integration import IntegrationModel
25 from rhodecode.model.integration import IntegrationModel
26
26
@@ -123,10 +123,14 b' def test_get_config(user_util, baseapp, '
123 ('web', 'allow_push', '*'),
123 ('web', 'allow_push', '*'),
124 ('web', 'allow_archive', 'gz zip bz2'),
124 ('web', 'allow_archive', 'gz zip bz2'),
125 ('web', 'baseurl', '/'),
125 ('web', 'baseurl', '/'),
126
127 # largefiles data...
126 ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')),
128 ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')),
129 ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')),
130
127 ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'),
131 ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'),
128 ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'),
132 ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'),
129 ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')),
133
130 ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'),
134 ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'),
131 ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'),
135 ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'),
132 ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'),
136 ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'),
@@ -22,7 +22,8 b' import pytest'
22
22
23 from rhodecode.lib.str_utils import base64_to_str
23 from rhodecode.lib.str_utils import base64_to_str
24 from rhodecode.lib.utils2 import AttributeDict
24 from rhodecode.lib.utils2 import AttributeDict
25 from rhodecode.tests.utils import CustomTestApp
25 from rhodecode.tests.fixtures.fixture_pyramid import ini_config
26 from rhodecode.tests.utils import CustomTestApp, AuthPluginManager
26
27
27 from rhodecode.lib.caching_query import FromCache
28 from rhodecode.lib.caching_query import FromCache
28 from rhodecode.lib.middleware import simplevcs
29 from rhodecode.lib.middleware import simplevcs
@@ -34,6 +35,57 b' from rhodecode.tests import ('
34 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
35 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
35 from rhodecode.tests.lib.middleware import mock_scm_app
36 from rhodecode.tests.lib.middleware import mock_scm_app
36
37
38 from rhodecode.model.db import Permission, User
39 from rhodecode.model.meta import Session
40 from rhodecode.model.user import UserModel
41
42
43 @pytest.fixture()
44 def enable_auth_plugins(request, app):
45 """
46 Return a factory object that when called, allows to control which
47 authentication plugins are enabled.
48 """
49
50 enabler = AuthPluginManager()
51 request.addfinalizer(enabler.cleanup)
52
53 return enabler
54
55
56 @pytest.fixture()
57 def test_user_factory(request, baseapp):
58
59 def user_factory(username='test_user', password='qweqwe', first_name='John', last_name='Testing', **kwargs):
60 usr = UserModel().create_or_update(
61 username=username,
62 password=password,
63 email=f'{username}@rhodecode.org',
64 firstname=first_name, lastname=last_name)
65 Session().commit()
66
67 for k, v in kwargs.items():
68 setattr(usr, k, v)
69 Session().add(usr)
70
71 new_usr = User.get_by_username(username)
72 new_usr_id = new_usr.user_id
73 assert new_usr == usr
74
75 @request.addfinalizer
76 def cleanup():
77 if User.get(new_usr_id) is None:
78 return
79
80 perm = Permission.query().all()
81 for p in perm:
82 UserModel().revoke_perm(usr, p)
83
84 UserModel().delete(new_usr_id)
85 Session().commit()
86 return usr
87
88 return user_factory
37
89
38 class StubVCSController(simplevcs.SimpleVCS):
90 class StubVCSController(simplevcs.SimpleVCS):
39
91
@@ -107,8 +159,7 b' def _remove_default_user_from_query_cach'
107 Session().expire(user)
159 Session().expire(user)
108
160
109
161
110 def test_handles_exceptions_during_permissions_checks(
162 def test_handles_exceptions_during_permissions_checks(vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory):
111 vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory):
112
163
113 test_password = 'qweqwe'
164 test_password = 'qweqwe'
114 test_user = test_user_factory(password=test_password, extern_type='headers', extern_name='headers')
165 test_user = test_user_factory(password=test_password, extern_type='headers', extern_name='headers')
@@ -373,29 +424,30 b' class TestShadowRepoExposure(object):'
373 controller.vcs_repo_name)
424 controller.vcs_repo_name)
374
425
375
426
376 @pytest.mark.usefixtures('baseapp')
377 class TestGenerateVcsResponse(object):
427 class TestGenerateVcsResponse(object):
378
428
379 def test_ensures_that_start_response_is_called_early_enough(self):
429 def test_ensures_that_start_response_is_called_early_enough(self, baseapp):
380 self.call_controller_with_response_body(iter(['a', 'b']))
430 app_ini_config = baseapp.config.registry.settings['__file__']
431 self.call_controller_with_response_body(app_ini_config, iter(['a', 'b']))
381 assert self.start_response.called
432 assert self.start_response.called
382
433
383 def test_invalidates_cache_after_body_is_consumed(self):
434 def test_invalidates_cache_after_body_is_consumed(self, baseapp):
384 result = self.call_controller_with_response_body(iter(['a', 'b']))
435 app_ini_config = baseapp.config.registry.settings['__file__']
436 result = self.call_controller_with_response_body(app_ini_config, iter(['a', 'b']))
385 assert not self.was_cache_invalidated()
437 assert not self.was_cache_invalidated()
386 # Consume the result
438 # Consume the result
387 list(result)
439 list(result)
388 assert self.was_cache_invalidated()
440 assert self.was_cache_invalidated()
389
441
390 def test_raises_unknown_exceptions(self):
442 def test_raises_unknown_exceptions(self, baseapp):
391 result = self.call_controller_with_response_body(
443 app_ini_config = baseapp.config.registry.settings['__file__']
392 self.raise_result_iter(vcs_kind='unknown'))
444 result = self.call_controller_with_response_body(app_ini_config, self.raise_result_iter(vcs_kind='unknown'))
393 with pytest.raises(Exception):
445 with pytest.raises(Exception):
394 list(result)
446 list(result)
395
447
396 def call_controller_with_response_body(self, response_body):
448 def call_controller_with_response_body(self, ini_config, response_body):
449
397 settings = {
450 settings = {
398 'base_path': 'fake_base_path',
399 'vcs.hooks.protocol.v2': 'celery',
451 'vcs.hooks.protocol.v2': 'celery',
400 'vcs.hooks.direct_calls': False,
452 'vcs.hooks.direct_calls': False,
401 }
453 }
@@ -407,7 +459,7 b' class TestGenerateVcsResponse(object):'
407 result = controller._generate_vcs_response(
459 result = controller._generate_vcs_response(
408 environ={}, start_response=self.start_response,
460 environ={}, start_response=self.start_response,
409 repo_path='fake_repo_path',
461 repo_path='fake_repo_path',
410 extras={}, action='push')
462 extras={'config': ini_config}, action='push')
411 self.controller = controller
463 self.controller = controller
412 return result
464 return result
413
465
@@ -19,6 +19,7 b''
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22 import tempfile
22
23
23 from rhodecode.tests.utils import CustomTestApp
24 from rhodecode.tests.utils import CustomTestApp
24 from rhodecode.lib.middleware.utils import scm_app_http, scm_app
25 from rhodecode.lib.middleware.utils import scm_app_http, scm_app
@@ -41,10 +42,13 b' def vcsserver_http_echo_app(request, vcs'
41 """
42 """
42 A running VCSServer with the EchoApp activated via HTTP.
43 A running VCSServer with the EchoApp activated via HTTP.
43 """
44 """
44 vcsserver = vcsserver_factory(
45 store_dir = tempfile.gettempdir()
46
47 vcsserver_instance = vcsserver_factory(
45 request=request,
48 request=request,
49 store_dir=store_dir,
46 overrides=[{'app:main': {'dev.use_echo_app': 'true'}}])
50 overrides=[{'app:main': {'dev.use_echo_app': 'true'}}])
47 return vcsserver
51 return vcsserver_instance
48
52
49
53
50 @pytest.fixture(scope='session')
54 @pytest.fixture(scope='session')
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -30,7 +30,7 b' from rhodecode.lib.diffs import ('
30
30
31 from rhodecode.lib.utils2 import AttributeDict
31 from rhodecode.lib.utils2 import AttributeDict
32 from rhodecode.lib.vcs.backends.git import GitCommit
32 from rhodecode.lib.vcs.backends.git import GitCommit
33 from rhodecode.tests.fixture import Fixture
33 from rhodecode.tests.fixtures.rc_fixture import Fixture
34 from rhodecode.tests import no_newline_id_generator
34 from rhodecode.tests import no_newline_id_generator
35 from rhodecode.lib.vcs.backends.git.repository import GitDiff
35 from rhodecode.lib.vcs.backends.git.repository import GitDiff
36 from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff
36 from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -18,305 +17,71 b''
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
18
20 import logging
19 import logging
21 import io
22
20
23 import mock
21 import mock
24 import msgpack
25 import pytest
22 import pytest
26 import tempfile
23 import tempfile
27
24
28 from rhodecode.lib.hook_daemon import http_hooks_deamon
29 from rhodecode.lib.hook_daemon import celery_hooks_deamon
25 from rhodecode.lib.hook_daemon import celery_hooks_deamon
30 from rhodecode.lib.hook_daemon import hook_module
26 from rhodecode.lib.hook_daemon import utils as hooks_utils
31 from rhodecode.lib.hook_daemon import base as hook_base
27 from rhodecode.lib.hook_daemon import base as hook_base
32 from rhodecode.lib.str_utils import safe_bytes
28
33 from rhodecode.tests.utils import assert_message_in_log
29 from rhodecode.tests.utils import assert_message_in_log
34 from rhodecode.lib.ext_json import json
35
36 test_proto = http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO
37
30
38
31
39 class TestHooks(object):
32 class TestHooks(object):
40 def test_hooks_can_be_used_as_a_context_processor(self):
33 def test_hooks_can_be_used_as_a_context_processor(self):
41 hooks = hook_module.Hooks()
34 hooks = hook_base.Hooks()
42 with hooks as return_value:
35 with hooks as return_value:
43 pass
36 pass
44 assert hooks == return_value
37 assert hooks == return_value
45
38
46
47 class TestHooksHttpHandler(object):
48 def test_read_request_parses_method_name_and_arguments(self):
49 data = {
50 'method': 'test',
51 'extras': {
52 'param1': 1,
53 'param2': 'a'
54 }
55 }
56 request = self._generate_post_request(data)
57 hooks_patcher = mock.patch.object(
58 hook_module.Hooks, data['method'], create=True, return_value=1)
59
60 with hooks_patcher as hooks_mock:
61 handler = http_hooks_deamon.HooksHttpHandler
62 handler.DEFAULT_HOOKS_PROTO = test_proto
63 handler.wbufsize = 10240
64 MockServer(handler, request)
65
66 hooks_mock.assert_called_once_with(data['extras'])
67
68 def test_hooks_serialized_result_is_returned(self):
69 request = self._generate_post_request({})
70 rpc_method = 'test'
71 hook_result = {
72 'first': 'one',
73 'second': 2
74 }
75 extras = {}
76
77 # patching our _read to return test method and proto used
78 read_patcher = mock.patch.object(
79 http_hooks_deamon.HooksHttpHandler, '_read_request',
80 return_value=(test_proto, rpc_method, extras))
81
82 # patch Hooks instance to return hook_result data on 'test' call
83 hooks_patcher = mock.patch.object(
84 hook_module.Hooks, rpc_method, create=True,
85 return_value=hook_result)
86
87 with read_patcher, hooks_patcher:
88 handler = http_hooks_deamon.HooksHttpHandler
89 handler.DEFAULT_HOOKS_PROTO = test_proto
90 handler.wbufsize = 10240
91 server = MockServer(handler, request)
92
93 expected_result = http_hooks_deamon.HooksHttpHandler.serialize_data(hook_result)
94
95 server.request.output_stream.seek(0)
96 assert server.request.output_stream.readlines()[-1] == expected_result
97
98 def test_exception_is_returned_in_response(self):
99 request = self._generate_post_request({})
100 rpc_method = 'test'
101
102 read_patcher = mock.patch.object(
103 http_hooks_deamon.HooksHttpHandler, '_read_request',
104 return_value=(test_proto, rpc_method, {}))
105
106 hooks_patcher = mock.patch.object(
107 hook_module.Hooks, rpc_method, create=True,
108 side_effect=Exception('Test exception'))
109
110 with read_patcher, hooks_patcher:
111 handler = http_hooks_deamon.HooksHttpHandler
112 handler.DEFAULT_HOOKS_PROTO = test_proto
113 handler.wbufsize = 10240
114 server = MockServer(handler, request)
115
116 server.request.output_stream.seek(0)
117 data = server.request.output_stream.readlines()
118 msgpack_data = b''.join(data[5:])
119 org_exc = http_hooks_deamon.HooksHttpHandler.deserialize_data(msgpack_data)
120 expected_result = {
121 'exception': 'Exception',
122 'exception_traceback': org_exc['exception_traceback'],
123 'exception_args': ['Test exception']
124 }
125 assert org_exc == expected_result
126
127 def test_log_message_writes_to_debug_log(self, caplog):
128 ip_port = ('0.0.0.0', 8888)
129 handler = http_hooks_deamon.HooksHttpHandler(MockRequest('POST /'), ip_port, mock.Mock())
130 fake_date = '1/Nov/2015 00:00:00'
131 date_patcher = mock.patch.object(
132 handler, 'log_date_time_string', return_value=fake_date)
133
134 with date_patcher, caplog.at_level(logging.DEBUG):
135 handler.log_message('Some message %d, %s', 123, 'string')
136
137 expected_message = f"HOOKS: client={ip_port} - - [{fake_date}] Some message 123, string"
138
139 assert_message_in_log(
140 caplog.records, expected_message,
141 levelno=logging.DEBUG, module='http_hooks_deamon')
142
143 def _generate_post_request(self, data, proto=test_proto):
144 if proto == http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO:
145 payload = msgpack.packb(data)
146 else:
147 payload = json.dumps(data)
148
149 return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % (
150 len(payload), payload)
151
152
153 class ThreadedHookCallbackDaemon(object):
154 def test_constructor_calls_prepare(self):
155 prepare_daemon_patcher = mock.patch.object(
156 http_hooks_deamon.ThreadedHookCallbackDaemon, '_prepare')
157 with prepare_daemon_patcher as prepare_daemon_mock:
158 http_hooks_deamon.ThreadedHookCallbackDaemon()
159 prepare_daemon_mock.assert_called_once_with()
160
161 def test_run_is_called_on_context_start(self):
162 patchers = mock.patch.multiple(
163 http_hooks_deamon.ThreadedHookCallbackDaemon,
164 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
165
166 with patchers as mocks:
167 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
168 with daemon as daemon_context:
169 pass
170 mocks['_run'].assert_called_once_with()
171 assert daemon_context == daemon
172
173 def test_stop_is_called_on_context_exit(self):
174 patchers = mock.patch.multiple(
175 http_hooks_deamon.ThreadedHookCallbackDaemon,
176 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
177
178 with patchers as mocks:
179 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
180 with daemon as daemon_context:
181 assert mocks['_stop'].call_count == 0
182
183 mocks['_stop'].assert_called_once_with()
184 assert daemon_context == daemon
185
186
187 class TestHttpHooksCallbackDaemon(object):
188 def test_hooks_callback_generates_new_port(self, caplog):
189 with caplog.at_level(logging.DEBUG):
190 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
191 assert daemon._daemon.server_address == ('127.0.0.1', 8881)
192
193 with caplog.at_level(logging.DEBUG):
194 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host=None, port=None)
195 assert daemon._daemon.server_address[1] in range(0, 66000)
196 assert daemon._daemon.server_address[0] != '127.0.0.1'
197
198 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
199 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
200 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
201 assert daemon._daemon == tcp_server
202
203 _, port = tcp_server.server_address
204
205 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
206 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
207 assert_message_in_log(
208 caplog.records, msg, levelno=logging.DEBUG, module='http_hooks_deamon')
209
210 def test_prepare_inits_hooks_uri_and_logs_it(
211 self, tcp_server, caplog):
212 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
213 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
214
215 _, port = tcp_server.server_address
216 expected_uri = '{}:{}'.format('127.0.0.1', port)
217 assert daemon.hooks_uri == expected_uri
218
219 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
220 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
221
222 assert_message_in_log(
223 caplog.records, msg,
224 levelno=logging.DEBUG, module='http_hooks_deamon')
225
226 def test_run_creates_a_thread(self, tcp_server):
227 thread = mock.Mock()
228
229 with self._tcp_patcher(tcp_server):
230 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
231
232 with self._thread_patcher(thread) as thread_mock:
233 daemon._run()
234
235 thread_mock.assert_called_once_with(
236 target=tcp_server.serve_forever,
237 kwargs={'poll_interval': daemon.POLL_INTERVAL})
238 assert thread.daemon is True
239 thread.start.assert_called_once_with()
240
241 def test_run_logs(self, tcp_server, caplog):
242
243 with self._tcp_patcher(tcp_server):
244 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
245
246 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
247 daemon._run()
248
249 assert_message_in_log(
250 caplog.records,
251 'Running thread-based loop of callback daemon in background',
252 levelno=logging.DEBUG, module='http_hooks_deamon')
253
254 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
255 thread = mock.Mock()
256
257 with self._tcp_patcher(tcp_server):
258 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
259
260 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
261 with daemon:
262 assert daemon._daemon == tcp_server
263 assert daemon._callback_thread == thread
264
265 assert daemon._daemon is None
266 assert daemon._callback_thread is None
267 tcp_server.shutdown.assert_called_with()
268 thread.join.assert_called_once_with()
269
270 assert_message_in_log(
271 caplog.records, 'Waiting for background thread to finish.',
272 levelno=logging.DEBUG, module='http_hooks_deamon')
273
274 def _tcp_patcher(self, tcp_server):
275 return mock.patch.object(
276 http_hooks_deamon, 'TCPServer', return_value=tcp_server)
277
278 def _thread_patcher(self, thread):
279 return mock.patch.object(
280 http_hooks_deamon.threading, 'Thread', return_value=thread)
281
282
283 class TestPrepareHooksDaemon(object):
39 class TestPrepareHooksDaemon(object):
284
40
285 @pytest.mark.parametrize('protocol', ('celery',))
41 @pytest.mark.parametrize('protocol', ('celery',))
286 def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(
42 def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(self, protocol):
287 self, protocol):
288 with tempfile.NamedTemporaryFile(mode='w') as temp_file:
43 with tempfile.NamedTemporaryFile(mode='w') as temp_file:
289 temp_file.write("[app:main]\ncelery.broker_url = redis://redis/0\n"
44 temp_file.write(
290 "celery.result_backend = redis://redis/0")
45 "[app:main]\n"
46 "celery.broker_url = redis://redis/0\n"
47 "celery.result_backend = redis://redis/0\n"
48 )
291 temp_file.flush()
49 temp_file.flush()
292 expected_extras = {'config': temp_file.name}
50 expected_extras = {'config': temp_file.name}
293 callback, extras = hook_base.prepare_callback_daemon(
51 callback, extras = hooks_utils.prepare_callback_daemon(expected_extras, protocol=protocol)
294 expected_extras, protocol=protocol, host='')
295 assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon)
52 assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon)
296
53
297 @pytest.mark.parametrize('protocol, expected_class', (
54 @pytest.mark.parametrize('protocol, expected_class', (
298 ('http', http_hooks_deamon.HttpHooksCallbackDaemon),
55 ('celery', celery_hooks_deamon.CeleryHooksCallbackDaemon),
299 ))
56 ))
300 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
57 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(self, protocol, expected_class):
301 self, protocol, expected_class):
58
302 expected_extras = {
59 with tempfile.NamedTemporaryFile(mode='w') as temp_file:
303 'extra1': 'value1',
60 temp_file.write(
304 'txn_id': 'txnid2',
61 "[app:main]\n"
305 'hooks_protocol': protocol.lower(),
62 "celery.broker_url = redis://redis:6379/0\n"
306 'task_backend': '',
63 "celery.result_backend = redis://redis:6379/0\n"
307 'task_queue': '',
64 )
308 'repo_store': '/var/opt/rhodecode_repo_store',
65 temp_file.flush()
309 'repository': 'rhodecode',
66
310 }
67 expected_extras = {
311 from rhodecode import CONFIG
68 'extra1': 'value1',
312 CONFIG['vcs.svn.redis_conn'] = 'redis://redis:6379/0'
69 'txn_id': 'txnid2',
313 callback, extras = hook_base.prepare_callback_daemon(
70 'hooks_protocol': protocol.lower(),
314 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
71 'hooks_config': {
315 txn_id='txnid2')
72 'broker_url': 'redis://redis:6379/0',
316 assert isinstance(callback, expected_class)
73 'result_backend': 'redis://redis:6379/0',
317 extras.pop('hooks_uri')
74 },
318 expected_extras['time'] = extras['time']
75 'repo_store': '/var/opt/rhodecode_repo_store',
319 assert extras == expected_extras
76 'repository': 'rhodecode',
77 'config': temp_file.name
78 }
79 from rhodecode import CONFIG
80 CONFIG['vcs.svn.redis_conn'] = 'redis://redis:6379/0'
81 callback, extras = hooks_utils.prepare_callback_daemon(expected_extras.copy(), protocol=protocol,txn_id='txnid2')
82 assert isinstance(callback, expected_class)
83 expected_extras['time'] = extras['time']
84 assert extras == expected_extras
320
85
321 @pytest.mark.parametrize('protocol', (
86 @pytest.mark.parametrize('protocol', (
322 'invalid',
87 'invalid',
@@ -330,35 +95,4 b' class TestPrepareHooksDaemon(object):'
330 'hooks_protocol': protocol.lower()
95 'hooks_protocol': protocol.lower()
331 }
96 }
332 with pytest.raises(Exception):
97 with pytest.raises(Exception):
333 callback, extras = hook_base.prepare_callback_daemon(
98 callback, extras = hooks_utils.prepare_callback_daemon(expected_extras.copy(), protocol=protocol)
334 expected_extras.copy(),
335 protocol=protocol, host='127.0.0.1')
336
337
338 class MockRequest(object):
339
340 def __init__(self, request):
341 self.request = request
342 self.input_stream = io.BytesIO(safe_bytes(self.request))
343 self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion
344 self.output_stream.close = lambda: None
345
346 def makefile(self, mode, *args, **kwargs):
347 return self.output_stream if mode == 'wb' else self.input_stream
348
349
350 class MockServer(object):
351
352 def __init__(self, handler_cls, request):
353 ip_port = ('0.0.0.0', 8888)
354 self.request = MockRequest(request)
355 self.server_address = ip_port
356 self.handler = handler_cls(self.request, ip_port, self)
357
358
359 @pytest.fixture()
360 def tcp_server():
361 server = mock.Mock()
362 server.server_address = ('127.0.0.1', 8881)
363 server.wbufsize = 1024
364 return server
@@ -33,7 +33,7 b' from rhodecode.model import meta'
33 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo import RepoModel
34 from rhodecode.model.repo_group import RepoGroupModel
34 from rhodecode.model.repo_group import RepoGroupModel
35 from rhodecode.model.settings import UiSetting, SettingsModel
35 from rhodecode.model.settings import UiSetting, SettingsModel
36 from rhodecode.tests.fixture import Fixture
36 from rhodecode.tests.fixtures.rc_fixture import Fixture
37 from rhodecode_tools.lib.hash_utils import md5_safe
37 from rhodecode_tools.lib.hash_utils import md5_safe
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39
39
@@ -403,12 +403,9 b' class TestPrepareConfigData(object):'
403
403
404 self._assert_repo_name_passed(model_mock, repo_name)
404 self._assert_repo_name_passed(model_mock, repo_name)
405
405
406 expected_result = [
406 assert ('section1', 'option1', 'value1') in result
407 ('section1', 'option1', 'value1'),
407 assert ('section2', 'option2', 'value2') in result
408 ('section2', 'option2', 'value2'),
408 assert ('section3', 'option3', 'value3') not in result
409 ]
410 # We have extra config items returned, so we're ignoring two last items
411 assert result[:2] == expected_result
412
409
413 def _assert_repo_name_passed(self, model_mock, repo_name):
410 def _assert_repo_name_passed(self, model_mock, repo_name):
414 assert model_mock.call_count == 1
411 assert model_mock.call_count == 1
@@ -25,7 +25,7 b' It works by replaying a group of commits'
25
25
26 import argparse
26 import argparse
27 import collections
27 import collections
28 import ConfigParser
28 import configparser
29 import functools
29 import functools
30 import itertools
30 import itertools
31 import os
31 import os
@@ -294,7 +294,7 b' class HgMixin(object):'
294 def add_remote(self, repo, remote_url, remote_name='upstream'):
294 def add_remote(self, repo, remote_url, remote_name='upstream'):
295 self.remove_remote(repo, remote_name)
295 self.remove_remote(repo, remote_name)
296 os.chdir(repo)
296 os.chdir(repo)
297 hgrc = ConfigParser.RawConfigParser()
297 hgrc = configparser.RawConfigParser()
298 hgrc.read('.hg/hgrc')
298 hgrc.read('.hg/hgrc')
299 hgrc.set('paths', remote_name, remote_url)
299 hgrc.set('paths', remote_name, remote_url)
300 with open('.hg/hgrc', 'w') as f:
300 with open('.hg/hgrc', 'w') as f:
@@ -303,7 +303,7 b' class HgMixin(object):'
303 @keep_cwd
303 @keep_cwd
304 def remove_remote(self, repo, remote_name='upstream'):
304 def remove_remote(self, repo, remote_name='upstream'):
305 os.chdir(repo)
305 os.chdir(repo)
306 hgrc = ConfigParser.RawConfigParser()
306 hgrc = configparser.RawConfigParser()
307 hgrc.read('.hg/hgrc')
307 hgrc.read('.hg/hgrc')
308 hgrc.remove_option('paths', remote_name)
308 hgrc.remove_option('paths', remote_name)
309 with open('.hg/hgrc', 'w') as f:
309 with open('.hg/hgrc', 'w') as f:
@@ -59,16 +59,6 b' def parse_options():'
59 parser.add_argument(
59 parser.add_argument(
60 '--interval', '-i', type=float, default=5,
60 '--interval', '-i', type=float, default=5,
61 help="Interval in secods.")
61 help="Interval in secods.")
62 parser.add_argument(
63 '--appenlight', '--ae', action='store_true')
64 parser.add_argument(
65 '--appenlight-url', '--ae-url',
66 default='https://ae.rhodecode.com/api/logs',
67 help='URL of the Appenlight API endpoint, defaults to "%(default)s".')
68 parser.add_argument(
69 '--appenlight-api-key', '--ae-key',
70 help='API key to use when sending data to appenlight. This has to be '
71 'set if Appenlight is enabled.')
72 return parser.parse_args()
62 return parser.parse_args()
73
63
74
64
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -1,5 +1,3 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -22,7 +22,7 b' from rhodecode.model.meta import Session'
22 from rhodecode.model.repo_group import RepoGroupModel
22 from rhodecode.model.repo_group import RepoGroupModel
23 from rhodecode.model.repo import RepoModel
23 from rhodecode.model.repo import RepoModel
24 from rhodecode.model.user import UserModel
24 from rhodecode.model.user import UserModel
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixtures.rc_fixture import Fixture
26
26
27
27
28 fixture = Fixture()
28 fixture = Fixture()
@@ -19,7 +19,7 b''
19
19
20 import pytest
20 import pytest
21
21
22 from rhodecode.tests.fixture import Fixture
22 from rhodecode.tests.fixtures.rc_fixture import Fixture
23
23
24 from rhodecode.model.db import User, Notification, UserNotification
24 from rhodecode.model.db import User, Notification, UserNotification
25 from rhodecode.model.meta import Session
25 from rhodecode.model.meta import Session
@@ -29,7 +29,7 b' from rhodecode.model.repo import RepoMod'
29 from rhodecode.model.repo_group import RepoGroupModel
29 from rhodecode.model.repo_group import RepoGroupModel
30 from rhodecode.model.user import UserModel
30 from rhodecode.model.user import UserModel
31 from rhodecode.model.user_group import UserGroupModel
31 from rhodecode.model.user_group import UserGroupModel
32 from rhodecode.tests.fixture import Fixture
32 from rhodecode.tests.fixtures.rc_fixture import Fixture
33
33
34
34
35 fixture = Fixture()
35 fixture = Fixture()
This diff has been collapsed as it changes many lines, (785 lines changed) Show them Hide them
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -16,6 +15,7 b''
16 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 import os
19
19
20 import mock
20 import mock
21 import pytest
21 import pytest
@@ -23,8 +23,7 b' import textwrap'
23
23
24 import rhodecode
24 import rhodecode
25 from rhodecode.lib.vcs.backends import get_backend
25 from rhodecode.lib.vcs.backends import get_backend
26 from rhodecode.lib.vcs.backends.base import (
26 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason, Reference
27 MergeResponse, MergeFailureReason, Reference)
28 from rhodecode.lib.vcs.exceptions import RepositoryError
27 from rhodecode.lib.vcs.exceptions import RepositoryError
29 from rhodecode.lib.vcs.nodes import FileNode
28 from rhodecode.lib.vcs.nodes import FileNode
30 from rhodecode.model.comment import CommentsModel
29 from rhodecode.model.comment import CommentsModel
@@ -39,54 +38,42 b' pytestmark = ['
39 ]
38 ]
40
39
41
40
42 @pytest.mark.usefixtures('config_stub')
41 @pytest.mark.usefixtures("config_stub")
43 class TestPullRequestModel(object):
42 class TestPullRequestModel(object):
44
45 @pytest.fixture()
43 @pytest.fixture()
46 def pull_request(self, request, backend, pr_util):
44 def pull_request(self, request, backend, pr_util):
47 """
45 """
48 A pull request combined with multiples patches.
46 A pull request combined with multiples patches.
49 """
47 """
50 BackendClass = get_backend(backend.alias)
48 BackendClass = get_backend(backend.alias)
51 merge_resp = MergeResponse(
49 merge_resp = MergeResponse(False, False, None, MergeFailureReason.UNKNOWN, metadata={"exception": "MockError"})
52 False, False, None, MergeFailureReason.UNKNOWN,
50 self.merge_patcher = mock.patch.object(BackendClass, "merge", return_value=merge_resp)
53 metadata={'exception': 'MockError'})
51 self.workspace_remove_patcher = mock.patch.object(BackendClass, "cleanup_merge_workspace")
54 self.merge_patcher = mock.patch.object(
55 BackendClass, 'merge', return_value=merge_resp)
56 self.workspace_remove_patcher = mock.patch.object(
57 BackendClass, 'cleanup_merge_workspace')
58
52
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
53 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
54 self.merge_mock = self.merge_patcher.start()
61 self.comment_patcher = mock.patch(
55 self.comment_patcher = mock.patch("rhodecode.model.changeset_status.ChangesetStatusModel.set_status")
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 self.comment_patcher.start()
56 self.comment_patcher.start()
64 self.notification_patcher = mock.patch(
57 self.notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create")
65 'rhodecode.model.notification.NotificationModel.create')
66 self.notification_patcher.start()
58 self.notification_patcher.start()
67 self.helper_patcher = mock.patch(
59 self.helper_patcher = mock.patch("rhodecode.lib.helpers.route_path")
68 'rhodecode.lib.helpers.route_path')
69 self.helper_patcher.start()
60 self.helper_patcher.start()
70
61
71 self.hook_patcher = mock.patch.object(PullRequestModel,
62 self.hook_patcher = mock.patch.object(PullRequestModel, "trigger_pull_request_hook")
72 'trigger_pull_request_hook')
73 self.hook_mock = self.hook_patcher.start()
63 self.hook_mock = self.hook_patcher.start()
74
64
75 self.invalidation_patcher = mock.patch(
65 self.invalidation_patcher = mock.patch("rhodecode.model.pull_request.ScmModel.mark_for_invalidation")
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 self.invalidation_mock = self.invalidation_patcher.start()
66 self.invalidation_mock = self.invalidation_patcher.start()
78
67
79 self.pull_request = pr_util.create_pull_request(
68 self.pull_request = pr_util.create_pull_request(mergeable=True, name_suffix="Δ…Δ‡")
80 mergeable=True, name_suffix=u'Δ…Δ‡')
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
69 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
70 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
71 self.workspace_id = f"pr-{self.pull_request.pull_request_id}"
84 self.repo_id = self.pull_request.target_repo.repo_id
72 self.repo_id = self.pull_request.target_repo.repo_id
85
73
86 @request.addfinalizer
74 @request.addfinalizer
87 def cleanup_pull_request():
75 def cleanup_pull_request():
88 calls = [mock.call(
76 calls = [mock.call(self.pull_request, self.pull_request.author, "create")]
89 self.pull_request, self.pull_request.author, 'create')]
90 self.hook_mock.assert_has_calls(calls)
77 self.hook_mock.assert_has_calls(calls)
91
78
92 self.workspace_remove_patcher.stop()
79 self.workspace_remove_patcher.stop()
@@ -114,29 +101,30 b' class TestPullRequestModel(object):'
114 assert len(prs) == 1
101 assert len(prs) == 1
115
102
116 def test_count_awaiting_review(self, pull_request):
103 def test_count_awaiting_review(self, pull_request):
117 pr_count = PullRequestModel().count_awaiting_review(
104 pr_count = PullRequestModel().count_awaiting_review(pull_request.target_repo)
118 pull_request.target_repo)
119 assert pr_count == 1
105 assert pr_count == 1
120
106
121 def test_get_awaiting_my_review(self, pull_request):
107 def test_get_awaiting_my_review(self, pull_request):
122 PullRequestModel().update_reviewers(
108 PullRequestModel().update_reviewers(
123 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
109 pull_request, [(pull_request.author, ["author"], False, "reviewer", [])], pull_request.author
124 pull_request.author)
110 )
125 Session().commit()
111 Session().commit()
126
112
127 prs = PullRequestModel().get_awaiting_my_review(
113 prs = PullRequestModel().get_awaiting_my_review(
128 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
114 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id
115 )
129 assert isinstance(prs, list)
116 assert isinstance(prs, list)
130 assert len(prs) == 1
117 assert len(prs) == 1
131
118
132 def test_count_awaiting_my_review(self, pull_request):
119 def test_count_awaiting_my_review(self, pull_request):
133 PullRequestModel().update_reviewers(
120 PullRequestModel().update_reviewers(
134 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
121 pull_request, [(pull_request.author, ["author"], False, "reviewer", [])], pull_request.author
135 pull_request.author)
122 )
136 Session().commit()
123 Session().commit()
137
124
138 pr_count = PullRequestModel().count_awaiting_my_review(
125 pr_count = PullRequestModel().count_awaiting_my_review(
139 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
126 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id
127 )
140 assert pr_count == 1
128 assert pr_count == 1
141
129
142 def test_delete_calls_cleanup_merge(self, pull_request):
130 def test_delete_calls_cleanup_merge(self, pull_request):
@@ -144,24 +132,19 b' class TestPullRequestModel(object):'
144 PullRequestModel().delete(pull_request, pull_request.author)
132 PullRequestModel().delete(pull_request, pull_request.author)
145 Session().commit()
133 Session().commit()
146
134
147 self.workspace_remove_mock.assert_called_once_with(
135 self.workspace_remove_mock.assert_called_once_with(repo_id, self.workspace_id)
148 repo_id, self.workspace_id)
149
136
150 def test_close_calls_cleanup_and_hook(self, pull_request):
137 def test_close_calls_cleanup_and_hook(self, pull_request):
151 PullRequestModel().close_pull_request(
138 PullRequestModel().close_pull_request(pull_request, pull_request.author)
152 pull_request, pull_request.author)
153 Session().commit()
139 Session().commit()
154
140
155 repo_id = pull_request.target_repo.repo_id
141 repo_id = pull_request.target_repo.repo_id
156
142
157 self.workspace_remove_mock.assert_called_once_with(
143 self.workspace_remove_mock.assert_called_once_with(repo_id, self.workspace_id)
158 repo_id, self.workspace_id)
144 self.hook_mock.assert_called_with(self.pull_request, self.pull_request.author, "close")
159 self.hook_mock.assert_called_with(
160 self.pull_request, self.pull_request.author, 'close')
161
145
162 def test_merge_status(self, pull_request):
146 def test_merge_status(self, pull_request):
163 self.merge_mock.return_value = MergeResponse(
147 self.merge_mock.return_value = MergeResponse(True, False, None, MergeFailureReason.NONE)
164 True, False, None, MergeFailureReason.NONE)
165
148
166 assert pull_request._last_merge_source_rev is None
149 assert pull_request._last_merge_source_rev is None
167 assert pull_request._last_merge_target_rev is None
150 assert pull_request._last_merge_target_rev is None
@@ -169,13 +152,17 b' class TestPullRequestModel(object):'
169
152
170 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
153 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
171 assert status is True
154 assert status is True
172 assert msg == 'This pull request can be automatically merged.'
155 assert msg == "This pull request can be automatically merged."
173 self.merge_mock.assert_called_with(
156 self.merge_mock.assert_called_with(
174 self.repo_id, self.workspace_id,
157 self.repo_id,
158 self.workspace_id,
175 pull_request.target_ref_parts,
159 pull_request.target_ref_parts,
176 pull_request.source_repo.scm_instance(),
160 pull_request.source_repo.scm_instance(),
177 pull_request.source_ref_parts, dry_run=True,
161 pull_request.source_ref_parts,
178 use_rebase=False, close_branch=False)
162 dry_run=True,
163 use_rebase=False,
164 close_branch=False,
165 )
179
166
180 assert pull_request._last_merge_source_rev == self.source_commit
167 assert pull_request._last_merge_source_rev == self.source_commit
181 assert pull_request._last_merge_target_rev == self.target_commit
168 assert pull_request._last_merge_target_rev == self.target_commit
@@ -184,13 +171,13 b' class TestPullRequestModel(object):'
184 self.merge_mock.reset_mock()
171 self.merge_mock.reset_mock()
185 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
172 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
186 assert status is True
173 assert status is True
187 assert msg == 'This pull request can be automatically merged.'
174 assert msg == "This pull request can be automatically merged."
188 assert self.merge_mock.called is False
175 assert self.merge_mock.called is False
189
176
190 def test_merge_status_known_failure(self, pull_request):
177 def test_merge_status_known_failure(self, pull_request):
191 self.merge_mock.return_value = MergeResponse(
178 self.merge_mock.return_value = MergeResponse(
192 False, False, None, MergeFailureReason.MERGE_FAILED,
179 False, False, None, MergeFailureReason.MERGE_FAILED, metadata={"unresolved_files": "file1"}
193 metadata={'unresolved_files': 'file1'})
180 )
194
181
195 assert pull_request._last_merge_source_rev is None
182 assert pull_request._last_merge_source_rev is None
196 assert pull_request._last_merge_target_rev is None
183 assert pull_request._last_merge_target_rev is None
@@ -198,13 +185,17 b' class TestPullRequestModel(object):'
198
185
199 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
186 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
200 assert status is False
187 assert status is False
201 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
188 assert msg == "This pull request cannot be merged because of merge conflicts. file1"
202 self.merge_mock.assert_called_with(
189 self.merge_mock.assert_called_with(
203 self.repo_id, self.workspace_id,
190 self.repo_id,
191 self.workspace_id,
204 pull_request.target_ref_parts,
192 pull_request.target_ref_parts,
205 pull_request.source_repo.scm_instance(),
193 pull_request.source_repo.scm_instance(),
206 pull_request.source_ref_parts, dry_run=True,
194 pull_request.source_ref_parts,
207 use_rebase=False, close_branch=False)
195 dry_run=True,
196 use_rebase=False,
197 close_branch=False,
198 )
208
199
209 assert pull_request._last_merge_source_rev == self.source_commit
200 assert pull_request._last_merge_source_rev == self.source_commit
210 assert pull_request._last_merge_target_rev == self.target_commit
201 assert pull_request._last_merge_target_rev == self.target_commit
@@ -213,13 +204,13 b' class TestPullRequestModel(object):'
213 self.merge_mock.reset_mock()
204 self.merge_mock.reset_mock()
214 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
205 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
215 assert status is False
206 assert status is False
216 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
207 assert msg == "This pull request cannot be merged because of merge conflicts. file1"
217 assert self.merge_mock.called is False
208 assert self.merge_mock.called is False
218
209
219 def test_merge_status_unknown_failure(self, pull_request):
210 def test_merge_status_unknown_failure(self, pull_request):
220 self.merge_mock.return_value = MergeResponse(
211 self.merge_mock.return_value = MergeResponse(
221 False, False, None, MergeFailureReason.UNKNOWN,
212 False, False, None, MergeFailureReason.UNKNOWN, metadata={"exception": "MockError"}
222 metadata={'exception': 'MockError'})
213 )
223
214
224 assert pull_request._last_merge_source_rev is None
215 assert pull_request._last_merge_source_rev is None
225 assert pull_request._last_merge_target_rev is None
216 assert pull_request._last_merge_target_rev is None
@@ -227,15 +218,17 b' class TestPullRequestModel(object):'
227
218
228 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
219 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
229 assert status is False
220 assert status is False
230 assert msg == (
221 assert msg == "This pull request cannot be merged because of an unhandled exception. MockError"
231 'This pull request cannot be merged because of an unhandled exception. '
232 'MockError')
233 self.merge_mock.assert_called_with(
222 self.merge_mock.assert_called_with(
234 self.repo_id, self.workspace_id,
223 self.repo_id,
224 self.workspace_id,
235 pull_request.target_ref_parts,
225 pull_request.target_ref_parts,
236 pull_request.source_repo.scm_instance(),
226 pull_request.source_repo.scm_instance(),
237 pull_request.source_ref_parts, dry_run=True,
227 pull_request.source_ref_parts,
238 use_rebase=False, close_branch=False)
228 dry_run=True,
229 use_rebase=False,
230 close_branch=False,
231 )
239
232
240 assert pull_request._last_merge_source_rev is None
233 assert pull_request._last_merge_source_rev is None
241 assert pull_request._last_merge_target_rev is None
234 assert pull_request._last_merge_target_rev is None
@@ -244,155 +237,136 b' class TestPullRequestModel(object):'
244 self.merge_mock.reset_mock()
237 self.merge_mock.reset_mock()
245 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
238 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
246 assert status is False
239 assert status is False
247 assert msg == (
240 assert msg == "This pull request cannot be merged because of an unhandled exception. MockError"
248 'This pull request cannot be merged because of an unhandled exception. '
249 'MockError')
250 assert self.merge_mock.called is True
241 assert self.merge_mock.called is True
251
242
252 def test_merge_status_when_target_is_locked(self, pull_request):
243 def test_merge_status_when_target_is_locked(self, pull_request):
253 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
244 pull_request.target_repo.locked = [1, "12345.50", "lock_web"]
254 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
245 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
255 assert status is False
246 assert status is False
256 assert msg == (
247 assert msg == "This pull request cannot be merged because the target repository is locked by user:1."
257 'This pull request cannot be merged because the target repository '
258 'is locked by user:1.')
259
248
260 def test_merge_status_requirements_check_target(self, pull_request):
249 def test_merge_status_requirements_check_target(self, pull_request):
261
262 def has_largefiles(self, repo):
250 def has_largefiles(self, repo):
263 return repo == pull_request.source_repo
251 return repo == pull_request.source_repo
264
252
265 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
253 patcher = mock.patch.object(PullRequestModel, "_has_largefiles", has_largefiles)
266 with patcher:
254 with patcher:
267 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
255 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
268
256
269 assert status is False
257 assert status is False
270 assert msg == 'Target repository large files support is disabled.'
258 assert msg == "Target repository large files support is disabled."
271
259
272 def test_merge_status_requirements_check_source(self, pull_request):
260 def test_merge_status_requirements_check_source(self, pull_request):
273
274 def has_largefiles(self, repo):
261 def has_largefiles(self, repo):
275 return repo == pull_request.target_repo
262 return repo == pull_request.target_repo
276
263
277 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
264 patcher = mock.patch.object(PullRequestModel, "_has_largefiles", has_largefiles)
278 with patcher:
265 with patcher:
279 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
266 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
280
267
281 assert status is False
268 assert status is False
282 assert msg == 'Source repository large files support is disabled.'
269 assert msg == "Source repository large files support is disabled."
283
270
284 def test_merge(self, pull_request, merge_extras):
271 def test_merge(self, pull_request, merge_extras):
285 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
272 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
286 merge_ref = Reference(
273 merge_ref = Reference("type", "name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6")
287 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
274 self.merge_mock.return_value = MergeResponse(True, True, merge_ref, MergeFailureReason.NONE)
288 self.merge_mock.return_value = MergeResponse(
289 True, True, merge_ref, MergeFailureReason.NONE)
290
275
291 merge_extras['repository'] = pull_request.target_repo.repo_name
276 merge_extras["repository"] = pull_request.target_repo.repo_name
292 PullRequestModel().merge_repo(
277 PullRequestModel().merge_repo(pull_request, pull_request.author, extras=merge_extras)
293 pull_request, pull_request.author, extras=merge_extras)
294 Session().commit()
278 Session().commit()
295
279
296 message = (
280 message = "Merge pull request !{pr_id} from {source_repo} {source_ref_name}" "\n\n {pr_title}".format(
297 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
281 pr_id=pull_request.pull_request_id,
298 u'\n\n {pr_title}'.format(
282 source_repo=safe_str(pull_request.source_repo.scm_instance().name),
299 pr_id=pull_request.pull_request_id,
283 source_ref_name=pull_request.source_ref_parts.name,
300 source_repo=safe_str(
284 pr_title=safe_str(pull_request.title),
301 pull_request.source_repo.scm_instance().name),
302 source_ref_name=pull_request.source_ref_parts.name,
303 pr_title=safe_str(pull_request.title)
304 )
305 )
285 )
306 self.merge_mock.assert_called_with(
286 self.merge_mock.assert_called_with(
307 self.repo_id, self.workspace_id,
287 self.repo_id,
288 self.workspace_id,
308 pull_request.target_ref_parts,
289 pull_request.target_ref_parts,
309 pull_request.source_repo.scm_instance(),
290 pull_request.source_repo.scm_instance(),
310 pull_request.source_ref_parts,
291 pull_request.source_ref_parts,
311 user_name=user.short_contact, user_email=user.email, message=message,
292 user_name=user.short_contact,
312 use_rebase=False, close_branch=False
293 user_email=user.email,
294 message=message,
295 use_rebase=False,
296 close_branch=False,
313 )
297 )
314 self.invalidation_mock.assert_called_once_with(
298 self.invalidation_mock.assert_called_once_with(pull_request.target_repo.repo_name)
315 pull_request.target_repo.repo_name)
316
299
317 self.hook_mock.assert_called_with(
300 self.hook_mock.assert_called_with(self.pull_request, self.pull_request.author, "merge")
318 self.pull_request, self.pull_request.author, 'merge')
319
301
320 pull_request = PullRequest.get(pull_request.pull_request_id)
302 pull_request = PullRequest.get(pull_request.pull_request_id)
321 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
303 assert pull_request.merge_rev == "6126b7bfcc82ad2d3deaee22af926b082ce54cc6"
322
304
323 def test_merge_with_status_lock(self, pull_request, merge_extras):
305 def test_merge_with_status_lock(self, pull_request, merge_extras):
324 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
306 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
325 merge_ref = Reference(
307 merge_ref = Reference("type", "name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6")
326 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
308 self.merge_mock.return_value = MergeResponse(True, True, merge_ref, MergeFailureReason.NONE)
327 self.merge_mock.return_value = MergeResponse(
328 True, True, merge_ref, MergeFailureReason.NONE)
329
309
330 merge_extras['repository'] = pull_request.target_repo.repo_name
310 merge_extras["repository"] = pull_request.target_repo.repo_name
331
311
332 with pull_request.set_state(PullRequest.STATE_UPDATING):
312 with pull_request.set_state(PullRequest.STATE_UPDATING):
333 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
313 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
334 PullRequestModel().merge_repo(
314 PullRequestModel().merge_repo(pull_request, pull_request.author, extras=merge_extras)
335 pull_request, pull_request.author, extras=merge_extras)
336 Session().commit()
315 Session().commit()
337
316
338 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
317 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
339
318
340 message = (
319 message = "Merge pull request !{pr_id} from {source_repo} {source_ref_name}" "\n\n {pr_title}".format(
341 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
320 pr_id=pull_request.pull_request_id,
342 u'\n\n {pr_title}'.format(
321 source_repo=safe_str(pull_request.source_repo.scm_instance().name),
343 pr_id=pull_request.pull_request_id,
322 source_ref_name=pull_request.source_ref_parts.name,
344 source_repo=safe_str(
323 pr_title=safe_str(pull_request.title),
345 pull_request.source_repo.scm_instance().name),
346 source_ref_name=pull_request.source_ref_parts.name,
347 pr_title=safe_str(pull_request.title)
348 )
349 )
324 )
350 self.merge_mock.assert_called_with(
325 self.merge_mock.assert_called_with(
351 self.repo_id, self.workspace_id,
326 self.repo_id,
327 self.workspace_id,
352 pull_request.target_ref_parts,
328 pull_request.target_ref_parts,
353 pull_request.source_repo.scm_instance(),
329 pull_request.source_repo.scm_instance(),
354 pull_request.source_ref_parts,
330 pull_request.source_ref_parts,
355 user_name=user.short_contact, user_email=user.email, message=message,
331 user_name=user.short_contact,
356 use_rebase=False, close_branch=False
332 user_email=user.email,
333 message=message,
334 use_rebase=False,
335 close_branch=False,
357 )
336 )
358 self.invalidation_mock.assert_called_once_with(
337 self.invalidation_mock.assert_called_once_with(pull_request.target_repo.repo_name)
359 pull_request.target_repo.repo_name)
360
338
361 self.hook_mock.assert_called_with(
339 self.hook_mock.assert_called_with(self.pull_request, self.pull_request.author, "merge")
362 self.pull_request, self.pull_request.author, 'merge')
363
340
364 pull_request = PullRequest.get(pull_request.pull_request_id)
341 pull_request = PullRequest.get(pull_request.pull_request_id)
365 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
342 assert pull_request.merge_rev == "6126b7bfcc82ad2d3deaee22af926b082ce54cc6"
366
343
367 def test_merge_failed(self, pull_request, merge_extras):
344 def test_merge_failed(self, pull_request, merge_extras):
368 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
345 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
369 merge_ref = Reference(
346 merge_ref = Reference("type", "name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6")
370 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
347 self.merge_mock.return_value = MergeResponse(False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
371 self.merge_mock.return_value = MergeResponse(
372 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
373
348
374 merge_extras['repository'] = pull_request.target_repo.repo_name
349 merge_extras["repository"] = pull_request.target_repo.repo_name
375 PullRequestModel().merge_repo(
350 PullRequestModel().merge_repo(pull_request, pull_request.author, extras=merge_extras)
376 pull_request, pull_request.author, extras=merge_extras)
377 Session().commit()
351 Session().commit()
378
352
379 message = (
353 message = "Merge pull request !{pr_id} from {source_repo} {source_ref_name}" "\n\n {pr_title}".format(
380 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
354 pr_id=pull_request.pull_request_id,
381 u'\n\n {pr_title}'.format(
355 source_repo=safe_str(pull_request.source_repo.scm_instance().name),
382 pr_id=pull_request.pull_request_id,
356 source_ref_name=pull_request.source_ref_parts.name,
383 source_repo=safe_str(
357 pr_title=safe_str(pull_request.title),
384 pull_request.source_repo.scm_instance().name),
385 source_ref_name=pull_request.source_ref_parts.name,
386 pr_title=safe_str(pull_request.title)
387 )
388 )
358 )
389 self.merge_mock.assert_called_with(
359 self.merge_mock.assert_called_with(
390 self.repo_id, self.workspace_id,
360 self.repo_id,
361 self.workspace_id,
391 pull_request.target_ref_parts,
362 pull_request.target_ref_parts,
392 pull_request.source_repo.scm_instance(),
363 pull_request.source_repo.scm_instance(),
393 pull_request.source_ref_parts,
364 pull_request.source_ref_parts,
394 user_name=user.short_contact, user_email=user.email, message=message,
365 user_name=user.short_contact,
395 use_rebase=False, close_branch=False
366 user_email=user.email,
367 message=message,
368 use_rebase=False,
369 close_branch=False,
396 )
370 )
397
371
398 pull_request = PullRequest.get(pull_request.pull_request_id)
372 pull_request = PullRequest.get(pull_request.pull_request_id)
@@ -410,7 +384,7 b' class TestPullRequestModel(object):'
410 assert commit_ids == pull_request.revisions
384 assert commit_ids == pull_request.revisions
411
385
412 # Merge revision is not in the revisions list
386 # Merge revision is not in the revisions list
413 pull_request.merge_rev = 'f000' * 10
387 pull_request.merge_rev = "f000" * 10
414 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
388 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
415 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
389 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
416
390
@@ -419,147 +393,126 b' class TestPullRequestModel(object):'
419 source_ref_id = pull_request.source_ref_parts.commit_id
393 source_ref_id = pull_request.source_ref_parts.commit_id
420 target_ref_id = pull_request.target_ref_parts.commit_id
394 target_ref_id = pull_request.target_ref_parts.commit_id
421 diff = PullRequestModel()._get_diff_from_pr_or_version(
395 diff = PullRequestModel()._get_diff_from_pr_or_version(
422 source_repo, source_ref_id, target_ref_id,
396 source_repo, source_ref_id, target_ref_id, hide_whitespace_changes=False, diff_context=6
423 hide_whitespace_changes=False, diff_context=6)
397 )
424 assert b'file_1' in diff.raw.tobytes()
398 assert b"file_1" in diff.raw.tobytes()
425
399
426 def test_generate_title_returns_unicode(self):
400 def test_generate_title_returns_unicode(self):
427 title = PullRequestModel().generate_pullrequest_title(
401 title = PullRequestModel().generate_pullrequest_title(
428 source='source-dummy',
402 source="source-dummy",
429 source_ref='source-ref-dummy',
403 source_ref="source-ref-dummy",
430 target='target-dummy',
404 target="target-dummy",
431 )
405 )
432 assert type(title) == str
406 assert type(title) == str
433
407
434 @pytest.mark.parametrize('title, has_wip', [
408 @pytest.mark.parametrize(
435 ('hello', False),
409 "title, has_wip",
436 ('hello wip', False),
410 [
437 ('hello wip: xxx', False),
411 ("hello", False),
438 ('[wip] hello', True),
412 ("hello wip", False),
439 ('[wip] hello', True),
413 ("hello wip: xxx", False),
440 ('wip: hello', True),
414 ("[wip] hello", True),
441 ('wip hello', True),
415 ("[wip] hello", True),
442
416 ("wip: hello", True),
443 ])
417 ("wip hello", True),
418 ],
419 )
444 def test_wip_title_marker(self, pull_request, title, has_wip):
420 def test_wip_title_marker(self, pull_request, title, has_wip):
445 pull_request.title = title
421 pull_request.title = title
446 assert pull_request.work_in_progress == has_wip
422 assert pull_request.work_in_progress == has_wip
447
423
448
424
449 @pytest.mark.usefixtures('config_stub')
425 @pytest.mark.usefixtures("config_stub")
450 class TestIntegrationMerge(object):
426 class TestIntegrationMerge(object):
451 @pytest.mark.parametrize('extra_config', (
452 {'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False},
453 ))
454 def test_merge_triggers_push_hooks(
455 self, pr_util, user_admin, capture_rcextensions, merge_extras,
456 extra_config):
457
458 pull_request = pr_util.create_pull_request(
459 approved=True, mergeable=True)
460 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
461 merge_extras['repository'] = pull_request.target_repo.repo_name
462 Session().commit()
463
464 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
465 merge_state = PullRequestModel().merge_repo(
466 pull_request, user_admin, extras=merge_extras)
467 Session().commit()
468
469 assert merge_state.executed
470 assert '_pre_push_hook' in capture_rcextensions
471 assert '_push_hook' in capture_rcextensions
472
427
473 def test_merge_can_be_rejected_by_pre_push_hook(
428 def test_merge_fails_if_target_is_locked(self, pr_util, user_regular, merge_extras):
474 self, pr_util, user_admin, capture_rcextensions, merge_extras):
429 pull_request = pr_util.create_pull_request(approved=True, mergeable=True)
475 pull_request = pr_util.create_pull_request(
430 locked_by = [user_regular.user_id + 1, 12345.50, "lock_web"]
476 approved=True, mergeable=True)
477 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
478 merge_extras['repository'] = pull_request.target_repo.repo_name
479 Session().commit()
480
481 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
482 pre_pull.side_effect = RepositoryError("Disallow push!")
483 merge_status = PullRequestModel().merge_repo(
484 pull_request, user_admin, extras=merge_extras)
485 Session().commit()
486
487 assert not merge_status.executed
488 assert 'pre_push' not in capture_rcextensions
489 assert 'post_push' not in capture_rcextensions
490
491 def test_merge_fails_if_target_is_locked(
492 self, pr_util, user_regular, merge_extras):
493 pull_request = pr_util.create_pull_request(
494 approved=True, mergeable=True)
495 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
496 pull_request.target_repo.locked = locked_by
431 pull_request.target_repo.locked = locked_by
497 # TODO: johbo: Check if this can work based on the database, currently
432 # TODO: johbo: Check if this can work based on the database, currently
498 # all data is pre-computed, that's why just updating the DB is not
433 # all data is pre-computed, that's why just updating the DB is not
499 # enough.
434 # enough.
500 merge_extras['locked_by'] = locked_by
435 merge_extras["locked_by"] = locked_by
501 merge_extras['repository'] = pull_request.target_repo.repo_name
436 merge_extras["repository"] = pull_request.target_repo.repo_name
502 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
437 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
503 Session().commit()
438 Session().commit()
504 merge_status = PullRequestModel().merge_repo(
439 merge_status = PullRequestModel().merge_repo(pull_request, user_regular, extras=merge_extras)
505 pull_request, user_regular, extras=merge_extras)
506 Session().commit()
440 Session().commit()
507
441
508 assert not merge_status.executed
442 assert not merge_status.executed
509
443
510
444
511 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
445 @pytest.mark.parametrize(
512 (False, 1, 0),
446 "use_outdated, inlines_count, outdated_count",
513 (True, 0, 1),
447 [
514 ])
448 (False, 1, 0),
515 def test_outdated_comments(
449 (True, 0, 1),
516 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
450 ],
451 )
452 def test_outdated_comments(pr_util, use_outdated, inlines_count, outdated_count, config_stub):
517 pull_request = pr_util.create_pull_request()
453 pull_request = pr_util.create_pull_request()
518 pr_util.create_inline_comment(file_path='not_in_updated_diff')
454 pr_util.create_inline_comment(file_path="not_in_updated_diff")
519
455
520 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
456 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
521 pr_util.add_one_commit()
457 pr_util.add_one_commit()
522 assert_inline_comments(
458 assert_inline_comments(pull_request, visible=inlines_count, outdated=outdated_count)
523 pull_request, visible=inlines_count, outdated=outdated_count)
524 outdated_comment_mock.assert_called_with(pull_request)
459 outdated_comment_mock.assert_called_with(pull_request)
525
460
526
461
527 @pytest.mark.parametrize('mr_type, expected_msg', [
462 @pytest.mark.parametrize(
528 (MergeFailureReason.NONE,
463 "mr_type, expected_msg",
529 'This pull request can be automatically merged.'),
464 [
530 (MergeFailureReason.UNKNOWN,
465 (MergeFailureReason.NONE, "This pull request can be automatically merged."),
531 'This pull request cannot be merged because of an unhandled exception. CRASH'),
466 (MergeFailureReason.UNKNOWN, "This pull request cannot be merged because of an unhandled exception. CRASH"),
532 (MergeFailureReason.MERGE_FAILED,
467 (
533 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
468 MergeFailureReason.MERGE_FAILED,
534 (MergeFailureReason.PUSH_FAILED,
469 "This pull request cannot be merged because of merge conflicts. CONFLICT_FILE",
535 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
470 ),
536 (MergeFailureReason.TARGET_IS_NOT_HEAD,
471 (
537 'This pull request cannot be merged because the target `ref_name` is not a head.'),
472 MergeFailureReason.PUSH_FAILED,
538 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
473 "This pull request could not be merged because push to target:`some-repo@merge_commit` failed.",
539 'This pull request cannot be merged because the source contains more branches than the target.'),
474 ),
540 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
475 (
541 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
476 MergeFailureReason.TARGET_IS_NOT_HEAD,
542 (MergeFailureReason.TARGET_IS_LOCKED,
477 "This pull request cannot be merged because the target `ref_name` is not a head.",
543 'This pull request cannot be merged because the target repository is locked by user:123.'),
478 ),
544 (MergeFailureReason.MISSING_TARGET_REF,
479 (
545 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
480 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
546 (MergeFailureReason.MISSING_SOURCE_REF,
481 "This pull request cannot be merged because the source contains more branches than the target.",
547 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
482 ),
548 (MergeFailureReason.SUBREPO_MERGE_FAILED,
483 (
549 'This pull request cannot be merged because of conflicts related to sub repositories.'),
484 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
550
485 "This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.",
551 ])
486 ),
487 (
488 MergeFailureReason.TARGET_IS_LOCKED,
489 "This pull request cannot be merged because the target repository is locked by user:123.",
490 ),
491 (
492 MergeFailureReason.MISSING_TARGET_REF,
493 "This pull request cannot be merged because the target reference `ref_name` is missing.",
494 ),
495 (
496 MergeFailureReason.MISSING_SOURCE_REF,
497 "This pull request cannot be merged because the source reference `ref_name` is missing.",
498 ),
499 (
500 MergeFailureReason.SUBREPO_MERGE_FAILED,
501 "This pull request cannot be merged because of conflicts related to sub repositories.",
502 ),
503 ],
504 )
552 def test_merge_response_message(mr_type, expected_msg):
505 def test_merge_response_message(mr_type, expected_msg):
553 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
506 merge_ref = Reference("type", "ref_name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6")
554 metadata = {
507 metadata = {
555 'unresolved_files': 'CONFLICT_FILE',
508 "unresolved_files": "CONFLICT_FILE",
556 'exception': "CRASH",
509 "exception": "CRASH",
557 'target': 'some-repo',
510 "target": "some-repo",
558 'merge_commit': 'merge_commit',
511 "merge_commit": "merge_commit",
559 'target_ref': merge_ref,
512 "target_ref": merge_ref,
560 'source_ref': merge_ref,
513 "source_ref": merge_ref,
561 'heads': ','.join(['a', 'b', 'c']),
514 "heads": ",".join(["a", "b", "c"]),
562 'locked_by': 'user:123'
515 "locked_by": "user:123",
563 }
516 }
564
517
565 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
518 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
@@ -573,30 +526,28 b' def merge_extras(request, user_regular):'
573 """
526 """
574
527
575 extras = {
528 extras = {
576 'ip': '127.0.0.1',
529 "ip": "127.0.0.1",
577 'username': user_regular.username,
530 "username": user_regular.username,
578 'user_id': user_regular.user_id,
531 "user_id": user_regular.user_id,
579 'action': 'push',
532 "action": "push",
580 'repository': 'fake_target_repo_name',
533 "repository": "fake_target_repo_name",
581 'scm': 'git',
534 "scm": "git",
582 'config': request.config.getini('pyramid_config'),
535 "config": request.config.getini("pyramid_config"),
583 'repo_store': '',
536 "repo_store": "",
584 'make_lock': None,
537 "make_lock": None,
585 'locked_by': [None, None, None],
538 "locked_by": [None, None, None],
586 'server_url': 'http://test.example.com:5000',
539 "server_url": "http://test.example.com:5000",
587 'hooks': ['push', 'pull'],
540 "hooks": ["push", "pull"],
588 'is_shadow_repo': False,
541 "is_shadow_repo": False,
589 }
542 }
590 return extras
543 return extras
591
544
592
545
593 @pytest.mark.usefixtures('config_stub')
546 @pytest.mark.usefixtures("config_stub")
594 class TestUpdateCommentHandling(object):
547 class TestUpdateCommentHandling(object):
595
548 @pytest.fixture(autouse=True, scope="class")
596 @pytest.fixture(autouse=True, scope='class')
597 def enable_outdated_comments(self, request, baseapp):
549 def enable_outdated_comments(self, request, baseapp):
598 config_patch = mock.patch.dict(
550 config_patch = mock.patch.dict("rhodecode.CONFIG", {"rhodecode_use_outdated_comments": True})
599 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
600 config_patch.start()
551 config_patch.start()
601
552
602 @request.addfinalizer
553 @request.addfinalizer
@@ -605,206 +556,194 b' class TestUpdateCommentHandling(object):'
605
556
606 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
557 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
607 commits = [
558 commits = [
608 {'message': 'a'},
559 {"message": "a"},
609 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
560 {"message": "b", "added": [FileNode(b"file_b", b"test_content\n")]},
610 {'message': 'c', 'added': [FileNode(b'file_c', b'test_content\n')]},
561 {"message": "c", "added": [FileNode(b"file_c", b"test_content\n")]},
611 ]
562 ]
612 pull_request = pr_util.create_pull_request(
563 pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"])
613 commits=commits, target_head='a', source_head='b', revisions=['b'])
564 pr_util.create_inline_comment(file_path="file_b")
614 pr_util.create_inline_comment(file_path='file_b')
565 pr_util.add_one_commit(head="c")
615 pr_util.add_one_commit(head='c')
616
566
617 assert_inline_comments(pull_request, visible=1, outdated=0)
567 assert_inline_comments(pull_request, visible=1, outdated=0)
618
568
619 def test_comment_stays_unflagged_on_change_above(self, pr_util):
569 def test_comment_stays_unflagged_on_change_above(self, pr_util):
620 original_content = b''.join((b'line %d\n' % x for x in range(1, 11)))
570 original_content = b"".join((b"line %d\n" % x for x in range(1, 11)))
621 updated_content = b'new_line_at_top\n' + original_content
571 updated_content = b"new_line_at_top\n" + original_content
622 commits = [
572 commits = [
623 {'message': 'a'},
573 {"message": "a"},
624 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
574 {"message": "b", "added": [FileNode(b"file_b", original_content)]},
625 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
575 {"message": "c", "changed": [FileNode(b"file_b", updated_content)]},
626 ]
576 ]
627 pull_request = pr_util.create_pull_request(
577 pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"])
628 commits=commits, target_head='a', source_head='b', revisions=['b'])
629
578
630 with outdated_comments_patcher():
579 with outdated_comments_patcher():
631 comment = pr_util.create_inline_comment(
580 comment = pr_util.create_inline_comment(line_no="n8", file_path="file_b")
632 line_no=u'n8', file_path='file_b')
581 pr_util.add_one_commit(head="c")
633 pr_util.add_one_commit(head='c')
634
582
635 assert_inline_comments(pull_request, visible=1, outdated=0)
583 assert_inline_comments(pull_request, visible=1, outdated=0)
636 assert comment.line_no == u'n9'
584 assert comment.line_no == "n9"
637
585
638 def test_comment_stays_unflagged_on_change_below(self, pr_util):
586 def test_comment_stays_unflagged_on_change_below(self, pr_util):
639 original_content = b''.join([b'line %d\n' % x for x in range(10)])
587 original_content = b"".join([b"line %d\n" % x for x in range(10)])
640 updated_content = original_content + b'new_line_at_end\n'
588 updated_content = original_content + b"new_line_at_end\n"
641 commits = [
589 commits = [
642 {'message': 'a'},
590 {"message": "a"},
643 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
591 {"message": "b", "added": [FileNode(b"file_b", original_content)]},
644 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
592 {"message": "c", "changed": [FileNode(b"file_b", updated_content)]},
645 ]
593 ]
646 pull_request = pr_util.create_pull_request(
594 pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"])
647 commits=commits, target_head='a', source_head='b', revisions=['b'])
595 pr_util.create_inline_comment(file_path="file_b")
648 pr_util.create_inline_comment(file_path='file_b')
596 pr_util.add_one_commit(head="c")
649 pr_util.add_one_commit(head='c')
650
597
651 assert_inline_comments(pull_request, visible=1, outdated=0)
598 assert_inline_comments(pull_request, visible=1, outdated=0)
652
599
653 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
600 @pytest.mark.parametrize("line_no", ["n4", "o4", "n10", "o9"])
654 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
601 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
655 base_lines = [b'line %d\n' % x for x in range(1, 13)]
602 base_lines = [b"line %d\n" % x for x in range(1, 13)]
656 change_lines = list(base_lines)
603 change_lines = list(base_lines)
657 change_lines.insert(6, b'line 6a added\n')
604 change_lines.insert(6, b"line 6a added\n")
658
605
659 # Changes on the last line of sight
606 # Changes on the last line of sight
660 update_lines = list(change_lines)
607 update_lines = list(change_lines)
661 update_lines[0] = b'line 1 changed\n'
608 update_lines[0] = b"line 1 changed\n"
662 update_lines[-1] = b'line 12 changed\n'
609 update_lines[-1] = b"line 12 changed\n"
663
610
664 def file_b(lines):
611 def file_b(lines):
665 return FileNode(b'file_b', b''.join(lines))
612 return FileNode(b"file_b", b"".join(lines))
666
613
667 commits = [
614 commits = [
668 {'message': 'a', 'added': [file_b(base_lines)]},
615 {"message": "a", "added": [file_b(base_lines)]},
669 {'message': 'b', 'changed': [file_b(change_lines)]},
616 {"message": "b", "changed": [file_b(change_lines)]},
670 {'message': 'c', 'changed': [file_b(update_lines)]},
617 {"message": "c", "changed": [file_b(update_lines)]},
671 ]
618 ]
672
619
673 pull_request = pr_util.create_pull_request(
620 pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"])
674 commits=commits, target_head='a', source_head='b', revisions=['b'])
621 pr_util.create_inline_comment(line_no=line_no, file_path="file_b")
675 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
676
622
677 with outdated_comments_patcher():
623 with outdated_comments_patcher():
678 pr_util.add_one_commit(head='c')
624 pr_util.add_one_commit(head="c")
679 assert_inline_comments(pull_request, visible=0, outdated=1)
625 assert_inline_comments(pull_request, visible=0, outdated=1)
680
626
681 @pytest.mark.parametrize("change, content", [
627 @pytest.mark.parametrize(
682 ('changed', b'changed\n'),
628 "change, content",
683 ('removed', b''),
629 [
684 ], ids=['changed', b'removed'])
630 ("changed", b"changed\n"),
631 ("removed", b""),
632 ],
633 ids=["changed", b"removed"],
634 )
685 def test_comment_flagged_on_change(self, pr_util, change, content):
635 def test_comment_flagged_on_change(self, pr_util, change, content):
686 commits = [
636 commits = [
687 {'message': 'a'},
637 {"message": "a"},
688 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
638 {"message": "b", "added": [FileNode(b"file_b", b"test_content\n")]},
689 {'message': 'c', change: [FileNode(b'file_b', content)]},
639 {"message": "c", change: [FileNode(b"file_b", content)]},
690 ]
640 ]
691 pull_request = pr_util.create_pull_request(
641 pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"])
692 commits=commits, target_head='a', source_head='b', revisions=['b'])
642 pr_util.create_inline_comment(file_path="file_b")
693 pr_util.create_inline_comment(file_path='file_b')
694
643
695 with outdated_comments_patcher():
644 with outdated_comments_patcher():
696 pr_util.add_one_commit(head='c')
645 pr_util.add_one_commit(head="c")
697 assert_inline_comments(pull_request, visible=0, outdated=1)
646 assert_inline_comments(pull_request, visible=0, outdated=1)
698
647
699
648
700 @pytest.mark.usefixtures('config_stub')
649 @pytest.mark.usefixtures("config_stub")
701 class TestUpdateChangedFiles(object):
650 class TestUpdateChangedFiles(object):
702
703 def test_no_changes_on_unchanged_diff(self, pr_util):
651 def test_no_changes_on_unchanged_diff(self, pr_util):
704 commits = [
652 commits = [
705 {'message': 'a'},
653 {"message": "a"},
706 {'message': 'b',
654 {"message": "b", "added": [FileNode(b"file_b", b"test_content b\n")]},
707 'added': [FileNode(b'file_b', b'test_content b\n')]},
655 {"message": "c", "added": [FileNode(b"file_c", b"test_content c\n")]},
708 {'message': 'c',
709 'added': [FileNode(b'file_c', b'test_content c\n')]},
710 ]
656 ]
711 # open a PR from a to b, adding file_b
657 # open a PR from a to b, adding file_b
712 pull_request = pr_util.create_pull_request(
658 pull_request = pr_util.create_pull_request(
713 commits=commits, target_head='a', source_head='b', revisions=['b'],
659 commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review"
714 name_suffix='per-file-review')
660 )
715
661
716 # modify PR adding new file file_c
662 # modify PR adding new file file_c
717 pr_util.add_one_commit(head='c')
663 pr_util.add_one_commit(head="c")
718
664
719 assert_pr_file_changes(
665 assert_pr_file_changes(pull_request, added=["file_c"], modified=[], removed=[])
720 pull_request,
721 added=['file_c'],
722 modified=[],
723 removed=[])
724
666
725 def test_modify_and_undo_modification_diff(self, pr_util):
667 def test_modify_and_undo_modification_diff(self, pr_util):
726 commits = [
668 commits = [
727 {'message': 'a'},
669 {"message": "a"},
728 {'message': 'b',
670 {"message": "b", "added": [FileNode(b"file_b", b"test_content b\n")]},
729 'added': [FileNode(b'file_b', b'test_content b\n')]},
671 {"message": "c", "changed": [FileNode(b"file_b", b"test_content b modified\n")]},
730 {'message': 'c',
672 {"message": "d", "changed": [FileNode(b"file_b", b"test_content b\n")]},
731 'changed': [FileNode(b'file_b', b'test_content b modified\n')]},
732 {'message': 'd',
733 'changed': [FileNode(b'file_b', b'test_content b\n')]},
734 ]
673 ]
735 # open a PR from a to b, adding file_b
674 # open a PR from a to b, adding file_b
736 pull_request = pr_util.create_pull_request(
675 pull_request = pr_util.create_pull_request(
737 commits=commits, target_head='a', source_head='b', revisions=['b'],
676 commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review"
738 name_suffix='per-file-review')
677 )
739
678
740 # modify PR modifying file file_b
679 # modify PR modifying file file_b
741 pr_util.add_one_commit(head='c')
680 pr_util.add_one_commit(head="c")
742
681
743 assert_pr_file_changes(
682 assert_pr_file_changes(pull_request, added=[], modified=["file_b"], removed=[])
744 pull_request,
745 added=[],
746 modified=['file_b'],
747 removed=[])
748
683
749 # move the head again to d, which rollbacks change,
684 # move the head again to d, which rollbacks change,
750 # meaning we should indicate no changes
685 # meaning we should indicate no changes
751 pr_util.add_one_commit(head='d')
686 pr_util.add_one_commit(head="d")
752
687
753 assert_pr_file_changes(
688 assert_pr_file_changes(pull_request, added=[], modified=[], removed=[])
754 pull_request,
755 added=[],
756 modified=[],
757 removed=[])
758
689
759 def test_updated_all_files_in_pr(self, pr_util):
690 def test_updated_all_files_in_pr(self, pr_util):
760 commits = [
691 commits = [
761 {'message': 'a'},
692 {"message": "a"},
762 {'message': 'b', 'added': [
693 {
763 FileNode(b'file_a', b'test_content a\n'),
694 "message": "b",
764 FileNode(b'file_b', b'test_content b\n'),
695 "added": [
765 FileNode(b'file_c', b'test_content c\n')]},
696 FileNode(b"file_a", b"test_content a\n"),
766 {'message': 'c', 'changed': [
697 FileNode(b"file_b", b"test_content b\n"),
767 FileNode(b'file_a', b'test_content a changed\n'),
698 FileNode(b"file_c", b"test_content c\n"),
768 FileNode(b'file_b', b'test_content b changed\n'),
699 ],
769 FileNode(b'file_c', b'test_content c changed\n')]},
700 },
701 {
702 "message": "c",
703 "changed": [
704 FileNode(b"file_a", b"test_content a changed\n"),
705 FileNode(b"file_b", b"test_content b changed\n"),
706 FileNode(b"file_c", b"test_content c changed\n"),
707 ],
708 },
770 ]
709 ]
771 # open a PR from a to b, changing 3 files
710 # open a PR from a to b, changing 3 files
772 pull_request = pr_util.create_pull_request(
711 pull_request = pr_util.create_pull_request(
773 commits=commits, target_head='a', source_head='b', revisions=['b'],
712 commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review"
774 name_suffix='per-file-review')
713 )
775
776 pr_util.add_one_commit(head='c')
777
714
778 assert_pr_file_changes(
715 pr_util.add_one_commit(head="c")
779 pull_request,
716
780 added=[],
717 assert_pr_file_changes(pull_request, added=[], modified=["file_a", "file_b", "file_c"], removed=[])
781 modified=['file_a', 'file_b', 'file_c'],
782 removed=[])
783
718
784 def test_updated_and_removed_all_files_in_pr(self, pr_util):
719 def test_updated_and_removed_all_files_in_pr(self, pr_util):
785 commits = [
720 commits = [
786 {'message': 'a'},
721 {"message": "a"},
787 {'message': 'b', 'added': [
722 {
788 FileNode(b'file_a', b'test_content a\n'),
723 "message": "b",
789 FileNode(b'file_b', b'test_content b\n'),
724 "added": [
790 FileNode(b'file_c', b'test_content c\n')]},
725 FileNode(b"file_a", b"test_content a\n"),
791 {'message': 'c', 'removed': [
726 FileNode(b"file_b", b"test_content b\n"),
792 FileNode(b'file_a', b'test_content a changed\n'),
727 FileNode(b"file_c", b"test_content c\n"),
793 FileNode(b'file_b', b'test_content b changed\n'),
728 ],
794 FileNode(b'file_c', b'test_content c changed\n')]},
729 },
730 {
731 "message": "c",
732 "removed": [
733 FileNode(b"file_a", b"test_content a changed\n"),
734 FileNode(b"file_b", b"test_content b changed\n"),
735 FileNode(b"file_c", b"test_content c changed\n"),
736 ],
737 },
795 ]
738 ]
796 # open a PR from a to b, removing 3 files
739 # open a PR from a to b, removing 3 files
797 pull_request = pr_util.create_pull_request(
740 pull_request = pr_util.create_pull_request(
798 commits=commits, target_head='a', source_head='b', revisions=['b'],
741 commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review"
799 name_suffix='per-file-review')
742 )
800
801 pr_util.add_one_commit(head='c')
802
743
803 assert_pr_file_changes(
744 pr_util.add_one_commit(head="c")
804 pull_request,
745
805 added=[],
746 assert_pr_file_changes(pull_request, added=[], modified=[], removed=["file_a", "file_b", "file_c"])
806 modified=[],
807 removed=['file_a', 'file_b', 'file_c'])
808
747
809
748
810 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
749 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
@@ -866,8 +805,7 b' def test_update_adds_a_comment_to_the_pu'
866
805
867 .. |under_review| replace:: *"Under Review"*"""
806 .. |under_review| replace:: *"Under Review"*"""
868 ).format(commit_id[:12])
807 ).format(commit_id[:12])
869 pull_request_comments = sorted(
808 pull_request_comments = sorted(pull_request.comments, key=lambda c: c.modified_at)
870 pull_request.comments, key=lambda c: c.modified_at)
871 update_comment = pull_request_comments[-1]
809 update_comment = pull_request_comments[-1]
872 assert update_comment.text == expected_message
810 assert update_comment.text == expected_message
873
811
@@ -890,8 +828,8 b' def test_create_version_from_snapshot_up'
890 version = PullRequestModel()._create_version_from_snapshot(pull_request)
828 version = PullRequestModel()._create_version_from_snapshot(pull_request)
891
829
892 # Check attributes
830 # Check attributes
893 assert version.title == pr_util.create_parameters['title']
831 assert version.title == pr_util.create_parameters["title"]
894 assert version.description == pr_util.create_parameters['description']
832 assert version.description == pr_util.create_parameters["description"]
895 assert version.status == PullRequest.STATUS_CLOSED
833 assert version.status == PullRequest.STATUS_CLOSED
896
834
897 # versions get updated created_on
835 # versions get updated created_on
@@ -899,11 +837,11 b' def test_create_version_from_snapshot_up'
899
837
900 assert version.updated_on == updated_on
838 assert version.updated_on == updated_on
901 assert version.user_id == pull_request.user_id
839 assert version.user_id == pull_request.user_id
902 assert version.revisions == pr_util.create_parameters['revisions']
840 assert version.revisions == pr_util.create_parameters["revisions"]
903 assert version.source_repo == pr_util.source_repository
841 assert version.source_repo == pr_util.source_repository
904 assert version.source_ref == pr_util.create_parameters['source_ref']
842 assert version.source_ref == pr_util.create_parameters["source_ref"]
905 assert version.target_repo == pr_util.target_repository
843 assert version.target_repo == pr_util.target_repository
906 assert version.target_ref == pr_util.create_parameters['target_ref']
844 assert version.target_ref == pr_util.create_parameters["target_ref"]
907 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
845 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
908 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
846 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
909 assert version.last_merge_status == pull_request.last_merge_status
847 assert version.last_merge_status == pull_request.last_merge_status
@@ -921,15 +859,9 b' def test_link_comments_to_version_only_u'
921 Session().commit()
859 Session().commit()
922
860
923 # Expect that only the new comment is linked to version2
861 # Expect that only the new comment is linked to version2
924 assert (
862 assert comment_unlinked.pull_request_version_id == version2.pull_request_version_id
925 comment_unlinked.pull_request_version_id ==
863 assert comment_linked.pull_request_version_id == version1.pull_request_version_id
926 version2.pull_request_version_id)
864 assert comment_unlinked.pull_request_version_id != comment_linked.pull_request_version_id
927 assert (
928 comment_linked.pull_request_version_id ==
929 version1.pull_request_version_id)
930 assert (
931 comment_unlinked.pull_request_version_id !=
932 comment_linked.pull_request_version_id)
933
865
934
866
935 def test_calculate_commits():
867 def test_calculate_commits():
@@ -945,35 +877,26 b' def test_calculate_commits():'
945 def assert_inline_comments(pull_request, visible=None, outdated=None):
877 def assert_inline_comments(pull_request, visible=None, outdated=None):
946 if visible is not None:
878 if visible is not None:
947 inline_comments = CommentsModel().get_inline_comments(
879 inline_comments = CommentsModel().get_inline_comments(
948 pull_request.target_repo.repo_id, pull_request=pull_request)
880 pull_request.target_repo.repo_id, pull_request=pull_request
949 inline_cnt = len(CommentsModel().get_inline_comments_as_list(
881 )
950 inline_comments))
882 inline_cnt = len(CommentsModel().get_inline_comments_as_list(inline_comments))
951 assert inline_cnt == visible
883 assert inline_cnt == visible
952 if outdated is not None:
884 if outdated is not None:
953 outdated_comments = CommentsModel().get_outdated_comments(
885 outdated_comments = CommentsModel().get_outdated_comments(pull_request.target_repo.repo_id, pull_request)
954 pull_request.target_repo.repo_id, pull_request)
955 assert len(outdated_comments) == outdated
886 assert len(outdated_comments) == outdated
956
887
957
888
958 def assert_pr_file_changes(
889 def assert_pr_file_changes(pull_request, added=None, modified=None, removed=None):
959 pull_request, added=None, modified=None, removed=None):
960 pr_versions = PullRequestModel().get_versions(pull_request)
890 pr_versions = PullRequestModel().get_versions(pull_request)
961 # always use first version, ie original PR to calculate changes
891 # always use first version, ie original PR to calculate changes
962 pull_request_version = pr_versions[0]
892 pull_request_version = pr_versions[0]
963 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
893 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(pull_request, pull_request_version)
964 pull_request, pull_request_version)
894 file_changes = PullRequestModel()._calculate_file_changes(old_diff_data, new_diff_data)
965 file_changes = PullRequestModel()._calculate_file_changes(
966 old_diff_data, new_diff_data)
967
895
968 assert added == file_changes.added, \
896 assert added == file_changes.added, "expected added:%s vs value:%s" % (added, file_changes.added)
969 'expected added:%s vs value:%s' % (added, file_changes.added)
897 assert modified == file_changes.modified, "expected modified:%s vs value:%s" % (modified, file_changes.modified)
970 assert modified == file_changes.modified, \
898 assert removed == file_changes.removed, "expected removed:%s vs value:%s" % (removed, file_changes.removed)
971 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
972 assert removed == file_changes.removed, \
973 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
974
899
975
900
976 def outdated_comments_patcher(use_outdated=True):
901 def outdated_comments_patcher(use_outdated=True):
977 return mock.patch.object(
902 return mock.patch.object(CommentsModel, "use_outdated_comments", return_value=use_outdated)
978 CommentsModel, 'use_outdated_comments',
979 return_value=use_outdated)
@@ -23,7 +23,7 b' from sqlalchemy.exc import IntegrityErro'
23 import pytest
23 import pytest
24
24
25 from rhodecode.tests import TESTS_TMP_PATH
25 from rhodecode.tests import TESTS_TMP_PATH
26 from rhodecode.tests.fixture import Fixture
26 from rhodecode.tests.fixtures.rc_fixture import Fixture
27
27
28 from rhodecode.model.repo_group import RepoGroupModel
28 from rhodecode.model.repo_group import RepoGroupModel
29 from rhodecode.model.repo import RepoModel
29 from rhodecode.model.repo import RepoModel
@@ -28,7 +28,7 b' from rhodecode.model.user_group import U'
28 from rhodecode.tests.models.common import (
28 from rhodecode.tests.models.common import (
29 _create_project_tree, check_tree_perms, _get_perms, _check_expected_count,
29 _create_project_tree, check_tree_perms, _get_perms, _check_expected_count,
30 expected_count, _destroy_project_tree)
30 expected_count, _destroy_project_tree)
31 from rhodecode.tests.fixture import Fixture
31 from rhodecode.tests.fixtures.rc_fixture import Fixture
32
32
33
33
34 fixture = Fixture()
34 fixture = Fixture()
@@ -22,7 +22,7 b' import pytest'
22
22
23 from rhodecode.model.db import User
23 from rhodecode.model.db import User
24 from rhodecode.tests import TEST_USER_REGULAR_LOGIN
24 from rhodecode.tests import TEST_USER_REGULAR_LOGIN
25 from rhodecode.tests.fixture import Fixture
25 from rhodecode.tests.fixtures.rc_fixture import Fixture
26 from rhodecode.model.user_group import UserGroupModel
26 from rhodecode.model.user_group import UserGroupModel
27 from rhodecode.model.meta import Session
27 from rhodecode.model.meta import Session
28
28
@@ -27,7 +27,7 b' from rhodecode.model.user import UserMod'
27 from rhodecode.model.user_group import UserGroupModel
27 from rhodecode.model.user_group import UserGroupModel
28 from rhodecode.model.repo import RepoModel
28 from rhodecode.model.repo import RepoModel
29 from rhodecode.model.repo_group import RepoGroupModel
29 from rhodecode.model.repo_group import RepoGroupModel
30 from rhodecode.tests.fixture import Fixture
30 from rhodecode.tests.fixtures.rc_fixture import Fixture
31 from rhodecode.lib.str_utils import safe_str
31 from rhodecode.lib.str_utils import safe_str
32
32
33
33
@@ -32,11 +32,11 b' from rhodecode.model.meta import Session'
32 from rhodecode.model.repo_group import RepoGroupModel
32 from rhodecode.model.repo_group import RepoGroupModel
33 from rhodecode.model.db import ChangesetStatus, Repository
33 from rhodecode.model.db import ChangesetStatus, Repository
34 from rhodecode.model.changeset_status import ChangesetStatusModel
34 from rhodecode.model.changeset_status import ChangesetStatusModel
35 from rhodecode.tests.fixture import Fixture
35 from rhodecode.tests.fixtures.rc_fixture import Fixture
36
36
37 fixture = Fixture()
37 fixture = Fixture()
38
38
39 pytestmark = pytest.mark.usefixtures('baseapp')
39 pytestmark = pytest.mark.usefixtures("baseapp")
40
40
41
41
42 @pytest.fixture()
42 @pytest.fixture()
@@ -111,7 +111,7 b' app.base_url = http://rhodecode.local'
111 app.service_api.host = http://rhodecode.local:10020
111 app.service_api.host = http://rhodecode.local:10020
112
112
113 ; Secret for Service API authentication.
113 ; Secret for Service API authentication.
114 app.service_api.token =
114 app.service_api.token = secret4
115
115
116 ; Unique application ID. Should be a random unique string for security.
116 ; Unique application ID. Should be a random unique string for security.
117 app_instance_uuid = rc-production
117 app_instance_uuid = rc-production
@@ -351,7 +351,7 b' archive_cache.objectstore.retry_attempts'
351 ; and served from the cache during subsequent requests for the same archive of
351 ; and served from the cache during subsequent requests for the same archive of
352 ; the repository. This path is important to be shared across filesystems and with
352 ; the repository. This path is important to be shared across filesystems and with
353 ; RhodeCode and vcsserver
353 ; RhodeCode and vcsserver
354 archive_cache.filesystem.store_dir = %(here)s/rc-tests/archive_cache
354 archive_cache.filesystem.store_dir = %(here)s/.rc-test-data/archive_cache
355
355
356 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
356 ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb
357 archive_cache.filesystem.cache_size_gb = 2
357 archive_cache.filesystem.cache_size_gb = 2
@@ -406,7 +406,7 b' celery.task_store_eager_result = true'
406
406
407 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
407 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
408 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
408 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
409 cache_dir = %(here)s/rc-test-data
409 cache_dir = %(here)s/.rc-test-data
410
410
411 ; *********************************************
411 ; *********************************************
412 ; `sql_cache_short` cache for heavy SQL queries
412 ; `sql_cache_short` cache for heavy SQL queries
@@ -435,7 +435,7 b' rc_cache.cache_repo_longterm.max_size = '
435 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
435 rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace
436 rc_cache.cache_general.expiration_time = 43200
436 rc_cache.cache_general.expiration_time = 43200
437 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
437 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
438 rc_cache.cache_general.arguments.filename = %(here)s/rc-tests/cache-backend/cache_general_db
438 rc_cache.cache_general.arguments.filename = %(here)s/.rc-test-data/cache-backend/cache_general_db
439
439
440 ; alternative `cache_general` redis backend with distributed lock
440 ; alternative `cache_general` redis backend with distributed lock
441 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
441 #rc_cache.cache_general.backend = dogpile.cache.rc.redis
@@ -454,6 +454,10 b' rc_cache.cache_general.arguments.filenam'
454 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
454 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
455 #rc_cache.cache_general.arguments.lock_auto_renewal = true
455 #rc_cache.cache_general.arguments.lock_auto_renewal = true
456
456
457 ; prefix for redis keys used for this cache backend, the final key is constructed using {custom-prefix}{key}
458 #rc_cache.cache_general.arguments.key_prefix = custom-prefix-
459
460
457 ; *************************************************
461 ; *************************************************
458 ; `cache_perms` cache for permission tree, auth TTL
462 ; `cache_perms` cache for permission tree, auth TTL
459 ; for simplicity use rc.file_namespace backend,
463 ; for simplicity use rc.file_namespace backend,
@@ -462,7 +466,7 b' rc_cache.cache_general.arguments.filenam'
462 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
466 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
463 rc_cache.cache_perms.expiration_time = 0
467 rc_cache.cache_perms.expiration_time = 0
464 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
468 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
465 rc_cache.cache_perms.arguments.filename = %(here)s/rc-tests/cache-backend/cache_perms_db
469 rc_cache.cache_perms.arguments.filename = %(here)s/.rc-test-data/cache-backend/cache_perms_db
466
470
467 ; alternative `cache_perms` redis backend with distributed lock
471 ; alternative `cache_perms` redis backend with distributed lock
468 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
472 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
@@ -481,6 +485,10 b' rc_cache.cache_perms.arguments.filename '
481 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
485 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
482 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
486 #rc_cache.cache_perms.arguments.lock_auto_renewal = true
483
487
488 ; prefix for redis keys used for this cache backend, the final key is constructed using {custom-prefix}{key}
489 #rc_cache.cache_perms.arguments.key_prefix = custom-prefix-
490
491
484 ; ***************************************************
492 ; ***************************************************
485 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
493 ; `cache_repo` cache for file tree, Readme, RSS FEEDS
486 ; for simplicity use rc.file_namespace backend,
494 ; for simplicity use rc.file_namespace backend,
@@ -489,7 +497,7 b' rc_cache.cache_perms.arguments.filename '
489 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
497 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
490 rc_cache.cache_repo.expiration_time = 2592000
498 rc_cache.cache_repo.expiration_time = 2592000
491 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
499 ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set
492 rc_cache.cache_repo.arguments.filename = %(here)s/rc-tests/cache-backend/cache_repo_db
500 rc_cache.cache_repo.arguments.filename = %(here)s/.rc-test-data/cache-backend/cache_repo_db
493
501
494 ; alternative `cache_repo` redis backend with distributed lock
502 ; alternative `cache_repo` redis backend with distributed lock
495 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
503 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
@@ -508,6 +516,10 b' rc_cache.cache_repo.arguments.filename ='
508 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
516 ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen
509 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
517 #rc_cache.cache_repo.arguments.lock_auto_renewal = true
510
518
519 ; prefix for redis keys used for this cache backend, the final key is constructed using {custom-prefix}{key}
520 #rc_cache.cache_repo.arguments.key_prefix = custom-prefix-
521
522
511 ; ##############
523 ; ##############
512 ; BEAKER SESSION
524 ; BEAKER SESSION
513 ; ##############
525 ; ##############
@@ -516,7 +528,7 b' rc_cache.cache_repo.arguments.filename ='
516 ; types are file, ext:redis, ext:database, ext:memcached
528 ; types are file, ext:redis, ext:database, ext:memcached
517 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
529 ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session
518 beaker.session.type = file
530 beaker.session.type = file
519 beaker.session.data_dir = %(here)s/rc-tests/data/sessions
531 beaker.session.data_dir = %(here)s/.rc-test-data/data/sessions
520
532
521 ; Redis based sessions
533 ; Redis based sessions
522 #beaker.session.type = ext:redis
534 #beaker.session.type = ext:redis
@@ -532,7 +544,7 b' beaker.session.data_dir = %(here)s/rc-te'
532
544
533 beaker.session.key = rhodecode
545 beaker.session.key = rhodecode
534 beaker.session.secret = test-rc-uytcxaz
546 beaker.session.secret = test-rc-uytcxaz
535 beaker.session.lock_dir = %(here)s/rc-tests/data/sessions/lock
547 beaker.session.lock_dir = %(here)s/.rc-test-data/data/sessions/lock
536
548
537 ; Secure encrypted cookie. Requires AES and AES python libraries
549 ; Secure encrypted cookie. Requires AES and AES python libraries
538 ; you must disable beaker.session.secret to use this
550 ; you must disable beaker.session.secret to use this
@@ -564,7 +576,7 b' beaker.session.secure = false'
564 ; WHOOSH Backend, doesn't require additional services to run
576 ; WHOOSH Backend, doesn't require additional services to run
565 ; it works good with few dozen repos
577 ; it works good with few dozen repos
566 search.module = rhodecode.lib.index.whoosh
578 search.module = rhodecode.lib.index.whoosh
567 search.location = %(here)s/rc-tests/data/index
579 search.location = %(here)s/.rc-test-data/data/index
568
580
569 ; ####################
581 ; ####################
570 ; CHANNELSTREAM CONFIG
582 ; CHANNELSTREAM CONFIG
@@ -584,7 +596,7 b' channelstream.server = channelstream:980'
584 ; see Nginx/Apache configuration examples in our docs
596 ; see Nginx/Apache configuration examples in our docs
585 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
597 channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream
586 channelstream.secret = ENV_GENERATED
598 channelstream.secret = ENV_GENERATED
587 channelstream.history.location = %(here)s/rc-tests/channelstream_history
599 channelstream.history.location = %(here)s/.rc-test-data/channelstream_history
588
600
589 ; Internal application path that Javascript uses to connect into.
601 ; Internal application path that Javascript uses to connect into.
590 ; If you use proxy-prefix the prefix should be added before /_channelstream
602 ; If you use proxy-prefix the prefix should be added before /_channelstream
@@ -601,7 +613,7 b' channelstream.proxy_path = /_channelstre'
601 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
613 ; pymysql is an alternative driver for MySQL, use in case of problems with default one
602 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
614 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
603
615
604 sqlalchemy.db1.url = sqlite:///%(here)s/rc-tests/rhodecode_test.db?timeout=30
616 sqlalchemy.db1.url = sqlite:///%(here)s/.rc-test-data/rhodecode_test.db?timeout=30
605
617
606 ; see sqlalchemy docs for other advanced settings
618 ; see sqlalchemy docs for other advanced settings
607 ; print the sql statements to output
619 ; print the sql statements to output
@@ -737,7 +749,7 b' ssh.generate_authorized_keyfile = true'
737 ; Path to the authorized_keys file where the generate entries are placed.
749 ; Path to the authorized_keys file where the generate entries are placed.
738 ; It is possible to have multiple key files specified in `sshd_config` e.g.
750 ; It is possible to have multiple key files specified in `sshd_config` e.g.
739 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
751 ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode
740 ssh.authorized_keys_file_path = %(here)s/rc-tests/authorized_keys_rhodecode
752 ssh.authorized_keys_file_path = %(here)s/.rc-test-data/authorized_keys_rhodecode
741
753
742 ; Command to execute the SSH wrapper. The binary is available in the
754 ; Command to execute the SSH wrapper. The binary is available in the
743 ; RhodeCode installation directory.
755 ; RhodeCode installation directory.
@@ -24,13 +24,13 b' import tempfile'
24 import pytest
24 import pytest
25 import subprocess
25 import subprocess
26 import logging
26 import logging
27 from urllib.request import urlopen
27 import requests
28 from urllib.error import URLError
29 import configparser
28 import configparser
30
29
31
30
32 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
31 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS
33 from rhodecode.tests.utils import is_url_reachable
32 from rhodecode.tests.utils import is_url_reachable
33 from rhodecode.tests import console_printer
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
@@ -49,7 +49,7 b' def get_host_url(pyramid_config):'
49
49
50 def assert_no_running_instance(url):
50 def assert_no_running_instance(url):
51 if is_url_reachable(url):
51 if is_url_reachable(url):
52 print(f"Hint: Usually this means another instance of server "
52 console_printer(f"Hint: Usually this means another instance of server "
53 f"is running in the background at {url}.")
53 f"is running in the background at {url}.")
54 pytest.fail(f"Port is not free at {url}, cannot start server at")
54 pytest.fail(f"Port is not free at {url}, cannot start server at")
55
55
@@ -58,8 +58,9 b' class ServerBase(object):'
58 _args = []
58 _args = []
59 log_file_name = 'NOT_DEFINED.log'
59 log_file_name = 'NOT_DEFINED.log'
60 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
60 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
61 console_marker = " :warning: [green]pytest-setup[/green] "
61
62
62 def __init__(self, config_file, log_file):
63 def __init__(self, config_file, log_file, env):
63 self.config_file = config_file
64 self.config_file = config_file
64 config = configparser.ConfigParser()
65 config = configparser.ConfigParser()
65 config.read(config_file)
66 config.read(config_file)
@@ -69,10 +70,10 b' class ServerBase(object):'
69 self._args = []
70 self._args = []
70 self.log_file = log_file or os.path.join(
71 self.log_file = log_file or os.path.join(
71 tempfile.gettempdir(), self.log_file_name)
72 tempfile.gettempdir(), self.log_file_name)
73 self.env = env
72 self.process = None
74 self.process = None
73 self.server_out = None
75 self.server_out = None
74 log.info("Using the {} configuration:{}".format(
76 log.info(f"Using the {self.__class__.__name__} configuration:{config_file}")
75 self.__class__.__name__, config_file))
76
77
77 if not os.path.isfile(config_file):
78 if not os.path.isfile(config_file):
78 raise RuntimeError(f'Failed to get config at {config_file}')
79 raise RuntimeError(f'Failed to get config at {config_file}')
@@ -110,18 +111,17 b' class ServerBase(object):'
110
111
111 while time.time() - start < timeout:
112 while time.time() - start < timeout:
112 try:
113 try:
113 urlopen(status_url)
114 requests.get(status_url)
114 break
115 break
115 except URLError:
116 except requests.exceptions.ConnectionError:
116 time.sleep(0.2)
117 time.sleep(0.2)
117 else:
118 else:
118 pytest.fail(
119 pytest.fail(
119 "Starting the {} failed or took more than {} "
120 f"Starting the {self.__class__.__name__} failed or took more than {timeout} seconds."
120 "seconds. cmd: `{}`".format(
121 f"cmd: `{self.command}`"
121 self.__class__.__name__, timeout, self.command))
122 )
122
123
123 log.info('Server of {} ready at url {}'.format(
124 log.info(f'Server of {self.__class__.__name__} ready at url {status_url}')
124 self.__class__.__name__, status_url))
125
125
126 def shutdown(self):
126 def shutdown(self):
127 self.process.kill()
127 self.process.kill()
@@ -130,7 +130,7 b' class ServerBase(object):'
130
130
131 def get_log_file_with_port(self):
131 def get_log_file_with_port(self):
132 log_file = list(self.log_file.partition('.log'))
132 log_file = list(self.log_file.partition('.log'))
133 log_file.insert(1, get_port(self.config_file))
133 log_file.insert(1, f'-{get_port(self.config_file)}')
134 log_file = ''.join(log_file)
134 log_file = ''.join(log_file)
135 return log_file
135 return log_file
136
136
@@ -140,11 +140,12 b' class RcVCSServer(ServerBase):'
140 Represents a running VCSServer instance.
140 Represents a running VCSServer instance.
141 """
141 """
142
142
143 log_file_name = 'rc-vcsserver.log'
143 log_file_name = 'rhodecode-vcsserver.log'
144 status_url_tmpl = 'http://{host}:{port}/status'
144 status_url_tmpl = 'http://{host}:{port}/status'
145
145
146 def __init__(self, config_file, log_file=None, workers='3'):
146 def __init__(self, config_file, log_file=None, workers='3', env=None, info_prefix=''):
147 super(RcVCSServer, self).__init__(config_file, log_file)
147 super(RcVCSServer, self).__init__(config_file, log_file, env)
148 self.info_prefix = info_prefix
148 self._args = [
149 self._args = [
149 'gunicorn',
150 'gunicorn',
150 '--bind', self.bind_addr,
151 '--bind', self.bind_addr,
@@ -164,9 +165,10 b' class RcVCSServer(ServerBase):'
164 host_url = self.host_url()
165 host_url = self.host_url()
165 assert_no_running_instance(host_url)
166 assert_no_running_instance(host_url)
166
167
167 print(f'rhodecode-vcsserver starting at: {host_url}')
168 console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-vcsserver starting at: {host_url}')
168 print(f'rhodecode-vcsserver command: {self.command}')
169 console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-vcsserver command: {self.command}')
169 print(f'rhodecode-vcsserver logfile: {self.log_file}')
170 console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-vcsserver logfile: {self.log_file}')
171 console_printer()
170
172
171 self.process = subprocess.Popen(
173 self.process = subprocess.Popen(
172 self._args, bufsize=0, env=env,
174 self._args, bufsize=0, env=env,
@@ -178,11 +180,12 b' class RcWebServer(ServerBase):'
178 Represents a running RCE web server used as a test fixture.
180 Represents a running RCE web server used as a test fixture.
179 """
181 """
180
182
181 log_file_name = 'rc-web.log'
183 log_file_name = 'rhodecode-ce.log'
182 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
184 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
183
185
184 def __init__(self, config_file, log_file=None, workers='2'):
186 def __init__(self, config_file, log_file=None, workers='2', env=None, info_prefix=''):
185 super(RcWebServer, self).__init__(config_file, log_file)
187 super(RcWebServer, self).__init__(config_file, log_file, env)
188 self.info_prefix = info_prefix
186 self._args = [
189 self._args = [
187 'gunicorn',
190 'gunicorn',
188 '--bind', self.bind_addr,
191 '--bind', self.bind_addr,
@@ -195,7 +198,8 b' class RcWebServer(ServerBase):'
195
198
196 def start(self):
199 def start(self):
197 env = os.environ.copy()
200 env = os.environ.copy()
198 env['RC_NO_TMP_PATH'] = '1'
201 if self.env:
202 env.update(self.env)
199
203
200 self.log_file = self.get_log_file_with_port()
204 self.log_file = self.get_log_file_with_port()
201 self.server_out = open(self.log_file, 'w')
205 self.server_out = open(self.log_file, 'w')
@@ -203,9 +207,10 b' class RcWebServer(ServerBase):'
203 host_url = self.host_url()
207 host_url = self.host_url()
204 assert_no_running_instance(host_url)
208 assert_no_running_instance(host_url)
205
209
206 print(f'rhodecode-web starting at: {host_url}')
210 console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-ce starting at: {host_url}')
207 print(f'rhodecode-web command: {self.command}')
211 console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-ce command: {self.command}')
208 print(f'rhodecode-web logfile: {self.log_file}')
212 console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-ce logfile: {self.log_file}')
213 console_printer()
209
214
210 self.process = subprocess.Popen(
215 self.process = subprocess.Popen(
211 self._args, bufsize=0, env=env,
216 self._args, bufsize=0, env=env,
@@ -229,3 +234,44 b' class RcWebServer(ServerBase):'
229 }
234 }
230 params.update(**kwargs)
235 params.update(**kwargs)
231 return params['user'], params['passwd']
236 return params['user'], params['passwd']
237
238 class CeleryServer(ServerBase):
239 log_file_name = 'rhodecode-celery.log'
240 status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping'
241
242 def __init__(self, config_file, log_file=None, workers='2', env=None, info_prefix=''):
243 super(CeleryServer, self).__init__(config_file, log_file, env)
244 self.info_prefix = info_prefix
245 self._args = \
246 ['celery',
247 '--no-color',
248 '--app=rhodecode.lib.celerylib.loader',
249 'worker',
250 '--autoscale=4,2',
251 '--max-tasks-per-child=30',
252 '--task-events',
253 '--loglevel=DEBUG',
254 '--ini=' + self.config_file]
255
256 def start(self):
257 env = os.environ.copy()
258 env['RC_NO_TEST_ENV'] = '1'
259
260 self.log_file = self.get_log_file_with_port()
261 self.server_out = open(self.log_file, 'w')
262
263 host_url = "Celery" #self.host_url()
264 #assert_no_running_instance(host_url)
265
266 console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-celery starting at: {host_url}')
267 console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-celery command: {self.command}')
268 console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-celery logfile: {self.log_file}')
269 console_printer()
270
271 self.process = subprocess.Popen(
272 self._args, bufsize=0, env=env,
273 stdout=self.server_out, stderr=self.server_out)
274
275
276 def wait_until_ready(self, timeout=30):
277 time.sleep(2)
@@ -36,24 +36,29 b' from webtest.app import TestResponse, Te'
36
36
37 import pytest
37 import pytest
38
38
39 try:
40 import rc_testdata
41 except ImportError:
42 raise ImportError('Failed to import rc_testdata, '
43 'please make sure this package is installed from requirements_test.txt')
44
45 from rhodecode.model.db import User, Repository
39 from rhodecode.model.db import User, Repository
46 from rhodecode.model.meta import Session
40 from rhodecode.model.meta import Session
47 from rhodecode.model.scm import ScmModel
41 from rhodecode.model.scm import ScmModel
48 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
42 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
49 from rhodecode.lib.vcs.backends.base import EmptyCommit
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
50 from rhodecode.tests import login_user_session
44 from rhodecode.tests import login_user_session, console_printer
45 from rhodecode.authentication import AuthenticationPluginRegistry
46 from rhodecode.model.settings import SettingsModel
51
47
52 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
53
49
54
50
55 def print_to_func(value, print_to=sys.stderr):
51 def console_printer_utils(msg):
56 print(value, file=print_to)
52 console_printer(f" :white_check_mark: [green]test-utils[/green] {msg}")
53
54
55 def get_rc_testdata():
56 try:
57 import rc_testdata
58 except ImportError:
59 raise ImportError('Failed to import rc_testdata, '
60 'please make sure this package is installed from requirements_test.txt')
61 return rc_testdata
57
62
58
63
59 class CustomTestResponse(TestResponse):
64 class CustomTestResponse(TestResponse):
@@ -73,7 +78,6 b' class CustomTestResponse(TestResponse):'
73 assert string in res
78 assert string in res
74 """
79 """
75 print_body = kw.pop('print_body', False)
80 print_body = kw.pop('print_body', False)
76 print_to = kw.pop('print_to', sys.stderr)
77
81
78 if 'no' in kw:
82 if 'no' in kw:
79 no = kw['no']
83 no = kw['no']
@@ -89,18 +93,18 b' class CustomTestResponse(TestResponse):'
89
93
90 for s in strings:
94 for s in strings:
91 if s not in self:
95 if s not in self:
92 print_to_func(f"Actual response (no {s!r}):", print_to=print_to)
96 console_printer_utils(f"Actual response (no {s!r}):")
93 print_to_func(f"body output saved as `{f}`", print_to=print_to)
97 console_printer_utils(f"body output saved as `{f}`")
94 if print_body:
98 if print_body:
95 print_to_func(str(self), print_to=print_to)
99 console_printer_utils(str(self))
96 raise IndexError(f"Body does not contain string {s!r}, body output saved as {f}")
100 raise IndexError(f"Body does not contain string {s!r}, body output saved as {f}")
97
101
98 for no_s in no:
102 for no_s in no:
99 if no_s in self:
103 if no_s in self:
100 print_to_func(f"Actual response (has {no_s!r})", print_to=print_to)
104 console_printer_utils(f"Actual response (has {no_s!r})")
101 print_to_func(f"body output saved as `{f}`", print_to=print_to)
105 console_printer_utils(f"body output saved as `{f}`")
102 if print_body:
106 if print_body:
103 print_to_func(str(self), print_to=print_to)
107 console_printer_utils(str(self))
104 raise IndexError(f"Body contains bad string {no_s!r}, body output saved as {f}")
108 raise IndexError(f"Body contains bad string {no_s!r}, body output saved as {f}")
105
109
106 def assert_response(self):
110 def assert_response(self):
@@ -209,6 +213,7 b' def extract_git_repo_from_dump(dump_name'
209 """Create git repo `repo_name` from dump `dump_name`."""
213 """Create git repo `repo_name` from dump `dump_name`."""
210 repos_path = ScmModel().repos_path
214 repos_path = ScmModel().repos_path
211 target_path = os.path.join(repos_path, repo_name)
215 target_path = os.path.join(repos_path, repo_name)
216 rc_testdata = get_rc_testdata()
212 rc_testdata.extract_git_dump(dump_name, target_path)
217 rc_testdata.extract_git_dump(dump_name, target_path)
213 return target_path
218 return target_path
214
219
@@ -217,6 +222,7 b' def extract_hg_repo_from_dump(dump_name,'
217 """Create hg repo `repo_name` from dump `dump_name`."""
222 """Create hg repo `repo_name` from dump `dump_name`."""
218 repos_path = ScmModel().repos_path
223 repos_path = ScmModel().repos_path
219 target_path = os.path.join(repos_path, repo_name)
224 target_path = os.path.join(repos_path, repo_name)
225 rc_testdata = get_rc_testdata()
220 rc_testdata.extract_hg_dump(dump_name, target_path)
226 rc_testdata.extract_hg_dump(dump_name, target_path)
221 return target_path
227 return target_path
222
228
@@ -245,6 +251,7 b' def _load_svn_dump_into_repo(dump_name, '
245 Currently the dumps are in rc_testdata. They might later on be
251 Currently the dumps are in rc_testdata. They might later on be
246 integrated with the main repository once they stabilize more.
252 integrated with the main repository once they stabilize more.
247 """
253 """
254 rc_testdata = get_rc_testdata()
248 dump = rc_testdata.load_svn_dump(dump_name)
255 dump = rc_testdata.load_svn_dump(dump_name)
249 load_dump = subprocess.Popen(
256 load_dump = subprocess.Popen(
250 ['svnadmin', 'load', repo_path],
257 ['svnadmin', 'load', repo_path],
@@ -254,9 +261,7 b' def _load_svn_dump_into_repo(dump_name, '
254 if load_dump.returncode != 0:
261 if load_dump.returncode != 0:
255 log.error("Output of load_dump command: %s", out)
262 log.error("Output of load_dump command: %s", out)
256 log.error("Error output of load_dump command: %s", err)
263 log.error("Error output of load_dump command: %s", err)
257 raise Exception(
264 raise Exception(f'Failed to load dump "{dump_name}" into repository at path "{repo_path}".')
258 'Failed to load dump "%s" into repository at path "%s".'
259 % (dump_name, repo_path))
260
265
261
266
262 class AssertResponse(object):
267 class AssertResponse(object):
@@ -492,3 +497,54 b' def permission_update_data_generator(csr'
492 ('perm_del_member_type_{}'.format(obj_id), obj_type),
497 ('perm_del_member_type_{}'.format(obj_id), obj_type),
493 ])
498 ])
494 return form_data
499 return form_data
500
501
502
503 class AuthPluginManager:
504
505 def cleanup(self):
506 self._enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode'])
507
508 def enable(self, plugins_list, override=None):
509 return self._enable_plugins(plugins_list, override)
510
511 @classmethod
512 def _enable_plugins(cls, plugins_list, override: object = None):
513 override = override or {}
514 params = {
515 'auth_plugins': ','.join(plugins_list),
516 }
517
518 # helper translate some names to others, to fix settings code
519 name_map = {
520 'token': 'authtoken'
521 }
522 log.debug('enable_auth_plugins: enabling following auth-plugins: %s', plugins_list)
523
524 for module in plugins_list:
525 plugin_name = module.partition('#')[-1]
526 if plugin_name in name_map:
527 plugin_name = name_map[plugin_name]
528 enabled_plugin = f'auth_{plugin_name}_enabled'
529 cache_ttl = f'auth_{plugin_name}_cache_ttl'
530
531 # default params that are needed for each plugin,
532 # `enabled` and `cache_ttl`
533 params.update({
534 enabled_plugin: True,
535 cache_ttl: 0
536 })
537 if override.get:
538 params.update(override.get(module, {}))
539
540 validated_params = params
541
542 for k, v in validated_params.items():
543 setting = SettingsModel().create_or_update_setting(k, v)
544 Session().add(setting)
545 Session().commit()
546
547 AuthenticationPluginRegistry.invalidate_auth_plugins_cache(hard=True)
548
549 enabled_plugins = SettingsModel().get_auth_plugins()
550 assert plugins_list == enabled_plugins
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -32,8 +31,7 b' from rhodecode.tests.utils import check_'
32
31
33
32
34 @pytest.fixture()
33 @pytest.fixture()
35 def vcs_repository_support(
34 def vcs_repository_support(request, backend_alias, baseapp, _vcs_repo_container):
36 request, backend_alias, baseapp, _vcs_repo_container):
37 """
35 """
38 Provide a test repository for the test run.
36 Provide a test repository for the test run.
39
37
@@ -63,7 +61,7 b' def vcs_repository_support('
63 return backend_alias, repo
61 return backend_alias, repo
64
62
65
63
66 @pytest.fixture(scope='class')
64 @pytest.fixture(scope="class")
67 def _vcs_repo_container(request):
65 def _vcs_repo_container(request):
68 """
66 """
69 Internal fixture intended to help support class based scoping on demand.
67 Internal fixture intended to help support class based scoping on demand.
@@ -73,13 +71,12 b' def _vcs_repo_container(request):'
73
71
74 def _create_vcs_repo_container(request):
72 def _create_vcs_repo_container(request):
75 repo_container = VcsRepoContainer()
73 repo_container = VcsRepoContainer()
76 if not request.config.getoption('--keep-tmp-path'):
74 if not request.config.getoption("--keep-tmp-path"):
77 request.addfinalizer(repo_container.cleanup)
75 request.addfinalizer(repo_container.cleanup)
78 return repo_container
76 return repo_container
79
77
80
78
81 class VcsRepoContainer(object):
79 class VcsRepoContainer(object):
82
83 def __init__(self):
80 def __init__(self):
84 self._cleanup_paths = []
81 self._cleanup_paths = []
85 self._repos = {}
82 self._repos = {}
@@ -98,14 +95,14 b' class VcsRepoContainer(object):'
98
95
99
96
100 def _should_create_repo_per_test(cls):
97 def _should_create_repo_per_test(cls):
101 return getattr(cls, 'recreate_repo_per_test', False)
98 return getattr(cls, "recreate_repo_per_test", False)
102
99
103
100
104 def _create_empty_repository(cls, backend_alias=None):
101 def _create_empty_repository(cls, backend_alias=None):
105 Backend = get_backend(backend_alias or cls.backend_alias)
102 Backend = get_backend(backend_alias or cls.backend_alias)
106 repo_path = get_new_dir(str(time.time()))
103 repo_path = get_new_dir(str(time.time()))
107 repo = Backend(repo_path, create=True)
104 repo = Backend(repo_path, create=True)
108 if hasattr(cls, '_get_commits'):
105 if hasattr(cls, "_get_commits"):
109 commits = cls._get_commits()
106 commits = cls._get_commits()
110 cls.tip = _add_commits_to_repo(repo, commits)
107 cls.tip = _add_commits_to_repo(repo, commits)
111
108
@@ -127,7 +124,7 b' def config():'
127 specific content is required.
124 specific content is required.
128 """
125 """
129 config = Config()
126 config = Config()
130 config.set('section-a', 'a-1', 'value-a-1')
127 config.set("section-a", "a-1", "value-a-1")
131 return config
128 return config
132
129
133
130
@@ -136,24 +133,24 b' def _add_commits_to_repo(repo, commits):'
136 tip = None
133 tip = None
137
134
138 for commit in commits:
135 for commit in commits:
139 for node in commit.get('added', []):
136 for node in commit.get("added", []):
140 if not isinstance(node, FileNode):
137 if not isinstance(node, FileNode):
141 node = FileNode(safe_bytes(node.path), content=node.content)
138 node = FileNode(safe_bytes(node.path), content=node.content)
142 imc.add(node)
139 imc.add(node)
143
140
144 for node in commit.get('changed', []):
141 for node in commit.get("changed", []):
145 if not isinstance(node, FileNode):
142 if not isinstance(node, FileNode):
146 node = FileNode(safe_bytes(node.path), content=node.content)
143 node = FileNode(safe_bytes(node.path), content=node.content)
147 imc.change(node)
144 imc.change(node)
148
145
149 for node in commit.get('removed', []):
146 for node in commit.get("removed", []):
150 imc.remove(FileNode(safe_bytes(node.path)))
147 imc.remove(FileNode(safe_bytes(node.path)))
151
148
152 tip = imc.commit(
149 tip = imc.commit(
153 message=str(commit['message']),
150 message=str(commit["message"]),
154 author=str(commit['author']),
151 author=str(commit["author"]),
155 date=commit['date'],
152 date=commit["date"],
156 branch=commit.get('branch')
153 branch=commit.get("branch"),
157 )
154 )
158
155
159 return tip
156 return tip
@@ -183,16 +180,15 b' def generate_repo_with_commits(vcs_repo)'
183 start_date = datetime.datetime(2010, 1, 1, 20)
180 start_date = datetime.datetime(2010, 1, 1, 20)
184 for x in range(num):
181 for x in range(num):
185 yield {
182 yield {
186 'message': 'Commit %d' % x,
183 "message": "Commit %d" % x,
187 'author': 'Joe Doe <joe.doe@example.com>',
184 "author": "Joe Doe <joe.doe@example.com>",
188 'date': start_date + datetime.timedelta(hours=12 * x),
185 "date": start_date + datetime.timedelta(hours=12 * x),
189 'added': [
186 "added": [
190 FileNode(b'file_%d.txt' % x, content=b'Foobar %d' % x),
187 FileNode(b"file_%d.txt" % x, content=b"Foobar %d" % x),
191 ],
188 ],
192 'modified': [
189 "modified": [
193 FileNode(b'file_%d.txt' % x,
190 FileNode(b"file_%d.txt" % x, content=b"Foobar %d modified" % (x - 1)),
194 content=b'Foobar %d modified' % (x-1)),
191 ],
195 ]
196 }
192 }
197
193
198 def commit_maker(num=5):
194 def commit_maker(num=5):
@@ -231,34 +227,33 b' class BackendTestMixin(object):'
231 created
227 created
232 before every single test. Defaults to ``True``.
228 before every single test. Defaults to ``True``.
233 """
229 """
230
234 recreate_repo_per_test = True
231 recreate_repo_per_test = True
235
232
236 @classmethod
233 @classmethod
237 def _get_commits(cls):
234 def _get_commits(cls):
238 commits = [
235 commits = [
239 {
236 {
240 'message': 'Initial commit',
237 "message": "Initial commit",
241 'author': 'Joe Doe <joe.doe@example.com>',
238 "author": "Joe Doe <joe.doe@example.com>",
242 'date': datetime.datetime(2010, 1, 1, 20),
239 "date": datetime.datetime(2010, 1, 1, 20),
243 'added': [
240 "added": [
244 FileNode(b'foobar', content=b'Foobar'),
241 FileNode(b"foobar", content=b"Foobar"),
245 FileNode(b'foobar2', content=b'Foobar II'),
242 FileNode(b"foobar2", content=b"Foobar II"),
246 FileNode(b'foo/bar/baz', content=b'baz here!'),
243 FileNode(b"foo/bar/baz", content=b"baz here!"),
247 ],
244 ],
248 },
245 },
249 {
246 {
250 'message': 'Changes...',
247 "message": "Changes...",
251 'author': 'Jane Doe <jane.doe@example.com>',
248 "author": "Jane Doe <jane.doe@example.com>",
252 'date': datetime.datetime(2010, 1, 1, 21),
249 "date": datetime.datetime(2010, 1, 1, 21),
253 'added': [
250 "added": [
254 FileNode(b'some/new.txt', content=b'news...'),
251 FileNode(b"some/new.txt", content=b"news..."),
255 ],
252 ],
256 'changed': [
253 "changed": [
257 FileNode(b'foobar', b'Foobar I'),
254 FileNode(b"foobar", b"Foobar I"),
258 ],
255 ],
259 'removed': [],
256 "removed": [],
260 },
257 },
261 ]
258 ]
262 return commits
259 return commits
263
264
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -43,121 +42,120 b' def d_cache_config():'
43
42
44 @pytest.mark.usefixtures("vcs_repository_support")
43 @pytest.mark.usefixtures("vcs_repository_support")
45 class TestArchives(BackendTestMixin):
44 class TestArchives(BackendTestMixin):
46
47 @classmethod
45 @classmethod
48 def _get_commits(cls):
46 def _get_commits(cls):
49 start_date = datetime.datetime(2010, 1, 1, 20)
47 start_date = datetime.datetime(2010, 1, 1, 20)
50 yield {
48 yield {
51 'message': 'Initial Commit',
49 "message": "Initial Commit",
52 'author': 'Joe Doe <joe.doe@example.com>',
50 "author": "Joe Doe <joe.doe@example.com>",
53 'date': start_date + datetime.timedelta(hours=12),
51 "date": start_date + datetime.timedelta(hours=12),
54 'added': [
52 "added": [
55 FileNode(b'executable_0o100755', b'mode_755', mode=0o100755),
53 FileNode(b"executable_0o100755", b"mode_755", mode=0o100755),
56 FileNode(b'executable_0o100500', b'mode_500', mode=0o100500),
54 FileNode(b"executable_0o100500", b"mode_500", mode=0o100500),
57 FileNode(b'not_executable', b'mode_644', mode=0o100644),
55 FileNode(b"not_executable", b"mode_644", mode=0o100644),
58 ],
56 ],
59 }
57 }
60 for x in range(5):
58 for x in range(5):
61 yield {
59 yield {
62 'message': 'Commit %d' % x,
60 "message": "Commit %d" % x,
63 'author': 'Joe Doe <joe.doe@example.com>',
61 "author": "Joe Doe <joe.doe@example.com>",
64 'date': start_date + datetime.timedelta(hours=12 * x),
62 "date": start_date + datetime.timedelta(hours=12 * x),
65 'added': [
63 "added": [
66 FileNode(b'%d/file_%d.txt' % (x, x), content=b'Foobar %d' % x),
64 FileNode(b"%d/file_%d.txt" % (x, x), content=b"Foobar %d" % x),
67 ],
65 ],
68 }
66 }
69
67
70 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
68 @pytest.mark.parametrize("compressor", ["gz", "bz2"])
71 def test_archive_tar(self, compressor, tmpdir, tmp_path, d_cache_config):
69 def test_archive_tar(self, compressor, tmpdir, tmp_path, d_cache_config):
72
70 archive_node = tmp_path / "archive-node"
73 archive_node = tmp_path / 'archive-node'
74 archive_node.touch()
71 archive_node.touch()
75
72
76 archive_lnk = self.tip.archive_repo(
73 archive_lnk = self.tip.archive_repo(
77 str(archive_node), kind=f't{compressor}', archive_dir_name='repo', cache_config=d_cache_config)
74 str(archive_node), kind=f"t{compressor}", archive_dir_name="repo", cache_config=d_cache_config
75 )
78
76
79 out_dir = tmpdir
77 out_dir = tmpdir
80 out_file = tarfile.open(str(archive_lnk), f'r|{compressor}')
78 out_file = tarfile.open(str(archive_lnk), f"r|{compressor}")
81 out_file.extractall(out_dir)
79 out_file.extractall(out_dir)
82 out_file.close()
80 out_file.close()
83
81
84 for x in range(5):
82 for x in range(5):
85 node_path = '%d/file_%d.txt' % (x, x)
83 node_path = "%d/file_%d.txt" % (x, x)
86 with open(os.path.join(out_dir, 'repo/' + node_path), 'rb') as f:
84 with open(os.path.join(out_dir, "repo/" + node_path), "rb") as f:
87 file_content = f.read()
85 file_content = f.read()
88 assert file_content == self.tip.get_node(node_path).content
86 assert file_content == self.tip.get_node(node_path).content
89
87
90 shutil.rmtree(out_dir)
88 shutil.rmtree(out_dir)
91
89
92 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
90 @pytest.mark.parametrize("compressor", ["gz", "bz2"])
93 def test_archive_tar_symlink(self, compressor):
91 def test_archive_tar_symlink(self, compressor):
94 pytest.skip('Not supported')
92 pytest.skip("Not supported")
95
93
96 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
94 @pytest.mark.parametrize("compressor", ["gz", "bz2"])
97 def test_archive_tar_file_modes(self, compressor, tmpdir, tmp_path, d_cache_config):
95 def test_archive_tar_file_modes(self, compressor, tmpdir, tmp_path, d_cache_config):
98 archive_node = tmp_path / 'archive-node'
96 archive_node = tmp_path / "archive-node"
99 archive_node.touch()
97 archive_node.touch()
100
98
101 archive_lnk = self.tip.archive_repo(
99 archive_lnk = self.tip.archive_repo(
102 str(archive_node), kind='t{}'.format(compressor), archive_dir_name='repo', cache_config=d_cache_config)
100 str(archive_node), kind="t{}".format(compressor), archive_dir_name="repo", cache_config=d_cache_config
101 )
103
102
104 out_dir = tmpdir
103 out_dir = tmpdir
105 out_file = tarfile.open(str(archive_lnk), 'r|{}'.format(compressor))
104 out_file = tarfile.open(str(archive_lnk), "r|{}".format(compressor))
106 out_file.extractall(out_dir)
105 out_file.extractall(out_dir)
107 out_file.close()
106 out_file.close()
108
107
109 def dest(inp):
108 def dest(inp):
110 return os.path.join(out_dir, "repo/" + inp)
109 return os.path.join(out_dir, "repo/" + inp)
111
110
112 assert oct(os.stat(dest('not_executable')).st_mode) == '0o100644'
111 assert oct(os.stat(dest("not_executable")).st_mode) == "0o100644"
113
112
114 def test_archive_zip(self, tmp_path, d_cache_config):
113 def test_archive_zip(self, tmp_path, d_cache_config):
115 archive_node = tmp_path / 'archive-node'
114 archive_node = tmp_path / "archive-node"
116 archive_node.touch()
117
118 archive_lnk = self.tip.archive_repo(str(archive_node), kind='zip',
119 archive_dir_name='repo', cache_config=d_cache_config)
120 zip_file = zipfile.ZipFile(str(archive_lnk))
121
122 for x in range(5):
123 node_path = '%d/file_%d.txt' % (x, x)
124 data = zip_file.read(f'repo/{node_path}')
125
126 decompressed = io.BytesIO()
127 decompressed.write(data)
128 assert decompressed.getvalue() == \
129 self.tip.get_node(node_path).content
130 decompressed.close()
131
132 def test_archive_zip_with_metadata(self, tmp_path, d_cache_config):
133 archive_node = tmp_path / 'archive-node'
134 archive_node.touch()
115 archive_node.touch()
135
116
136 archive_lnk = self.tip.archive_repo(
117 archive_lnk = self.tip.archive_repo(
137 str(archive_node), kind='zip',
118 str(archive_node), kind="zip", archive_dir_name="repo", cache_config=d_cache_config
138 archive_dir_name='repo', write_metadata=True, cache_config=d_cache_config)
119 )
120 zip_file = zipfile.ZipFile(str(archive_lnk))
121
122 for x in range(5):
123 node_path = "%d/file_%d.txt" % (x, x)
124 data = zip_file.read(f"repo/{node_path}")
125
126 decompressed = io.BytesIO()
127 decompressed.write(data)
128 assert decompressed.getvalue() == self.tip.get_node(node_path).content
129 decompressed.close()
130
131 def test_archive_zip_with_metadata(self, tmp_path, d_cache_config):
132 archive_node = tmp_path / "archive-node"
133 archive_node.touch()
134
135 archive_lnk = self.tip.archive_repo(
136 str(archive_node), kind="zip", archive_dir_name="repo", write_metadata=True, cache_config=d_cache_config
137 )
139
138
140 zip_file = zipfile.ZipFile(str(archive_lnk))
139 zip_file = zipfile.ZipFile(str(archive_lnk))
141 metafile = zip_file.read('repo/.archival.txt')
140 metafile = zip_file.read("repo/.archival.txt")
142
141
143 raw_id = ascii_bytes(self.tip.raw_id)
142 raw_id = ascii_bytes(self.tip.raw_id)
144 assert b'commit_id:%b' % raw_id in metafile
143 assert b"commit_id:%b" % raw_id in metafile
145
144
146 for x in range(5):
145 for x in range(5):
147 node_path = '%d/file_%d.txt' % (x, x)
146 node_path = "%d/file_%d.txt" % (x, x)
148 data = zip_file.read(f'repo/{node_path}')
147 data = zip_file.read(f"repo/{node_path}")
149 decompressed = io.BytesIO()
148 decompressed = io.BytesIO()
150 decompressed.write(data)
149 decompressed.write(data)
151 assert decompressed.getvalue() == \
150 assert decompressed.getvalue() == self.tip.get_node(node_path).content
152 self.tip.get_node(node_path).content
153 decompressed.close()
151 decompressed.close()
154
152
155 def test_archive_wrong_kind(self, tmp_path, d_cache_config):
153 def test_archive_wrong_kind(self, tmp_path, d_cache_config):
156 archive_node = tmp_path / 'archive-node'
154 archive_node = tmp_path / "archive-node"
157 archive_node.touch()
155 archive_node.touch()
158
156
159 with pytest.raises(ImproperArchiveTypeError):
157 with pytest.raises(ImproperArchiveTypeError):
160 self.tip.archive_repo(str(archive_node), kind='wrong kind', cache_config=d_cache_config)
158 self.tip.archive_repo(str(archive_node), kind="wrong kind", cache_config=d_cache_config)
161
159
162
160
163 @pytest.fixture()
161 @pytest.fixture()
@@ -167,8 +165,8 b' def base_commit():'
167 """
165 """
168 commit = base.BaseCommit()
166 commit = base.BaseCommit()
169 commit.repository = mock.Mock()
167 commit.repository = mock.Mock()
170 commit.repository.name = 'fake_repo'
168 commit.repository.name = "fake_repo"
171 commit.short_id = 'fake_id'
169 commit.short_id = "fake_id"
172 return commit
170 return commit
173
171
174
172
@@ -180,19 +178,17 b' def test_validate_archive_prefix_enforce'
180 def test_validate_archive_prefix_empty_prefix(base_commit):
178 def test_validate_archive_prefix_empty_prefix(base_commit):
181 # TODO: johbo: Should raise a ValueError here.
179 # TODO: johbo: Should raise a ValueError here.
182 with pytest.raises(VCSError):
180 with pytest.raises(VCSError):
183 base_commit._validate_archive_prefix('')
181 base_commit._validate_archive_prefix("")
184
182
185
183
186 def test_validate_archive_prefix_with_leading_slash(base_commit):
184 def test_validate_archive_prefix_with_leading_slash(base_commit):
187 # TODO: johbo: Should raise a ValueError here.
185 # TODO: johbo: Should raise a ValueError here.
188 with pytest.raises(VCSError):
186 with pytest.raises(VCSError):
189 base_commit._validate_archive_prefix('/any')
187 base_commit._validate_archive_prefix("/any")
190
188
191
189
192 def test_validate_archive_prefix_falls_back_to_repository_name(base_commit):
190 def test_validate_archive_prefix_falls_back_to_repository_name(base_commit):
193 prefix = base_commit._validate_archive_prefix(None)
191 prefix = base_commit._validate_archive_prefix(None)
194 expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format(
192 expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format(repo_name="fake_repo", short_id="fake_id")
195 repo_name='fake_repo',
196 short_id='fake_id')
197 assert isinstance(prefix, str)
193 assert isinstance(prefix, str)
198 assert prefix == expected_prefix
194 assert prefix == expected_prefix
@@ -64,18 +64,14 b' class TestBranches(BackendTestMixin):'
64 def test_new_head(self):
64 def test_new_head(self):
65 tip = self.repo.get_commit()
65 tip = self.repo.get_commit()
66
66
67 self.imc.add(
67 self.imc.add(FileNode(b"docs/index.txt", content=b"Documentation\n"))
68 FileNode(b"docs/index.txt", content=b"Documentation\n")
69 )
70 foobar_tip = self.imc.commit(
68 foobar_tip = self.imc.commit(
71 message="New branch: foobar",
69 message="New branch: foobar",
72 author="joe <joe@rhodecode.com>",
70 author="joe <joe@rhodecode.com>",
73 branch="foobar",
71 branch="foobar",
74 parents=[tip],
72 parents=[tip],
75 )
73 )
76 self.imc.change(
74 self.imc.change(FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n"))
77 FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n")
78 )
79 assert foobar_tip.branch == "foobar"
75 assert foobar_tip.branch == "foobar"
80 newtip = self.imc.commit(
76 newtip = self.imc.commit(
81 message="At foobar_tip branch",
77 message="At foobar_tip branch",
@@ -96,21 +92,15 b' class TestBranches(BackendTestMixin):'
96 @pytest.mark.backends("git", "hg")
92 @pytest.mark.backends("git", "hg")
97 def test_branch_with_slash_in_name(self):
93 def test_branch_with_slash_in_name(self):
98 self.imc.add(FileNode(b"extrafile", content=b"Some data\n"))
94 self.imc.add(FileNode(b"extrafile", content=b"Some data\n"))
99 self.imc.commit(
95 self.imc.commit("Branch with a slash!", author="joe <joe@rhodecode.com>", branch="issue/123")
100 "Branch with a slash!", author="joe <joe@rhodecode.com>", branch="issue/123"
101 )
102 assert "issue/123" in self.repo.branches
96 assert "issue/123" in self.repo.branches
103
97
104 @pytest.mark.backends("git", "hg")
98 @pytest.mark.backends("git", "hg")
105 def test_branch_with_slash_in_name_and_similar_without(self):
99 def test_branch_with_slash_in_name_and_similar_without(self):
106 self.imc.add(FileNode(b"extrafile", content=b"Some data\n"))
100 self.imc.add(FileNode(b"extrafile", content=b"Some data\n"))
107 self.imc.commit(
101 self.imc.commit("Branch with a slash!", author="joe <joe@rhodecode.com>", branch="issue/123")
108 "Branch with a slash!", author="joe <joe@rhodecode.com>", branch="issue/123"
109 )
110 self.imc.add(FileNode(b"extrafile II", content=b"Some data\n"))
102 self.imc.add(FileNode(b"extrafile II", content=b"Some data\n"))
111 self.imc.commit(
103 self.imc.commit("Branch without a slash...", author="joe <joe@rhodecode.com>", branch="123")
112 "Branch without a slash...", author="joe <joe@rhodecode.com>", branch="123"
113 )
114 assert "issue/123" in self.repo.branches
104 assert "issue/123" in self.repo.branches
115 assert "123" in self.repo.branches
105 assert "123" in self.repo.branches
116
106
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -28,9 +27,7 b' from rhodecode.lib.vcs import client_htt'
28
27
29
28
30 def is_new_connection(logger, level, message):
29 def is_new_connection(logger, level, message):
31 return (
30 return logger == "requests.packages.urllib3.connectionpool" and message.startswith("Starting new HTTP")
32 logger == 'requests.packages.urllib3.connectionpool' and
33 message.startswith('Starting new HTTP'))
34
31
35
32
36 @pytest.fixture()
33 @pytest.fixture()
@@ -54,7 +51,7 b' def stub_fail_session():'
54 """
51 """
55 session = mock.Mock()
52 session = mock.Mock()
56 post = session.post()
53 post = session.post()
57 post.content = msgpack.packb({'error': '500'})
54 post.content = msgpack.packb({"error": "500"})
58 post.status_code = 500
55 post.status_code = 500
59
56
60 session.reset_mock()
57 session.reset_mock()
@@ -89,44 +86,37 b' def test_uses_persistent_http_connection'
89 for x in range(5):
86 for x in range(5):
90 remote_call(normal=True, closed=False)
87 remote_call(normal=True, closed=False)
91
88
92 new_connections = [
89 new_connections = [r for r in caplog.record_tuples if is_new_connection(*r)]
93 r for r in caplog.record_tuples if is_new_connection(*r)]
94 assert len(new_connections) <= 1
90 assert len(new_connections) <= 1
95
91
96
92
97 def test_repo_maker_uses_session_for_classmethods(stub_session_factory):
93 def test_repo_maker_uses_session_for_classmethods(stub_session_factory):
98 repo_maker = client_http.RemoteVCSMaker(
94 repo_maker = client_http.RemoteVCSMaker("server_and_port", "endpoint", "test_dummy_scm", stub_session_factory)
99 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
100 repo_maker.example_call()
95 repo_maker.example_call()
101 stub_session_factory().post.assert_called_with(
96 stub_session_factory().post.assert_called_with("http://server_and_port/endpoint", data=mock.ANY)
102 'http://server_and_port/endpoint', data=mock.ANY)
103
97
104
98
105 def test_repo_maker_uses_session_for_instance_methods(
99 def test_repo_maker_uses_session_for_instance_methods(stub_session_factory, config):
106 stub_session_factory, config):
100 repo_maker = client_http.RemoteVCSMaker("server_and_port", "endpoint", "test_dummy_scm", stub_session_factory)
107 repo_maker = client_http.RemoteVCSMaker(
101 repo = repo_maker("stub_path", "stub_repo_id", config)
108 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
109 repo = repo_maker('stub_path', 'stub_repo_id', config)
110 repo.example_call()
102 repo.example_call()
111 stub_session_factory().post.assert_called_with(
103 stub_session_factory().post.assert_called_with("http://server_and_port/endpoint", data=mock.ANY)
112 'http://server_and_port/endpoint', data=mock.ANY)
113
104
114
105
115 @mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory')
106 @mock.patch("rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory")
116 @mock.patch('rhodecode.lib.vcs.connection')
107 @mock.patch("rhodecode.lib.vcs.connection")
117 def test_connect_passes_in_the_same_session(
108 def test_connect_passes_in_the_same_session(connection, session_factory_class, stub_session):
118 connection, session_factory_class, stub_session):
119 session_factory = session_factory_class.return_value
109 session_factory = session_factory_class.return_value
120 session_factory.return_value = stub_session
110 session_factory.return_value = stub_session
121
111
122 vcs.connect_http('server_and_port')
112 vcs.connect_http("server_and_port")
123
113
124
114
125 def test_repo_maker_uses_session_that_throws_error(
115 def test_repo_maker_uses_session_that_throws_error(stub_session_failing_factory, config):
126 stub_session_failing_factory, config):
127 repo_maker = client_http.RemoteVCSMaker(
116 repo_maker = client_http.RemoteVCSMaker(
128 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_failing_factory)
117 "server_and_port", "endpoint", "test_dummy_scm", stub_session_failing_factory
129 repo = repo_maker('stub_path', 'stub_repo_id', config)
118 )
119 repo = repo_maker("stub_path", "stub_repo_id", config)
130
120
131 with pytest.raises(exceptions.HttpVCSCommunicationError):
121 with pytest.raises(exceptions.HttpVCSCommunicationError):
132 repo.example_call()
122 repo.example_call()
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -23,27 +22,31 b' import time'
23 import pytest
22 import pytest
24
23
25 from rhodecode.lib.str_utils import safe_bytes
24 from rhodecode.lib.str_utils import safe_bytes
26 from rhodecode.lib.vcs.backends.base import (
25 from rhodecode.lib.vcs.backends.base import CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit
27 CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit)
28 from rhodecode.lib.vcs.exceptions import (
26 from rhodecode.lib.vcs.exceptions import (
29 BranchDoesNotExistError, CommitDoesNotExistError,
27 BranchDoesNotExistError,
30 RepositoryError, EmptyRepositoryError)
28 CommitDoesNotExistError,
29 RepositoryError,
30 EmptyRepositoryError,
31 )
31 from rhodecode.lib.vcs.nodes import (
32 from rhodecode.lib.vcs.nodes import (
32 FileNode, AddedFileNodesGenerator,
33 FileNode,
33 ChangedFileNodesGenerator, RemovedFileNodesGenerator)
34 AddedFileNodesGenerator,
35 ChangedFileNodesGenerator,
36 RemovedFileNodesGenerator,
37 )
34 from rhodecode.tests import get_new_dir
38 from rhodecode.tests import get_new_dir
35 from rhodecode.tests.vcs.conftest import BackendTestMixin
39 from rhodecode.tests.vcs.conftest import BackendTestMixin
36
40
37
41
38 class TestBaseChangeset(object):
42 class TestBaseChangeset(object):
39
40 def test_is_deprecated(self):
43 def test_is_deprecated(self):
41 from rhodecode.lib.vcs.backends.base import BaseChangeset
44 from rhodecode.lib.vcs.backends.base import BaseChangeset
45
42 pytest.deprecated_call(BaseChangeset)
46 pytest.deprecated_call(BaseChangeset)
43
47
44
48
45 class TestEmptyCommit(object):
49 class TestEmptyCommit(object):
46
47 def test_branch_without_alias_returns_none(self):
50 def test_branch_without_alias_returns_none(self):
48 commit = EmptyCommit()
51 commit = EmptyCommit()
49 assert commit.branch is None
52 assert commit.branch is None
@@ -58,29 +61,28 b' class TestCommitsInNonEmptyRepo(BackendT'
58 start_date = datetime.datetime(2010, 1, 1, 20)
61 start_date = datetime.datetime(2010, 1, 1, 20)
59 for x in range(5):
62 for x in range(5):
60 yield {
63 yield {
61 'message': 'Commit %d' % x,
64 "message": "Commit %d" % x,
62 'author': 'Joe Doe <joe.doe@example.com>',
65 "author": "Joe Doe <joe.doe@example.com>",
63 'date': start_date + datetime.timedelta(hours=12 * x),
66 "date": start_date + datetime.timedelta(hours=12 * x),
64 'added': [
67 "added": [
65 FileNode(b'file_%d.txt' % x,
68 FileNode(b"file_%d.txt" % x, content=b"Foobar %d" % x),
66 content=b'Foobar %d' % x),
67 ],
69 ],
68 }
70 }
69
71
70 def test_walk_returns_empty_list_in_case_of_file(self):
72 def test_walk_returns_empty_list_in_case_of_file(self):
71 result = list(self.tip.walk('file_0.txt'))
73 result = list(self.tip.walk("file_0.txt"))
72 assert result == []
74 assert result == []
73
75
74 @pytest.mark.backends("git", "hg")
76 @pytest.mark.backends("git", "hg")
75 def test_new_branch(self):
77 def test_new_branch(self):
76 self.imc.add(FileNode(b'docs/index.txt', content=b'Documentation\n'))
78 self.imc.add(FileNode(b"docs/index.txt", content=b"Documentation\n"))
77 foobar_tip = self.imc.commit(
79 foobar_tip = self.imc.commit(
78 message='New branch: foobar',
80 message="New branch: foobar",
79 author='joe <joe@rhodecode.com>',
81 author="joe <joe@rhodecode.com>",
80 branch='foobar',
82 branch="foobar",
81 )
83 )
82 assert 'foobar' in self.repo.branches
84 assert "foobar" in self.repo.branches
83 assert foobar_tip.branch == 'foobar'
85 assert foobar_tip.branch == "foobar"
84 # 'foobar' should be the only branch that contains the new commit
86 # 'foobar' should be the only branch that contains the new commit
85 branch = list(self.repo.branches.values())
87 branch = list(self.repo.branches.values())
86 assert branch[0] != branch[1]
88 assert branch[0] != branch[1]
@@ -89,18 +91,14 b' class TestCommitsInNonEmptyRepo(BackendT'
89 def test_new_head_in_default_branch(self):
91 def test_new_head_in_default_branch(self):
90 tip = self.repo.get_commit()
92 tip = self.repo.get_commit()
91
93
92 self.imc.add(
94 self.imc.add(FileNode(b"docs/index.txt", content=b"Documentation\n"))
93 FileNode(b"docs/index.txt", content=b"Documentation\n")
94 )
95 foobar_tip = self.imc.commit(
95 foobar_tip = self.imc.commit(
96 message="New branch: foobar",
96 message="New branch: foobar",
97 author="joe <joe@rhodecode.com>",
97 author="joe <joe@rhodecode.com>",
98 branch="foobar",
98 branch="foobar",
99 parents=[tip],
99 parents=[tip],
100 )
100 )
101 self.imc.change(
101 self.imc.change(FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n"))
102 FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n")
103 )
104 assert foobar_tip.branch == "foobar"
102 assert foobar_tip.branch == "foobar"
105 newtip = self.imc.commit(
103 newtip = self.imc.commit(
106 message="At foobar_tip branch",
104 message="At foobar_tip branch",
@@ -132,51 +130,55 b' class TestCommitsInNonEmptyRepo(BackendT'
132 :return:
130 :return:
133 """
131 """
134 DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME
132 DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME
135 TEST_BRANCH = 'docs'
133 TEST_BRANCH = "docs"
136 org_tip = self.repo.get_commit()
134 org_tip = self.repo.get_commit()
137
135
138 self.imc.add(FileNode(b'readme.txt', content=b'Document\n'))
136 self.imc.add(FileNode(b"readme.txt", content=b"Document\n"))
139 initial = self.imc.commit(
137 initial = self.imc.commit(
140 message='Initial commit',
138 message="Initial commit",
141 author='joe <joe@rhodecode.com>',
139 author="joe <joe@rhodecode.com>",
142 parents=[org_tip],
140 parents=[org_tip],
143 branch=DEFAULT_BRANCH,)
141 branch=DEFAULT_BRANCH,
142 )
144
143
145 self.imc.add(FileNode(b'newdoc.txt', content=b'foobar\n'))
144 self.imc.add(FileNode(b"newdoc.txt", content=b"foobar\n"))
146 docs_branch_commit1 = self.imc.commit(
145 docs_branch_commit1 = self.imc.commit(
147 message='New branch: docs',
146 message="New branch: docs",
148 author='joe <joe@rhodecode.com>',
147 author="joe <joe@rhodecode.com>",
149 parents=[initial],
148 parents=[initial],
150 branch=TEST_BRANCH,)
149 branch=TEST_BRANCH,
150 )
151
151
152 self.imc.add(FileNode(b'newdoc2.txt', content=b'foobar2\n'))
152 self.imc.add(FileNode(b"newdoc2.txt", content=b"foobar2\n"))
153 docs_branch_commit2 = self.imc.commit(
153 docs_branch_commit2 = self.imc.commit(
154 message='New branch: docs2',
154 message="New branch: docs2",
155 author='joe <joe@rhodecode.com>',
155 author="joe <joe@rhodecode.com>",
156 parents=[docs_branch_commit1],
156 parents=[docs_branch_commit1],
157 branch=TEST_BRANCH,)
157 branch=TEST_BRANCH,
158 )
158
159
159 self.imc.add(FileNode(b'newfile', content=b'hello world\n'))
160 self.imc.add(FileNode(b"newfile", content=b"hello world\n"))
160 self.imc.commit(
161 self.imc.commit(
161 message='Back in default branch',
162 message="Back in default branch",
162 author='joe <joe@rhodecode.com>',
163 author="joe <joe@rhodecode.com>",
163 parents=[initial],
164 parents=[initial],
164 branch=DEFAULT_BRANCH,)
165 branch=DEFAULT_BRANCH,
166 )
165
167
166 default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH)
168 default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH)
167 assert docs_branch_commit1 not in list(default_branch_commits)
169 assert docs_branch_commit1 not in list(default_branch_commits)
168 assert docs_branch_commit2 not in list(default_branch_commits)
170 assert docs_branch_commit2 not in list(default_branch_commits)
169
171
170 docs_branch_commits = self.repo.get_commits(
172 docs_branch_commits = self.repo.get_commits(
171 start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1],
173 start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1], branch_name=TEST_BRANCH
172 branch_name=TEST_BRANCH)
174 )
173 assert docs_branch_commit1 in list(docs_branch_commits)
175 assert docs_branch_commit1 in list(docs_branch_commits)
174 assert docs_branch_commit2 in list(docs_branch_commits)
176 assert docs_branch_commit2 in list(docs_branch_commits)
175
177
176 @pytest.mark.backends("svn")
178 @pytest.mark.backends("svn")
177 def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn):
179 def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn):
178 repo = vcsbackend_svn['svn-simple-layout']
180 repo = vcsbackend_svn["svn-simple-layout"]
179 commits = repo.get_commits(branch_name='trunk')
181 commits = repo.get_commits(branch_name="trunk")
180 commit_indexes = [c.idx for c in commits]
182 commit_indexes = [c.idx for c in commits]
181 assert commit_indexes == [1, 2, 3, 7, 12, 15]
183 assert commit_indexes == [1, 2, 3, 7, 12, 15]
182
184
@@ -214,13 +216,10 b' class TestCommits(BackendTestMixin):'
214 start_date = datetime.datetime(2010, 1, 1, 20)
216 start_date = datetime.datetime(2010, 1, 1, 20)
215 for x in range(5):
217 for x in range(5):
216 yield {
218 yield {
217 'message': 'Commit %d' % x,
219 "message": "Commit %d" % x,
218 'author': 'Joe Doe <joe.doe@example.com>',
220 "author": "Joe Doe <joe.doe@example.com>",
219 'date': start_date + datetime.timedelta(hours=12 * x),
221 "date": start_date + datetime.timedelta(hours=12 * x),
220 'added': [
222 "added": [FileNode(b"file_%d.txt" % x, content=b"Foobar %d" % x)],
221 FileNode(b'file_%d.txt' % x,
222 content=b'Foobar %d' % x)
223 ],
224 }
223 }
225
224
226 def test_simple(self):
225 def test_simple(self):
@@ -231,11 +230,11 b' class TestCommits(BackendTestMixin):'
231 tip = self.repo.get_commit()
230 tip = self.repo.get_commit()
232 # json.dumps(tip) uses .__json__() method
231 # json.dumps(tip) uses .__json__() method
233 data = tip.__json__()
232 data = tip.__json__()
234 assert 'branch' in data
233 assert "branch" in data
235 assert data['revision']
234 assert data["revision"]
236
235
237 def test_retrieve_tip(self):
236 def test_retrieve_tip(self):
238 tip = self.repo.get_commit('tip')
237 tip = self.repo.get_commit("tip")
239 assert tip == self.repo.get_commit()
238 assert tip == self.repo.get_commit()
240
239
241 def test_invalid(self):
240 def test_invalid(self):
@@ -259,34 +258,34 b' class TestCommits(BackendTestMixin):'
259
258
260 def test_size(self):
259 def test_size(self):
261 tip = self.repo.get_commit()
260 tip = self.repo.get_commit()
262 size = 5 * len('Foobar N') # Size of 5 files
261 size = 5 * len("Foobar N") # Size of 5 files
263 assert tip.size == size
262 assert tip.size == size
264
263
265 def test_size_at_commit(self):
264 def test_size_at_commit(self):
266 tip = self.repo.get_commit()
265 tip = self.repo.get_commit()
267 size = 5 * len('Foobar N') # Size of 5 files
266 size = 5 * len("Foobar N") # Size of 5 files
268 assert self.repo.size_at_commit(tip.raw_id) == size
267 assert self.repo.size_at_commit(tip.raw_id) == size
269
268
270 def test_size_at_first_commit(self):
269 def test_size_at_first_commit(self):
271 commit = self.repo[0]
270 commit = self.repo[0]
272 size = len('Foobar N') # Size of 1 file
271 size = len("Foobar N") # Size of 1 file
273 assert self.repo.size_at_commit(commit.raw_id) == size
272 assert self.repo.size_at_commit(commit.raw_id) == size
274
273
275 def test_author(self):
274 def test_author(self):
276 tip = self.repo.get_commit()
275 tip = self.repo.get_commit()
277 assert_text_equal(tip.author, 'Joe Doe <joe.doe@example.com>')
276 assert_text_equal(tip.author, "Joe Doe <joe.doe@example.com>")
278
277
279 def test_author_name(self):
278 def test_author_name(self):
280 tip = self.repo.get_commit()
279 tip = self.repo.get_commit()
281 assert_text_equal(tip.author_name, 'Joe Doe')
280 assert_text_equal(tip.author_name, "Joe Doe")
282
281
283 def test_author_email(self):
282 def test_author_email(self):
284 tip = self.repo.get_commit()
283 tip = self.repo.get_commit()
285 assert_text_equal(tip.author_email, 'joe.doe@example.com')
284 assert_text_equal(tip.author_email, "joe.doe@example.com")
286
285
287 def test_message(self):
286 def test_message(self):
288 tip = self.repo.get_commit()
287 tip = self.repo.get_commit()
289 assert_text_equal(tip.message, 'Commit 4')
288 assert_text_equal(tip.message, "Commit 4")
290
289
291 def test_diff(self):
290 def test_diff(self):
292 tip = self.repo.get_commit()
291 tip = self.repo.get_commit()
@@ -296,7 +295,7 b' class TestCommits(BackendTestMixin):'
296 def test_prev(self):
295 def test_prev(self):
297 tip = self.repo.get_commit()
296 tip = self.repo.get_commit()
298 prev_commit = tip.prev()
297 prev_commit = tip.prev()
299 assert prev_commit.message == 'Commit 3'
298 assert prev_commit.message == "Commit 3"
300
299
301 def test_prev_raises_on_first_commit(self):
300 def test_prev_raises_on_first_commit(self):
302 commit = self.repo.get_commit(commit_idx=0)
301 commit = self.repo.get_commit(commit_idx=0)
@@ -311,7 +310,7 b' class TestCommits(BackendTestMixin):'
311 def test_next(self):
310 def test_next(self):
312 commit = self.repo.get_commit(commit_idx=2)
311 commit = self.repo.get_commit(commit_idx=2)
313 next_commit = commit.next()
312 next_commit = commit.next()
314 assert next_commit.message == 'Commit 3'
313 assert next_commit.message == "Commit 3"
315
314
316 def test_next_raises_on_tip(self):
315 def test_next_raises_on_tip(self):
317 commit = self.repo.get_commit()
316 commit = self.repo.get_commit()
@@ -320,36 +319,36 b' class TestCommits(BackendTestMixin):'
320
319
321 def test_get_path_commit(self):
320 def test_get_path_commit(self):
322 commit = self.repo.get_commit()
321 commit = self.repo.get_commit()
323 commit.get_path_commit('file_4.txt')
322 commit.get_path_commit("file_4.txt")
324 assert commit.message == 'Commit 4'
323 assert commit.message == "Commit 4"
325
324
326 def test_get_filenodes_generator(self):
325 def test_get_filenodes_generator(self):
327 tip = self.repo.get_commit()
326 tip = self.repo.get_commit()
328 filepaths = [node.path for node in tip.get_filenodes_generator()]
327 filepaths = [node.path for node in tip.get_filenodes_generator()]
329 assert filepaths == ['file_%d.txt' % x for x in range(5)]
328 assert filepaths == ["file_%d.txt" % x for x in range(5)]
330
329
331 def test_get_file_annotate(self):
330 def test_get_file_annotate(self):
332 file_added_commit = self.repo.get_commit(commit_idx=3)
331 file_added_commit = self.repo.get_commit(commit_idx=3)
333 annotations = list(file_added_commit.get_file_annotate('file_3.txt'))
332 annotations = list(file_added_commit.get_file_annotate("file_3.txt"))
334
333
335 line_no, commit_id, commit_loader, line = annotations[0]
334 line_no, commit_id, commit_loader, line = annotations[0]
336
335
337 assert line_no == 1
336 assert line_no == 1
338 assert commit_id == file_added_commit.raw_id
337 assert commit_id == file_added_commit.raw_id
339 assert commit_loader() == file_added_commit
338 assert commit_loader() == file_added_commit
340 assert b'Foobar 3' in line
339 assert b"Foobar 3" in line
341
340
342 def test_get_file_annotate_does_not_exist(self):
341 def test_get_file_annotate_does_not_exist(self):
343 file_added_commit = self.repo.get_commit(commit_idx=2)
342 file_added_commit = self.repo.get_commit(commit_idx=2)
344 # TODO: Should use a specific exception class here?
343 # TODO: Should use a specific exception class here?
345 with pytest.raises(Exception):
344 with pytest.raises(Exception):
346 list(file_added_commit.get_file_annotate('file_3.txt'))
345 list(file_added_commit.get_file_annotate("file_3.txt"))
347
346
348 def test_get_file_annotate_tip(self):
347 def test_get_file_annotate_tip(self):
349 tip = self.repo.get_commit()
348 tip = self.repo.get_commit()
350 commit = self.repo.get_commit(commit_idx=3)
349 commit = self.repo.get_commit(commit_idx=3)
351 expected_values = list(commit.get_file_annotate('file_3.txt'))
350 expected_values = list(commit.get_file_annotate("file_3.txt"))
352 annotations = list(tip.get_file_annotate('file_3.txt'))
351 annotations = list(tip.get_file_annotate("file_3.txt"))
353
352
354 # Note: Skip index 2 because the loader function is not the same
353 # Note: Skip index 2 because the loader function is not the same
355 for idx in (0, 1, 3):
354 for idx in (0, 1, 3):
@@ -398,7 +397,7 b' class TestCommits(BackendTestMixin):'
398 repo = self.Backend(repo_path, create=True)
397 repo = self.Backend(repo_path, create=True)
399
398
400 with pytest.raises(EmptyRepositoryError):
399 with pytest.raises(EmptyRepositoryError):
401 list(repo.get_commits(start_id='foobar'))
400 list(repo.get_commits(start_id="foobar"))
402
401
403 def test_get_commits_respects_hidden(self):
402 def test_get_commits_respects_hidden(self):
404 commits = self.repo.get_commits(show_hidden=True)
403 commits = self.repo.get_commits(show_hidden=True)
@@ -424,8 +423,7 b' class TestCommits(BackendTestMixin):'
424
423
425 def test_get_commits_respects_start_date_with_branch(self):
424 def test_get_commits_respects_start_date_with_branch(self):
426 start_date = datetime.datetime(2010, 1, 2)
425 start_date = datetime.datetime(2010, 1, 2)
427 commits = self.repo.get_commits(
426 commits = self.repo.get_commits(start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME)
428 start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME)
429 assert isinstance(commits, CollectionGenerator)
427 assert isinstance(commits, CollectionGenerator)
430 # Should be 4 commits after 2010-01-02 00:00:00
428 # Should be 4 commits after 2010-01-02 00:00:00
431 assert len(commits) == 4
429 assert len(commits) == 4
@@ -435,8 +433,7 b' class TestCommits(BackendTestMixin):'
435 def test_get_commits_respects_start_date_and_end_date(self):
433 def test_get_commits_respects_start_date_and_end_date(self):
436 start_date = datetime.datetime(2010, 1, 2)
434 start_date = datetime.datetime(2010, 1, 2)
437 end_date = datetime.datetime(2010, 1, 3)
435 end_date = datetime.datetime(2010, 1, 3)
438 commits = self.repo.get_commits(start_date=start_date,
436 commits = self.repo.get_commits(start_date=start_date, end_date=end_date)
439 end_date=end_date)
440 assert isinstance(commits, CollectionGenerator)
437 assert isinstance(commits, CollectionGenerator)
441 assert len(commits) == 2
438 assert len(commits) == 2
442 for c in commits:
439 for c in commits:
@@ -459,23 +456,22 b' class TestCommits(BackendTestMixin):'
459 assert list(commit_ids) == list(reversed(self.repo.commit_ids))
456 assert list(commit_ids) == list(reversed(self.repo.commit_ids))
460
457
461 def test_get_commits_slice_generator(self):
458 def test_get_commits_slice_generator(self):
462 commits = self.repo.get_commits(
459 commits = self.repo.get_commits(branch_name=self.repo.DEFAULT_BRANCH_NAME)
463 branch_name=self.repo.DEFAULT_BRANCH_NAME)
464 assert isinstance(commits, CollectionGenerator)
460 assert isinstance(commits, CollectionGenerator)
465 commit_slice = list(commits[1:3])
461 commit_slice = list(commits[1:3])
466 assert len(commit_slice) == 2
462 assert len(commit_slice) == 2
467
463
468 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self):
464 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self):
469 with pytest.raises(CommitDoesNotExistError):
465 with pytest.raises(CommitDoesNotExistError):
470 list(self.repo.get_commits(start_id='foobar'))
466 list(self.repo.get_commits(start_id="foobar"))
471
467
472 def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self):
468 def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self):
473 with pytest.raises(CommitDoesNotExistError):
469 with pytest.raises(CommitDoesNotExistError):
474 list(self.repo.get_commits(end_id='foobar'))
470 list(self.repo.get_commits(end_id="foobar"))
475
471
476 def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self):
472 def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self):
477 with pytest.raises(BranchDoesNotExistError):
473 with pytest.raises(BranchDoesNotExistError):
478 list(self.repo.get_commits(branch_name='foobar'))
474 list(self.repo.get_commits(branch_name="foobar"))
479
475
480 def test_get_commits_raise_repositoryerror_for_wrong_start_end(self):
476 def test_get_commits_raise_repositoryerror_for_wrong_start_end(self):
481 start_id = self.repo.commit_ids[-1]
477 start_id = self.repo.commit_ids[-1]
@@ -498,13 +494,16 b' class TestCommits(BackendTestMixin):'
498 assert commit1 is not None
494 assert commit1 is not None
499 assert commit2 is not None
495 assert commit2 is not None
500 assert 1 != commit1
496 assert 1 != commit1
501 assert 'string' != commit1
497 assert "string" != commit1
502
498
503
499
504 @pytest.mark.parametrize("filename, expected", [
500 @pytest.mark.parametrize(
505 ("README.rst", False),
501 "filename, expected",
506 ("README", True),
502 [
507 ])
503 ("README.rst", False),
504 ("README", True),
505 ],
506 )
508 def test_commit_is_link(vcsbackend, filename, expected):
507 def test_commit_is_link(vcsbackend, filename, expected):
509 commit = vcsbackend.repo.get_commit()
508 commit = vcsbackend.repo.get_commit()
510 link_status = commit.is_link(filename)
509 link_status = commit.is_link(filename)
@@ -519,75 +518,74 b' class TestCommitsChanges(BackendTestMixi'
519 def _get_commits(cls):
518 def _get_commits(cls):
520 return [
519 return [
521 {
520 {
522 'message': 'Initial',
521 "message": "Initial",
523 'author': 'Joe Doe <joe.doe@example.com>',
522 "author": "Joe Doe <joe.doe@example.com>",
524 'date': datetime.datetime(2010, 1, 1, 20),
523 "date": datetime.datetime(2010, 1, 1, 20),
525 'added': [
524 "added": [
526 FileNode(b'foo/bar', content=b'foo'),
525 FileNode(b"foo/bar", content=b"foo"),
527 FileNode(safe_bytes('foo/baΕ‚'), content=b'foo'),
526 FileNode(safe_bytes("foo/baΕ‚"), content=b"foo"),
528 FileNode(b'foobar', content=b'foo'),
527 FileNode(b"foobar", content=b"foo"),
529 FileNode(b'qwe', content=b'foo'),
528 FileNode(b"qwe", content=b"foo"),
530 ],
529 ],
531 },
530 },
532 {
531 {
533 'message': 'Massive changes',
532 "message": "Massive changes",
534 'author': 'Joe Doe <joe.doe@example.com>',
533 "author": "Joe Doe <joe.doe@example.com>",
535 'date': datetime.datetime(2010, 1, 1, 22),
534 "date": datetime.datetime(2010, 1, 1, 22),
536 'added': [FileNode(b'fallout', content=b'War never changes')],
535 "added": [FileNode(b"fallout", content=b"War never changes")],
537 'changed': [
536 "changed": [
538 FileNode(b'foo/bar', content=b'baz'),
537 FileNode(b"foo/bar", content=b"baz"),
539 FileNode(b'foobar', content=b'baz'),
538 FileNode(b"foobar", content=b"baz"),
540 ],
539 ],
541 'removed': [FileNode(b'qwe')],
540 "removed": [FileNode(b"qwe")],
542 },
541 },
543 ]
542 ]
544
543
545 def test_initial_commit(self, local_dt_to_utc):
544 def test_initial_commit(self, local_dt_to_utc):
546 commit = self.repo.get_commit(commit_idx=0)
545 commit = self.repo.get_commit(commit_idx=0)
547 assert set(commit.added) == {
546 assert set(commit.added) == {
548 commit.get_node('foo/bar'),
547 commit.get_node("foo/bar"),
549 commit.get_node('foo/baΕ‚'),
548 commit.get_node("foo/baΕ‚"),
550 commit.get_node('foobar'),
549 commit.get_node("foobar"),
551 commit.get_node('qwe')
550 commit.get_node("qwe"),
552 }
551 }
553 assert set(commit.changed) == set()
552 assert set(commit.changed) == set()
554 assert set(commit.removed) == set()
553 assert set(commit.removed) == set()
555 assert set(commit.affected_files) == {'foo/bar', 'foo/baΕ‚', 'foobar', 'qwe'}
554 assert set(commit.affected_files) == {"foo/bar", "foo/baΕ‚", "foobar", "qwe"}
556 assert commit.date == local_dt_to_utc(
555 assert commit.date == local_dt_to_utc(datetime.datetime(2010, 1, 1, 20, 0))
557 datetime.datetime(2010, 1, 1, 20, 0))
558
556
559 def test_head_added(self):
557 def test_head_added(self):
560 commit = self.repo.get_commit()
558 commit = self.repo.get_commit()
561 assert isinstance(commit.added, AddedFileNodesGenerator)
559 assert isinstance(commit.added, AddedFileNodesGenerator)
562 assert set(commit.added) == {commit.get_node('fallout')}
560 assert set(commit.added) == {commit.get_node("fallout")}
563 assert isinstance(commit.changed, ChangedFileNodesGenerator)
561 assert isinstance(commit.changed, ChangedFileNodesGenerator)
564 assert set(commit.changed) == {commit.get_node('foo/bar'), commit.get_node('foobar')}
562 assert set(commit.changed) == {commit.get_node("foo/bar"), commit.get_node("foobar")}
565 assert isinstance(commit.removed, RemovedFileNodesGenerator)
563 assert isinstance(commit.removed, RemovedFileNodesGenerator)
566 assert len(commit.removed) == 1
564 assert len(commit.removed) == 1
567 assert list(commit.removed)[0].path == 'qwe'
565 assert list(commit.removed)[0].path == "qwe"
568
566
569 def test_get_filemode(self):
567 def test_get_filemode(self):
570 commit = self.repo.get_commit()
568 commit = self.repo.get_commit()
571 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bar')
569 assert FILEMODE_DEFAULT == commit.get_file_mode("foo/bar")
572
570
573 def test_get_filemode_non_ascii(self):
571 def test_get_filemode_non_ascii(self):
574 commit = self.repo.get_commit()
572 commit = self.repo.get_commit()
575 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ‚')
573 assert FILEMODE_DEFAULT == commit.get_file_mode("foo/baΕ‚")
576 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ‚')
574 assert FILEMODE_DEFAULT == commit.get_file_mode("foo/baΕ‚")
577
575
578 def test_get_path_history(self):
576 def test_get_path_history(self):
579 commit = self.repo.get_commit()
577 commit = self.repo.get_commit()
580 history = commit.get_path_history('foo/bar')
578 history = commit.get_path_history("foo/bar")
581 assert len(history) == 2
579 assert len(history) == 2
582
580
583 def test_get_path_history_with_limit(self):
581 def test_get_path_history_with_limit(self):
584 commit = self.repo.get_commit()
582 commit = self.repo.get_commit()
585 history = commit.get_path_history('foo/bar', limit=1)
583 history = commit.get_path_history("foo/bar", limit=1)
586 assert len(history) == 1
584 assert len(history) == 1
587
585
588 def test_get_path_history_first_commit(self):
586 def test_get_path_history_first_commit(self):
589 commit = self.repo[0]
587 commit = self.repo[0]
590 history = commit.get_path_history('foo/bar')
588 history = commit.get_path_history("foo/bar")
591 assert len(history) == 1
589 assert len(history) == 1
592
590
593
591
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -21,14 +20,17 b' import pytest'
21
20
22
21
23 def test_get_existing_value(config):
22 def test_get_existing_value(config):
24 value = config.get('section-a', 'a-1')
23 value = config.get("section-a", "a-1")
25 assert value == 'value-a-1'
24 assert value == "value-a-1"
26
25
27
26
28 @pytest.mark.parametrize('section, option', [
27 @pytest.mark.parametrize(
29 ('section-a', 'does-not-exist'),
28 "section, option",
30 ('does-not-exist', 'does-not-exist'),
29 [
31 ])
30 ("section-a", "does-not-exist"),
31 ("does-not-exist", "does-not-exist"),
32 ],
33 )
32 def test_get_unset_value_returns_none(config, section, option):
34 def test_get_unset_value_returns_none(config, section, option):
33 value = config.get(section, option)
35 value = config.get(section, option)
34 assert value is None
36 assert value is None
@@ -41,11 +43,11 b' def test_allows_to_create_a_copy(config)'
41
43
42 def test_changes_in_the_copy_dont_affect_the_original(config):
44 def test_changes_in_the_copy_dont_affect_the_original(config):
43 clone = config.copy()
45 clone = config.copy()
44 clone.set('section-a', 'a-2', 'value-a-2')
46 clone.set("section-a", "a-2", "value-a-2")
45 assert set(config.serialize()) == {('section-a', 'a-1', 'value-a-1')}
47 assert set(config.serialize()) == {("section-a", "a-1", "value-a-1")}
46
48
47
49
48 def test_changes_in_the_original_dont_affect_the_copy(config):
50 def test_changes_in_the_original_dont_affect_the_copy(config):
49 clone = config.copy()
51 clone = config.copy()
50 config.set('section-a', 'a-2', 'value-a-2')
52 config.set("section-a", "a-2", "value-a-2")
51 assert set(clone.serialize()) == {('section-a', 'a-1', 'value-a-1')}
53 assert set(clone.serialize()) == {("section-a", "a-1", "value-a-1")}
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -25,7 +24,6 b' from rhodecode.tests.vcs.conftest import'
25
24
26
25
27 class TestGetDiffValidation:
26 class TestGetDiffValidation:
28
29 def test_raises_on_string_input(self, vcsbackend):
27 def test_raises_on_string_input(self, vcsbackend):
30 repo = vcsbackend.repo
28 repo = vcsbackend.repo
31 with pytest.raises(TypeError):
29 with pytest.raises(TypeError):
@@ -54,63 +52,58 b' class TestGetDiffValidation:'
54 def test_supports_path1_parameter(self, vcsbackend):
52 def test_supports_path1_parameter(self, vcsbackend):
55 repo = vcsbackend.repo
53 repo = vcsbackend.repo
56 commit = repo[1]
54 commit = repo[1]
57 repo.get_diff(
55 repo.get_diff(repo.EMPTY_COMMIT, commit, path="vcs/__init__.py", path1="vcs/__init__.py")
58 repo.EMPTY_COMMIT, commit,
59 path='vcs/__init__.py', path1='vcs/__init__.py')
60
56
61 @pytest.mark.backends("git", "hg")
57 @pytest.mark.backends("git", "hg")
62 def test_raises_value_error_if_paths_not_supported(self, vcsbackend):
58 def test_raises_value_error_if_paths_not_supported(self, vcsbackend):
63 repo = vcsbackend.repo
59 repo = vcsbackend.repo
64 commit = repo[1]
60 commit = repo[1]
65 with pytest.raises(ValueError):
61 with pytest.raises(ValueError):
66 repo.get_diff(
62 repo.get_diff(repo.EMPTY_COMMIT, commit, path="trunk/example.py", path1="branches/argparse/example.py")
67 repo.EMPTY_COMMIT, commit,
68 path='trunk/example.py', path1='branches/argparse/example.py')
69
63
70
64
71 @pytest.mark.usefixtures("vcs_repository_support")
65 @pytest.mark.usefixtures("vcs_repository_support")
72 class TestRepositoryGetDiff(BackendTestMixin):
66 class TestRepositoryGetDiff(BackendTestMixin):
73
74 recreate_repo_per_test = False
67 recreate_repo_per_test = False
75
68
76 @classmethod
69 @classmethod
77 def _get_commits(cls):
70 def _get_commits(cls):
78 commits = [
71 commits = [
79 {
72 {
80 'message': 'Initial commit',
73 "message": "Initial commit",
81 'author': 'Joe Doe <joe.doe@example.com>',
74 "author": "Joe Doe <joe.doe@example.com>",
82 'date': datetime.datetime(2010, 1, 1, 20),
75 "date": datetime.datetime(2010, 1, 1, 20),
83 'added': [
76 "added": [
84 FileNode(b'foobar', content=b'foobar'),
77 FileNode(b"foobar", content=b"foobar"),
85 FileNode(b'foobar2', content=b'foobar2'),
78 FileNode(b"foobar2", content=b"foobar2"),
86 ],
79 ],
87 },
80 },
88 {
81 {
89 'message': 'Changed foobar, added foobar3',
82 "message": "Changed foobar, added foobar3",
90 'author': 'Jane Doe <jane.doe@example.com>',
83 "author": "Jane Doe <jane.doe@example.com>",
91 'date': datetime.datetime(2010, 1, 1, 21),
84 "date": datetime.datetime(2010, 1, 1, 21),
92 'added': [
85 "added": [
93 FileNode(b'foobar3', content=b'foobar3'),
86 FileNode(b"foobar3", content=b"foobar3"),
94 ],
87 ],
95 'changed': [
88 "changed": [
96 FileNode(b'foobar', b'FOOBAR'),
89 FileNode(b"foobar", b"FOOBAR"),
97 ],
90 ],
98 },
91 },
99 {
92 {
100 'message': 'Removed foobar, changed foobar3',
93 "message": "Removed foobar, changed foobar3",
101 'author': 'Jane Doe <jane.doe@example.com>',
94 "author": "Jane Doe <jane.doe@example.com>",
102 'date': datetime.datetime(2010, 1, 1, 22),
95 "date": datetime.datetime(2010, 1, 1, 22),
103 'changed': [
96 "changed": [
104 FileNode(b'foobar3', content=b'FOOBAR\nFOOBAR\nFOOBAR\n'),
97 FileNode(b"foobar3", content=b"FOOBAR\nFOOBAR\nFOOBAR\n"),
105 ],
98 ],
106 'removed': [FileNode(b'foobar')],
99 "removed": [FileNode(b"foobar")],
107 },
100 },
108 {
101 {
109 'message': 'Whitespace changes',
102 "message": "Whitespace changes",
110 'author': 'Jane Doe <jane.doe@example.com>',
103 "author": "Jane Doe <jane.doe@example.com>",
111 'date': datetime.datetime(2010, 1, 1, 23),
104 "date": datetime.datetime(2010, 1, 1, 23),
112 'changed': [
105 "changed": [
113 FileNode(b'foobar3', content=b'FOOBAR \nFOOBAR\nFOOBAR\n'),
106 FileNode(b"foobar3", content=b"FOOBAR \nFOOBAR\nFOOBAR\n"),
114 ],
107 ],
115 },
108 },
116 ]
109 ]
@@ -130,28 +123,24 b' class TestRepositoryGetDiff(BackendTestM'
130 assert diff.raw.tobytes() == self.third_commit_diffs[self.repo.alias]
123 assert diff.raw.tobytes() == self.third_commit_diffs[self.repo.alias]
131
124
132 def test_ignore_whitespace(self):
125 def test_ignore_whitespace(self):
133 diff = self.repo.get_diff(
126 diff = self.repo.get_diff(self.repo[2], self.repo[3], ignore_whitespace=True)
134 self.repo[2], self.repo[3], ignore_whitespace=True)
127 assert b"@@" not in diff.raw.tobytes()
135 assert b'@@' not in diff.raw.tobytes()
136
128
137 def test_only_one_file(self):
129 def test_only_one_file(self):
138 diff = self.repo.get_diff(
130 diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[0], path="foobar")
139 self.repo.EMPTY_COMMIT, self.repo[0], path='foobar')
131 assert b"foobar2" not in diff.raw.tobytes()
140 assert b'foobar2' not in diff.raw.tobytes()
141
132
142 def test_context_parameter(self):
133 def test_context_parameter(self):
143 first_commit = self.repo.get_commit(commit_idx=0)
134 first_commit = self.repo.get_commit(commit_idx=0)
144 diff = self.repo.get_diff(
135 diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, first_commit, context=2)
145 self.repo.EMPTY_COMMIT, first_commit, context=2)
146 assert diff.raw.tobytes() == self.first_commit_diffs[self.repo.alias]
136 assert diff.raw.tobytes() == self.first_commit_diffs[self.repo.alias]
147
137
148 def test_context_only_one_file(self):
138 def test_context_only_one_file(self):
149 diff = self.repo.get_diff(
139 diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[0], path="foobar", context=2)
150 self.repo.EMPTY_COMMIT, self.repo[0], path='foobar', context=2)
151 assert diff.raw.tobytes() == self.first_commit_one_file[self.repo.alias]
140 assert diff.raw.tobytes() == self.first_commit_one_file[self.repo.alias]
152
141
153 first_commit_diffs = {
142 first_commit_diffs = {
154 'git': br"""diff --git a/foobar b/foobar
143 "git": rb"""diff --git a/foobar b/foobar
155 new file mode 100644
144 new file mode 100644
156 index 0000000..f6ea049
145 index 0000000..f6ea049
157 --- /dev/null
146 --- /dev/null
@@ -168,7 +157,7 b' index 0000000..e8c9d6b'
168 +foobar2
157 +foobar2
169 \ No newline at end of file
158 \ No newline at end of file
170 """,
159 """,
171 'hg': br"""diff --git a/foobar b/foobar
160 "hg": rb"""diff --git a/foobar b/foobar
172 new file mode 100644
161 new file mode 100644
173 --- /dev/null
162 --- /dev/null
174 +++ b/foobar
163 +++ b/foobar
@@ -183,7 +172,7 b' new file mode 100644'
183 +foobar2
172 +foobar2
184 \ No newline at end of file
173 \ No newline at end of file
185 """,
174 """,
186 'svn': b"""Index: foobar
175 "svn": b"""Index: foobar
187 ===================================================================
176 ===================================================================
188 diff --git a/foobar b/foobar
177 diff --git a/foobar b/foobar
189 new file mode 10644
178 new file mode 10644
@@ -205,7 +194,7 b' new file mode 10644'
205 }
194 }
206
195
207 second_commit_diffs = {
196 second_commit_diffs = {
208 'git': br"""diff --git a/foobar b/foobar
197 "git": rb"""diff --git a/foobar b/foobar
209 index f6ea049..389865b 100644
198 index f6ea049..389865b 100644
210 --- a/foobar
199 --- a/foobar
211 +++ b/foobar
200 +++ b/foobar
@@ -223,7 +212,7 b' index 0000000..c11c37d'
223 +foobar3
212 +foobar3
224 \ No newline at end of file
213 \ No newline at end of file
225 """,
214 """,
226 'hg': br"""diff --git a/foobar b/foobar
215 "hg": rb"""diff --git a/foobar b/foobar
227 --- a/foobar
216 --- a/foobar
228 +++ b/foobar
217 +++ b/foobar
229 @@ -1,1 +1,1 @@
218 @@ -1,1 +1,1 @@
@@ -239,7 +228,7 b' new file mode 100644'
239 +foobar3
228 +foobar3
240 \ No newline at end of file
229 \ No newline at end of file
241 """,
230 """,
242 'svn': b"""Index: foobar
231 "svn": b"""Index: foobar
243 ===================================================================
232 ===================================================================
244 diff --git a/foobar b/foobar
233 diff --git a/foobar b/foobar
245 --- a/foobar\t(revision 1)
234 --- a/foobar\t(revision 1)
@@ -262,7 +251,7 b' new file mode 10644'
262 }
251 }
263
252
264 third_commit_diffs = {
253 third_commit_diffs = {
265 'git': br"""diff --git a/foobar b/foobar
254 "git": rb"""diff --git a/foobar b/foobar
266 deleted file mode 100644
255 deleted file mode 100644
267 index 389865b..0000000
256 index 389865b..0000000
268 --- a/foobar
257 --- a/foobar
@@ -281,7 +270,7 b' index c11c37d..f932447 100644'
281 +FOOBAR
270 +FOOBAR
282 +FOOBAR
271 +FOOBAR
283 """,
272 """,
284 'hg': br"""diff --git a/foobar b/foobar
273 "hg": rb"""diff --git a/foobar b/foobar
285 deleted file mode 100644
274 deleted file mode 100644
286 --- a/foobar
275 --- a/foobar
287 +++ /dev/null
276 +++ /dev/null
@@ -298,7 +287,7 b' diff --git a/foobar3 b/foobar3'
298 +FOOBAR
287 +FOOBAR
299 +FOOBAR
288 +FOOBAR
300 """,
289 """,
301 'svn': b"""Index: foobar
290 "svn": b"""Index: foobar
302 ===================================================================
291 ===================================================================
303 diff --git a/foobar b/foobar
292 diff --git a/foobar b/foobar
304 deleted file mode 10644
293 deleted file mode 10644
@@ -322,7 +311,7 b' diff --git a/foobar3 b/foobar3'
322 }
311 }
323
312
324 first_commit_one_file = {
313 first_commit_one_file = {
325 'git': br"""diff --git a/foobar b/foobar
314 "git": rb"""diff --git a/foobar b/foobar
326 new file mode 100644
315 new file mode 100644
327 index 0000000..f6ea049
316 index 0000000..f6ea049
328 --- /dev/null
317 --- /dev/null
@@ -331,7 +320,7 b' index 0000000..f6ea049'
331 +foobar
320 +foobar
332 \ No newline at end of file
321 \ No newline at end of file
333 """,
322 """,
334 'hg': br"""diff --git a/foobar b/foobar
323 "hg": rb"""diff --git a/foobar b/foobar
335 new file mode 100644
324 new file mode 100644
336 --- /dev/null
325 --- /dev/null
337 +++ b/foobar
326 +++ b/foobar
@@ -339,7 +328,7 b' new file mode 100644'
339 +foobar
328 +foobar
340 \ No newline at end of file
329 \ No newline at end of file
341 """,
330 """,
342 'svn': b"""Index: foobar
331 "svn": b"""Index: foobar
343 ===================================================================
332 ===================================================================
344 diff --git a/foobar b/foobar
333 diff --git a/foobar b/foobar
345 new file mode 10644
334 new file mode 10644
@@ -353,13 +342,11 b' new file mode 10644'
353
342
354
343
355 class TestSvnGetDiff(object):
344 class TestSvnGetDiff(object):
356
345 @pytest.mark.parametrize(
357 @pytest.mark.parametrize('path, path1', [
346 "path, path1", [("trunk/example.py", "tags/v0.2/example.py"), ("trunk", "tags/v0.2")], ids=["file", "dir"]
358 ('trunk/example.py', 'tags/v0.2/example.py'),
347 )
359 ('trunk', 'tags/v0.2')
360 ], ids=['file', 'dir'])
361 def test_diff_to_tagged_version(self, vcsbackend_svn, path, path1):
348 def test_diff_to_tagged_version(self, vcsbackend_svn, path, path1):
362 repo = vcsbackend_svn['svn-simple-layout']
349 repo = vcsbackend_svn["svn-simple-layout"]
363 commit1 = repo[-2]
350 commit1 = repo[-2]
364 commit2 = repo[-1]
351 commit2 = repo[-1]
365 diff = repo.get_diff(commit1, commit2, path=path, path1=path1)
352 diff = repo.get_diff(commit1, commit2, path=path, path1=path1)
@@ -386,7 +373,7 b' diff --git a/example.py b/example.py'
386 '''
373 '''
387
374
388 def test_diff_of_moved_directory(self, vcsbackend_svn):
375 def test_diff_of_moved_directory(self, vcsbackend_svn):
389 repo = vcsbackend_svn['svn-move-directory']
376 repo = vcsbackend_svn["svn-move-directory"]
390 diff = repo.get_diff(repo[0], repo[1])
377 diff = repo.get_diff(repo[0], repo[1])
391 # TODO: johbo: Think about supporting svn directory nodes
378 # TODO: johbo: Think about supporting svn directory nodes
392 # a little bit better, source is here like a file
379 # a little bit better, source is here like a file
@@ -408,7 +395,6 b' new file mode 10644'
408
395
409 @pytest.mark.usefixtures("vcs_repository_support")
396 @pytest.mark.usefixtures("vcs_repository_support")
410 class TestGetDiffBinary(BackendTestMixin):
397 class TestGetDiffBinary(BackendTestMixin):
411
412 recreate_repo_per_test = False
398 recreate_repo_per_test = False
413
399
414 # Note: "Fake" PNG files, has the correct magic as prefix
400 # Note: "Fake" PNG files, has the correct magic as prefix
@@ -419,26 +405,29 b' class TestGetDiffBinary(BackendTestMixin'
419 def _get_commits():
405 def _get_commits():
420 commits = [
406 commits = [
421 {
407 {
422 'message': 'Add binary file image.png',
408 "message": "Add binary file image.png",
423 'author': 'Joe Doe <joe.deo@example.com>',
409 "author": "Joe Doe <joe.deo@example.com>",
424 'date': datetime.datetime(2010, 1, 1, 20),
410 "date": datetime.datetime(2010, 1, 1, 20),
425 'added': [
411 "added": [
426 FileNode(b'image.png', content=TestGetDiffBinary.BINARY),
412 FileNode(b"image.png", content=TestGetDiffBinary.BINARY),
427 ]},
413 ],
414 },
428 {
415 {
429 'message': 'Modify image.png',
416 "message": "Modify image.png",
430 'author': 'Joe Doe <joe.deo@example.com>',
417 "author": "Joe Doe <joe.deo@example.com>",
431 'date': datetime.datetime(2010, 1, 1, 21),
418 "date": datetime.datetime(2010, 1, 1, 21),
432 'changed': [
419 "changed": [
433 FileNode(b'image.png', content=TestGetDiffBinary.BINARY2),
420 FileNode(b"image.png", content=TestGetDiffBinary.BINARY2),
434 ]},
421 ],
422 },
435 {
423 {
436 'message': 'Remove image.png',
424 "message": "Remove image.png",
437 'author': 'Joe Doe <joe.deo@example.com>',
425 "author": "Joe Doe <joe.deo@example.com>",
438 'date': datetime.datetime(2010, 1, 1, 21),
426 "date": datetime.datetime(2010, 1, 1, 21),
439 'removed': [
427 "removed": [
440 FileNode(b'image.png'),
428 FileNode(b"image.png"),
441 ]},
429 ],
430 },
442 ]
431 ]
443 return commits
432 return commits
444
433
@@ -446,7 +435,7 b' class TestGetDiffBinary(BackendTestMixin'
446 diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[0])
435 diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[0])
447
436
448 expected = {
437 expected = {
449 'git': b"""diff --git a/image.png b/image.png
438 "git": b"""diff --git a/image.png b/image.png
450 new file mode 100644
439 new file mode 100644
451 index 0000000000000000000000000000000000000000..28380fd4a25c58be1b68b523ba2a314f4459ee9c
440 index 0000000000000000000000000000000000000000..28380fd4a25c58be1b68b523ba2a314f4459ee9c
452 GIT binary patch
441 GIT binary patch
@@ -457,7 +446,7 b' literal 0'
457 Hc$@<O00001
446 Hc$@<O00001
458
447
459 """,
448 """,
460 'hg': b"""diff --git a/image.png b/image.png
449 "hg": b"""diff --git a/image.png b/image.png
461 new file mode 100644
450 new file mode 100644
462 index 0000000000000000000000000000000000000000..28380fd4a25c58be1b68b523ba2a314f4459ee9c
451 index 0000000000000000000000000000000000000000..28380fd4a25c58be1b68b523ba2a314f4459ee9c
463 GIT binary patch
452 GIT binary patch
@@ -465,7 +454,7 b' literal 19'
465 Yc%17D@N?(olHy`uVBq!ia0vp^03%2O-T(jq
454 Yc%17D@N?(olHy`uVBq!ia0vp^03%2O-T(jq
466
455
467 """,
456 """,
468 'svn': b"""===================================================================
457 "svn": b"""===================================================================
469 Cannot display: file marked as a binary type.
458 Cannot display: file marked as a binary type.
470 svn:mime-type = application/octet-stream
459 svn:mime-type = application/octet-stream
471 Index: image.png
460 Index: image.png
@@ -482,7 +471,7 b' new file mode 10644'
482 diff = self.repo.get_diff(self.repo[0], self.repo[1])
471 diff = self.repo.get_diff(self.repo[0], self.repo[1])
483
472
484 expected = {
473 expected = {
485 'git': b"""diff --git a/image.png b/image.png
474 "git": b"""diff --git a/image.png b/image.png
486 index 28380fd4a25c58be1b68b523ba2a314f4459ee9c..1008a77cd372386a1c24fbd96019333f67ad0065 100644
475 index 28380fd4a25c58be1b68b523ba2a314f4459ee9c..1008a77cd372386a1c24fbd96019333f67ad0065 100644
487 GIT binary patch
476 GIT binary patch
488 literal 19
477 literal 19
@@ -492,14 +481,14 b' literal 19'
492 Yc%17D@N?(olHy`uVBq!ia0vp^03%2O-T(jq
481 Yc%17D@N?(olHy`uVBq!ia0vp^03%2O-T(jq
493
482
494 """,
483 """,
495 'hg': b"""diff --git a/image.png b/image.png
484 "hg": b"""diff --git a/image.png b/image.png
496 index 28380fd4a25c58be1b68b523ba2a314f4459ee9c..1008a77cd372386a1c24fbd96019333f67ad0065
485 index 28380fd4a25c58be1b68b523ba2a314f4459ee9c..1008a77cd372386a1c24fbd96019333f67ad0065
497 GIT binary patch
486 GIT binary patch
498 literal 19
487 literal 19
499 ac%17D@N?(olHy`uVBq!ia0y~$U;qFkO9I~j
488 ac%17D@N?(olHy`uVBq!ia0y~$U;qFkO9I~j
500
489
501 """,
490 """,
502 'svn': b"""===================================================================
491 "svn": b"""===================================================================
503 Cannot display: file marked as a binary type.
492 Cannot display: file marked as a binary type.
504 svn:mime-type = application/octet-stream
493 svn:mime-type = application/octet-stream
505 Index: image.png
494 Index: image.png
@@ -515,7 +504,7 b' diff --git a/image.png b/image.png'
515 diff = self.repo.get_diff(self.repo[1], self.repo[2])
504 diff = self.repo.get_diff(self.repo[1], self.repo[2])
516
505
517 expected = {
506 expected = {
518 'git': b"""diff --git a/image.png b/image.png
507 "git": b"""diff --git a/image.png b/image.png
519 deleted file mode 100644
508 deleted file mode 100644
520 index 1008a77cd372386a1c24fbd96019333f67ad0065..0000000000000000000000000000000000000000
509 index 1008a77cd372386a1c24fbd96019333f67ad0065..0000000000000000000000000000000000000000
521 GIT binary patch
510 GIT binary patch
@@ -526,7 +515,7 b' literal 19'
526 ac%17D@N?(olHy`uVBq!ia0y~$U;qFkO9I~j
515 ac%17D@N?(olHy`uVBq!ia0y~$U;qFkO9I~j
527
516
528 """,
517 """,
529 'hg': b"""diff --git a/image.png b/image.png
518 "hg": b"""diff --git a/image.png b/image.png
530 deleted file mode 100644
519 deleted file mode 100644
531 index 1008a77cd372386a1c24fbd96019333f67ad0065..0000000000000000000000000000000000000000
520 index 1008a77cd372386a1c24fbd96019333f67ad0065..0000000000000000000000000000000000000000
532 GIT binary patch
521 GIT binary patch
@@ -534,7 +523,7 b' literal 0'
534 Hc$@<O00001
523 Hc$@<O00001
535
524
536 """,
525 """,
537 'svn': b"""===================================================================
526 "svn": b"""===================================================================
538 Cannot display: file marked as a binary type.
527 Cannot display: file marked as a binary type.
539 svn:mime-type = application/octet-stream
528 svn:mime-type = application/octet-stream
540 Index: image.png
529 Index: image.png
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -23,14 +22,13 b' from rhodecode.lib.vcs import exceptions'
23
22
24
23
25 class TestMapVcsExceptions:
24 class TestMapVcsExceptions:
26
27 def test_maps_exceptions_based_on_exception_map(self):
25 def test_maps_exceptions_based_on_exception_map(self):
28 with pytest.raises(exceptions.RepositoryError):
26 with pytest.raises(exceptions.RepositoryError):
29 self.func(kind='abort')
27 self.func(kind="abort")
30
28
31 def test_raises_key_error_if_kind_is_unknown(self):
29 def test_raises_key_error_if_kind_is_unknown(self):
32 with pytest.raises(KeyError):
30 with pytest.raises(KeyError):
33 self.func(kind='not_existing_kind')
31 self.func(kind="not_existing_kind")
34
32
35 def test_raises_exception_unchanged_if_no_vcs_kind(self):
33 def test_raises_exception_unchanged_if_no_vcs_kind(self):
36 with pytest.raises(Exception) as exc:
34 with pytest.raises(Exception) as exc:
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -28,22 +27,21 b' from rhodecode.tests.vcs.conftest import'
28
27
29 @pytest.mark.usefixtures("vcs_repository_support")
28 @pytest.mark.usefixtures("vcs_repository_support")
30 class TestFileNodeUnicodePath(BackendTestMixin):
29 class TestFileNodeUnicodePath(BackendTestMixin):
31
30 fname = safe_bytes("Δ…Ε›Γ°Δ…Δ™Ε‚Δ…Δ‡.txt")
32 fname = safe_bytes('Δ…Ε›Γ°Δ…Δ™Ε‚Δ…Δ‡.txt')
33 ufname = fname
31 ufname = fname
34
32
35 @classmethod
33 @classmethod
36 def _get_commits(cls):
34 def _get_commits(cls):
37 nodes = [
35 nodes = [
38 FileNode(cls.fname, content=b'Foobar'),
36 FileNode(cls.fname, content=b"Foobar"),
39 ]
37 ]
40
38
41 commits = [
39 commits = [
42 {
40 {
43 'message': 'Initial commit',
41 "message": "Initial commit",
44 'author': 'Joe Doe <joe.doe@example.com>',
42 "author": "Joe Doe <joe.doe@example.com>",
45 'date': datetime.datetime(2010, 1, 1, 20),
43 "date": datetime.datetime(2010, 1, 1, 20),
46 'added': nodes,
44 "added": nodes,
47 },
45 },
48 ]
46 ]
49 return commits
47 return commits
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -28,28 +27,30 b' from rhodecode.tests.vcs.conftest import'
28
27
29 @pytest.mark.usefixtures("vcs_repository_support")
28 @pytest.mark.usefixtures("vcs_repository_support")
30 class TestGetitem(BackendTestMixin):
29 class TestGetitem(BackendTestMixin):
31
32 @classmethod
30 @classmethod
33 def _get_commits(cls):
31 def _get_commits(cls):
34 start_date = datetime.datetime(2010, 1, 1, 20)
32 start_date = datetime.datetime(2010, 1, 1, 20)
35 for x in range(5):
33 for x in range(5):
36 yield {
34 yield {
37 'message': 'Commit %d' % x,
35 "message": "Commit %d" % x,
38 'author': 'Joe Doe <joe.doe@example.com>',
36 "author": "Joe Doe <joe.doe@example.com>",
39 'date': start_date + datetime.timedelta(hours=12 * x),
37 "date": start_date + datetime.timedelta(hours=12 * x),
40 'added': [
38 "added": [
41 FileNode(b'file_%d.txt' % x, content='Foobar %d' % x),
39 FileNode(b"file_%d.txt" % x, content="Foobar %d" % x),
42 ],
40 ],
43 }
41 }
44
42
45 def test_last_item_is_tip(self):
43 def test_last_item_is_tip(self):
46 assert self.repo[-1] == self.repo.get_commit()
44 assert self.repo[-1] == self.repo.get_commit()
47
45
48 @pytest.mark.parametrize("offset, message", [
46 @pytest.mark.parametrize(
49 (-1, 'Commit 4'),
47 "offset, message",
50 (-2, 'Commit 3'),
48 [
51 (-5, 'Commit 0'),
49 (-1, "Commit 4"),
52 ])
50 (-2, "Commit 3"),
51 (-5, "Commit 0"),
52 ],
53 )
53 def test_negative_offset_fetches_correct_commit(self, offset, message):
54 def test_negative_offset_fetches_correct_commit(self, offset, message):
54 assert self.repo[offset].message == message
55 assert self.repo[offset].message == message
55
56
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -25,17 +24,16 b' from rhodecode.tests.vcs.conftest import'
25
24
26 @pytest.mark.usefixtures("vcs_repository_support")
25 @pytest.mark.usefixtures("vcs_repository_support")
27 class TestGetslice(BackendTestMixin):
26 class TestGetslice(BackendTestMixin):
28
29 @classmethod
27 @classmethod
30 def _get_commits(cls):
28 def _get_commits(cls):
31 start_date = datetime.datetime(2010, 1, 1, 20)
29 start_date = datetime.datetime(2010, 1, 1, 20)
32 for x in range(5):
30 for x in range(5):
33 yield {
31 yield {
34 'message': 'Commit %d' % x,
32 "message": "Commit %d" % x,
35 'author': 'Joe Doe <joe.doe@example.com>',
33 "author": "Joe Doe <joe.doe@example.com>",
36 'date': start_date + datetime.timedelta(hours=12 * x),
34 "date": start_date + datetime.timedelta(hours=12 * x),
37 'added': [
35 "added": [
38 FileNode(b'file_%d.txt' % x, content='Foobar %d' % x),
36 FileNode(b"file_%d.txt" % x, content="Foobar %d" % x),
39 ],
37 ],
40 }
38 }
41
39
@@ -43,34 +41,24 b' class TestGetslice(BackendTestMixin):'
43 assert list(self.repo[-1:])[0] == self.repo.get_commit()
41 assert list(self.repo[-1:])[0] == self.repo.get_commit()
44
42
45 def test__getslice__respects_start_index(self):
43 def test__getslice__respects_start_index(self):
46 assert list(self.repo[2:]) == \
44 assert list(self.repo[2:]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[2:]]
47 [self.repo.get_commit(commit_id)
48 for commit_id in self.repo.commit_ids[2:]]
49
45
50 def test__getslice__respects_negative_start_index(self):
46 def test__getslice__respects_negative_start_index(self):
51 assert list(self.repo[-2:]) == \
47 assert list(self.repo[-2:]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[-2:]]
52 [self.repo.get_commit(commit_id)
53 for commit_id in self.repo.commit_ids[-2:]]
54
48
55 def test__getslice__respects_end_index(self):
49 def test__getslice__respects_end_index(self):
56 assert list(self.repo[:2]) == \
50 assert list(self.repo[:2]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[:2]]
57 [self.repo.get_commit(commit_id)
58 for commit_id in self.repo.commit_ids[:2]]
59
51
60 def test__getslice__respects_negative_end_index(self):
52 def test__getslice__respects_negative_end_index(self):
61 assert list(self.repo[:-2]) == \
53 assert list(self.repo[:-2]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[:-2]]
62 [self.repo.get_commit(commit_id)
63 for commit_id in self.repo.commit_ids[:-2]]
64
54
65 def test__getslice__start_grater_than_end(self):
55 def test__getslice__start_grater_than_end(self):
66 assert list(self.repo[10:0]) == []
56 assert list(self.repo[10:0]) == []
67
57
68 def test__getslice__negative_iteration(self):
58 def test__getslice__negative_iteration(self):
69 assert list(self.repo[::-1]) == \
59 assert list(self.repo[::-1]) == [self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[::-1]]
70 [self.repo.get_commit(commit_id)
71 for commit_id in self.repo.commit_ids[::-1]]
72
60
73 def test__getslice__iterate_even(self):
61 def test__getslice__iterate_even(self):
74 assert list(self.repo[0:10:2]) == \
62 assert list(self.repo[0:10:2]) == [
75 [self.repo.get_commit(commit_id)
63 self.repo.get_commit(commit_id) for commit_id in self.repo.commit_ids[0:10:2]
76 for commit_id in self.repo.commit_ids[0:10:2]]
64 ]
This diff has been collapsed as it changes many lines, (1011 lines changed) Show them Hide them
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -27,12 +26,9 b' import pytest'
27
26
28 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.utils import make_db_config
29 from rhodecode.lib.vcs.backends.base import Reference
28 from rhodecode.lib.vcs.backends.base import Reference
30 from rhodecode.lib.vcs.backends.git import (
29 from rhodecode.lib.vcs.backends.git import GitRepository, GitCommit, discover_git_version
31 GitRepository, GitCommit, discover_git_version)
30 from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError
32 from rhodecode.lib.vcs.exceptions import (
31 from rhodecode.lib.vcs.nodes import NodeKind, FileNode, DirNode, NodeState, SubModuleNode
33 RepositoryError, VCSError, NodeDoesNotExistError)
34 from rhodecode.lib.vcs.nodes import (
35 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
32 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests.vcs.conftest import BackendTestMixin
33 from rhodecode.tests.vcs.conftest import BackendTestMixin
38
34
@@ -40,7 +36,7 b' from rhodecode.tests.vcs.conftest import'
40 pytestmark = pytest.mark.backends("git")
36 pytestmark = pytest.mark.backends("git")
41
37
42
38
43 DIFF_FROM_REMOTE = br"""diff --git a/foobar b/foobar
39 DIFF_FROM_REMOTE = rb"""diff --git a/foobar b/foobar
44 new file mode 100644
40 new file mode 100644
45 index 0000000..f6ea049
41 index 0000000..f6ea049
46 --- /dev/null
42 --- /dev/null
@@ -64,7 +60,6 b' def callable_get_diff(*args, **kwargs):'
64
60
65
61
66 class TestGitRepository(object):
62 class TestGitRepository(object):
67
68 @pytest.fixture(autouse=True)
63 @pytest.fixture(autouse=True)
69 def prepare(self, request, baseapp):
64 def prepare(self, request, baseapp):
70 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
65 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
@@ -74,9 +69,8 b' class TestGitRepository(object):'
74 """
69 """
75 Return a non bare clone of the base repo.
70 Return a non bare clone of the base repo.
76 """
71 """
77 clone_path = str(tmpdir.join('clone-repo'))
72 clone_path = str(tmpdir.join("clone-repo"))
78 repo_clone = GitRepository(
73 repo_clone = GitRepository(clone_path, create=True, src_url=self.repo.path, bare=False)
79 clone_path, create=True, src_url=self.repo.path, bare=False)
80
74
81 return repo_clone
75 return repo_clone
82
76
@@ -84,20 +78,18 b' class TestGitRepository(object):'
84 """
78 """
85 Return a non bare empty repo.
79 Return a non bare empty repo.
86 """
80 """
87 clone_path = str(tmpdir.join('empty-repo'))
81 clone_path = str(tmpdir.join("empty-repo"))
88 return GitRepository(clone_path, create=True, bare=bare)
82 return GitRepository(clone_path, create=True, bare=bare)
89
83
90 def test_wrong_repo_path(self):
84 def test_wrong_repo_path(self):
91 wrong_repo_path = '/tmp/errorrepo_git'
85 wrong_repo_path = "/tmp/errorrepo_git"
92 with pytest.raises(RepositoryError):
86 with pytest.raises(RepositoryError):
93 GitRepository(wrong_repo_path)
87 GitRepository(wrong_repo_path)
94
88
95 def test_repo_clone(self, tmp_path_factory):
89 def test_repo_clone(self, tmp_path_factory):
96 repo = GitRepository(TEST_GIT_REPO)
90 repo = GitRepository(TEST_GIT_REPO)
97 clone_path = '{}_{}'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE)
91 clone_path = f"{tmp_path_factory.mktemp('_')}_{TEST_GIT_REPO_CLONE}"
98 repo_clone = GitRepository(
92 repo_clone = GitRepository(clone_path, src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
99 clone_path,
100 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
101
93
102 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
94 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
103 # Checking hashes of commits should be enough
95 # Checking hashes of commits should be enough
@@ -107,48 +99,42 b' class TestGitRepository(object):'
107
99
108 def test_repo_clone_without_create(self):
100 def test_repo_clone_without_create(self):
109 with pytest.raises(RepositoryError):
101 with pytest.raises(RepositoryError):
110 GitRepository(
102 GitRepository(TEST_GIT_REPO_CLONE + "_wo_create", src_url=TEST_GIT_REPO)
111 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
112
103
113 def test_repo_clone_with_update(self, tmp_path_factory):
104 def test_repo_clone_with_update(self, tmp_path_factory):
114 repo = GitRepository(TEST_GIT_REPO)
105 repo = GitRepository(TEST_GIT_REPO)
115 clone_path = '{}_{}_update'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE)
106 clone_path = "{}_{}_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
116
107
117 repo_clone = GitRepository(
108 repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
118 clone_path,
119 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
120 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
109 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
121
110
122 # check if current workdir was updated
111 # check if current workdir was updated
123 fpath = os.path.join(clone_path, 'MANIFEST.in')
112 fpath = os.path.join(clone_path, "MANIFEST.in")
124 assert os.path.isfile(fpath)
113 assert os.path.isfile(fpath)
125
114
126 def test_repo_clone_without_update(self, tmp_path_factory):
115 def test_repo_clone_without_update(self, tmp_path_factory):
127 repo = GitRepository(TEST_GIT_REPO)
116 repo = GitRepository(TEST_GIT_REPO)
128 clone_path = '{}_{}_without_update'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE)
117 clone_path = "{}_{}_without_update".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
129 repo_clone = GitRepository(
118 repo_clone = GitRepository(clone_path, create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
130 clone_path,
131 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
132 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
119 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
133 # check if current workdir was *NOT* updated
120 # check if current workdir was *NOT* updated
134 fpath = os.path.join(clone_path, 'MANIFEST.in')
121 fpath = os.path.join(clone_path, "MANIFEST.in")
135 # Make sure it's not bare repo
122 # Make sure it's not bare repo
136 assert not repo_clone.bare
123 assert not repo_clone.bare
137 assert not os.path.isfile(fpath)
124 assert not os.path.isfile(fpath)
138
125
139 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
126 def test_repo_clone_into_bare_repo(self, tmp_path_factory):
140 repo = GitRepository(TEST_GIT_REPO)
127 repo = GitRepository(TEST_GIT_REPO)
141 clone_path = '{}_{}_bare.git'.format(tmp_path_factory.mktemp('_'), TEST_GIT_REPO_CLONE)
128 clone_path = "{}_{}_bare.git".format(tmp_path_factory.mktemp("_"), TEST_GIT_REPO_CLONE)
142 repo_clone = GitRepository(
129 repo_clone = GitRepository(clone_path, create=True, src_url=repo.path, bare=True)
143 clone_path, create=True, src_url=repo.path, bare=True)
144 assert repo_clone.bare
130 assert repo_clone.bare
145
131
146 def test_create_repo_is_not_bare_by_default(self):
132 def test_create_repo_is_not_bare_by_default(self):
147 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
133 repo = GitRepository(get_new_dir("not-bare-by-default"), create=True)
148 assert not repo.bare
134 assert not repo.bare
149
135
150 def test_create_bare_repo(self):
136 def test_create_bare_repo(self):
151 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
137 repo = GitRepository(get_new_dir("bare-repo"), create=True, bare=True)
152 assert repo.bare
138 assert repo.bare
153
139
154 def test_update_server_info(self):
140 def test_update_server_info(self):
@@ -167,37 +153,38 b' class TestGitRepository(object):'
167 def test_commit_ids(self):
153 def test_commit_ids(self):
168 # there are 112 commits (by now)
154 # there are 112 commits (by now)
169 # so we can assume they would be available from now on
155 # so we can assume they would be available from now on
170 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
156 subset = {
171 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
157 "c1214f7e79e02fc37156ff215cd71275450cffc3",
172 'fa6600f6848800641328adbf7811fd2372c02ab2',
158 "38b5fe81f109cb111f549bfe9bb6b267e10bc557",
173 '102607b09cdd60e2793929c4f90478be29f85a17',
159 "fa6600f6848800641328adbf7811fd2372c02ab2",
174 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
160 "102607b09cdd60e2793929c4f90478be29f85a17",
175 '2d1028c054665b962fa3d307adfc923ddd528038',
161 "49d3fd156b6f7db46313fac355dca1a0b94a0017",
176 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
162 "2d1028c054665b962fa3d307adfc923ddd528038",
177 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
163 "d7e0d30fbcae12c90680eb095a4f5f02505ce501",
178 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
164 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
179 '8430a588b43b5d6da365400117c89400326e7992',
165 "dd80b0f6cf5052f17cc738c2951c4f2070200d7f",
180 'd955cd312c17b02143c04fa1099a352b04368118',
166 "8430a588b43b5d6da365400117c89400326e7992",
181 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
167 "d955cd312c17b02143c04fa1099a352b04368118",
182 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
168 "f67b87e5c629c2ee0ba58f85197e423ff28d735b",
183 'f298fe1189f1b69779a4423f40b48edf92a703fc',
169 "add63e382e4aabc9e1afdc4bdc24506c269b7618",
184 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
170 "f298fe1189f1b69779a4423f40b48edf92a703fc",
185 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
171 "bd9b619eb41994cac43d67cf4ccc8399c1125808",
186 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
172 "6e125e7c890379446e98980d8ed60fba87d0f6d1",
187 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
173 "d4a54db9f745dfeba6933bf5b1e79e15d0af20bd",
188 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
174 "0b05e4ed56c802098dfc813cbe779b2f49e92500",
189 '45223f8f114c64bf4d6f853e3c35a369a6305520',
175 "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e",
190 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
176 "45223f8f114c64bf4d6f853e3c35a369a6305520",
191 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
177 "ca1eb7957a54bce53b12d1a51b13452f95bc7c7e",
192 '27d48942240f5b91dfda77accd2caac94708cc7d',
178 "f5ea29fc42ef67a2a5a7aecff10e1566699acd68",
193 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
179 "27d48942240f5b91dfda77accd2caac94708cc7d",
194 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
180 "622f0eb0bafd619d2560c26f80f09e3b0b0d78af",
181 "e686b958768ee96af8029fe19c6050b1a8dd3b2b",
182 }
195 assert subset.issubset(set(self.repo.commit_ids))
183 assert subset.issubset(set(self.repo.commit_ids))
196
184
197 def test_slicing(self):
185 def test_slicing(self):
198 # 4 1 5 10 95
186 # 4 1 5 10 95
199 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
187 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), (10, 20, 10), (5, 100, 95)]:
200 (10, 20, 10), (5, 100, 95)]:
201 commit_ids = list(self.repo[sfrom:sto])
188 commit_ids = list(self.repo[sfrom:sto])
202 assert len(commit_ids) == size
189 assert len(commit_ids) == size
203 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
190 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
@@ -214,8 +201,8 b' class TestGitRepository(object):'
214
201
215 def test_tags(self):
202 def test_tags(self):
216 # TODO: Need more tests here
203 # TODO: Need more tests here
217 assert 'v0.1.1' in self.repo.tags
204 assert "v0.1.1" in self.repo.tags
218 assert 'v0.1.2' in self.repo.tags
205 assert "v0.1.2" in self.repo.tags
219 for __, commit_id in self.repo.tags.items():
206 for __, commit_id in self.repo.tags.items():
220 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
207 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
221
208
@@ -229,37 +216,34 b' class TestGitRepository(object):'
229 init_commit = self.repo.get_commit(commit_id)
216 init_commit = self.repo.get_commit(commit_id)
230 init_author = init_commit.author
217 init_author = init_commit.author
231
218
232 assert init_commit.message == 'initial import\n'
219 assert init_commit.message == "initial import\n"
233 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
220 assert init_author == "Marcin Kuzminski <marcin@python-blog.com>"
234 assert init_author == init_commit.committer
221 assert init_author == init_commit.committer
235 for path in ('vcs/__init__.py',
222 for path in ("vcs/__init__.py", "vcs/backends/BaseRepository.py", "vcs/backends/__init__.py"):
236 'vcs/backends/BaseRepository.py',
237 'vcs/backends/__init__.py'):
238 assert isinstance(init_commit.get_node(path), FileNode)
223 assert isinstance(init_commit.get_node(path), FileNode)
239 for path in ('', 'vcs', 'vcs/backends'):
224 for path in ("", "vcs", "vcs/backends"):
240 assert isinstance(init_commit.get_node(path), DirNode)
225 assert isinstance(init_commit.get_node(path), DirNode)
241
226
242 with pytest.raises(NodeDoesNotExistError):
227 with pytest.raises(NodeDoesNotExistError):
243 init_commit.get_node(path='foobar')
228 init_commit.get_node(path="foobar")
244
229
245 node = init_commit.get_node('vcs/')
230 node = init_commit.get_node("vcs/")
246 assert hasattr(node, 'kind')
231 assert hasattr(node, "kind")
247 assert node.kind == NodeKind.DIR
232 assert node.kind == NodeKind.DIR
248
233
249 node = init_commit.get_node('vcs')
234 node = init_commit.get_node("vcs")
250 assert hasattr(node, 'kind')
235 assert hasattr(node, "kind")
251 assert node.kind == NodeKind.DIR
236 assert node.kind == NodeKind.DIR
252
237
253 node = init_commit.get_node('vcs/__init__.py')
238 node = init_commit.get_node("vcs/__init__.py")
254 assert hasattr(node, 'kind')
239 assert hasattr(node, "kind")
255 assert node.kind == NodeKind.FILE
240 assert node.kind == NodeKind.FILE
256
241
257 def test_not_existing_commit(self):
242 def test_not_existing_commit(self):
258 with pytest.raises(RepositoryError):
243 with pytest.raises(RepositoryError):
259 self.repo.get_commit('f' * 40)
244 self.repo.get_commit("f" * 40)
260
245
261 def test_commit10(self):
246 def test_commit10(self):
262
263 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
247 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
264 README = """===
248 README = """===
265 VCS
249 VCS
@@ -273,7 +257,7 b' Introduction'
273 TODO: To be written...
257 TODO: To be written...
274
258
275 """
259 """
276 node = commit10.get_node('README.rst')
260 node = commit10.get_node("README.rst")
277 assert node.kind == NodeKind.FILE
261 assert node.kind == NodeKind.FILE
278 assert node.str_content == README
262 assert node.str_content == README
279
263
@@ -283,39 +267,39 b' TODO: To be written...'
283 def test_checkout_with_create(self, tmpdir):
267 def test_checkout_with_create(self, tmpdir):
284 repo_clone = self.get_clone_repo(tmpdir)
268 repo_clone = self.get_clone_repo(tmpdir)
285
269
286 new_branch = 'new_branch'
270 new_branch = "new_branch"
287 assert repo_clone._current_branch() == 'master'
271 assert repo_clone._current_branch() == "master"
288 assert set(repo_clone.branches) == {'master'}
272 assert set(repo_clone.branches) == {"master"}
289 repo_clone._checkout(new_branch, create=True)
273 repo_clone._checkout(new_branch, create=True)
290
274
291 # Branches is a lazy property so we need to recrete the Repo object.
275 # Branches is a lazy property so we need to recrete the Repo object.
292 repo_clone = GitRepository(repo_clone.path)
276 repo_clone = GitRepository(repo_clone.path)
293 assert set(repo_clone.branches) == {'master', new_branch}
277 assert set(repo_clone.branches) == {"master", new_branch}
294 assert repo_clone._current_branch() == new_branch
278 assert repo_clone._current_branch() == new_branch
295
279
296 def test_checkout(self, tmpdir):
280 def test_checkout(self, tmpdir):
297 repo_clone = self.get_clone_repo(tmpdir)
281 repo_clone = self.get_clone_repo(tmpdir)
298
282
299 repo_clone._checkout('new_branch', create=True)
283 repo_clone._checkout("new_branch", create=True)
300 repo_clone._checkout('master')
284 repo_clone._checkout("master")
301
285
302 assert repo_clone._current_branch() == 'master'
286 assert repo_clone._current_branch() == "master"
303
287
304 def test_checkout_same_branch(self, tmpdir):
288 def test_checkout_same_branch(self, tmpdir):
305 repo_clone = self.get_clone_repo(tmpdir)
289 repo_clone = self.get_clone_repo(tmpdir)
306
290
307 repo_clone._checkout('master')
291 repo_clone._checkout("master")
308 assert repo_clone._current_branch() == 'master'
292 assert repo_clone._current_branch() == "master"
309
293
310 def test_checkout_branch_already_exists(self, tmpdir):
294 def test_checkout_branch_already_exists(self, tmpdir):
311 repo_clone = self.get_clone_repo(tmpdir)
295 repo_clone = self.get_clone_repo(tmpdir)
312
296
313 with pytest.raises(RepositoryError):
297 with pytest.raises(RepositoryError):
314 repo_clone._checkout('master', create=True)
298 repo_clone._checkout("master", create=True)
315
299
316 def test_checkout_bare_repo(self):
300 def test_checkout_bare_repo(self):
317 with pytest.raises(RepositoryError):
301 with pytest.raises(RepositoryError):
318 self.repo._checkout('master')
302 self.repo._checkout("master")
319
303
320 def test_current_branch_bare_repo(self):
304 def test_current_branch_bare_repo(self):
321 with pytest.raises(RepositoryError):
305 with pytest.raises(RepositoryError):
@@ -326,8 +310,8 b' TODO: To be written...'
326 assert repo._current_branch() is None
310 assert repo._current_branch() is None
327
311
328 def test_local_clone(self, tmp_path_factory):
312 def test_local_clone(self, tmp_path_factory):
329 clone_path = str(tmp_path_factory.mktemp('test-local-clone'))
313 clone_path = str(tmp_path_factory.mktemp("test-local-clone"))
330 self.repo._local_clone(clone_path, 'master')
314 self.repo._local_clone(clone_path, "master")
331 repo_clone = GitRepository(clone_path)
315 repo_clone = GitRepository(clone_path)
332
316
333 assert self.repo.commit_ids == repo_clone.commit_ids
317 assert self.repo.commit_ids == repo_clone.commit_ids
@@ -338,23 +322,23 b' TODO: To be written...'
338 # Create a new branch in source repo
322 # Create a new branch in source repo
339 new_branch_commit = source_repo.commit_ids[-3]
323 new_branch_commit = source_repo.commit_ids[-3]
340 source_repo._checkout(new_branch_commit)
324 source_repo._checkout(new_branch_commit)
341 source_repo._checkout('new_branch', create=True)
325 source_repo._checkout("new_branch", create=True)
342
326
343 clone_path = str(tmpdir.join('git-clone-path-1'))
327 clone_path = str(tmpdir.join("git-clone-path-1"))
344 source_repo._local_clone(clone_path, 'new_branch')
328 source_repo._local_clone(clone_path, "new_branch")
345 repo_clone = GitRepository(clone_path)
329 repo_clone = GitRepository(clone_path)
346
330
347 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
331 assert source_repo.commit_ids[: -3 + 1] == repo_clone.commit_ids
348
332
349 clone_path = str(tmpdir.join('git-clone-path-2'))
333 clone_path = str(tmpdir.join("git-clone-path-2"))
350 source_repo._local_clone(clone_path, 'master')
334 source_repo._local_clone(clone_path, "master")
351 repo_clone = GitRepository(clone_path)
335 repo_clone = GitRepository(clone_path)
352
336
353 assert source_repo.commit_ids == repo_clone.commit_ids
337 assert source_repo.commit_ids == repo_clone.commit_ids
354
338
355 def test_local_clone_fails_if_target_exists(self):
339 def test_local_clone_fails_if_target_exists(self):
356 with pytest.raises(RepositoryError):
340 with pytest.raises(RepositoryError):
357 self.repo._local_clone(self.repo.path, 'master')
341 self.repo._local_clone(self.repo.path, "master")
358
342
359 def test_local_fetch(self, tmpdir):
343 def test_local_fetch(self, tmpdir):
360 target_repo = self.get_empty_repo(tmpdir)
344 target_repo = self.get_empty_repo(tmpdir)
@@ -364,30 +348,30 b' TODO: To be written...'
364 master_commit = source_repo.commit_ids[-1]
348 master_commit = source_repo.commit_ids[-1]
365 new_branch_commit = source_repo.commit_ids[-3]
349 new_branch_commit = source_repo.commit_ids[-3]
366 source_repo._checkout(new_branch_commit)
350 source_repo._checkout(new_branch_commit)
367 source_repo._checkout('new_branch', create=True)
351 source_repo._checkout("new_branch", create=True)
368
352
369 target_repo._local_fetch(source_repo.path, 'new_branch')
353 target_repo._local_fetch(source_repo.path, "new_branch")
370 assert target_repo._last_fetch_heads() == [new_branch_commit]
354 assert target_repo._last_fetch_heads() == [new_branch_commit]
371
355
372 target_repo._local_fetch(source_repo.path, 'master')
356 target_repo._local_fetch(source_repo.path, "master")
373 assert target_repo._last_fetch_heads() == [master_commit]
357 assert target_repo._last_fetch_heads() == [master_commit]
374
358
375 def test_local_fetch_from_bare_repo(self, tmpdir):
359 def test_local_fetch_from_bare_repo(self, tmpdir):
376 target_repo = self.get_empty_repo(tmpdir)
360 target_repo = self.get_empty_repo(tmpdir)
377 target_repo._local_fetch(self.repo.path, 'master')
361 target_repo._local_fetch(self.repo.path, "master")
378
362
379 master_commit = self.repo.commit_ids[-1]
363 master_commit = self.repo.commit_ids[-1]
380 assert target_repo._last_fetch_heads() == [master_commit]
364 assert target_repo._last_fetch_heads() == [master_commit]
381
365
382 def test_local_fetch_from_same_repo(self):
366 def test_local_fetch_from_same_repo(self):
383 with pytest.raises(ValueError):
367 with pytest.raises(ValueError):
384 self.repo._local_fetch(self.repo.path, 'master')
368 self.repo._local_fetch(self.repo.path, "master")
385
369
386 def test_local_fetch_branch_does_not_exist(self, tmpdir):
370 def test_local_fetch_branch_does_not_exist(self, tmpdir):
387 target_repo = self.get_empty_repo(tmpdir)
371 target_repo = self.get_empty_repo(tmpdir)
388
372
389 with pytest.raises(RepositoryError):
373 with pytest.raises(RepositoryError):
390 target_repo._local_fetch(self.repo.path, 'new_branch')
374 target_repo._local_fetch(self.repo.path, "new_branch")
391
375
392 def test_local_pull(self, tmpdir):
376 def test_local_pull(self, tmpdir):
393 target_repo = self.get_empty_repo(tmpdir)
377 target_repo = self.get_empty_repo(tmpdir)
@@ -397,19 +381,19 b' TODO: To be written...'
397 master_commit = source_repo.commit_ids[-1]
381 master_commit = source_repo.commit_ids[-1]
398 new_branch_commit = source_repo.commit_ids[-3]
382 new_branch_commit = source_repo.commit_ids[-3]
399 source_repo._checkout(new_branch_commit)
383 source_repo._checkout(new_branch_commit)
400 source_repo._checkout('new_branch', create=True)
384 source_repo._checkout("new_branch", create=True)
401
385
402 target_repo._local_pull(source_repo.path, 'new_branch')
386 target_repo._local_pull(source_repo.path, "new_branch")
403 target_repo = GitRepository(target_repo.path)
387 target_repo = GitRepository(target_repo.path)
404 assert target_repo.head == new_branch_commit
388 assert target_repo.head == new_branch_commit
405
389
406 target_repo._local_pull(source_repo.path, 'master')
390 target_repo._local_pull(source_repo.path, "master")
407 target_repo = GitRepository(target_repo.path)
391 target_repo = GitRepository(target_repo.path)
408 assert target_repo.head == master_commit
392 assert target_repo.head == master_commit
409
393
410 def test_local_pull_in_bare_repo(self):
394 def test_local_pull_in_bare_repo(self):
411 with pytest.raises(RepositoryError):
395 with pytest.raises(RepositoryError):
412 self.repo._local_pull(self.repo.path, 'master')
396 self.repo._local_pull(self.repo.path, "master")
413
397
414 def test_local_merge(self, tmpdir):
398 def test_local_merge(self, tmpdir):
415 target_repo = self.get_empty_repo(tmpdir)
399 target_repo = self.get_empty_repo(tmpdir)
@@ -419,159 +403,144 b' TODO: To be written...'
419 master_commit = source_repo.commit_ids[-1]
403 master_commit = source_repo.commit_ids[-1]
420 new_branch_commit = source_repo.commit_ids[-3]
404 new_branch_commit = source_repo.commit_ids[-3]
421 source_repo._checkout(new_branch_commit)
405 source_repo._checkout(new_branch_commit)
422 source_repo._checkout('new_branch', create=True)
406 source_repo._checkout("new_branch", create=True)
423
407
424 # This is required as one cannot do a -ff-only merge in an empty repo.
408 # This is required as one cannot do a -ff-only merge in an empty repo.
425 target_repo._local_pull(source_repo.path, 'new_branch')
409 target_repo._local_pull(source_repo.path, "new_branch")
426
410
427 target_repo._local_fetch(source_repo.path, 'master')
411 target_repo._local_fetch(source_repo.path, "master")
428 merge_message = 'Merge message\n\nDescription:...'
412 merge_message = "Merge message\n\nDescription:..."
429 user_name = 'Albert Einstein'
413 user_name = "Albert Einstein"
430 user_email = 'albert@einstein.com'
414 user_email = "albert@einstein.com"
431 target_repo._local_merge(merge_message, user_name, user_email,
415 target_repo._local_merge(merge_message, user_name, user_email, target_repo._last_fetch_heads())
432 target_repo._last_fetch_heads())
433
416
434 target_repo = GitRepository(target_repo.path)
417 target_repo = GitRepository(target_repo.path)
435 assert target_repo.commit_ids[-2] == master_commit
418 assert target_repo.commit_ids[-2] == master_commit
436 last_commit = target_repo.get_commit(target_repo.head)
419 last_commit = target_repo.get_commit(target_repo.head)
437 assert last_commit.message.strip() == merge_message
420 assert last_commit.message.strip() == merge_message
438 assert last_commit.author == '%s <%s>' % (user_name, user_email)
421 assert last_commit.author == "%s <%s>" % (user_name, user_email)
439
422
440 assert not os.path.exists(
423 assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD"))
441 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
442
424
443 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
425 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
444 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
426 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
445 vcsbackend_git.ensure_file(b'README', b'I will conflict with you!!!')
427 vcsbackend_git.ensure_file(b"README", b"I will conflict with you!!!")
446
428
447 target_repo._local_fetch(self.repo.path, 'master')
429 target_repo._local_fetch(self.repo.path, "master")
448 with pytest.raises(RepositoryError):
430 with pytest.raises(RepositoryError):
449 target_repo._local_merge(
431 target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads())
450 'merge_message', 'user name', 'user@name.com',
451 target_repo._last_fetch_heads())
452
432
453 # Check we are not left in an intermediate merge state
433 # Check we are not left in an intermediate merge state
454 assert not os.path.exists(
434 assert not os.path.exists(os.path.join(target_repo.path, ".git", "MERGE_HEAD"))
455 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
456
435
457 def test_local_merge_into_empty_repo(self, tmpdir):
436 def test_local_merge_into_empty_repo(self, tmpdir):
458 target_repo = self.get_empty_repo(tmpdir)
437 target_repo = self.get_empty_repo(tmpdir)
459
438
460 # This is required as one cannot do a -ff-only merge in an empty repo.
439 # This is required as one cannot do a -ff-only merge in an empty repo.
461 target_repo._local_fetch(self.repo.path, 'master')
440 target_repo._local_fetch(self.repo.path, "master")
462 with pytest.raises(RepositoryError):
441 with pytest.raises(RepositoryError):
463 target_repo._local_merge(
442 target_repo._local_merge("merge_message", "user name", "user@name.com", target_repo._last_fetch_heads())
464 'merge_message', 'user name', 'user@name.com',
465 target_repo._last_fetch_heads())
466
443
467 def test_local_merge_in_bare_repo(self):
444 def test_local_merge_in_bare_repo(self):
468 with pytest.raises(RepositoryError):
445 with pytest.raises(RepositoryError):
469 self.repo._local_merge(
446 self.repo._local_merge("merge_message", "user name", "user@name.com", None)
470 'merge_message', 'user name', 'user@name.com', None)
471
447
472 def test_local_push_non_bare(self, tmpdir):
448 def test_local_push_non_bare(self, tmpdir):
473 target_repo = self.get_empty_repo(tmpdir)
449 target_repo = self.get_empty_repo(tmpdir)
474
450
475 pushed_branch = 'pushed_branch'
451 pushed_branch = "pushed_branch"
476 self.repo._local_push('master', target_repo.path, pushed_branch)
452 self.repo._local_push("master", target_repo.path, pushed_branch)
477 # Fix the HEAD of the target repo, or otherwise GitRepository won't
453 # Fix the HEAD of the target repo, or otherwise GitRepository won't
478 # report any branches.
454 # report any branches.
479 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
455 with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f:
480 f.write('ref: refs/heads/%s' % pushed_branch)
456 f.write("ref: refs/heads/%s" % pushed_branch)
481
457
482 target_repo = GitRepository(target_repo.path)
458 target_repo = GitRepository(target_repo.path)
483
459
484 assert (target_repo.branches[pushed_branch] ==
460 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
485 self.repo.branches['master'])
486
461
487 def test_local_push_bare(self, tmpdir):
462 def test_local_push_bare(self, tmpdir):
488 target_repo = self.get_empty_repo(tmpdir, bare=True)
463 target_repo = self.get_empty_repo(tmpdir, bare=True)
489
464
490 pushed_branch = 'pushed_branch'
465 pushed_branch = "pushed_branch"
491 self.repo._local_push('master', target_repo.path, pushed_branch)
466 self.repo._local_push("master", target_repo.path, pushed_branch)
492 # Fix the HEAD of the target repo, or otherwise GitRepository won't
467 # Fix the HEAD of the target repo, or otherwise GitRepository won't
493 # report any branches.
468 # report any branches.
494 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
469 with open(os.path.join(target_repo.path, "HEAD"), "w") as f:
495 f.write('ref: refs/heads/%s' % pushed_branch)
470 f.write("ref: refs/heads/%s" % pushed_branch)
496
471
497 target_repo = GitRepository(target_repo.path)
472 target_repo = GitRepository(target_repo.path)
498
473
499 assert (target_repo.branches[pushed_branch] ==
474 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
500 self.repo.branches['master'])
501
475
502 def test_local_push_non_bare_target_branch_is_checked_out(self, tmpdir):
476 def test_local_push_non_bare_target_branch_is_checked_out(self, tmpdir):
503 target_repo = self.get_clone_repo(tmpdir)
477 target_repo = self.get_clone_repo(tmpdir)
504
478
505 pushed_branch = 'pushed_branch'
479 pushed_branch = "pushed_branch"
506 # Create a new branch in source repo
480 # Create a new branch in source repo
507 new_branch_commit = target_repo.commit_ids[-3]
481 new_branch_commit = target_repo.commit_ids[-3]
508 target_repo._checkout(new_branch_commit)
482 target_repo._checkout(new_branch_commit)
509 target_repo._checkout(pushed_branch, create=True)
483 target_repo._checkout(pushed_branch, create=True)
510
484
511 self.repo._local_push('master', target_repo.path, pushed_branch)
485 self.repo._local_push("master", target_repo.path, pushed_branch)
512
486
513 target_repo = GitRepository(target_repo.path)
487 target_repo = GitRepository(target_repo.path)
514
488
515 assert (target_repo.branches[pushed_branch] ==
489 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
516 self.repo.branches['master'])
517
490
518 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
491 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
519 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
492 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
520 with pytest.raises(RepositoryError):
493 with pytest.raises(RepositoryError):
521 self.repo._local_push('master', target_repo.path, 'master')
494 self.repo._local_push("master", target_repo.path, "master")
522
495
523 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmpdir):
496 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self, tmpdir):
524 target_repo = self.get_empty_repo(tmpdir, bare=True)
497 target_repo = self.get_empty_repo(tmpdir, bare=True)
525
498
526 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
499 with mock.patch.object(self.repo, "run_git_command") as run_mock:
527 self.repo._local_push(
500 self.repo._local_push("master", target_repo.path, "master", enable_hooks=True)
528 'master', target_repo.path, 'master', enable_hooks=True)
501 env = run_mock.call_args[1]["extra_env"]
529 env = run_mock.call_args[1]['extra_env']
502 assert "RC_SKIP_HOOKS" not in env
530 assert 'RC_SKIP_HOOKS' not in env
531
503
532 def _add_failing_hook(self, repo_path, hook_name, bare=False):
504 def _add_failing_hook(self, repo_path, hook_name, bare=False):
533 path_components = (
505 path_components = ["hooks", hook_name] if bare else [".git", "hooks", hook_name]
534 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
535 hook_path = os.path.join(repo_path, *path_components)
506 hook_path = os.path.join(repo_path, *path_components)
536 with open(hook_path, 'w') as f:
507 with open(hook_path, "w") as f:
537 script_lines = [
508 script_lines = [
538 '#!%s' % sys.executable,
509 "#!%s" % sys.executable,
539 'import os',
510 "import os",
540 'import sys',
511 "import sys",
541 'if os.environ.get("RC_SKIP_HOOKS"):',
512 'if os.environ.get("RC_SKIP_HOOKS"):',
542 ' sys.exit(0)',
513 " sys.exit(0)",
543 'sys.exit(1)',
514 "sys.exit(1)",
544 ]
515 ]
545 f.write('\n'.join(script_lines))
516 f.write("\n".join(script_lines))
546 os.chmod(hook_path, 0o755)
517 os.chmod(hook_path, 0o755)
547
518
548 def test_local_push_does_not_execute_hook(self, tmpdir):
519 def test_local_push_does_not_execute_hook(self, tmpdir):
549 target_repo = self.get_empty_repo(tmpdir)
520 target_repo = self.get_empty_repo(tmpdir)
550
521
551 pushed_branch = 'pushed_branch'
522 pushed_branch = "pushed_branch"
552 self._add_failing_hook(target_repo.path, 'pre-receive')
523 self._add_failing_hook(target_repo.path, "pre-receive")
553 self.repo._local_push('master', target_repo.path, pushed_branch)
524 self.repo._local_push("master", target_repo.path, pushed_branch)
554 # Fix the HEAD of the target repo, or otherwise GitRepository won't
525 # Fix the HEAD of the target repo, or otherwise GitRepository won't
555 # report any branches.
526 # report any branches.
556 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
527 with open(os.path.join(target_repo.path, ".git", "HEAD"), "w") as f:
557 f.write('ref: refs/heads/%s' % pushed_branch)
528 f.write("ref: refs/heads/%s" % pushed_branch)
558
529
559 target_repo = GitRepository(target_repo.path)
530 target_repo = GitRepository(target_repo.path)
560
531
561 assert (target_repo.branches[pushed_branch] ==
532 assert target_repo.branches[pushed_branch] == self.repo.branches["master"]
562 self.repo.branches['master'])
563
533
564 def test_local_push_executes_hook(self, tmpdir):
534 def test_local_push_executes_hook(self, tmpdir):
565 target_repo = self.get_empty_repo(tmpdir, bare=True)
535 target_repo = self.get_empty_repo(tmpdir, bare=True)
566 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
536 self._add_failing_hook(target_repo.path, "pre-receive", bare=True)
567 with pytest.raises(RepositoryError):
537 with pytest.raises(RepositoryError):
568 self.repo._local_push(
538 self.repo._local_push("master", target_repo.path, "master", enable_hooks=True)
569 'master', target_repo.path, 'master', enable_hooks=True)
570
539
571 def test_maybe_prepare_merge_workspace(self):
540 def test_maybe_prepare_merge_workspace(self):
572 workspace = self.repo._maybe_prepare_merge_workspace(
541 workspace = self.repo._maybe_prepare_merge_workspace(
573 2, 'pr2', Reference('branch', 'master', 'unused'),
542 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
574 Reference('branch', 'master', 'unused'))
543 )
575
544
576 assert os.path.isdir(workspace)
545 assert os.path.isdir(workspace)
577 workspace_repo = GitRepository(workspace)
546 workspace_repo = GitRepository(workspace)
@@ -579,14 +548,14 b' TODO: To be written...'
579
548
580 # Calling it a second time should also succeed
549 # Calling it a second time should also succeed
581 workspace = self.repo._maybe_prepare_merge_workspace(
550 workspace = self.repo._maybe_prepare_merge_workspace(
582 2, 'pr2', Reference('branch', 'master', 'unused'),
551 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
583 Reference('branch', 'master', 'unused'))
552 )
584 assert os.path.isdir(workspace)
553 assert os.path.isdir(workspace)
585
554
586 def test_maybe_prepare_merge_workspace_different_refs(self):
555 def test_maybe_prepare_merge_workspace_different_refs(self):
587 workspace = self.repo._maybe_prepare_merge_workspace(
556 workspace = self.repo._maybe_prepare_merge_workspace(
588 2, 'pr2', Reference('branch', 'master', 'unused'),
557 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused")
589 Reference('branch', 'develop', 'unused'))
558 )
590
559
591 assert os.path.isdir(workspace)
560 assert os.path.isdir(workspace)
592 workspace_repo = GitRepository(workspace)
561 workspace_repo = GitRepository(workspace)
@@ -594,48 +563,47 b' TODO: To be written...'
594
563
595 # Calling it a second time should also succeed
564 # Calling it a second time should also succeed
596 workspace = self.repo._maybe_prepare_merge_workspace(
565 workspace = self.repo._maybe_prepare_merge_workspace(
597 2, 'pr2', Reference('branch', 'master', 'unused'),
566 2, "pr2", Reference("branch", "master", "unused"), Reference("branch", "develop", "unused")
598 Reference('branch', 'develop', 'unused'))
567 )
599 assert os.path.isdir(workspace)
568 assert os.path.isdir(workspace)
600
569
601 def test_cleanup_merge_workspace(self):
570 def test_cleanup_merge_workspace(self):
602 workspace = self.repo._maybe_prepare_merge_workspace(
571 workspace = self.repo._maybe_prepare_merge_workspace(
603 2, 'pr3', Reference('branch', 'master', 'unused'),
572 2, "pr3", Reference("branch", "master", "unused"), Reference("branch", "master", "unused")
604 Reference('branch', 'master', 'unused'))
573 )
605 self.repo.cleanup_merge_workspace(2, 'pr3')
574 self.repo.cleanup_merge_workspace(2, "pr3")
606
575
607 assert not os.path.exists(workspace)
576 assert not os.path.exists(workspace)
608
577
609 def test_cleanup_merge_workspace_invalid_workspace_id(self):
578 def test_cleanup_merge_workspace_invalid_workspace_id(self):
610 # No assert: because in case of an inexistent workspace this function
579 # No assert: because in case of an inexistent workspace this function
611 # should still succeed.
580 # should still succeed.
612 self.repo.cleanup_merge_workspace(1, 'pr4')
581 self.repo.cleanup_merge_workspace(1, "pr4")
613
582
614 def test_set_refs(self):
583 def test_set_refs(self):
615 test_ref = 'refs/test-refs/abcde'
584 test_ref = "refs/test-refs/abcde"
616 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
585 test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623"
617
586
618 self.repo.set_refs(test_ref, test_commit_id)
587 self.repo.set_refs(test_ref, test_commit_id)
619 stdout, _ = self.repo.run_git_command(['show-ref'])
588 stdout, _ = self.repo.run_git_command(["show-ref"])
620 assert test_ref in stdout
589 assert test_ref in stdout
621 assert test_commit_id in stdout
590 assert test_commit_id in stdout
622
591
623 def test_remove_ref(self):
592 def test_remove_ref(self):
624 test_ref = 'refs/test-refs/abcde'
593 test_ref = "refs/test-refs/abcde"
625 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
594 test_commit_id = "ecb86e1f424f2608262b130db174a7dfd25a6623"
626 self.repo.set_refs(test_ref, test_commit_id)
595 self.repo.set_refs(test_ref, test_commit_id)
627 stdout, _ = self.repo.run_git_command(['show-ref'])
596 stdout, _ = self.repo.run_git_command(["show-ref"])
628 assert test_ref in stdout
597 assert test_ref in stdout
629 assert test_commit_id in stdout
598 assert test_commit_id in stdout
630
599
631 self.repo.remove_ref(test_ref)
600 self.repo.remove_ref(test_ref)
632 stdout, _ = self.repo.run_git_command(['show-ref'])
601 stdout, _ = self.repo.run_git_command(["show-ref"])
633 assert test_ref not in stdout
602 assert test_ref not in stdout
634 assert test_commit_id not in stdout
603 assert test_commit_id not in stdout
635
604
636
605
637 class TestGitCommit(object):
606 class TestGitCommit(object):
638
639 @pytest.fixture(autouse=True)
607 @pytest.fixture(autouse=True)
640 def prepare(self):
608 def prepare(self):
641 self.repo = GitRepository(TEST_GIT_REPO)
609 self.repo = GitRepository(TEST_GIT_REPO)
@@ -643,11 +611,11 b' class TestGitCommit(object):'
643 def test_default_commit(self):
611 def test_default_commit(self):
644 tip = self.repo.get_commit()
612 tip = self.repo.get_commit()
645 assert tip == self.repo.get_commit(None)
613 assert tip == self.repo.get_commit(None)
646 assert tip == self.repo.get_commit('tip')
614 assert tip == self.repo.get_commit("tip")
647
615
648 def test_root_node(self):
616 def test_root_node(self):
649 tip = self.repo.get_commit()
617 tip = self.repo.get_commit()
650 assert tip.root is tip.get_node('')
618 assert tip.root is tip.get_node("")
651
619
652 def test_lazy_fetch(self):
620 def test_lazy_fetch(self):
653 """
621 """
@@ -655,7 +623,7 b' class TestGitCommit(object):'
655 the commit. This test is somewhat hard to write as order of tests
623 the commit. This test is somewhat hard to write as order of tests
656 is a key here. Written by running command after command in a shell.
624 is a key here. Written by running command after command in a shell.
657 """
625 """
658 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
626 commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc"
659 assert commit_id in self.repo.commit_ids
627 assert commit_id in self.repo.commit_ids
660 commit = self.repo.get_commit(commit_id)
628 commit = self.repo.get_commit(commit_id)
661 assert len(commit.nodes) == 0
629 assert len(commit.nodes) == 0
@@ -665,31 +633,29 b' class TestGitCommit(object):'
665 # accessing root.nodes updates commit.nodes
633 # accessing root.nodes updates commit.nodes
666 assert len(commit.nodes) == 9
634 assert len(commit.nodes) == 9
667
635
668 docs = root.get_node('docs')
636 docs = root.get_node("docs")
669 # we haven't yet accessed anything new as docs dir was already cached
637 # we haven't yet accessed anything new as docs dir was already cached
670 assert len(commit.nodes) == 9
638 assert len(commit.nodes) == 9
671 assert len(docs.nodes) == 8
639 assert len(docs.nodes) == 8
672 # accessing docs.nodes updates commit.nodes
640 # accessing docs.nodes updates commit.nodes
673 assert len(commit.nodes) == 17
641 assert len(commit.nodes) == 17
674
642
675 assert docs is commit.get_node('docs')
643 assert docs is commit.get_node("docs")
676 assert docs is root.nodes[0]
644 assert docs is root.nodes[0]
677 assert docs is root.dirs[0]
645 assert docs is root.dirs[0]
678 assert docs is commit.get_node('docs')
646 assert docs is commit.get_node("docs")
679
647
680 def test_nodes_with_commit(self):
648 def test_nodes_with_commit(self):
681 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
649 commit_id = "2a13f185e4525f9d4b59882791a2d397b90d5ddc"
682 commit = self.repo.get_commit(commit_id)
650 commit = self.repo.get_commit(commit_id)
683 root = commit.root
651 root = commit.root
684 docs = root.get_node('docs')
652 docs = root.get_node("docs")
685 assert docs is commit.get_node('docs')
653 assert docs is commit.get_node("docs")
686 api = docs.get_node('api')
654 api = docs.get_node("api")
687 assert api is commit.get_node('docs/api')
655 assert api is commit.get_node("docs/api")
688 index = api.get_node('index.rst')
656 index = api.get_node("index.rst")
689 assert index is commit.get_node('docs/api/index.rst')
657 assert index is commit.get_node("docs/api/index.rst")
690 assert index is commit.get_node('docs')\
658 assert index is commit.get_node("docs").get_node("api").get_node("index.rst")
691 .get_node('api')\
692 .get_node('index.rst')
693
659
694 def test_branch_and_tags(self):
660 def test_branch_and_tags(self):
695 """
661 """
@@ -716,19 +682,12 b' class TestGitCommit(object):'
716
682
717 def test_file_size(self):
683 def test_file_size(self):
718 to_check = (
684 to_check = (
719 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
685 ("c1214f7e79e02fc37156ff215cd71275450cffc3", "vcs/backends/BaseRepository.py", 502),
720 'vcs/backends/BaseRepository.py', 502),
686 ("d7e0d30fbcae12c90680eb095a4f5f02505ce501", "vcs/backends/hg.py", 854),
721 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
687 ("6e125e7c890379446e98980d8ed60fba87d0f6d1", "setup.py", 1068),
722 'vcs/backends/hg.py', 854),
688 ("d955cd312c17b02143c04fa1099a352b04368118", "vcs/backends/base.py", 2921),
723 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
689 ("ca1eb7957a54bce53b12d1a51b13452f95bc7c7e", "vcs/backends/base.py", 3936),
724 'setup.py', 1068),
690 ("f50f42baeed5af6518ef4b0cb2f1423f3851a941", "vcs/backends/base.py", 6189),
725
726 ('d955cd312c17b02143c04fa1099a352b04368118',
727 'vcs/backends/base.py', 2921),
728 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
729 'vcs/backends/base.py', 3936),
730 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
731 'vcs/backends/base.py', 6189),
732 )
691 )
733 for commit_id, path, size in to_check:
692 for commit_id, path, size in to_check:
734 node = self.repo.get_commit(commit_id).get_node(path)
693 node = self.repo.get_commit(commit_id).get_node(path)
@@ -736,80 +695,77 b' class TestGitCommit(object):'
736 assert node.size == size
695 assert node.size == size
737
696
738 def test_file_history_from_commits(self):
697 def test_file_history_from_commits(self):
739 node = self.repo[10].get_node('setup.py')
698 node = self.repo[10].get_node("setup.py")
740 commit_ids = [commit.raw_id for commit in node.history]
699 commit_ids = [commit.raw_id for commit in node.history]
741 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
700 assert ["ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == commit_ids
742
701
743 node = self.repo[20].get_node('setup.py')
702 node = self.repo[20].get_node("setup.py")
744 node_ids = [commit.raw_id for commit in node.history]
703 node_ids = [commit.raw_id for commit in node.history]
745 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
704 assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids
746 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
747
705
748 # special case we check history from commit that has this particular
706 # special case we check history from commit that has this particular
749 # file changed this means we check if it's included as well
707 # file changed this means we check if it's included as well
750 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
708 node = self.repo.get_commit("191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e").get_node("setup.py")
751 .get_node('setup.py')
752 node_ids = [commit.raw_id for commit in node.history]
709 node_ids = [commit.raw_id for commit in node.history]
753 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
710 assert ["191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"] == node_ids
754 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
755
711
756 def test_file_history(self):
712 def test_file_history(self):
757 # we can only check if those commits are present in the history
713 # we can only check if those commits are present in the history
758 # as we cannot update this test every time file is changed
714 # as we cannot update this test every time file is changed
759 files = {
715 files = {
760 'setup.py': [
716 "setup.py": [
761 '54386793436c938cff89326944d4c2702340037d',
717 "54386793436c938cff89326944d4c2702340037d",
762 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
718 "51d254f0ecf5df2ce50c0b115741f4cf13985dab",
763 '998ed409c795fec2012b1c0ca054d99888b22090',
719 "998ed409c795fec2012b1c0ca054d99888b22090",
764 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
720 "5e0eb4c47f56564395f76333f319d26c79e2fb09",
765 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
721 "0115510b70c7229dbc5dc49036b32e7d91d23acd",
766 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
722 "7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e",
767 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
723 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
768 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
724 "191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e",
769 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
725 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
770 ],
726 ],
771 'vcs/nodes.py': [
727 "vcs/nodes.py": [
772 '33fa3223355104431402a888fa77a4e9956feb3e',
728 "33fa3223355104431402a888fa77a4e9956feb3e",
773 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
729 "fa014c12c26d10ba682fadb78f2a11c24c8118e1",
774 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
730 "e686b958768ee96af8029fe19c6050b1a8dd3b2b",
775 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
731 "ab5721ca0a081f26bf43d9051e615af2cc99952f",
776 'c877b68d18e792a66b7f4c529ea02c8f80801542',
732 "c877b68d18e792a66b7f4c529ea02c8f80801542",
777 '4313566d2e417cb382948f8d9d7c765330356054',
733 "4313566d2e417cb382948f8d9d7c765330356054",
778 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
734 "6c2303a793671e807d1cfc70134c9ca0767d98c2",
779 '54386793436c938cff89326944d4c2702340037d',
735 "54386793436c938cff89326944d4c2702340037d",
780 '54000345d2e78b03a99d561399e8e548de3f3203',
736 "54000345d2e78b03a99d561399e8e548de3f3203",
781 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
737 "1c6b3677b37ea064cb4b51714d8f7498f93f4b2b",
782 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
738 "2d03ca750a44440fb5ea8b751176d1f36f8e8f46",
783 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
739 "2a08b128c206db48c2f0b8f70df060e6db0ae4f8",
784 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
740 "30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b",
785 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
741 "ac71e9503c2ca95542839af0ce7b64011b72ea7c",
786 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
742 "12669288fd13adba2a9b7dd5b870cc23ffab92d2",
787 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
743 "5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382",
788 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
744 "12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5",
789 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
745 "5eab1222a7cd4bfcbabc218ca6d04276d4e27378",
790 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
746 "f50f42baeed5af6518ef4b0cb2f1423f3851a941",
791 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
747 "d7e390a45f6aa96f04f5e7f583ad4f867431aa25",
792 'f15c21f97864b4f071cddfbf2750ec2e23859414',
748 "f15c21f97864b4f071cddfbf2750ec2e23859414",
793 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
749 "e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade",
794 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
750 "ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b",
795 '84dec09632a4458f79f50ddbbd155506c460b4f9',
751 "84dec09632a4458f79f50ddbbd155506c460b4f9",
796 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
752 "0115510b70c7229dbc5dc49036b32e7d91d23acd",
797 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
753 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
798 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
754 "3bf1c5868e570e39569d094f922d33ced2fa3b2b",
799 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
755 "b8d04012574729d2c29886e53b1a43ef16dd00a1",
800 '6970b057cffe4aab0a792aa634c89f4bebf01441',
756 "6970b057cffe4aab0a792aa634c89f4bebf01441",
801 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
757 "dd80b0f6cf5052f17cc738c2951c4f2070200d7f",
802 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
758 "ff7ca51e58c505fec0dd2491de52c622bb7a806b",
803 ],
759 ],
804 'vcs/backends/git.py': [
760 "vcs/backends/git.py": [
805 '4cf116ad5a457530381135e2f4c453e68a1b0105',
761 "4cf116ad5a457530381135e2f4c453e68a1b0105",
806 '9a751d84d8e9408e736329767387f41b36935153',
762 "9a751d84d8e9408e736329767387f41b36935153",
807 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
763 "cb681fb539c3faaedbcdf5ca71ca413425c18f01",
808 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
764 "428f81bb652bcba8d631bce926e8834ff49bdcc6",
809 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
765 "180ab15aebf26f98f714d8c68715e0f05fa6e1c7",
810 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
766 "2b8e07312a2e89e92b90426ab97f349f4bce2a3a",
811 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
767 "50e08c506174d8645a4bb517dd122ac946a0f3bf",
812 '54000345d2e78b03a99d561399e8e548de3f3203',
768 "54000345d2e78b03a99d561399e8e548de3f3203",
813 ],
769 ],
814 }
770 }
815 for path, commit_ids in files.items():
771 for path, commit_ids in files.items():
@@ -817,79 +773,79 b' class TestGitCommit(object):'
817 node_ids = [commit.raw_id for commit in node.history]
773 node_ids = [commit.raw_id for commit in node.history]
818 assert set(commit_ids).issubset(set(node_ids)), (
774 assert set(commit_ids).issubset(set(node_ids)), (
819 "We assumed that %s is subset of commit_ids for which file %s "
775 "We assumed that %s is subset of commit_ids for which file %s "
820 "has been changed, and history of that node returned: %s"
776 "has been changed, and history of that node returned: %s" % (commit_ids, path, node_ids)
821 % (commit_ids, path, node_ids))
777 )
822
778
823 def test_file_annotate(self):
779 def test_file_annotate(self):
824 files = {
780 files = {
825 'vcs/backends/__init__.py': {
781 "vcs/backends/__init__.py": {
826 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
782 "c1214f7e79e02fc37156ff215cd71275450cffc3": {
827 'lines_no': 1,
783 "lines_no": 1,
828 'commits': [
784 "commits": [
829 'c1214f7e79e02fc37156ff215cd71275450cffc3',
785 "c1214f7e79e02fc37156ff215cd71275450cffc3",
830 ],
786 ],
831 },
787 },
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
788 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647": {
833 'lines_no': 21,
789 "lines_no": 21,
834 'commits': [
790 "commits": [
835 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
791 "49d3fd156b6f7db46313fac355dca1a0b94a0017",
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
792 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
793 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
794 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
795 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
796 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
797 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
798 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
799 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
800 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
801 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
802 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
803 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
804 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
805 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
806 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
851 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
807 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
852 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
808 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
853 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
809 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
854 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
810 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
855 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
811 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
856 ],
812 ],
857 },
813 },
858 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
814 "e29b67bd158580fc90fc5e9111240b90e6e86064": {
859 'lines_no': 32,
815 "lines_no": 32,
860 'commits': [
816 "commits": [
861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
817 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
818 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
863 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
819 "5eab1222a7cd4bfcbabc218ca6d04276d4e27378",
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
820 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
821 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
866 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
822 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
867 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
823 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
868 '54000345d2e78b03a99d561399e8e548de3f3203',
824 "54000345d2e78b03a99d561399e8e548de3f3203",
869 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
825 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
870 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
826 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
871 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
827 "78c3f0c23b7ee935ec276acb8b8212444c33c396",
872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
828 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
873 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
829 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
830 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
831 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
876 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
832 "2a13f185e4525f9d4b59882791a2d397b90d5ddc",
877 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
833 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
878 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
834 "78c3f0c23b7ee935ec276acb8b8212444c33c396",
879 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
835 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
880 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
836 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
837 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
883 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
884 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
886 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
842 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
888 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
844 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
889 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
845 "992f38217b979d0b0987d0bae3cc26dac85d9b19",
890 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
891 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
892 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 "16fba1ae9334d79b66d7afed2c2dfbfa2ae53647",
893 ],
849 ],
894 },
850 },
895 },
851 },
@@ -903,37 +859,32 b' class TestGitCommit(object):'
903 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
859 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
904 assert l1_1 == l1_2
860 assert l1_1 == l1_2
905 l1 = l1_1
861 l1 = l1_1
906 l2 = files[fname][commit_id]['commits']
862 l2 = files[fname][commit_id]["commits"]
907 assert l1 == l2, (
863 assert l1 == l2, (
908 "The lists of commit_ids for %s@commit_id %s"
864 "The lists of commit_ids for %s@commit_id %s"
909 "from annotation list should match each other, "
865 "from annotation list should match each other, "
910 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
866 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2)
867 )
911
868
912 def test_files_state(self):
869 def test_files_state(self):
913 """
870 """
914 Tests state of FileNodes.
871 Tests state of FileNodes.
915 """
872 """
916 node = self.repo\
873 node = self.repo.get_commit("e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0").get_node("vcs/utils/diffs.py")
917 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
918 .get_node('vcs/utils/diffs.py')
919 assert node.state, NodeState.ADDED
874 assert node.state, NodeState.ADDED
920 assert node.added
875 assert node.added
921 assert not node.changed
876 assert not node.changed
922 assert not node.not_changed
877 assert not node.not_changed
923 assert not node.removed
878 assert not node.removed
924
879
925 node = self.repo\
880 node = self.repo.get_commit("33fa3223355104431402a888fa77a4e9956feb3e").get_node(".hgignore")
926 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
927 .get_node('.hgignore')
928 assert node.state, NodeState.CHANGED
881 assert node.state, NodeState.CHANGED
929 assert not node.added
882 assert not node.added
930 assert node.changed
883 assert node.changed
931 assert not node.not_changed
884 assert not node.not_changed
932 assert not node.removed
885 assert not node.removed
933
886
934 node = self.repo\
887 node = self.repo.get_commit("e29b67bd158580fc90fc5e9111240b90e6e86064").get_node("setup.py")
935 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
936 .get_node('setup.py')
937 assert node.state, NodeState.NOT_CHANGED
888 assert node.state, NodeState.NOT_CHANGED
938 assert not node.added
889 assert not node.added
939 assert not node.changed
890 assert not node.changed
@@ -942,48 +893,38 b' class TestGitCommit(object):'
942
893
943 # If node has REMOVED state then trying to fetch it would raise
894 # If node has REMOVED state then trying to fetch it would raise
944 # CommitError exception
895 # CommitError exception
945 commit = self.repo.get_commit(
896 commit = self.repo.get_commit("fa6600f6848800641328adbf7811fd2372c02ab2")
946 'fa6600f6848800641328adbf7811fd2372c02ab2')
897 path = "vcs/backends/BaseRepository.py"
947 path = 'vcs/backends/BaseRepository.py'
948 with pytest.raises(NodeDoesNotExistError):
898 with pytest.raises(NodeDoesNotExistError):
949 commit.get_node(path)
899 commit.get_node(path)
950 # but it would be one of ``removed`` (commit's attribute)
900 # but it would be one of ``removed`` (commit's attribute)
951 assert path in [rf.path for rf in commit.removed]
901 assert path in [rf.path for rf in commit.removed]
952
902
953 commit = self.repo.get_commit(
903 commit = self.repo.get_commit("54386793436c938cff89326944d4c2702340037d")
954 '54386793436c938cff89326944d4c2702340037d')
904 changed = ["setup.py", "tests/test_nodes.py", "vcs/backends/hg.py", "vcs/nodes.py"]
955 changed = [
956 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
957 'vcs/nodes.py']
958 assert set(changed) == set([f.path for f in commit.changed])
905 assert set(changed) == set([f.path for f in commit.changed])
959
906
960 def test_unicode_branch_refs(self):
907 def test_unicode_branch_refs(self):
961 unicode_branches = {
908 unicode_branches = {
962 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
909 "refs/heads/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b",
963 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
910 "refs/heads/uniΓ§ΓΆβˆ‚e": "ΓΌrl",
964 }
911 }
965 with mock.patch(
912 with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_branches):
966 ("rhodecode.lib.vcs.backends.git.repository"
967 ".GitRepository._refs"),
968 unicode_branches):
969 branches = self.repo.branches
913 branches = self.repo.branches
970
914
971 assert 'unicode' in branches
915 assert "unicode" in branches
972 assert 'uniΓ§ΓΆβˆ‚e' in branches
916 assert "uniΓ§ΓΆβˆ‚e" in branches
973
917
974 def test_unicode_tag_refs(self):
918 def test_unicode_tag_refs(self):
975 unicode_tags = {
919 unicode_tags = {
976 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
920 "refs/tags/unicode": "6c0ce52b229aa978889e91b38777f800e85f330b",
977 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
921 "refs/tags/uniΓ§ΓΆβˆ‚e": "6c0ce52b229aa978889e91b38777f800e85f330b",
978 }
922 }
979 with mock.patch(
923 with mock.patch(("rhodecode.lib.vcs.backends.git.repository" ".GitRepository._refs"), unicode_tags):
980 ("rhodecode.lib.vcs.backends.git.repository"
981 ".GitRepository._refs"),
982 unicode_tags):
983 tags = self.repo.tags
924 tags = self.repo.tags
984
925
985 assert 'unicode' in tags
926 assert "unicode" in tags
986 assert 'uniΓ§ΓΆβˆ‚e' in tags
927 assert "uniΓ§ΓΆβˆ‚e" in tags
987
928
988 def test_commit_message_is_unicode(self):
929 def test_commit_message_is_unicode(self):
989 for commit in self.repo:
930 for commit in self.repo:
@@ -995,190 +936,186 b' class TestGitCommit(object):'
995
936
996 def test_repo_files_content_types(self):
937 def test_repo_files_content_types(self):
997 commit = self.repo.get_commit()
938 commit = self.repo.get_commit()
998 for node in commit.get_node('/'):
939 for node in commit.get_node("/"):
999 if node.is_file():
940 if node.is_file():
1000 assert type(node.content) == bytes
941 assert type(node.content) == bytes
1001 assert type(node.str_content) == str
942 assert type(node.str_content) == str
1002
943
1003 def test_wrong_path(self):
944 def test_wrong_path(self):
1004 # There is 'setup.py' in the root dir but not there:
945 # There is 'setup.py' in the root dir but not there:
1005 path = 'foo/bar/setup.py'
946 path = "foo/bar/setup.py"
1006 tip = self.repo.get_commit()
947 tip = self.repo.get_commit()
1007 with pytest.raises(VCSError):
948 with pytest.raises(VCSError):
1008 tip.get_node(path)
949 tip.get_node(path)
1009
950
1010 @pytest.mark.parametrize("author_email, commit_id", [
951 @pytest.mark.parametrize(
1011 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
952 "author_email, commit_id",
1012 ('lukasz.balcerzak@python-center.pl',
953 [
1013 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
954 ("marcin@python-blog.com", "c1214f7e79e02fc37156ff215cd71275450cffc3"),
1014 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
955 ("lukasz.balcerzak@python-center.pl", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"),
1015 ])
956 ("none@none", "8430a588b43b5d6da365400117c89400326e7992"),
957 ],
958 )
1016 def test_author_email(self, author_email, commit_id):
959 def test_author_email(self, author_email, commit_id):
1017 commit = self.repo.get_commit(commit_id)
960 commit = self.repo.get_commit(commit_id)
1018 assert author_email == commit.author_email
961 assert author_email == commit.author_email
1019
962
1020 @pytest.mark.parametrize("author, commit_id", [
963 @pytest.mark.parametrize(
1021 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
964 "author, commit_id",
1022 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
965 [
1023 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
966 ("Marcin Kuzminski", "c1214f7e79e02fc37156ff215cd71275450cffc3"),
1024 ])
967 ("Lukasz Balcerzak", "ff7ca51e58c505fec0dd2491de52c622bb7a806b"),
968 ("marcink", "8430a588b43b5d6da365400117c89400326e7992"),
969 ],
970 )
1025 def test_author_username(self, author, commit_id):
971 def test_author_username(self, author, commit_id):
1026 commit = self.repo.get_commit(commit_id)
972 commit = self.repo.get_commit(commit_id)
1027 assert author == commit.author_name
973 assert author == commit.author_name
1028
974
1029
975
1030 class TestLargeFileRepo(object):
976 class TestLargeFileRepo(object):
1031
1032 def test_large_file(self, backend_git):
977 def test_large_file(self, backend_git):
1033 conf = make_db_config()
978 conf = make_db_config()
1034 repo = backend_git.create_test_repo('largefiles', conf)
979 git_largefiles_store = conf.get("vcs_git_lfs", "store_location")
980
981 repo = backend_git.create_test_repo("largefiles", conf)
1035
982
1036 tip = repo.scm_instance().get_commit()
983 tip = repo.scm_instance().get_commit()
984 node = tip.get_node("1MB.zip")
985
1037
986
1038 # extract stored LF node into the origin cache
987 # extract stored LF node into the origin cache
1039 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
988 repo_lfs_store: str = os.path.join(repo.repo_path, repo.repo_name, "lfs_store")
1040
989
1041 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
990 oid: str = "7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf"
1042 oid_path = os.path.join(lfs_store, oid)
991 # where the OID actually is INSIDE the repo...
1043 # Todo: oid path depends on LFSOidStorage.store_suffix. Once it will be changed update below line accordingly
992 oid_path = os.path.join(repo_lfs_store, oid)
1044 oid_destination = os.path.join(
1045 conf.get('vcs_git_lfs', 'store_location'), f'objects/{oid[:2]}/{oid[2:4]}/{oid}')
1046
993
1047 os.makedirs(os.path.dirname(oid_destination))
994 # Note: oid path depends on LFSOidStore.store_suffix. Once it will be changed update below line accordingly
995 oid_destination = os.path.join(git_largefiles_store, f"objects/{oid[:2]}/{oid[2:4]}/{oid}")
996
1048 shutil.copy(oid_path, oid_destination)
997 shutil.copy(oid_path, oid_destination)
1049
998
1050 node = tip.get_node('1MB.zip')
1051
1052 lf_node = node.get_largefile_node()
999 lf_node = node.get_largefile_node()
1053
1000
1054 assert lf_node.is_largefile() is True
1001 assert lf_node.is_largefile() is True
1055 assert lf_node.size == 1024000
1002 assert lf_node.size == 1024000
1056 assert lf_node.name == '1MB.zip'
1003 assert lf_node.name == "1MB.zip"
1057
1004
1058
1005
1059 @pytest.mark.usefixtures("vcs_repository_support")
1006 @pytest.mark.usefixtures("vcs_repository_support")
1060 class TestGitSpecificWithRepo(BackendTestMixin):
1007 class TestGitSpecificWithRepo(BackendTestMixin):
1061
1062 @classmethod
1008 @classmethod
1063 def _get_commits(cls):
1009 def _get_commits(cls):
1064 return [
1010 return [
1065 {
1011 {
1066 'message': 'Initial',
1012 "message": "Initial",
1067 'author': 'Joe Doe <joe.doe@example.com>',
1013 "author": "Joe Doe <joe.doe@example.com>",
1068 'date': datetime.datetime(2010, 1, 1, 20),
1014 "date": datetime.datetime(2010, 1, 1, 20),
1069 'added': [
1015 "added": [
1070 FileNode(b'foobar/static/js/admin/base.js', content=b'base'),
1016 FileNode(b"foobar/static/js/admin/base.js", content=b"base"),
1071 FileNode(b'foobar/static/admin', content=b'admin', mode=0o120000), # this is a link
1017 FileNode(b"foobar/static/admin", content=b"admin", mode=0o120000), # this is a link
1072 FileNode(b'foo', content=b'foo'),
1018 FileNode(b"foo", content=b"foo"),
1073 ],
1019 ],
1074 },
1020 },
1075 {
1021 {
1076 'message': 'Second',
1022 "message": "Second",
1077 'author': 'Joe Doe <joe.doe@example.com>',
1023 "author": "Joe Doe <joe.doe@example.com>",
1078 'date': datetime.datetime(2010, 1, 1, 22),
1024 "date": datetime.datetime(2010, 1, 1, 22),
1079 'added': [
1025 "added": [
1080 FileNode(b'foo2', content=b'foo2'),
1026 FileNode(b"foo2", content=b"foo2"),
1081 ],
1027 ],
1082 },
1028 },
1083 ]
1029 ]
1084
1030
1085 def test_paths_slow_traversing(self):
1031 def test_paths_slow_traversing(self):
1086 commit = self.repo.get_commit()
1032 commit = self.repo.get_commit()
1087 assert commit.get_node('foobar').get_node('static').get_node('js')\
1033 assert (
1088 .get_node('admin').get_node('base.js').content == b'base'
1034 commit.get_node("foobar").get_node("static").get_node("js").get_node("admin").get_node("base.js").content
1035 == b"base"
1036 )
1089
1037
1090 def test_paths_fast_traversing(self):
1038 def test_paths_fast_traversing(self):
1091 commit = self.repo.get_commit()
1039 commit = self.repo.get_commit()
1092 assert commit.get_node('foobar/static/js/admin/base.js').content == b'base'
1040 assert commit.get_node("foobar/static/js/admin/base.js").content == b"base"
1093
1041
1094 def test_get_diff_runs_git_command_with_hashes(self):
1042 def test_get_diff_runs_git_command_with_hashes(self):
1095 comm1 = self.repo[0]
1043 comm1 = self.repo[0]
1096 comm2 = self.repo[1]
1044 comm2 = self.repo[1]
1097
1045
1098 with mock.patch.object(self.repo, '_remote', return_value=mock.Mock()) as remote_mock:
1046 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1099 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1047 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1100 self.repo.get_diff(comm1, comm2)
1048 self.repo.get_diff(comm1, comm2)
1101
1049
1102 remote_mock.diff.assert_called_once_with(
1050 remote_mock.diff.assert_called_once_with(
1103 comm1.raw_id, comm2.raw_id,
1051 comm1.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3
1104 file_filter=None, opt_ignorews=False, context=3)
1052 )
1105
1053
1106 def test_get_diff_runs_git_command_with_str_hashes(self):
1054 def test_get_diff_runs_git_command_with_str_hashes(self):
1107 comm2 = self.repo[1]
1055 comm2 = self.repo[1]
1108
1056
1109 with mock.patch.object(self.repo, '_remote', return_value=mock.Mock()) as remote_mock:
1057 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1110 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1058 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1111 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1059 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1112
1060
1113 remote_mock.diff.assert_called_once_with(
1061 remote_mock.diff.assert_called_once_with(
1114 self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id,
1062 self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id, file_filter=None, opt_ignorews=False, context=3
1115 file_filter=None, opt_ignorews=False, context=3)
1063 )
1116
1064
1117 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1065 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1118 comm1 = self.repo[0]
1066 comm1 = self.repo[0]
1119 comm2 = self.repo[1]
1067 comm2 = self.repo[1]
1120
1068
1121 with mock.patch.object(self.repo, '_remote', return_value=mock.Mock()) as remote_mock:
1069 with mock.patch.object(self.repo, "_remote", return_value=mock.Mock()) as remote_mock:
1122 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1070 remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff)
1123 self.repo.get_diff(comm1, comm2, 'foo')
1071 self.repo.get_diff(comm1, comm2, "foo")
1124
1072
1125 remote_mock.diff.assert_called_once_with(
1073 remote_mock.diff.assert_called_once_with(
1126 self.repo._lookup_commit(0), comm2.raw_id,
1074 self.repo._lookup_commit(0), comm2.raw_id, file_filter="foo", opt_ignorews=False, context=3
1127 file_filter='foo', opt_ignorews=False, context=3)
1075 )
1128
1076
1129
1077
1130 @pytest.mark.usefixtures("vcs_repository_support")
1078 @pytest.mark.usefixtures("vcs_repository_support")
1131 class TestGitRegression(BackendTestMixin):
1079 class TestGitRegression(BackendTestMixin):
1132
1133 @classmethod
1080 @classmethod
1134 def _get_commits(cls):
1081 def _get_commits(cls):
1135 return [
1082 return [
1136 {
1083 {
1137 'message': 'Initial',
1084 "message": "Initial",
1138 'author': 'Joe Doe <joe.doe@example.com>',
1085 "author": "Joe Doe <joe.doe@example.com>",
1139 'date': datetime.datetime(2010, 1, 1, 20),
1086 "date": datetime.datetime(2010, 1, 1, 20),
1140 'added': [
1087 "added": [
1141 FileNode(b'bot/__init__.py', content=b'base'),
1088 FileNode(b"bot/__init__.py", content=b"base"),
1142 FileNode(b'bot/templates/404.html', content=b'base'),
1089 FileNode(b"bot/templates/404.html", content=b"base"),
1143 FileNode(b'bot/templates/500.html', content=b'base'),
1090 FileNode(b"bot/templates/500.html", content=b"base"),
1144 ],
1091 ],
1145 },
1092 },
1146 {
1093 {
1147 'message': 'Second',
1094 "message": "Second",
1148 'author': 'Joe Doe <joe.doe@example.com>',
1095 "author": "Joe Doe <joe.doe@example.com>",
1149 'date': datetime.datetime(2010, 1, 1, 22),
1096 "date": datetime.datetime(2010, 1, 1, 22),
1150 'added': [
1097 "added": [
1151 FileNode(b'bot/build/migrations/1.py', content=b'foo2'),
1098 FileNode(b"bot/build/migrations/1.py", content=b"foo2"),
1152 FileNode(b'bot/build/migrations/2.py', content=b'foo2'),
1099 FileNode(b"bot/build/migrations/2.py", content=b"foo2"),
1153 FileNode(b'bot/build/static/templates/f.html', content=b'foo2'),
1100 FileNode(b"bot/build/static/templates/f.html", content=b"foo2"),
1154 FileNode(b'bot/build/static/templates/f1.html', content=b'foo2'),
1101 FileNode(b"bot/build/static/templates/f1.html", content=b"foo2"),
1155 FileNode(b'bot/build/templates/err.html', content=b'foo2'),
1102 FileNode(b"bot/build/templates/err.html", content=b"foo2"),
1156 FileNode(b'bot/build/templates/err2.html', content=b'foo2'),
1103 FileNode(b"bot/build/templates/err2.html", content=b"foo2"),
1157 ],
1104 ],
1158 },
1105 },
1159 ]
1106 ]
1160
1107
1161 @pytest.mark.parametrize("path, expected_paths", [
1108 @pytest.mark.parametrize(
1162 ('bot', [
1109 "path, expected_paths",
1163 'bot/build',
1110 [
1164 'bot/templates',
1111 ("bot", ["bot/build", "bot/templates", "bot/__init__.py"]),
1165 'bot/__init__.py']),
1112 ("bot/build", ["bot/build/migrations", "bot/build/static", "bot/build/templates"]),
1166 ('bot/build', [
1113 ("bot/build/static", ["bot/build/static/templates"]),
1167 'bot/build/migrations',
1114 ("bot/build/static/templates", ["bot/build/static/templates/f.html", "bot/build/static/templates/f1.html"]),
1168 'bot/build/static',
1115 ("bot/build/templates", ["bot/build/templates/err.html", "bot/build/templates/err2.html"]),
1169 'bot/build/templates']),
1116 ("bot/templates/", ["bot/templates/404.html", "bot/templates/500.html"]),
1170 ('bot/build/static', [
1117 ],
1171 'bot/build/static/templates']),
1118 )
1172 ('bot/build/static/templates', [
1173 'bot/build/static/templates/f.html',
1174 'bot/build/static/templates/f1.html']),
1175 ('bot/build/templates', [
1176 'bot/build/templates/err.html',
1177 'bot/build/templates/err2.html']),
1178 ('bot/templates/', [
1179 'bot/templates/404.html',
1180 'bot/templates/500.html']),
1181 ])
1182 def test_similar_paths(self, path, expected_paths):
1119 def test_similar_paths(self, path, expected_paths):
1183 commit = self.repo.get_commit()
1120 commit = self.repo.get_commit()
1184 paths = [n.path for n in commit.get_nodes(path)]
1121 paths = [n.path for n in commit.get_nodes(path)]
@@ -1186,122 +1123,120 b' class TestGitRegression(BackendTestMixin'
1186
1123
1187
1124
1188 class TestDiscoverGitVersion(object):
1125 class TestDiscoverGitVersion(object):
1189
1190 def test_returns_git_version(self, baseapp):
1126 def test_returns_git_version(self, baseapp):
1191 version = discover_git_version()
1127 version = discover_git_version()
1192 assert version
1128 assert version
1193
1129
1194 def test_returns_empty_string_without_vcsserver(self):
1130 def test_returns_empty_string_without_vcsserver(self):
1195 mock_connection = mock.Mock()
1131 mock_connection = mock.Mock()
1196 mock_connection.discover_git_version = mock.Mock(
1132 mock_connection.discover_git_version = mock.Mock(side_effect=Exception)
1197 side_effect=Exception)
1133 with mock.patch("rhodecode.lib.vcs.connection.Git", mock_connection):
1198 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1199 version = discover_git_version()
1134 version = discover_git_version()
1200 assert version == ''
1135 assert version == ""
1201
1136
1202
1137
1203 class TestGetSubmoduleUrl(object):
1138 class TestGetSubmoduleUrl(object):
1204 def test_submodules_file_found(self):
1139 def test_submodules_file_found(self):
1205 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1140 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1206 node = mock.Mock()
1141 node = mock.Mock()
1207
1142
1208 with mock.patch.object(
1143 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1209 commit, 'get_node', return_value=node) as get_node_mock:
1210 node.str_content = (
1144 node.str_content = (
1211 '[submodule "subrepo1"]\n'
1145 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1212 '\tpath = subrepo1\n'
1213 '\turl = https://code.rhodecode.com/dulwich\n'
1214 )
1146 )
1215 result = commit._get_submodule_url('subrepo1')
1147 result = commit._get_submodule_url("subrepo1")
1216 get_node_mock.assert_called_once_with('.gitmodules')
1148 get_node_mock.assert_called_once_with(".gitmodules")
1217 assert result == 'https://code.rhodecode.com/dulwich'
1149 assert result == "https://code.rhodecode.com/dulwich"
1218
1150
1219 def test_complex_submodule_path(self):
1151 def test_complex_submodule_path(self):
1220 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1152 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1221 node = mock.Mock()
1153 node = mock.Mock()
1222
1154
1223 with mock.patch.object(
1155 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1224 commit, 'get_node', return_value=node) as get_node_mock:
1225 node.str_content = (
1156 node.str_content = (
1226 '[submodule "complex/subrepo/path"]\n'
1157 '[submodule "complex/subrepo/path"]\n'
1227 '\tpath = complex/subrepo/path\n'
1158 "\tpath = complex/subrepo/path\n"
1228 '\turl = https://code.rhodecode.com/dulwich\n'
1159 "\turl = https://code.rhodecode.com/dulwich\n"
1229 )
1160 )
1230 result = commit._get_submodule_url('complex/subrepo/path')
1161 result = commit._get_submodule_url("complex/subrepo/path")
1231 get_node_mock.assert_called_once_with('.gitmodules')
1162 get_node_mock.assert_called_once_with(".gitmodules")
1232 assert result == 'https://code.rhodecode.com/dulwich'
1163 assert result == "https://code.rhodecode.com/dulwich"
1233
1164
1234 def test_submodules_file_not_found(self):
1165 def test_submodules_file_not_found(self):
1235 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1166 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1236 with mock.patch.object(
1167 with mock.patch.object(commit, "get_node", side_effect=NodeDoesNotExistError):
1237 commit, 'get_node', side_effect=NodeDoesNotExistError):
1168 result = commit._get_submodule_url("complex/subrepo/path")
1238 result = commit._get_submodule_url('complex/subrepo/path')
1239 assert result is None
1169 assert result is None
1240
1170
1241 def test_path_not_found(self):
1171 def test_path_not_found(self):
1242 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1172 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1243 node = mock.Mock()
1173 node = mock.Mock()
1244
1174
1245 with mock.patch.object(
1175 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1246 commit, 'get_node', return_value=node) as get_node_mock:
1247 node.str_content = (
1176 node.str_content = (
1248 '[submodule "subrepo1"]\n'
1177 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1249 '\tpath = subrepo1\n'
1250 '\turl = https://code.rhodecode.com/dulwich\n'
1251 )
1178 )
1252 result = commit._get_submodule_url('subrepo2')
1179 result = commit._get_submodule_url("subrepo2")
1253 get_node_mock.assert_called_once_with('.gitmodules')
1180 get_node_mock.assert_called_once_with(".gitmodules")
1254 assert result is None
1181 assert result is None
1255
1182
1256 def test_returns_cached_values(self):
1183 def test_returns_cached_values(self):
1257 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1184 commit = GitCommit(repository=mock.Mock(), raw_id="abcdef12", idx=1)
1258 node = mock.Mock()
1185 node = mock.Mock()
1259
1186
1260 with mock.patch.object(
1187 with mock.patch.object(commit, "get_node", return_value=node) as get_node_mock:
1261 commit, 'get_node', return_value=node) as get_node_mock:
1262 node.str_content = (
1188 node.str_content = (
1263 '[submodule "subrepo1"]\n'
1189 '[submodule "subrepo1"]\n' "\tpath = subrepo1\n" "\turl = https://code.rhodecode.com/dulwich\n"
1264 '\tpath = subrepo1\n'
1265 '\turl = https://code.rhodecode.com/dulwich\n'
1266 )
1190 )
1267 for _ in range(3):
1191 for _ in range(3):
1268 commit._get_submodule_url('subrepo1')
1192 commit._get_submodule_url("subrepo1")
1269 get_node_mock.assert_called_once_with('.gitmodules')
1193 get_node_mock.assert_called_once_with(".gitmodules")
1270
1194
1271 def test_get_node_returns_a_link(self):
1195 def test_get_node_returns_a_link(self):
1272 repository = mock.Mock()
1196 repository = mock.Mock()
1273 repository.alias = 'git'
1197 repository.alias = "git"
1274 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1198 commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1)
1275 submodule_url = 'https://code.rhodecode.com/dulwich'
1199 submodule_url = "https://code.rhodecode.com/dulwich"
1276 get_id_patch = mock.patch.object(
1200 get_id_patch = mock.patch.object(commit, "_get_tree_id_for_path", return_value=(1, "link"))
1277 commit, '_get_tree_id_for_path', return_value=(1, 'link'))
1201 get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url)
1278 get_submodule_patch = mock.patch.object(
1279 commit, '_get_submodule_url', return_value=submodule_url)
1280
1202
1281 with get_id_patch, get_submodule_patch as submodule_mock:
1203 with get_id_patch, get_submodule_patch as submodule_mock:
1282 node = commit.get_node('/abcde')
1204 node = commit.get_node("/abcde")
1283
1205
1284 submodule_mock.assert_called_once_with('/abcde')
1206 submodule_mock.assert_called_once_with("/abcde")
1285 assert type(node) == SubModuleNode
1207 assert type(node) == SubModuleNode
1286 assert node.url == submodule_url
1208 assert node.url == submodule_url
1287
1209
1288 def test_get_nodes_returns_links(self):
1210 def test_get_nodes_returns_links(self):
1289 repository = mock.MagicMock()
1211 repository = mock.MagicMock()
1290 repository.alias = 'git'
1212 repository.alias = "git"
1291 repository._remote.tree_items.return_value = [
1213 repository._remote.tree_items.return_value = [("subrepo", "stat", 1, "link")]
1292 ('subrepo', 'stat', 1, 'link')
1214 commit = GitCommit(repository=repository, raw_id="abcdef12", idx=1)
1293 ]
1215 submodule_url = "https://code.rhodecode.com/dulwich"
1294 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1216 get_id_patch = mock.patch.object(commit, "_get_tree_id_for_path", return_value=(1, "tree"))
1295 submodule_url = 'https://code.rhodecode.com/dulwich'
1217 get_submodule_patch = mock.patch.object(commit, "_get_submodule_url", return_value=submodule_url)
1296 get_id_patch = mock.patch.object(
1297 commit, '_get_tree_id_for_path', return_value=(1, 'tree'))
1298 get_submodule_patch = mock.patch.object(
1299 commit, '_get_submodule_url', return_value=submodule_url)
1300
1218
1301 with get_id_patch, get_submodule_patch as submodule_mock:
1219 with get_id_patch, get_submodule_patch as submodule_mock:
1302 nodes = commit.get_nodes('/abcde')
1220 nodes = commit.get_nodes("/abcde")
1303
1221
1304 submodule_mock.assert_called_once_with('/abcde/subrepo')
1222 submodule_mock.assert_called_once_with("/abcde/subrepo")
1305 assert len(nodes) == 1
1223 assert len(nodes) == 1
1306 assert type(nodes[0]) == SubModuleNode
1224 assert type(nodes[0]) == SubModuleNode
1307 assert nodes[0].url == submodule_url
1225 assert nodes[0].url == submodule_url
1226
1227
1228 class TestGetShadowInstance(object):
1229
1230 @pytest.fixture()
1231 def repo(self, vcsbackend_git):
1232 _git_repo = vcsbackend_git.repo
1233
1234 mock.patch.object(_git_repo, "config", mock.Mock())
1235 connection_mock = mock.Mock(unsafe=True, name="connection.Hg")
1236
1237 mock.patch("rhodecode.lib.vcs.connection.Git", connection_mock)
1238 return _git_repo
1239
1240 def test_getting_shadow_instance_copies_config(self, repo):
1241 shadow = repo.get_shadow_instance(repo.path)
1242 assert shadow.config.serialize() == repo.config.serialize()
This diff has been collapsed as it changes many lines, (992 lines changed) Show them Hide them
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -25,11 +24,9 b' import pytest'
25 from rhodecode.lib.str_utils import safe_bytes
24 from rhodecode.lib.str_utils import safe_bytes
26 from rhodecode.lib.utils import make_db_config
25 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.vcs import backends
26 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
27 from rhodecode.lib.vcs.backends.base import Reference, MergeResponse, MergeFailureReason
29 Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
28 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 from rhodecode.lib.vcs.exceptions import (
29 from rhodecode.lib.vcs.exceptions import RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
30 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
31 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35
32
@@ -44,12 +41,13 b' def repo_path_generator():'
44 i = 0
41 i = 0
45 while True:
42 while True:
46 i += 1
43 i += 1
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
44 yield "%s-%d" % (TEST_HG_REPO_CLONE, i)
45
48
46
49 REPO_PATH_GENERATOR = repo_path_generator()
47 REPO_PATH_GENERATOR = repo_path_generator()
50
48
51
49
52 @pytest.fixture(scope='class', autouse=True)
50 @pytest.fixture(scope="class", autouse=True)
53 def repo(request, baseapp):
51 def repo(request, baseapp):
54 repo = MercurialRepository(TEST_HG_REPO)
52 repo = MercurialRepository(TEST_HG_REPO)
55 if request.cls:
53 if request.cls:
@@ -58,7 +56,6 b' def repo(request, baseapp):'
58
56
59
57
60 class TestMercurialRepository(object):
58 class TestMercurialRepository(object):
61
62 # pylint: disable=protected-access
59 # pylint: disable=protected-access
63
60
64 def get_clone_repo(self):
61 def get_clone_repo(self):
@@ -66,8 +63,7 b' class TestMercurialRepository(object):'
66 Return a clone of the base repo.
63 Return a clone of the base repo.
67 """
64 """
68 clone_path = next(REPO_PATH_GENERATOR)
65 clone_path = next(REPO_PATH_GENERATOR)
69 repo_clone = MercurialRepository(
66 repo_clone = MercurialRepository(clone_path, create=True, src_url=self.repo.path)
70 clone_path, create=True, src_url=self.repo.path)
71
67
72 return repo_clone
68 return repo_clone
73
69
@@ -78,40 +74,39 b' class TestMercurialRepository(object):'
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
74 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79
75
80 def test_wrong_repo_path(self):
76 def test_wrong_repo_path(self):
81 wrong_repo_path = '/tmp/errorrepo_hg'
77 wrong_repo_path = "/tmp/errorrepo_hg"
82 with pytest.raises(RepositoryError):
78 with pytest.raises(RepositoryError):
83 MercurialRepository(wrong_repo_path)
79 MercurialRepository(wrong_repo_path)
84
80
85 def test_unicode_path_repo(self):
81 def test_unicode_path_repo(self):
86 with pytest.raises(VCSError):
82 with pytest.raises(VCSError):
87 MercurialRepository('iShouldFail')
83 MercurialRepository("iShouldFail")
88
84
89 def test_unicode_commit_id(self):
85 def test_unicode_commit_id(self):
90 with pytest.raises(CommitDoesNotExistError):
86 with pytest.raises(CommitDoesNotExistError):
91 self.repo.get_commit('unicode-commit-id')
87 self.repo.get_commit("unicode-commit-id")
92 with pytest.raises(CommitDoesNotExistError):
88 with pytest.raises(CommitDoesNotExistError):
93 self.repo.get_commit('unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
89 self.repo.get_commit("unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id")
94
90
95 def test_unicode_bookmark(self):
91 def test_unicode_bookmark(self):
96 self.repo.bookmark('unicode-bookmark')
92 self.repo.bookmark("unicode-bookmark")
97 self.repo.bookmark('unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
93 self.repo.bookmark("unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark")
98
94
99 def test_unicode_branch(self):
95 def test_unicode_branch(self):
100 with pytest.raises(KeyError):
96 with pytest.raises(KeyError):
101 assert self.repo.branches['unicode-branch']
97 assert self.repo.branches["unicode-branch"]
102 with pytest.raises(KeyError):
98 with pytest.raises(KeyError):
103 assert self.repo.branches['unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
99 assert self.repo.branches["unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch"]
104
100
105 def test_repo_clone(self):
101 def test_repo_clone(self):
106 if os.path.exists(TEST_HG_REPO_CLONE):
102 if os.path.exists(TEST_HG_REPO_CLONE):
107 self.fail(
103 self.fail(
108 'Cannot test mercurial clone repo as location %s already '
104 "Cannot test mercurial clone repo as location %s already "
109 'exists. You should manually remove it first.'
105 "exists. You should manually remove it first." % TEST_HG_REPO_CLONE
110 % TEST_HG_REPO_CLONE)
106 )
111
107
112 repo = MercurialRepository(TEST_HG_REPO)
108 repo = MercurialRepository(TEST_HG_REPO)
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
109 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE, create=True, src_url=TEST_HG_REPO)
114 src_url=TEST_HG_REPO)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
110 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 # Checking hashes of commits should be enough
111 # Checking hashes of commits should be enough
117 for commit in repo.get_commits():
112 for commit in repo.get_commits():
@@ -121,72 +116,80 b' class TestMercurialRepository(object):'
121 def test_repo_clone_with_update(self):
116 def test_repo_clone_with_update(self):
122 repo = MercurialRepository(TEST_HG_REPO)
117 repo = MercurialRepository(TEST_HG_REPO)
123 repo_clone = MercurialRepository(
118 repo_clone = MercurialRepository(
124 TEST_HG_REPO_CLONE + '_w_update',
119 TEST_HG_REPO_CLONE + "_w_update", create=True, src_url=TEST_HG_REPO, do_workspace_checkout=True
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
120 )
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
121 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127
122
128 # check if current workdir was updated
123 # check if current workdir was updated
129 assert os.path.isfile(
124 assert os.path.isfile(os.path.join(TEST_HG_REPO_CLONE + "_w_update", "MANIFEST.in"))
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131
125
132 def test_repo_clone_without_update(self):
126 def test_repo_clone_without_update(self):
133 repo = MercurialRepository(TEST_HG_REPO)
127 repo = MercurialRepository(TEST_HG_REPO)
134 repo_clone = MercurialRepository(
128 repo_clone = MercurialRepository(
135 TEST_HG_REPO_CLONE + '_wo_update',
129 TEST_HG_REPO_CLONE + "_wo_update", create=True, src_url=TEST_HG_REPO, do_workspace_checkout=False
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
130 )
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
131 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 assert not os.path.isfile(
132 assert not os.path.isfile(os.path.join(TEST_HG_REPO_CLONE + "_wo_update", "MANIFEST.in"))
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140
133
141 def test_commit_ids(self):
134 def test_commit_ids(self):
142 # there are 21 commits at bitbucket now
135 # there are 21 commits at bitbucket now
143 # so we can assume they would be available from now on
136 # so we can assume they would be available from now on
144 subset = {'b986218ba1c9b0d6a259fac9b050b1724ed8e545', '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
137 subset = {
145 '6cba7170863a2411822803fa77a0a264f1310b35', '56349e29c2af3ac913b28bde9a2c6154436e615b',
138 "b986218ba1c9b0d6a259fac9b050b1724ed8e545",
146 '2dda4e345facb0ccff1a191052dd1606dba6781d', '6fff84722075f1607a30f436523403845f84cd9e',
139 "3d8f361e72ab303da48d799ff1ac40d5ac37c67e",
147 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
140 "6cba7170863a2411822803fa77a0a264f1310b35",
148 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', 'be90031137367893f1c406e0a8683010fd115b79',
141 "56349e29c2af3ac913b28bde9a2c6154436e615b",
149 'db8e58be770518cbb2b1cdfa69146e47cd481481', '84478366594b424af694a6c784cb991a16b87c21',
142 "2dda4e345facb0ccff1a191052dd1606dba6781d",
150 '17f8e105dddb9f339600389c6dc7175d395a535c', '20a662e756499bde3095ffc9bc0643d1def2d0eb',
143 "6fff84722075f1607a30f436523403845f84cd9e",
151 '2e319b85e70a707bba0beff866d9f9de032aa4f9', '786facd2c61deb9cf91e9534735124fb8fc11842',
144 "7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7",
152 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', 'aa6a0de05b7612707db567078e130a6cd114a9a7',
145 "3803844fdbd3b711175fc3da9bdacfcd6d29a6fb",
153 'eada5a770da98ab0dd7325e29d00e0714f228d09'
146 "dc5d2c0661b61928834a785d3e64a3f80d3aad9c",
154 }
147 "be90031137367893f1c406e0a8683010fd115b79",
148 "db8e58be770518cbb2b1cdfa69146e47cd481481",
149 "84478366594b424af694a6c784cb991a16b87c21",
150 "17f8e105dddb9f339600389c6dc7175d395a535c",
151 "20a662e756499bde3095ffc9bc0643d1def2d0eb",
152 "2e319b85e70a707bba0beff866d9f9de032aa4f9",
153 "786facd2c61deb9cf91e9534735124fb8fc11842",
154 "94593d2128d38210a2fcd1aabff6dda0d6d9edf8",
155 "aa6a0de05b7612707db567078e130a6cd114a9a7",
156 "eada5a770da98ab0dd7325e29d00e0714f228d09",
157 }
155 assert subset.issubset(set(self.repo.commit_ids))
158 assert subset.issubset(set(self.repo.commit_ids))
156
159
157 # check if we have the proper order of commits
160 # check if we have the proper order of commits
158 org = [
161 org = [
159 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
162 "b986218ba1c9b0d6a259fac9b050b1724ed8e545",
160 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
163 "3d8f361e72ab303da48d799ff1ac40d5ac37c67e",
161 '6cba7170863a2411822803fa77a0a264f1310b35',
164 "6cba7170863a2411822803fa77a0a264f1310b35",
162 '56349e29c2af3ac913b28bde9a2c6154436e615b',
165 "56349e29c2af3ac913b28bde9a2c6154436e615b",
163 '2dda4e345facb0ccff1a191052dd1606dba6781d',
166 "2dda4e345facb0ccff1a191052dd1606dba6781d",
164 '6fff84722075f1607a30f436523403845f84cd9e',
167 "6fff84722075f1607a30f436523403845f84cd9e",
165 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
168 "7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7",
166 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
169 "3803844fdbd3b711175fc3da9bdacfcd6d29a6fb",
167 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
170 "dc5d2c0661b61928834a785d3e64a3f80d3aad9c",
168 'be90031137367893f1c406e0a8683010fd115b79',
171 "be90031137367893f1c406e0a8683010fd115b79",
169 'db8e58be770518cbb2b1cdfa69146e47cd481481',
172 "db8e58be770518cbb2b1cdfa69146e47cd481481",
170 '84478366594b424af694a6c784cb991a16b87c21',
173 "84478366594b424af694a6c784cb991a16b87c21",
171 '17f8e105dddb9f339600389c6dc7175d395a535c',
174 "17f8e105dddb9f339600389c6dc7175d395a535c",
172 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
175 "20a662e756499bde3095ffc9bc0643d1def2d0eb",
173 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
176 "2e319b85e70a707bba0beff866d9f9de032aa4f9",
174 '786facd2c61deb9cf91e9534735124fb8fc11842',
177 "786facd2c61deb9cf91e9534735124fb8fc11842",
175 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
178 "94593d2128d38210a2fcd1aabff6dda0d6d9edf8",
176 'aa6a0de05b7612707db567078e130a6cd114a9a7',
179 "aa6a0de05b7612707db567078e130a6cd114a9a7",
177 'eada5a770da98ab0dd7325e29d00e0714f228d09',
180 "eada5a770da98ab0dd7325e29d00e0714f228d09",
178 '2c1885c735575ca478bf9e17b0029dca68824458',
181 "2c1885c735575ca478bf9e17b0029dca68824458",
179 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
182 "d9bcd465040bf869799b09ad732c04e0eea99fe9",
180 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
183 "469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7",
181 '4fb8326d78e5120da2c7468dcf7098997be385da',
184 "4fb8326d78e5120da2c7468dcf7098997be385da",
182 '62b4a097164940bd66030c4db51687f3ec035eed',
185 "62b4a097164940bd66030c4db51687f3ec035eed",
183 '536c1a19428381cfea92ac44985304f6a8049569',
186 "536c1a19428381cfea92ac44985304f6a8049569",
184 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
187 "965e8ab3c44b070cdaa5bf727ddef0ada980ecc4",
185 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
188 "9bb326a04ae5d98d437dece54be04f830cf1edd9",
186 'f8940bcb890a98c4702319fbe36db75ea309b475',
189 "f8940bcb890a98c4702319fbe36db75ea309b475",
187 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
190 "ff5ab059786ebc7411e559a2cc309dfae3625a3b",
188 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
191 "6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08",
189 'ee87846a61c12153b51543bf860e1026c6d3dcba',
192 "ee87846a61c12153b51543bf860e1026c6d3dcba",
190 ]
193 ]
191 assert org == self.repo.commit_ids[:31]
194 assert org == self.repo.commit_ids[:31]
192
195
@@ -197,8 +200,7 b' class TestMercurialRepository(object):'
197
200
198 def test_slicing(self):
201 def test_slicing(self):
199 # 4 1 5 10 95
202 # 4 1 5 10 95
200 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
203 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5), (10, 20, 10), (5, 100, 95)]:
201 (10, 20, 10), (5, 100, 95)]:
202 indexes = list(self.repo[sfrom:sto])
204 indexes = list(self.repo[sfrom:sto])
203 assert len(indexes) == size
205 assert len(indexes) == size
204 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
206 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
@@ -208,64 +210,64 b' class TestMercurialRepository(object):'
208 # TODO: Need more tests here
210 # TODO: Need more tests here
209
211
210 # active branches
212 # active branches
211 assert 'default' in self.repo.branches
213 assert "default" in self.repo.branches
212 assert 'stable' in self.repo.branches
214 assert "stable" in self.repo.branches
213
215
214 # closed
216 # closed
215 assert 'git' in self.repo._get_branches(closed=True)
217 assert "git" in self.repo._get_branches(closed=True)
216 assert 'web' in self.repo._get_branches(closed=True)
218 assert "web" in self.repo._get_branches(closed=True)
217
219
218 for name, id in self.repo.branches.items():
220 for name, id in self.repo.branches.items():
219 assert isinstance(self.repo.get_commit(id), MercurialCommit)
221 assert isinstance(self.repo.get_commit(id), MercurialCommit)
220
222
221 def test_tip_in_tags(self):
223 def test_tip_in_tags(self):
222 # tip is always a tag
224 # tip is always a tag
223 assert 'tip' in self.repo.tags
225 assert "tip" in self.repo.tags
224
226
225 def test_tip_commit_in_tags(self):
227 def test_tip_commit_in_tags(self):
226 tip = self.repo.get_commit()
228 tip = self.repo.get_commit()
227 assert self.repo.tags['tip'] == tip.raw_id
229 assert self.repo.tags["tip"] == tip.raw_id
228
230
229 def test_initial_commit(self):
231 def test_initial_commit(self):
230 init_commit = self.repo.get_commit(commit_idx=0)
232 init_commit = self.repo.get_commit(commit_idx=0)
231 init_author = init_commit.author
233 init_author = init_commit.author
232
234
233 assert init_commit.message == 'initial import'
235 assert init_commit.message == "initial import"
234 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
236 assert init_author == "Marcin Kuzminski <marcin@python-blog.com>"
235 assert init_author == init_commit.committer
237 assert init_author == init_commit.committer
236 assert sorted(init_commit._file_paths) == sorted([
238 assert sorted(init_commit._file_paths) == sorted(
237 'vcs/__init__.py',
239 [
238 'vcs/backends/BaseRepository.py',
240 "vcs/__init__.py",
239 'vcs/backends/__init__.py',
241 "vcs/backends/BaseRepository.py",
240 ])
242 "vcs/backends/__init__.py",
241 assert sorted(init_commit._dir_paths) == sorted(
243 ]
242 ['', 'vcs', 'vcs/backends'])
244 )
245 assert sorted(init_commit._dir_paths) == sorted(["", "vcs", "vcs/backends"])
243
246
244 assert init_commit._dir_paths + init_commit._file_paths == \
247 assert init_commit._dir_paths + init_commit._file_paths == init_commit._paths
245 init_commit._paths
246
248
247 with pytest.raises(NodeDoesNotExistError):
249 with pytest.raises(NodeDoesNotExistError):
248 init_commit.get_node(path='foobar')
250 init_commit.get_node(path="foobar")
249
251
250 node = init_commit.get_node('vcs/')
252 node = init_commit.get_node("vcs/")
251 assert hasattr(node, 'kind')
253 assert hasattr(node, "kind")
252 assert node.kind == NodeKind.DIR
254 assert node.kind == NodeKind.DIR
253
255
254 node = init_commit.get_node('vcs')
256 node = init_commit.get_node("vcs")
255 assert hasattr(node, 'kind')
257 assert hasattr(node, "kind")
256 assert node.kind == NodeKind.DIR
258 assert node.kind == NodeKind.DIR
257
259
258 node = init_commit.get_node('vcs/__init__.py')
260 node = init_commit.get_node("vcs/__init__.py")
259 assert hasattr(node, 'kind')
261 assert hasattr(node, "kind")
260 assert node.kind == NodeKind.FILE
262 assert node.kind == NodeKind.FILE
261
263
262 def test_not_existing_commit(self):
264 def test_not_existing_commit(self):
263 # rawid
265 # rawid
264 with pytest.raises(RepositoryError):
266 with pytest.raises(RepositoryError):
265 self.repo.get_commit('abcd' * 10)
267 self.repo.get_commit("abcd" * 10)
266 # shortid
268 # shortid
267 with pytest.raises(RepositoryError):
269 with pytest.raises(RepositoryError):
268 self.repo.get_commit('erro' * 4)
270 self.repo.get_commit("erro" * 4)
269 # numeric
271 # numeric
270 with pytest.raises(RepositoryError):
272 with pytest.raises(RepositoryError):
271 self.repo.get_commit(commit_idx=self.repo.count() + 1)
273 self.repo.get_commit(commit_idx=self.repo.count() + 1)
@@ -289,7 +291,7 b' Introduction'
289 TODO: To be written...
291 TODO: To be written...
290
292
291 """
293 """
292 node = commit10.get_node('README.rst')
294 node = commit10.get_node("README.rst")
293 assert node.kind == NodeKind.FILE
295 assert node.kind == NodeKind.FILE
294 assert node.str_content == README
296 assert node.str_content == README
295
297
@@ -308,77 +310,73 b' TODO: To be written...'
308 repo_clone = self.get_clone_repo()
310 repo_clone = self.get_clone_repo()
309 branches = repo_clone.branches
311 branches = repo_clone.branches
310
312
311 repo_clone._update('default')
313 repo_clone._update("default")
312 assert branches['default'] == repo_clone._identify()
314 assert branches["default"] == repo_clone._identify()
313 repo_clone._update('stable')
315 repo_clone._update("stable")
314 assert branches['stable'] == repo_clone._identify()
316 assert branches["stable"] == repo_clone._identify()
315
317
316 def test_local_pull_branch(self):
318 def test_local_pull_branch(self):
317 target_repo = self.get_empty_repo()
319 target_repo = self.get_empty_repo()
318 source_repo = self.get_clone_repo()
320 source_repo = self.get_clone_repo()
319
321
320 default = Reference(
322 default = Reference("branch", "default", source_repo.branches["default"])
321 'branch', 'default', source_repo.branches['default'])
322 target_repo._local_pull(source_repo.path, default)
323 target_repo._local_pull(source_repo.path, default)
323 target_repo = MercurialRepository(target_repo.path)
324 target_repo = MercurialRepository(target_repo.path)
324 assert (target_repo.branches['default'] ==
325 assert target_repo.branches["default"] == source_repo.branches["default"]
325 source_repo.branches['default'])
326
326
327 stable = Reference('branch', 'stable', source_repo.branches['stable'])
327 stable = Reference("branch", "stable", source_repo.branches["stable"])
328 target_repo._local_pull(source_repo.path, stable)
328 target_repo._local_pull(source_repo.path, stable)
329 target_repo = MercurialRepository(target_repo.path)
329 target_repo = MercurialRepository(target_repo.path)
330 assert target_repo.branches['stable'] == source_repo.branches['stable']
330 assert target_repo.branches["stable"] == source_repo.branches["stable"]
331
331
332 def test_local_pull_bookmark(self):
332 def test_local_pull_bookmark(self):
333 target_repo = self.get_empty_repo()
333 target_repo = self.get_empty_repo()
334 source_repo = self.get_clone_repo()
334 source_repo = self.get_clone_repo()
335
335
336 commits = list(source_repo.get_commits(branch_name='default'))
336 commits = list(source_repo.get_commits(branch_name="default"))
337 foo1_id = commits[-5].raw_id
337 foo1_id = commits[-5].raw_id
338 foo1 = Reference('book', 'foo1', foo1_id)
338 foo1 = Reference("book", "foo1", foo1_id)
339 source_repo._update(foo1_id)
339 source_repo._update(foo1_id)
340 source_repo.bookmark('foo1')
340 source_repo.bookmark("foo1")
341
341
342 foo2_id = commits[-3].raw_id
342 foo2_id = commits[-3].raw_id
343 foo2 = Reference('book', 'foo2', foo2_id)
343 foo2 = Reference("book", "foo2", foo2_id)
344 source_repo._update(foo2_id)
344 source_repo._update(foo2_id)
345 source_repo.bookmark('foo2')
345 source_repo.bookmark("foo2")
346
346
347 target_repo._local_pull(source_repo.path, foo1)
347 target_repo._local_pull(source_repo.path, foo1)
348 target_repo = MercurialRepository(target_repo.path)
348 target_repo = MercurialRepository(target_repo.path)
349 assert target_repo.branches['default'] == commits[-5].raw_id
349 assert target_repo.branches["default"] == commits[-5].raw_id
350
350
351 target_repo._local_pull(source_repo.path, foo2)
351 target_repo._local_pull(source_repo.path, foo2)
352 target_repo = MercurialRepository(target_repo.path)
352 target_repo = MercurialRepository(target_repo.path)
353 assert target_repo.branches['default'] == commits[-3].raw_id
353 assert target_repo.branches["default"] == commits[-3].raw_id
354
354
355 def test_local_pull_commit(self):
355 def test_local_pull_commit(self):
356 target_repo = self.get_empty_repo()
356 target_repo = self.get_empty_repo()
357 source_repo = self.get_clone_repo()
357 source_repo = self.get_clone_repo()
358
358
359 commits = list(source_repo.get_commits(branch_name='default'))
359 commits = list(source_repo.get_commits(branch_name="default"))
360 commit_id = commits[-5].raw_id
360 commit_id = commits[-5].raw_id
361 commit = Reference('rev', commit_id, commit_id)
361 commit = Reference("rev", commit_id, commit_id)
362 target_repo._local_pull(source_repo.path, commit)
362 target_repo._local_pull(source_repo.path, commit)
363 target_repo = MercurialRepository(target_repo.path)
363 target_repo = MercurialRepository(target_repo.path)
364 assert target_repo.branches['default'] == commit_id
364 assert target_repo.branches["default"] == commit_id
365
365
366 commit_id = commits[-3].raw_id
366 commit_id = commits[-3].raw_id
367 commit = Reference('rev', commit_id, commit_id)
367 commit = Reference("rev", commit_id, commit_id)
368 target_repo._local_pull(source_repo.path, commit)
368 target_repo._local_pull(source_repo.path, commit)
369 target_repo = MercurialRepository(target_repo.path)
369 target_repo = MercurialRepository(target_repo.path)
370 assert target_repo.branches['default'] == commit_id
370 assert target_repo.branches["default"] == commit_id
371
371
372 def test_local_pull_from_same_repo(self):
372 def test_local_pull_from_same_repo(self):
373 reference = Reference('branch', 'default', None)
373 reference = Reference("branch", "default", None)
374 with pytest.raises(ValueError):
374 with pytest.raises(ValueError):
375 self.repo._local_pull(self.repo.path, reference)
375 self.repo._local_pull(self.repo.path, reference)
376
376
377 def test_validate_pull_reference_raises_on_missing_reference(
377 def test_validate_pull_reference_raises_on_missing_reference(self, vcsbackend_hg):
378 self, vcsbackend_hg):
379 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
378 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
380 reference = Reference(
379 reference = Reference("book", "invalid_reference", "a" * 40)
381 'book', 'invalid_reference', 'a' * 40)
382
380
383 with pytest.raises(CommitDoesNotExistError):
381 with pytest.raises(CommitDoesNotExistError):
384 target_repo._validate_pull_reference(reference)
382 target_repo._validate_pull_reference(reference)
@@ -387,51 +385,48 b' TODO: To be written...'
387 assert set(self.repo._heads()) == set(self.repo.branches.values())
385 assert set(self.repo._heads()) == set(self.repo.branches.values())
388
386
389 def test_ancestor(self):
387 def test_ancestor(self):
390 commits = [
388 commits = [c.raw_id for c in self.repo.get_commits(branch_name="default")]
391 c.raw_id for c in self.repo.get_commits(branch_name='default')]
392 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
389 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
393 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
390 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
394
391
395 def test_local_push(self):
392 def test_local_push(self):
396 target_repo = self.get_empty_repo()
393 target_repo = self.get_empty_repo()
397
394
398 revisions = list(self.repo.get_commits(branch_name='default'))
395 revisions = list(self.repo.get_commits(branch_name="default"))
399 revision = revisions[-5].raw_id
396 revision = revisions[-5].raw_id
400 self.repo._local_push(revision, target_repo.path)
397 self.repo._local_push(revision, target_repo.path)
401
398
402 target_repo = MercurialRepository(target_repo.path)
399 target_repo = MercurialRepository(target_repo.path)
403
400
404 assert target_repo.branches['default'] == revision
401 assert target_repo.branches["default"] == revision
405
402
406 def test_hooks_can_be_enabled_for_local_push(self):
403 def test_hooks_can_be_enabled_for_local_push(self):
407 revision = 'deadbeef'
404 revision = "deadbeef"
408 repo_path = 'test_group/test_repo'
405 repo_path = "test_group/test_repo"
409 with mock.patch.object(self.repo, '_remote') as remote_mock:
406 with mock.patch.object(self.repo, "_remote") as remote_mock:
410 self.repo._local_push(revision, repo_path, enable_hooks=True)
407 self.repo._local_push(revision, repo_path, enable_hooks=True)
411 remote_mock.push.assert_called_once_with(
408 remote_mock.push.assert_called_once_with([revision], repo_path, hooks=True, push_branches=False)
412 [revision], repo_path, hooks=True, push_branches=False)
413
409
414 def test_local_merge(self, vcsbackend_hg):
410 def test_local_merge(self, vcsbackend_hg):
415 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
411 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
416 source_repo = vcsbackend_hg.clone_repo(target_repo)
412 source_repo = vcsbackend_hg.clone_repo(target_repo)
417 vcsbackend_hg.add_file(target_repo, b'README_MERGE1', b'Version 1')
413 vcsbackend_hg.add_file(target_repo, b"README_MERGE1", b"Version 1")
418 target_repo = MercurialRepository(target_repo.path)
414 target_repo = MercurialRepository(target_repo.path)
419 target_rev = target_repo.branches['default']
415 target_rev = target_repo.branches["default"]
420 target_ref = Reference(
416 target_ref = Reference(type="branch", name="default", commit_id=target_rev)
421 type='branch', name='default', commit_id=target_rev)
417 vcsbackend_hg.add_file(source_repo, b"README_MERGE2", b"Version 2")
422 vcsbackend_hg.add_file(source_repo, b'README_MERGE2', b'Version 2')
423 source_repo = MercurialRepository(source_repo.path)
418 source_repo = MercurialRepository(source_repo.path)
424 source_rev = source_repo.branches['default']
419 source_rev = source_repo.branches["default"]
425 source_ref = Reference(
420 source_ref = Reference(type="branch", name="default", commit_id=source_rev)
426 type='branch', name='default', commit_id=source_rev)
427
421
428 target_repo._local_pull(source_repo.path, source_ref)
422 target_repo._local_pull(source_repo.path, source_ref)
429
423
430 merge_message = 'Merge message\n\nDescription:...'
424 merge_message = "Merge message\n\nDescription:..."
431 user_name = 'Albert Einstein'
425 user_name = "Albert Einstein"
432 user_email = 'albert@einstein.com'
426 user_email = "albert@einstein.com"
433 merge_commit_id, needs_push = target_repo._local_merge(
427 merge_commit_id, needs_push = target_repo._local_merge(
434 target_ref, merge_message, user_name, user_email, source_ref)
428 target_ref, merge_message, user_name, user_email, source_ref
429 )
435 assert needs_push
430 assert needs_push
436
431
437 target_repo = MercurialRepository(target_repo.path)
432 target_repo = MercurialRepository(target_repo.path)
@@ -439,30 +434,28 b' TODO: To be written...'
439 assert target_repo.commit_ids[-2] == source_rev
434 assert target_repo.commit_ids[-2] == source_rev
440 last_commit = target_repo.get_commit(merge_commit_id)
435 last_commit = target_repo.get_commit(merge_commit_id)
441 assert last_commit.message.strip() == merge_message
436 assert last_commit.message.strip() == merge_message
442 assert last_commit.author == '%s <%s>' % (user_name, user_email)
437 assert last_commit.author == "%s <%s>" % (user_name, user_email)
443
438
444 assert not os.path.exists(
439 assert not os.path.exists(os.path.join(target_repo.path, ".hg", "merge", "state"))
445 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
446
440
447 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
441 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
448 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
442 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
449 source_repo = vcsbackend_hg.clone_repo(target_repo)
443 source_repo = vcsbackend_hg.clone_repo(target_repo)
450 target_rev = target_repo.branches['default']
444 target_rev = target_repo.branches["default"]
451 target_ref = Reference(
445 target_ref = Reference(type="branch", name="default", commit_id=target_rev)
452 type='branch', name='default', commit_id=target_rev)
446 vcsbackend_hg.add_file(source_repo, "README_MERGE2", "Version 2")
453 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
454 source_repo = MercurialRepository(source_repo.path)
447 source_repo = MercurialRepository(source_repo.path)
455 source_rev = source_repo.branches['default']
448 source_rev = source_repo.branches["default"]
456 source_ref = Reference(
449 source_ref = Reference(type="branch", name="default", commit_id=source_rev)
457 type='branch', name='default', commit_id=source_rev)
458
450
459 target_repo._local_pull(source_repo.path, source_ref)
451 target_repo._local_pull(source_repo.path, source_ref)
460
452
461 merge_message = 'Merge message\n\nDescription:...'
453 merge_message = "Merge message\n\nDescription:..."
462 user_name = 'Albert Einstein'
454 user_name = "Albert Einstein"
463 user_email = 'albert@einstein.com'
455 user_email = "albert@einstein.com"
464 merge_commit_id, needs_push = target_repo._local_merge(
456 merge_commit_id, needs_push = target_repo._local_merge(
465 target_ref, merge_message, user_name, user_email, source_ref)
457 target_ref, merge_message, user_name, user_email, source_ref
458 )
466 assert merge_commit_id == source_rev
459 assert merge_commit_id == source_rev
467 assert needs_push
460 assert needs_push
468
461
@@ -470,70 +463,62 b' TODO: To be written...'
470 assert target_repo.commit_ids[-2] == target_rev
463 assert target_repo.commit_ids[-2] == target_rev
471 assert target_repo.commit_ids[-1] == source_rev
464 assert target_repo.commit_ids[-1] == source_rev
472
465
473 assert not os.path.exists(
466 assert not os.path.exists(os.path.join(target_repo.path, ".hg", "merge", "state"))
474 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
475
467
476 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
468 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
477 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
469 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
478 target_rev = target_repo.branches['default']
470 target_rev = target_repo.branches["default"]
479 target_ref = Reference(
471 target_ref = Reference(type="branch", name="default", commit_id=target_rev)
480 type='branch', name='default', commit_id=target_rev)
481
472
482 merge_message = 'Merge message\n\nDescription:...'
473 merge_message = "Merge message\n\nDescription:..."
483 user_name = 'Albert Einstein'
474 user_name = "Albert Einstein"
484 user_email = 'albert@einstein.com'
475 user_email = "albert@einstein.com"
485 merge_commit_id, needs_push = target_repo._local_merge(
476 merge_commit_id, needs_push = target_repo._local_merge(
486 target_ref, merge_message, user_name, user_email, target_ref)
477 target_ref, merge_message, user_name, user_email, target_ref
478 )
487 assert merge_commit_id == target_rev
479 assert merge_commit_id == target_rev
488 assert not needs_push
480 assert not needs_push
489
481
490 target_repo = MercurialRepository(target_repo.path)
482 target_repo = MercurialRepository(target_repo.path)
491 assert target_repo.commit_ids[-1] == target_rev
483 assert target_repo.commit_ids[-1] == target_rev
492
484
493 assert not os.path.exists(
485 assert not os.path.exists(os.path.join(target_repo.path, ".hg", "merge", "state"))
494 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
495
486
496 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
487 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
497 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
488 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
498 source_repo = vcsbackend_hg.clone_repo(target_repo)
489 source_repo = vcsbackend_hg.clone_repo(target_repo)
499 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
490 vcsbackend_hg.add_file(target_repo, "README_MERGE", "Version 1")
500 target_repo = MercurialRepository(target_repo.path)
491 target_repo = MercurialRepository(target_repo.path)
501 target_rev = target_repo.branches['default']
492 target_rev = target_repo.branches["default"]
502 target_ref = Reference(
493 target_ref = Reference(type="branch", name="default", commit_id=target_rev)
503 type='branch', name='default', commit_id=target_rev)
494 vcsbackend_hg.add_file(source_repo, "README_MERGE", "Version 2")
504 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
505 source_repo = MercurialRepository(source_repo.path)
495 source_repo = MercurialRepository(source_repo.path)
506 source_rev = source_repo.branches['default']
496 source_rev = source_repo.branches["default"]
507 source_ref = Reference(
497 source_ref = Reference(type="branch", name="default", commit_id=source_rev)
508 type='branch', name='default', commit_id=source_rev)
509
498
510 target_repo._local_pull(source_repo.path, source_ref)
499 target_repo._local_pull(source_repo.path, source_ref)
511 with pytest.raises(RepositoryError):
500 with pytest.raises(RepositoryError):
512 target_repo._local_merge(
501 target_repo._local_merge(target_ref, "merge_message", "user name", "user@name.com", source_ref)
513 target_ref, 'merge_message', 'user name', 'user@name.com',
514 source_ref)
515
502
516 # Check we are not left in an intermediate merge state
503 # Check we are not left in an intermediate merge state
517 assert not os.path.exists(
504 assert not os.path.exists(os.path.join(target_repo.path, ".hg", "merge", "state"))
518 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
519
505
520 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
506 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
521 commits = [
507 commits = [
522 {'message': 'a'},
508 {"message": "a"},
523 {'message': 'b', 'branch': 'b'},
509 {"message": "b", "branch": "b"},
524 ]
510 ]
525 repo = backend_hg.create_repo(commits)
511 repo = backend_hg.create_repo(commits)
526 commit_ids = backend_hg.commit_ids
512 commit_ids = backend_hg.commit_ids
527 target_ref = Reference(
513 target_ref = Reference(type="branch", name="default", commit_id=commit_ids["a"])
528 type='branch', name='default', commit_id=commit_ids['a'])
514 source_ref = Reference(type="branch", name="b", commit_id=commit_ids["b"])
529 source_ref = Reference(
515 merge_message = "Merge message\n\nDescription:..."
530 type='branch', name='b', commit_id=commit_ids['b'])
516 user_name = "Albert Einstein"
531 merge_message = 'Merge message\n\nDescription:...'
517 user_email = "albert@einstein.com"
532 user_name = 'Albert Einstein'
533 user_email = 'albert@einstein.com'
534 vcs_repo = repo.scm_instance()
518 vcs_repo = repo.scm_instance()
535 merge_commit_id, needs_push = vcs_repo._local_merge(
519 merge_commit_id, needs_push = vcs_repo._local_merge(
536 target_ref, merge_message, user_name, user_email, source_ref)
520 target_ref, merge_message, user_name, user_email, source_ref
521 )
537 assert merge_commit_id != source_ref.commit_id
522 assert merge_commit_id != source_ref.commit_id
538 assert needs_push is True
523 assert needs_push is True
539 commit = vcs_repo.get_commit(merge_commit_id)
524 commit = vcs_repo.get_commit(merge_commit_id)
@@ -541,63 +526,62 b' TODO: To be written...'
541 assert commit.message == merge_message
526 assert commit.message == merge_message
542
527
543 def test_maybe_prepare_merge_workspace(self):
528 def test_maybe_prepare_merge_workspace(self):
544 workspace = self.repo._maybe_prepare_merge_workspace(
529 workspace = self.repo._maybe_prepare_merge_workspace(1, "pr2", "unused", "unused2")
545 1, 'pr2', 'unused', 'unused2')
546
530
547 assert os.path.isdir(workspace)
531 assert os.path.isdir(workspace)
548 workspace_repo = MercurialRepository(workspace)
532 workspace_repo = MercurialRepository(workspace)
549 assert workspace_repo.branches == self.repo.branches
533 assert workspace_repo.branches == self.repo.branches
550
534
551 # Calling it a second time should also succeed
535 # Calling it a second time should also succeed
552 workspace = self.repo._maybe_prepare_merge_workspace(
536 workspace = self.repo._maybe_prepare_merge_workspace(1, "pr2", "unused", "unused2")
553 1, 'pr2', 'unused', 'unused2')
554 assert os.path.isdir(workspace)
537 assert os.path.isdir(workspace)
555
538
556 def test_cleanup_merge_workspace(self):
539 def test_cleanup_merge_workspace(self):
557 workspace = self.repo._maybe_prepare_merge_workspace(
540 workspace = self.repo._maybe_prepare_merge_workspace(1, "pr3", "unused", "unused2")
558 1, 'pr3', 'unused', 'unused2')
559
541
560 assert os.path.isdir(workspace)
542 assert os.path.isdir(workspace)
561 self.repo.cleanup_merge_workspace(1, 'pr3')
543 self.repo.cleanup_merge_workspace(1, "pr3")
562
544
563 assert not os.path.exists(workspace)
545 assert not os.path.exists(workspace)
564
546
565 def test_cleanup_merge_workspace_invalid_workspace_id(self):
547 def test_cleanup_merge_workspace_invalid_workspace_id(self):
566 # No assert: because in case of an inexistent workspace this function
548 # No assert: because in case of an inexistent workspace this function
567 # should still succeed.
549 # should still succeed.
568 self.repo.cleanup_merge_workspace(1, 'pr4')
550 self.repo.cleanup_merge_workspace(1, "pr4")
569
551
570 def test_merge_target_is_bookmark(self, vcsbackend_hg):
552 def test_merge_target_is_bookmark(self, vcsbackend_hg):
571 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
553 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
572 source_repo = vcsbackend_hg.clone_repo(target_repo)
554 source_repo = vcsbackend_hg.clone_repo(target_repo)
573 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
555 vcsbackend_hg.add_file(target_repo, "README_MERGE1", "Version 1")
574 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
556 vcsbackend_hg.add_file(source_repo, "README_MERGE2", "Version 2")
575 imc = source_repo.in_memory_commit
557 imc = source_repo.in_memory_commit
576 imc.add(FileNode(b'file_x', content=source_repo.name))
558 imc.add(FileNode(b"file_x", content=source_repo.name))
577 imc.commit(
559 imc.commit(message="Automatic commit from repo merge test", author="Automatic <automatic@rhodecode.com>")
578 message='Automatic commit from repo merge test',
579 author='Automatic <automatic@rhodecode.com>')
580 target_commit = target_repo.get_commit()
560 target_commit = target_repo.get_commit()
581 source_commit = source_repo.get_commit()
561 source_commit = source_repo.get_commit()
582 default_branch = target_repo.DEFAULT_BRANCH_NAME
562 default_branch = target_repo.DEFAULT_BRANCH_NAME
583 bookmark_name = 'bookmark'
563 bookmark_name = "bookmark"
584 target_repo._update(default_branch)
564 target_repo._update(default_branch)
585 target_repo.bookmark(bookmark_name)
565 target_repo.bookmark(bookmark_name)
586 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
566 target_ref = Reference("book", bookmark_name, target_commit.raw_id)
587 source_ref = Reference('branch', default_branch, source_commit.raw_id)
567 source_ref = Reference("branch", default_branch, source_commit.raw_id)
588 workspace_id = 'test-merge'
568 workspace_id = "test-merge"
589 repo_id = repo_id_generator(target_repo.path)
569 repo_id = repo_id_generator(target_repo.path)
590 merge_response = target_repo.merge(
570 merge_response = target_repo.merge(
591 repo_id, workspace_id, target_ref, source_repo, source_ref,
571 repo_id,
592 'test user', 'test@rhodecode.com', 'merge message 1',
572 workspace_id,
593 dry_run=False)
573 target_ref,
594 expected_merge_response = MergeResponse(
574 source_repo,
595 True, True, merge_response.merge_ref,
575 source_ref,
596 MergeFailureReason.NONE)
576 "test user",
577 "test@rhodecode.com",
578 "merge message 1",
579 dry_run=False,
580 )
581 expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE)
597 assert merge_response == expected_merge_response
582 assert merge_response == expected_merge_response
598
583
599 target_repo = backends.get_backend(vcsbackend_hg.alias)(
584 target_repo = backends.get_backend(vcsbackend_hg.alias)(target_repo.path)
600 target_repo.path)
601 target_commits = list(target_repo.get_commits())
585 target_commits = list(target_repo.get_commits())
602 commit_ids = [c.raw_id for c in target_commits[:-1]]
586 commit_ids = [c.raw_id for c in target_commits[:-1]]
603 assert source_ref.commit_id in commit_ids
587 assert source_ref.commit_id in commit_ids
@@ -605,43 +589,43 b' TODO: To be written...'
605
589
606 merge_commit = target_commits[-1]
590 merge_commit = target_commits[-1]
607 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
591 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
608 assert merge_commit.message.strip() == 'merge message 1'
592 assert merge_commit.message.strip() == "merge message 1"
609 assert merge_commit.author == 'test user <test@rhodecode.com>'
593 assert merge_commit.author == "test user <test@rhodecode.com>"
610
594
611 # Check the bookmark was updated in the target repo
595 # Check the bookmark was updated in the target repo
612 assert (
596 assert target_repo.bookmarks[bookmark_name] == merge_response.merge_ref.commit_id
613 target_repo.bookmarks[bookmark_name] ==
614 merge_response.merge_ref.commit_id)
615
597
616 def test_merge_source_is_bookmark(self, vcsbackend_hg):
598 def test_merge_source_is_bookmark(self, vcsbackend_hg):
617 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
599 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
618 source_repo = vcsbackend_hg.clone_repo(target_repo)
600 source_repo = vcsbackend_hg.clone_repo(target_repo)
619 imc = source_repo.in_memory_commit
601 imc = source_repo.in_memory_commit
620 imc.add(FileNode(b'file_x', content=source_repo.name))
602 imc.add(FileNode(b"file_x", content=source_repo.name))
621 imc.commit(
603 imc.commit(message="Automatic commit from repo merge test", author="Automatic <automatic@rhodecode.com>")
622 message='Automatic commit from repo merge test',
623 author='Automatic <automatic@rhodecode.com>')
624 target_commit = target_repo.get_commit()
604 target_commit = target_repo.get_commit()
625 source_commit = source_repo.get_commit()
605 source_commit = source_repo.get_commit()
626 default_branch = target_repo.DEFAULT_BRANCH_NAME
606 default_branch = target_repo.DEFAULT_BRANCH_NAME
627 bookmark_name = 'bookmark'
607 bookmark_name = "bookmark"
628 target_ref = Reference('branch', default_branch, target_commit.raw_id)
608 target_ref = Reference("branch", default_branch, target_commit.raw_id)
629 source_repo._update(default_branch)
609 source_repo._update(default_branch)
630 source_repo.bookmark(bookmark_name)
610 source_repo.bookmark(bookmark_name)
631 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
611 source_ref = Reference("book", bookmark_name, source_commit.raw_id)
632 workspace_id = 'test-merge'
612 workspace_id = "test-merge"
633 repo_id = repo_id_generator(target_repo.path)
613 repo_id = repo_id_generator(target_repo.path)
634 merge_response = target_repo.merge(
614 merge_response = target_repo.merge(
635 repo_id, workspace_id, target_ref, source_repo, source_ref,
615 repo_id,
636 'test user', 'test@rhodecode.com', 'merge message 1',
616 workspace_id,
637 dry_run=False)
617 target_ref,
638 expected_merge_response = MergeResponse(
618 source_repo,
639 True, True, merge_response.merge_ref,
619 source_ref,
640 MergeFailureReason.NONE)
620 "test user",
621 "test@rhodecode.com",
622 "merge message 1",
623 dry_run=False,
624 )
625 expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE)
641 assert merge_response == expected_merge_response
626 assert merge_response == expected_merge_response
642
627
643 target_repo = backends.get_backend(vcsbackend_hg.alias)(
628 target_repo = backends.get_backend(vcsbackend_hg.alias)(target_repo.path)
644 target_repo.path)
645 target_commits = list(target_repo.get_commits())
629 target_commits = list(target_repo.get_commits())
646 commit_ids = [c.raw_id for c in target_commits]
630 commit_ids = [c.raw_id for c in target_commits]
647 assert source_ref.commit_id == commit_ids[-1]
631 assert source_ref.commit_id == commit_ids[-1]
@@ -650,78 +634,89 b' TODO: To be written...'
650 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
634 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
651 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
635 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
652 source_repo = vcsbackend_hg.clone_repo(target_repo)
636 source_repo = vcsbackend_hg.clone_repo(target_repo)
653 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
637 vcsbackend_hg.add_file(target_repo, "README_MERGE1", "Version 1")
654 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
638 vcsbackend_hg.add_file(source_repo, "README_MERGE2", "Version 2")
655
639
656 # add an extra head to the target repo
640 # add an extra head to the target repo
657 imc = target_repo.in_memory_commit
641 imc = target_repo.in_memory_commit
658 imc.add(FileNode(b'file_x', content='foo'))
642 imc.add(FileNode(b"file_x", content="foo"))
659 commits = list(target_repo.get_commits())
643 commits = list(target_repo.get_commits())
660 imc.commit(
644 imc.commit(
661 message='Automatic commit from repo merge test',
645 message="Automatic commit from repo merge test",
662 author='Automatic <automatic@rhodecode.com>', parents=commits[0:1])
646 author="Automatic <automatic@rhodecode.com>",
647 parents=commits[0:1],
648 )
663
649
664 target_commit = target_repo.get_commit()
650 target_commit = target_repo.get_commit()
665 source_commit = source_repo.get_commit()
651 source_commit = source_repo.get_commit()
666 default_branch = target_repo.DEFAULT_BRANCH_NAME
652 default_branch = target_repo.DEFAULT_BRANCH_NAME
667 target_repo._update(default_branch)
653 target_repo._update(default_branch)
668
654
669 target_ref = Reference('branch', default_branch, target_commit.raw_id)
655 target_ref = Reference("branch", default_branch, target_commit.raw_id)
670 source_ref = Reference('branch', default_branch, source_commit.raw_id)
656 source_ref = Reference("branch", default_branch, source_commit.raw_id)
671 workspace_id = 'test-merge'
657 workspace_id = "test-merge"
672
658
673 assert len(target_repo._heads(branch='default')) == 2
659 assert len(target_repo._heads(branch="default")) == 2
674 heads = target_repo._heads(branch='default')
660 heads = target_repo._heads(branch="default")
675 expected_merge_response = MergeResponse(
661 expected_merge_response = MergeResponse(
676 False, False, None,
662 False, False, None, MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, metadata={"heads": heads}
677 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
663 )
678 metadata={'heads': heads})
679 repo_id = repo_id_generator(target_repo.path)
664 repo_id = repo_id_generator(target_repo.path)
680 merge_response = target_repo.merge(
665 merge_response = target_repo.merge(
681 repo_id, workspace_id, target_ref, source_repo, source_ref,
666 repo_id,
682 'test user', 'test@rhodecode.com', 'merge message 1',
667 workspace_id,
683 dry_run=False)
668 target_ref,
669 source_repo,
670 source_ref,
671 "test user",
672 "test@rhodecode.com",
673 "merge message 1",
674 dry_run=False,
675 )
684 assert merge_response == expected_merge_response
676 assert merge_response == expected_merge_response
685
677
686 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
678 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
687 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
679 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
688 source_repo = vcsbackend_hg.clone_repo(target_repo)
680 source_repo = vcsbackend_hg.clone_repo(target_repo)
689 vcsbackend_hg.add_file(target_repo, b'README_MERGE1', b'Version 1')
681 vcsbackend_hg.add_file(target_repo, b"README_MERGE1", b"Version 1")
690 vcsbackend_hg.add_file(source_repo, b'README_MERGE2', b'Version 2')
682 vcsbackend_hg.add_file(source_repo, b"README_MERGE2", b"Version 2")
691
683
692 imc = source_repo.in_memory_commit
684 imc = source_repo.in_memory_commit
693 imc.add(FileNode(b'file_x', content=safe_bytes(source_repo.name)))
685 imc.add(FileNode(b"file_x", content=safe_bytes(source_repo.name)))
694 imc.commit(
686 imc.commit(message="Automatic commit from repo merge test", author="Automatic <automatic@rhodecode.com>")
695 message='Automatic commit from repo merge test',
696 author='Automatic <automatic@rhodecode.com>')
697
687
698 target_commit = target_repo.get_commit()
688 target_commit = target_repo.get_commit()
699 source_commit = source_repo.get_commit()
689 source_commit = source_repo.get_commit()
700
690
701 vcsbackend_hg.add_file(source_repo, b'LICENSE', b'LICENSE Info')
691 vcsbackend_hg.add_file(source_repo, b"LICENSE", b"LICENSE Info")
702
692
703 default_branch = target_repo.DEFAULT_BRANCH_NAME
693 default_branch = target_repo.DEFAULT_BRANCH_NAME
704 bookmark_name = 'bookmark'
694 bookmark_name = "bookmark"
705 source_repo._update(default_branch)
695 source_repo._update(default_branch)
706 source_repo.bookmark(bookmark_name)
696 source_repo.bookmark(bookmark_name)
707
697
708 target_ref = Reference('branch', default_branch, target_commit.raw_id)
698 target_ref = Reference("branch", default_branch, target_commit.raw_id)
709 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
699 source_ref = Reference("book", bookmark_name, source_commit.raw_id)
710 repo_id = repo_id_generator(target_repo.path)
700 repo_id = repo_id_generator(target_repo.path)
711 workspace_id = 'test-merge'
701 workspace_id = "test-merge"
712
702
713 merge_response = target_repo.merge(
703 merge_response = target_repo.merge(
714 repo_id, workspace_id, target_ref, source_repo, source_ref,
704 repo_id,
715 'test user', 'test@rhodecode.com', 'merge message 1',
705 workspace_id,
716 dry_run=False, use_rebase=True)
706 target_ref,
707 source_repo,
708 source_ref,
709 "test user",
710 "test@rhodecode.com",
711 "merge message 1",
712 dry_run=False,
713 use_rebase=True,
714 )
717
715
718 expected_merge_response = MergeResponse(
716 expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE)
719 True, True, merge_response.merge_ref,
720 MergeFailureReason.NONE)
721 assert merge_response == expected_merge_response
717 assert merge_response == expected_merge_response
722
718
723 target_repo = backends.get_backend(vcsbackend_hg.alias)(
719 target_repo = backends.get_backend(vcsbackend_hg.alias)(target_repo.path)
724 target_repo.path)
725 last_commit = target_repo.get_commit()
720 last_commit = target_repo.get_commit()
726 assert last_commit.message == source_commit.message
721 assert last_commit.message == source_commit.message
727 assert last_commit.author == source_commit.author
722 assert last_commit.author == source_commit.author
@@ -736,27 +731,28 b' TODO: To be written...'
736 class TestGetShadowInstance(object):
731 class TestGetShadowInstance(object):
737
732
738 @pytest.fixture()
733 @pytest.fixture()
739 def repo(self, vcsbackend_hg, monkeypatch):
734 def repo(self, vcsbackend_hg):
740 repo = vcsbackend_hg.repo
735 _hg_repo = vcsbackend_hg.repo
741 monkeypatch.setattr(repo, 'config', mock.Mock())
736 connection_mock = mock.Mock(unsafe=True, name="connection.Hg")
742 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
737 mock.patch("rhodecode.lib.vcs.connection.Hg", connection_mock)
743 return repo
738 return _hg_repo
744
739
745 def test_passes_config(self, repo):
740 def test_getting_shadow_instance_copies_config(self, repo):
746 shadow = repo.get_shadow_instance(repo.path)
741 shadow = repo.get_shadow_instance(repo.path)
747 assert shadow.config == repo.config.copy()
742 assert shadow.config.serialize() == repo.config.serialize()
748
743
749 def test_disables_hooks(self, repo):
744 def test_disables_hooks_section(self, repo):
745 repo.config.set('hooks', 'foo', 'val')
750 shadow = repo.get_shadow_instance(repo.path)
746 shadow = repo.get_shadow_instance(repo.path)
751 shadow.config.clear_section.assert_called_once_with('hooks')
747 assert not shadow.config.items('hooks')
752
748
753 def test_allows_to_keep_hooks(self, repo):
749 def test_allows_to_keep_hooks(self, repo):
750 repo.config.set('hooks', 'foo', 'val')
754 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
751 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
755 assert not shadow.config.clear_section.called
752 assert shadow.config.items('hooks')
756
753
757
754
758 class TestMercurialCommit(object):
755 class TestMercurialCommit(object):
759
760 def _test_equality(self, commit):
756 def _test_equality(self, commit):
761 idx = commit.idx
757 idx = commit.idx
762 assert commit == self.repo.get_commit(commit_idx=idx)
758 assert commit == self.repo.get_commit(commit_idx=idx)
@@ -768,15 +764,15 b' class TestMercurialCommit(object):'
768 self._test_equality(commit)
764 self._test_equality(commit)
769
765
770 def test_default_commit(self):
766 def test_default_commit(self):
771 tip = self.repo.get_commit('tip')
767 tip = self.repo.get_commit("tip")
772 assert tip == self.repo.get_commit()
768 assert tip == self.repo.get_commit()
773 assert tip == self.repo.get_commit(commit_id=None)
769 assert tip == self.repo.get_commit(commit_id=None)
774 assert tip == self.repo.get_commit(commit_idx=None)
770 assert tip == self.repo.get_commit(commit_idx=None)
775 assert tip == list(self.repo[-1:])[0]
771 assert tip == list(self.repo[-1:])[0]
776
772
777 def test_root_node(self):
773 def test_root_node(self):
778 tip = self.repo.get_commit('tip')
774 tip = self.repo.get_commit("tip")
779 assert tip.root is tip.get_node('')
775 assert tip.root is tip.get_node("")
780
776
781 def test_lazy_fetch(self):
777 def test_lazy_fetch(self):
782 """
778 """
@@ -792,44 +788,43 b' class TestMercurialCommit(object):'
792 # accessing root.nodes updates commit.nodes
788 # accessing root.nodes updates commit.nodes
793 assert len(commit.nodes) == 9
789 assert len(commit.nodes) == 9
794
790
795 docs = root.get_node('docs')
791 docs = root.get_node("docs")
796 # we haven't yet accessed anything new as docs dir was already cached
792 # we haven't yet accessed anything new as docs dir was already cached
797 assert len(commit.nodes) == 9
793 assert len(commit.nodes) == 9
798 assert len(docs.nodes) == 8
794 assert len(docs.nodes) == 8
799 # accessing docs.nodes updates commit.nodes
795 # accessing docs.nodes updates commit.nodes
800 assert len(commit.nodes) == 17
796 assert len(commit.nodes) == 17
801
797
802 assert docs is commit.get_node('docs')
798 assert docs is commit.get_node("docs")
803 assert docs is root.nodes[0]
799 assert docs is root.nodes[0]
804 assert docs is root.dirs[0]
800 assert docs is root.dirs[0]
805 assert docs is commit.get_node('docs')
801 assert docs is commit.get_node("docs")
806
802
807 def test_nodes_with_commit(self):
803 def test_nodes_with_commit(self):
808 commit = self.repo.get_commit(commit_idx=45)
804 commit = self.repo.get_commit(commit_idx=45)
809 root = commit.root
805 root = commit.root
810 docs = root.get_node('docs')
806 docs = root.get_node("docs")
811 assert docs is commit.get_node('docs')
807 assert docs is commit.get_node("docs")
812 api = docs.get_node('api')
808 api = docs.get_node("api")
813 assert api is commit.get_node('docs/api')
809 assert api is commit.get_node("docs/api")
814 index = api.get_node('index.rst')
810 index = api.get_node("index.rst")
815 assert index is commit.get_node('docs/api/index.rst')
811 assert index is commit.get_node("docs/api/index.rst")
816 assert index is commit.get_node(
812 assert index is commit.get_node("docs").get_node("api").get_node("index.rst")
817 'docs').get_node('api').get_node('index.rst')
818
813
819 def test_branch_and_tags(self):
814 def test_branch_and_tags(self):
820 commit0 = self.repo.get_commit(commit_idx=0)
815 commit0 = self.repo.get_commit(commit_idx=0)
821 assert commit0.branch == 'default'
816 assert commit0.branch == "default"
822 assert commit0.tags == []
817 assert commit0.tags == []
823
818
824 commit10 = self.repo.get_commit(commit_idx=10)
819 commit10 = self.repo.get_commit(commit_idx=10)
825 assert commit10.branch == 'default'
820 assert commit10.branch == "default"
826 assert commit10.tags == []
821 assert commit10.tags == []
827
822
828 commit44 = self.repo.get_commit(commit_idx=44)
823 commit44 = self.repo.get_commit(commit_idx=44)
829 assert commit44.branch == 'web'
824 assert commit44.branch == "web"
830
825
831 tip = self.repo.get_commit('tip')
826 tip = self.repo.get_commit("tip")
832 assert 'tip' in tip.tags
827 assert "tip" in tip.tags
833
828
834 def test_bookmarks(self):
829 def test_bookmarks(self):
835 commit0 = self.repo.get_commit(commit_idx=0)
830 commit0 = self.repo.get_commit(commit_idx=0)
@@ -842,46 +837,84 b' class TestMercurialCommit(object):'
842
837
843 def test_file_size(self):
838 def test_file_size(self):
844 to_check = (
839 to_check = (
845 (10, 'setup.py', 1068),
840 (10, "setup.py", 1068),
846 (20, 'setup.py', 1106),
841 (20, "setup.py", 1106),
847 (60, 'setup.py', 1074),
842 (60, "setup.py", 1074),
848
843 (10, "vcs/backends/base.py", 2921),
849 (10, 'vcs/backends/base.py', 2921),
844 (20, "vcs/backends/base.py", 3936),
850 (20, 'vcs/backends/base.py', 3936),
845 (60, "vcs/backends/base.py", 6189),
851 (60, 'vcs/backends/base.py', 6189),
852 )
846 )
853 for idx, path, size in to_check:
847 for idx, path, size in to_check:
854 self._test_file_size(idx, path, size)
848 self._test_file_size(idx, path, size)
855
849
856 def test_file_history_from_commits(self):
850 def test_file_history_from_commits(self):
857 node = self.repo[10].get_node('setup.py')
851 node = self.repo[10].get_node("setup.py")
858 commit_ids = [commit.raw_id for commit in node.history]
852 commit_ids = [commit.raw_id for commit in node.history]
859 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
853 assert ["3803844fdbd3b711175fc3da9bdacfcd6d29a6fb"] == commit_ids
860
854
861 node = self.repo[20].get_node('setup.py')
855 node = self.repo[20].get_node("setup.py")
862 node_ids = [commit.raw_id for commit in node.history]
856 node_ids = [commit.raw_id for commit in node.history]
863 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
857 assert ["eada5a770da98ab0dd7325e29d00e0714f228d09", "3803844fdbd3b711175fc3da9bdacfcd6d29a6fb"] == node_ids
864 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
865
858
866 # special case we check history from commit that has this particular
859 # special case we check history from commit that has this particular
867 # file changed this means we check if it's included as well
860 # file changed this means we check if it's included as well
868 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
861 node = self.repo.get_commit("eada5a770da98ab0dd7325e29d00e0714f228d09").get_node("setup.py")
869 .get_node('setup.py')
870 node_ids = [commit.raw_id for commit in node.history]
862 node_ids = [commit.raw_id for commit in node.history]
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
863 assert ["eada5a770da98ab0dd7325e29d00e0714f228d09", "3803844fdbd3b711175fc3da9bdacfcd6d29a6fb"] == node_ids
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
873
864
874 def test_file_history(self):
865 def test_file_history(self):
875 # we can only check if those commits are present in the history
866 # we can only check if those commits are present in the history
876 # as we cannot update this test every time file is changed
867 # as we cannot update this test every time file is changed
877 files = {
868 files = {
878 'setup.py': [7, 18, 45, 46, 47, 69, 77],
869 "setup.py": [7, 18, 45, 46, 47, 69, 77],
879 'vcs/nodes.py': [
870 "vcs/nodes.py": [7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
880 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
871 "vcs/backends/hg.py": [
881 'vcs/backends/hg.py': [
872 4,
882 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
873 5,
883 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
874 6,
884 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
875 11,
876 12,
877 13,
878 14,
879 15,
880 16,
881 21,
882 22,
883 23,
884 26,
885 27,
886 28,
887 30,
888 31,
889 33,
890 35,
891 36,
892 37,
893 38,
894 39,
895 40,
896 41,
897 44,
898 45,
899 47,
900 48,
901 49,
902 53,
903 54,
904 55,
905 58,
906 60,
907 61,
908 67,
909 68,
910 69,
911 70,
912 73,
913 77,
914 78,
915 79,
916 82,
917 ],
885 }
918 }
886 for path, indexes in files.items():
919 for path, indexes in files.items():
887 tip = self.repo.get_commit(commit_idx=indexes[-1])
920 tip = self.repo.get_commit(commit_idx=indexes[-1])
@@ -889,72 +922,105 b' class TestMercurialCommit(object):'
889 node_indexes = [commit.idx for commit in node.history]
922 node_indexes = [commit.idx for commit in node.history]
890 assert set(indexes).issubset(set(node_indexes)), (
923 assert set(indexes).issubset(set(node_indexes)), (
891 "We assumed that %s is subset of commits for which file %s "
924 "We assumed that %s is subset of commits for which file %s "
892 "has been changed, and history of that node returned: %s"
925 "has been changed, and history of that node returned: %s" % (indexes, path, node_indexes)
893 % (indexes, path, node_indexes))
926 )
894
927
895 def test_file_annotate(self):
928 def test_file_annotate(self):
896 files = {
929 files = {
897 'vcs/backends/__init__.py': {
930 "vcs/backends/__init__.py": {
898 89: {
931 89: {
899 'lines_no': 31,
932 "lines_no": 31,
900 'commits': [
933 "commits": [
901 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
934 32,
902 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
935 32,
903 32, 32, 32, 32, 37, 32, 37, 37, 32,
936 61,
904 32, 32
937 32,
905 ]
938 32,
939 37,
940 32,
941 32,
942 32,
943 44,
944 37,
945 37,
946 37,
947 37,
948 45,
949 37,
950 44,
951 37,
952 37,
953 37,
954 32,
955 32,
956 32,
957 32,
958 37,
959 32,
960 37,
961 37,
962 32,
963 32,
964 32,
965 ],
966 },
967 20: {"lines_no": 1, "commits": [4]},
968 55: {
969 "lines_no": 31,
970 "commits": [
971 32,
972 32,
973 45,
974 32,
975 32,
976 37,
977 32,
978 32,
979 32,
980 44,
981 37,
982 37,
983 37,
984 37,
985 45,
986 37,
987 44,
988 37,
989 37,
990 37,
991 32,
992 32,
993 32,
994 32,
995 37,
996 32,
997 37,
998 37,
999 32,
1000 32,
1001 32,
1002 ],
1003 },
1004 },
1005 "vcs/exceptions.py": {
1006 89: {
1007 "lines_no": 18,
1008 "commits": [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 16, 16, 18, 18, 18],
906 },
1009 },
907 20: {
1010 20: {
908 'lines_no': 1,
1011 "lines_no": 18,
909 'commits': [4]
1012 "commits": [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 16, 16, 18, 18, 18],
910 },
1013 },
911 55: {
1014 55: {
912 'lines_no': 31,
1015 "lines_no": 18,
913 'commits': [
1016 "commits": [16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 16, 16, 18, 18, 18],
914 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
915 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
916 32, 32, 32, 32, 37, 32, 37, 37, 32,
917 32, 32
918 ]
919 }
920 },
921 'vcs/exceptions.py': {
922 89: {
923 'lines_no': 18,
924 'commits': [
925 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
926 16, 16, 17, 16, 16, 18, 18, 18
927 ]
928 },
1017 },
929 20: {
930 'lines_no': 18,
931 'commits': [
932 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
933 16, 16, 17, 16, 16, 18, 18, 18
934 ]
935 },
936 55: {
937 'lines_no': 18,
938 'commits': [
939 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
940 17, 16, 16, 18, 18, 18
941 ]
942 }
943 },
1018 },
944 'MANIFEST.in': {
1019 "MANIFEST.in": {
945 89: {
1020 89: {"lines_no": 5, "commits": [7, 7, 7, 71, 71]},
946 'lines_no': 5,
1021 20: {"lines_no": 3, "commits": [7, 7, 7]},
947 'commits': [7, 7, 7, 71, 71]
1022 55: {"lines_no": 3, "commits": [7, 7, 7]},
948 },
1023 },
949 20: {
950 'lines_no': 3,
951 'commits': [7, 7, 7]
952 },
953 55: {
954 'lines_no': 3,
955 'commits': [7, 7, 7]
956 }
957 }
958 }
1024 }
959
1025
960 for fname, commit_dict in files.items():
1026 for fname, commit_dict in files.items():
@@ -963,13 +1029,13 b' class TestMercurialCommit(object):'
963 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
1029 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
964 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
1030 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
965 assert l1_1 == l1_2
1031 assert l1_1 == l1_2
966 l1 = l1_2 = [
1032 l1 = l1_2 = [x[2]().idx for x in commit.get_file_annotate(fname)]
967 x[2]().idx for x in commit.get_file_annotate(fname)]
1033 l2 = files[fname][idx]["commits"]
968 l2 = files[fname][idx]['commits']
969 assert l1 == l2, (
1034 assert l1 == l2, (
970 "The lists of commit for %s@commit_id%s"
1035 "The lists of commit for %s@commit_id%s"
971 "from annotation list should match each other,"
1036 "from annotation list should match each other,"
972 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
1037 "got \n%s \nvs \n%s " % (fname, idx, l1, l2)
1038 )
973
1039
974 def test_commit_state(self):
1040 def test_commit_state(self):
975 """
1041 """
@@ -981,28 +1047,52 b' class TestMercurialCommit(object):'
981 # changed: 13
1047 # changed: 13
982 # added: 20
1048 # added: 20
983 # removed: 1
1049 # removed: 1
984 changed = set([
1050 changed = set(
985 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
1051 [
986 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
1052 ".hgignore",
987 'vcs/__init__.py', 'vcs/backends/__init__.py',
1053 "README.rst",
988 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
1054 "docs/conf.py",
989 'vcs/utils/__init__.py'])
1055 "docs/index.rst",
1056 "setup.py",
1057 "tests/test_hg.py",
1058 "tests/test_nodes.py",
1059 "vcs/__init__.py",
1060 "vcs/backends/__init__.py",
1061 "vcs/backends/base.py",
1062 "vcs/backends/hg.py",
1063 "vcs/nodes.py",
1064 "vcs/utils/__init__.py",
1065 ]
1066 )
990
1067
991 added = set([
1068 added = set(
992 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
1069 [
993 'docs/api/index.rst', 'docs/api/nodes.rst',
1070 "docs/api/backends/hg.rst",
994 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1071 "docs/api/backends/index.rst",
995 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1072 "docs/api/index.rst",
996 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1073 "docs/api/nodes.rst",
997 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1074 "docs/api/web/index.rst",
998 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1075 "docs/api/web/simplevcs.rst",
999 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1076 "docs/installation.rst",
1000 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1077 "docs/quickstart.rst",
1001 'vcs/web/simplevcs/views.py'])
1078 "setup.cfg",
1079 "vcs/utils/baseui_config.py",
1080 "vcs/utils/web.py",
1081 "vcs/web/__init__.py",
1082 "vcs/web/exceptions.py",
1083 "vcs/web/simplevcs/__init__.py",
1084 "vcs/web/simplevcs/exceptions.py",
1085 "vcs/web/simplevcs/middleware.py",
1086 "vcs/web/simplevcs/models.py",
1087 "vcs/web/simplevcs/settings.py",
1088 "vcs/web/simplevcs/utils.py",
1089 "vcs/web/simplevcs/views.py",
1090 ]
1091 )
1002
1092
1003 removed = set(['docs/api.rst'])
1093 removed = set(["docs/api.rst"])
1004
1094
1005 commit64 = self.repo.get_commit('46ad32a4f974')
1095 commit64 = self.repo.get_commit("46ad32a4f974")
1006 assert set((node.path for node in commit64.added)) == added
1096 assert set((node.path for node in commit64.added)) == added
1007 assert set((node.path for node in commit64.changed)) == changed
1097 assert set((node.path for node in commit64.changed)) == changed
1008 assert set((node.path for node in commit64.removed)) == removed
1098 assert set((node.path for node in commit64.removed)) == removed
@@ -1012,10 +1102,9 b' class TestMercurialCommit(object):'
1012 # changed: 13
1102 # changed: 13
1013 # added: 20
1103 # added: 20
1014 # removed: 1
1104 # removed: 1
1015 commit88 = self.repo.get_commit('b090f22d27d6')
1105 commit88 = self.repo.get_commit("b090f22d27d6")
1016 assert set((node.path for node in commit88.added)) == set()
1106 assert set((node.path for node in commit88.added)) == set()
1017 assert set((node.path for node in commit88.changed)) == \
1107 assert set((node.path for node in commit88.changed)) == set([".hgignore"])
1018 set(['.hgignore'])
1019 assert set((node.path for node in commit88.removed)) == set()
1108 assert set((node.path for node in commit88.removed)) == set()
1020
1109
1021 #
1110 #
@@ -1025,24 +1114,25 b' class TestMercurialCommit(object):'
1025 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1114 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1026 # removed: 1 ['vcs/utils/web.py']
1115 # removed: 1 ['vcs/utils/web.py']
1027 commit85 = self.repo.get_commit(commit_idx=85)
1116 commit85 = self.repo.get_commit(commit_idx=85)
1028 assert set((node.path for node in commit85.added)) == set([
1117 assert set((node.path for node in commit85.added)) == set(
1029 'vcs/utils/diffs.py',
1118 ["vcs/utils/diffs.py", "vcs/web/simplevcs/views/diffs.py"]
1030 'vcs/web/simplevcs/views/diffs.py'])
1119 )
1031 assert set((node.path for node in commit85.changed)) == set([
1120 assert set((node.path for node in commit85.changed)) == set(
1032 'vcs/web/simplevcs/models.py',
1121 [
1033 'vcs/web/simplevcs/utils.py',
1122 "vcs/web/simplevcs/models.py",
1034 'vcs/web/simplevcs/views/__init__.py',
1123 "vcs/web/simplevcs/utils.py",
1035 'vcs/web/simplevcs/views/repository.py',
1124 "vcs/web/simplevcs/views/__init__.py",
1036 ])
1125 "vcs/web/simplevcs/views/repository.py",
1037 assert set((node.path for node in commit85.removed)) == \
1126 ]
1038 set(['vcs/utils/web.py'])
1127 )
1128 assert set((node.path for node in commit85.removed)) == set(["vcs/utils/web.py"])
1039
1129
1040 def test_files_state(self):
1130 def test_files_state(self):
1041 """
1131 """
1042 Tests state of FileNodes.
1132 Tests state of FileNodes.
1043 """
1133 """
1044 commit = self.repo.get_commit(commit_idx=85)
1134 commit = self.repo.get_commit(commit_idx=85)
1045 node = commit.get_node('vcs/utils/diffs.py')
1135 node = commit.get_node("vcs/utils/diffs.py")
1046 assert node.state, NodeState.ADDED
1136 assert node.state, NodeState.ADDED
1047 assert node.added
1137 assert node.added
1048 assert not node.changed
1138 assert not node.changed
@@ -1050,7 +1140,7 b' class TestMercurialCommit(object):'
1050 assert not node.removed
1140 assert not node.removed
1051
1141
1052 commit = self.repo.get_commit(commit_idx=88)
1142 commit = self.repo.get_commit(commit_idx=88)
1053 node = commit.get_node('.hgignore')
1143 node = commit.get_node(".hgignore")
1054 assert node.state, NodeState.CHANGED
1144 assert node.state, NodeState.CHANGED
1055 assert not node.added
1145 assert not node.added
1056 assert node.changed
1146 assert node.changed
@@ -1058,7 +1148,7 b' class TestMercurialCommit(object):'
1058 assert not node.removed
1148 assert not node.removed
1059
1149
1060 commit = self.repo.get_commit(commit_idx=85)
1150 commit = self.repo.get_commit(commit_idx=85)
1061 node = commit.get_node('setup.py')
1151 node = commit.get_node("setup.py")
1062 assert node.state, NodeState.NOT_CHANGED
1152 assert node.state, NodeState.NOT_CHANGED
1063 assert not node.added
1153 assert not node.added
1064 assert not node.changed
1154 assert not node.changed
@@ -1068,7 +1158,7 b' class TestMercurialCommit(object):'
1068 # If node has REMOVED state then trying to fetch it would raise
1158 # If node has REMOVED state then trying to fetch it would raise
1069 # CommitError exception
1159 # CommitError exception
1070 commit = self.repo.get_commit(commit_idx=2)
1160 commit = self.repo.get_commit(commit_idx=2)
1071 path = 'vcs/backends/BaseRepository.py'
1161 path = "vcs/backends/BaseRepository.py"
1072 with pytest.raises(NodeDoesNotExistError):
1162 with pytest.raises(NodeDoesNotExistError):
1073 commit.get_node(path)
1163 commit.get_node(path)
1074 # but it would be one of ``removed`` (commit's attribute)
1164 # but it would be one of ``removed`` (commit's attribute)
@@ -1084,57 +1174,53 b' class TestMercurialCommit(object):'
1084
1174
1085 def test_repo_files_content_type(self):
1175 def test_repo_files_content_type(self):
1086 test_commit = self.repo.get_commit(commit_idx=100)
1176 test_commit = self.repo.get_commit(commit_idx=100)
1087 for node in test_commit.get_node('/'):
1177 for node in test_commit.get_node("/"):
1088 if node.is_file():
1178 if node.is_file():
1089 assert type(node.content) == bytes
1179 assert type(node.content) == bytes
1090 assert type(node.str_content) == str
1180 assert type(node.str_content) == str
1091
1181
1092 def test_wrong_path(self):
1182 def test_wrong_path(self):
1093 # There is 'setup.py' in the root dir but not there:
1183 # There is 'setup.py' in the root dir but not there:
1094 path = 'foo/bar/setup.py'
1184 path = "foo/bar/setup.py"
1095 with pytest.raises(VCSError):
1185 with pytest.raises(VCSError):
1096 self.repo.get_commit().get_node(path)
1186 self.repo.get_commit().get_node(path)
1097
1187
1098 def test_author_email(self):
1188 def test_author_email(self):
1099 assert 'marcin@python-blog.com' == \
1189 assert "marcin@python-blog.com" == self.repo.get_commit("b986218ba1c9").author_email
1100 self.repo.get_commit('b986218ba1c9').author_email
1190 assert "lukasz.balcerzak@python-center.pl" == self.repo.get_commit("3803844fdbd3").author_email
1101 assert 'lukasz.balcerzak@python-center.pl' == \
1191 assert "" == self.repo.get_commit("84478366594b").author_email
1102 self.repo.get_commit('3803844fdbd3').author_email
1103 assert '' == self.repo.get_commit('84478366594b').author_email
1104
1192
1105 def test_author_username(self):
1193 def test_author_username(self):
1106 assert 'Marcin Kuzminski' == \
1194 assert "Marcin Kuzminski" == self.repo.get_commit("b986218ba1c9").author_name
1107 self.repo.get_commit('b986218ba1c9').author_name
1195 assert "Lukasz Balcerzak" == self.repo.get_commit("3803844fdbd3").author_name
1108 assert 'Lukasz Balcerzak' == \
1196 assert "marcink" == self.repo.get_commit("84478366594b").author_name
1109 self.repo.get_commit('3803844fdbd3').author_name
1110 assert 'marcink' == \
1111 self.repo.get_commit('84478366594b').author_name
1112
1197
1113
1198
1114 class TestLargeFileRepo(object):
1199 class TestLargeFileRepo(object):
1115
1116 def test_large_file(self, backend_hg):
1200 def test_large_file(self, backend_hg):
1117 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1201 conf = make_db_config()
1202 hg_largefiles_store = conf.get("largefiles", "usercache")
1203 repo = backend_hg.create_test_repo("largefiles", conf)
1118
1204
1119 tip = repo.scm_instance().get_commit()
1205 tip = repo.scm_instance().get_commit()
1120 node = tip.get_node('.hglf/thisfileislarge')
1206 node = tip.get_node(".hglf/thisfileislarge")
1121
1207
1122 lf_node = node.get_largefile_node()
1208 lf_node = node.get_largefile_node()
1123
1209
1124 assert lf_node.is_largefile() is True
1210 assert lf_node.is_largefile() is True
1125 assert lf_node.size == 1024000
1211 assert lf_node.size == 1024000
1126 assert lf_node.name == '.hglf/thisfileislarge'
1212 assert lf_node.name == ".hglf/thisfileislarge"
1127
1213
1128
1214
1129 class TestGetBranchName(object):
1215 class TestGetBranchName(object):
1130 def test_returns_ref_name_when_type_is_branch(self):
1216 def test_returns_ref_name_when_type_is_branch(self):
1131 ref = self._create_ref('branch', 'fake-name')
1217 ref = self._create_ref("branch", "fake-name")
1132 result = self.repo._get_branch_name(ref)
1218 result = self.repo._get_branch_name(ref)
1133 assert result == ref.name
1219 assert result == ref.name
1134
1220
1135 @pytest.mark.parametrize("type_", ("book", "tag"))
1221 @pytest.mark.parametrize("type_", ("book", "tag"))
1136 def test_queries_remote_when_type_is_not_branch(self, type_):
1222 def test_queries_remote_when_type_is_not_branch(self, type_):
1137 ref = self._create_ref(type_, 'wrong-fake-name')
1223 ref = self._create_ref(type_, "wrong-fake-name")
1138 with mock.patch.object(self.repo, "_remote") as remote_mock:
1224 with mock.patch.object(self.repo, "_remote") as remote_mock:
1139 remote_mock.ctx_branch.return_value = "fake-name"
1225 remote_mock.ctx_branch.return_value = "fake-name"
1140 result = self.repo._get_branch_name(ref)
1226 result = self.repo._get_branch_name(ref)
@@ -1144,7 +1230,7 b' class TestGetBranchName(object):'
1144 def _create_ref(self, type_, name):
1230 def _create_ref(self, type_, name):
1145 ref = mock.Mock()
1231 ref = mock.Mock()
1146 ref.type = type_
1232 ref.type = type_
1147 ref.name = 'wrong-fake-name'
1233 ref.name = "wrong-fake-name"
1148 ref.commit_id = "deadbeef"
1234 ref.commit_id = "deadbeef"
1149 return ref
1235 return ref
1150
1236
@@ -1153,8 +1239,7 b' class TestIsTheSameBranch(object):'
1153 def test_returns_true_when_branches_are_equal(self):
1239 def test_returns_true_when_branches_are_equal(self):
1154 source_ref = mock.Mock(name="source-ref")
1240 source_ref = mock.Mock(name="source-ref")
1155 target_ref = mock.Mock(name="target-ref")
1241 target_ref = mock.Mock(name="target-ref")
1156 branch_name_patcher = mock.patch.object(
1242 branch_name_patcher = mock.patch.object(self.repo, "_get_branch_name", return_value="default")
1157 self.repo, "_get_branch_name", return_value="default")
1158 with branch_name_patcher as branch_name_mock:
1243 with branch_name_patcher as branch_name_mock:
1159 result = self.repo._is_the_same_branch(source_ref, target_ref)
1244 result = self.repo._is_the_same_branch(source_ref, target_ref)
1160
1245
@@ -1171,8 +1256,7 b' class TestIsTheSameBranch(object):'
1171 def side_effect(ref):
1256 def side_effect(ref):
1172 return ref.name
1257 return ref.name
1173
1258
1174 branch_name_patcher = mock.patch.object(
1259 branch_name_patcher = mock.patch.object(self.repo, "_get_branch_name", side_effect=side_effect)
1175 self.repo, "_get_branch_name", side_effect=side_effect)
1176 with branch_name_patcher as branch_name_mock:
1260 with branch_name_patcher as branch_name_mock:
1177 result = self.repo._is_the_same_branch(source_ref, target_ref)
1261 result = self.repo._is_the_same_branch(source_ref, target_ref)
1178
1262
@@ -1,5 +1,3 b''
1
2
3 # Copyright (C) 2016-2023 RhodeCode GmbH
1 # Copyright (C) 2016-2023 RhodeCode GmbH
4 #
2 #
5 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -35,45 +33,47 b' class TestMercurialRemoteRepoInvalidatio'
35 """
33 """
36
34
37 # Default reference used as a dummy during tests.
35 # Default reference used as a dummy during tests.
38 default_ref = Reference('branch', 'default', None)
36 default_ref = Reference("branch", "default", None)
39
37
40 # Methods of vcsserver.hg.HgRemote that are "writing" operations.
38 # Methods of vcsserver.hg.HgRemote that are "writing" operations.
41 writing_methods = [
39 writing_methods = [
42 'bookmark',
40 "bookmark",
43 'commit',
41 "commit",
44 'merge',
42 "merge",
45 'pull',
43 "pull",
46 'pull_cmd',
44 "pull_cmd",
47 'rebase',
45 "rebase",
48 'strip',
46 "strip",
49 'tag',
47 "tag",
50 ]
48 ]
51
49
52 @pytest.mark.parametrize('method_name, method_args', [
50 @pytest.mark.parametrize(
53 ('_local_merge', [default_ref, None, None, None, default_ref]),
51 "method_name, method_args",
54 ('_local_pull', ['', default_ref]),
52 [
55 ('bookmark', [None]),
53 ("_local_merge", [default_ref, None, None, None, default_ref]),
56 ('pull', ['', default_ref]),
54 ("_local_pull", ["", default_ref]),
57 ('remove_tag', ['mytag', None]),
55 ("bookmark", [None]),
58 ('strip', [None]),
56 ("pull", ["", default_ref]),
59 ('tag', ['newtag', None]),
57 ("remove_tag", ["mytag", None]),
60 ])
58 ("strip", [None]),
61 def test_method_invokes_invalidate_on_remote_repo(
59 ("tag", ["newtag", None]),
62 self, method_name, method_args, backend_hg):
60 ],
61 )
62 def test_method_invokes_invalidate_on_remote_repo(self, method_name, method_args, backend_hg):
63 """
63 """
64 Check that the listed methods are invalidating the VCSServer cache
64 Check that the listed methods are invalidating the VCSServer cache
65 after invoking a writing method of their remote repository object.
65 after invoking a writing method of their remote repository object.
66 """
66 """
67 tags = {'mytag': 'mytag-id'}
67 tags = {"mytag": "mytag-id"}
68
68
69 def add_tag(name, raw_id, *args, **kwds):
69 def add_tag(name, raw_id, *args, **kwds):
70 tags[name] = raw_id
70 tags[name] = raw_id
71
71
72 repo = backend_hg.repo.scm_instance()
72 repo = backend_hg.repo.scm_instance()
73
73
74 with patch.object(repo, '_remote') as remote:
74 with patch.object(repo, "_remote") as remote:
75 repo.tags = tags
75 repo.tags = tags
76 remote.lookup.return_value = ('commit-id', 'commit-idx')
76 remote.lookup.return_value = ("commit-id", "commit-idx")
77 remote.tags.return_value = tags
77 remote.tags.return_value = tags
78 remote._get_tags.return_value = tags
78 remote._get_tags.return_value = tags
79 remote.is_empty.return_value = False
79 remote.is_empty.return_value = False
@@ -98,6 +98,7 b' class TestMercurialRemoteRepoInvalidatio'
98 references.
98 references.
99 """
99 """
100 from rhodecode.model.pull_request import PullRequestModel
100 from rhodecode.model.pull_request import PullRequestModel
101
101 repo_id = pull_request.target_repo.repo_id
102 repo_id = pull_request.target_repo.repo_id
102 target_vcs = pull_request.target_repo.scm_instance()
103 target_vcs = pull_request.target_repo.scm_instance()
103 target_ref = pull_request.target_ref_parts
104 target_ref = pull_request.target_ref_parts
@@ -107,7 +108,8 b' class TestMercurialRemoteRepoInvalidatio'
107 pr = PullRequestModel()
108 pr = PullRequestModel()
108 workspace_id = pr._workspace_id(pull_request)
109 workspace_id = pr._workspace_id(pull_request)
109 shadow_repository_path = target_vcs._maybe_prepare_merge_workspace(
110 shadow_repository_path = target_vcs._maybe_prepare_merge_workspace(
110 repo_id, workspace_id, target_ref, source_ref)
111 repo_id, workspace_id, target_ref, source_ref
112 )
111 shadow_repo = target_vcs.get_shadow_instance(shadow_repository_path, cache=True)
113 shadow_repo = target_vcs.get_shadow_instance(shadow_repository_path, cache=True)
112
114
113 # This will populate the cache of the mercurial repository object
115 # This will populate the cache of the mercurial repository object
@@ -116,7 +118,7 b' class TestMercurialRemoteRepoInvalidatio'
116
118
117 return shadow_repo, source_ref, target_ref
119 return shadow_repo, source_ref, target_ref
118
120
119 @pytest.mark.backends('hg')
121 @pytest.mark.backends("hg")
120 def test_commit_does_not_exist_error_happens(self, pr_util, app):
122 def test_commit_does_not_exist_error_happens(self, pr_util, app):
121 """
123 """
122 This test is somewhat special. It does not really test the system
124 This test is somewhat special. It does not really test the system
@@ -132,18 +134,18 b' class TestMercurialRemoteRepoInvalidatio'
132 source_vcs = pull_request.source_repo.scm_instance()
134 source_vcs = pull_request.source_repo.scm_instance()
133 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(pull_request)
135 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(pull_request)
134
136
135 initial_cache_uid = shadow_repo._remote._wire['context']
137 initial_cache_uid = shadow_repo._remote._wire["context"]
136 initial_commit_ids = shadow_repo._remote.get_all_commit_ids('visible')
138 initial_commit_ids = shadow_repo._remote.get_all_commit_ids("visible")
137
139
138 # Pull from target and source references but without invalidation of
140 # Pull from target and source references but without invalidation of
139 # RemoteRepo objects and without VCSServer caching of mercurial repository objects.
141 # RemoteRepo objects and without VCSServer caching of mercurial repository objects.
140 with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'):
142 with patch.object(shadow_repo._remote, "invalidate_vcs_cache"):
141 # NOTE: Do not use patch.dict() to disable the cache because it
143 # NOTE: Do not use patch.dict() to disable the cache because it
142 # restores the WHOLE dict and not only the patched keys.
144 # restores the WHOLE dict and not only the patched keys.
143 shadow_repo._remote._wire['cache'] = False
145 shadow_repo._remote._wire["cache"] = False
144 shadow_repo._local_pull(target_vcs.path, target_ref)
146 shadow_repo._local_pull(target_vcs.path, target_ref)
145 shadow_repo._local_pull(source_vcs.path, source_ref)
147 shadow_repo._local_pull(source_vcs.path, source_ref)
146 shadow_repo._remote._wire['cache'] = True
148 shadow_repo._remote._wire["cache"] = True
147
149
148 # Try to lookup the target_ref in shadow repo. This should work because
150 # Try to lookup the target_ref in shadow repo. This should work because
149 # test_repo_maker_uses_session_for_instance_methods
151 # test_repo_maker_uses_session_for_instance_methods
@@ -153,14 +155,14 b' class TestMercurialRemoteRepoInvalidatio'
153
155
154 # we ensure that call context has not changed, this is what
156 # we ensure that call context has not changed, this is what
155 # `invalidate_vcs_cache` does
157 # `invalidate_vcs_cache` does
156 assert initial_cache_uid == shadow_repo._remote._wire['context']
158 assert initial_cache_uid == shadow_repo._remote._wire["context"]
157
159
158 # If we try to lookup all commits.
160 # If we try to lookup all commits.
159 # repo commit cache doesn't get invalidated. (Due to patched
161 # repo commit cache doesn't get invalidated. (Due to patched
160 # invalidation and caching above).
162 # invalidation and caching above).
161 assert initial_commit_ids == shadow_repo._remote.get_all_commit_ids('visible')
163 assert initial_commit_ids == shadow_repo._remote.get_all_commit_ids("visible")
162
164
163 @pytest.mark.backends('hg')
165 @pytest.mark.backends("hg")
164 def test_commit_does_not_exist_error_does_not_happen(self, pr_util, app):
166 def test_commit_does_not_exist_error_does_not_happen(self, pr_util, app):
165 """
167 """
166 This test simulates a pull request merge in which the pull operations
168 This test simulates a pull request merge in which the pull operations
@@ -179,10 +181,10 b' class TestMercurialRemoteRepoInvalidatio'
179 # of RemoteRepo objects.
181 # of RemoteRepo objects.
180 # NOTE: Do not use patch.dict() to disable the cache because it
182 # NOTE: Do not use patch.dict() to disable the cache because it
181 # restores the WHOLE dict and not only the patched keys.
183 # restores the WHOLE dict and not only the patched keys.
182 shadow_repo._remote._wire['cache'] = False
184 shadow_repo._remote._wire["cache"] = False
183 shadow_repo._local_pull(target_vcs.path, target_ref)
185 shadow_repo._local_pull(target_vcs.path, target_ref)
184 shadow_repo._local_pull(source_vcs.path, source_ref)
186 shadow_repo._local_pull(source_vcs.path, source_ref)
185 shadow_repo._remote._wire['cache'] = True
187 shadow_repo._remote._wire["cache"] = True
186
188
187 # Try to lookup the target and source references in shadow repo. This
189 # Try to lookup the target and source references in shadow repo. This
188 # should work because the RemoteRepo object gets invalidated during the
190 # should work because the RemoteRepo object gets invalidated during the
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -20,15 +19,21 b''
20 """
19 """
21 Tests so called "in memory commits" commit API of vcs.
20 Tests so called "in memory commits" commit API of vcs.
22 """
21 """
22
23 import datetime
23 import datetime
24
24
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib.str_utils import safe_bytes, safe_str
27 from rhodecode.lib.str_utils import safe_bytes, safe_str
28 from rhodecode.lib.vcs.exceptions import (
28 from rhodecode.lib.vcs.exceptions import (
29 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
29 EmptyRepositoryError,
30 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
30 NodeAlreadyAddedError,
31 NodeNotChangedError)
31 NodeAlreadyExistsError,
32 NodeAlreadyRemovedError,
33 NodeAlreadyChangedError,
34 NodeDoesNotExistError,
35 NodeNotChangedError,
36 )
32 from rhodecode.lib.vcs.nodes import DirNode, FileNode
37 from rhodecode.lib.vcs.nodes import DirNode, FileNode
33 from rhodecode.tests.vcs.conftest import BackendTestMixin
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
34
39
@@ -36,19 +41,21 b' from rhodecode.tests.vcs.conftest import'
36 @pytest.fixture()
41 @pytest.fixture()
37 def nodes():
42 def nodes():
38 nodes = [
43 nodes = [
39 FileNode(b'foobar', content=b'Foo & bar'),
44 FileNode(b"foobar", content=b"Foo & bar"),
40 FileNode(b'foobar2', content=b'Foo & bar, doubled!'),
45 FileNode(b"foobar2", content=b"Foo & bar, doubled!"),
41 FileNode(b'foo bar with spaces', content=b''),
46 FileNode(b"foo bar with spaces", content=b""),
42 FileNode(b'foo/bar/baz', content=b'Inside'),
47 FileNode(b"foo/bar/baz", content=b"Inside"),
43 FileNode(b'foo/bar/file.bin', content=(
48 FileNode(
44 b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
49 b"foo/bar/file.bin",
45 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
50 content=(
46 b'\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
51 b"\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00"
47 b'\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
52 b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe"
48 b'\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
53 b"\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
49 b'\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
54 b"\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00"
50 b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
55 b"\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00"
51 )
56 b"\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff"
57 b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
58 ),
52 ),
59 ),
53 ]
60 ]
54 return nodes
61 return nodes
@@ -80,14 +87,14 b' class TestInMemoryCommit(BackendTestMixi'
80 def test_add_on_branch_hg(self, nodes):
87 def test_add_on_branch_hg(self, nodes):
81 for node in nodes:
88 for node in nodes:
82 self.imc.add(node)
89 self.imc.add(node)
83 self.commit(branch='stable')
90 self.commit(branch="stable")
84 self.assert_successful_commit(nodes)
91 self.assert_successful_commit(nodes)
85
92
86 @pytest.mark.backends("git")
93 @pytest.mark.backends("git")
87 def test_add_on_branch_git(self, nodes):
94 def test_add_on_branch_git(self, nodes):
88 for node in nodes:
95 for node in nodes:
89 self.imc.add(node)
96 self.imc.add(node)
90 self.commit(branch='stable')
97 self.commit(branch="stable")
91 self.assert_successful_commit(nodes)
98 self.assert_successful_commit(nodes)
92
99
93 def test_add_in_bulk(self, nodes):
100 def test_add_in_bulk(self, nodes):
@@ -98,10 +105,8 b' class TestInMemoryCommit(BackendTestMixi'
98
105
99 def test_add_non_ascii_files(self):
106 def test_add_non_ascii_files(self):
100 nodes = [
107 nodes = [
101 FileNode(safe_bytes('ΕΌΓ³Ε‚wik/zwierzΔ…tko_utf8_str'),
108 FileNode(safe_bytes("ΕΌΓ³Ε‚wik/zwierzΔ…tko_utf8_str"), content=safe_bytes("Δ‡Δ‡Δ‡Δ‡")),
102 content=safe_bytes('Δ‡Δ‡Δ‡Δ‡')),
109 FileNode(safe_bytes("ΕΌΓ³Ε‚wik/zwierzΔ…tko_unicode"), content=safe_bytes("Δ‡Δ‡Δ‡Δ‡")),
103 FileNode(safe_bytes('ΕΌΓ³Ε‚wik/zwierzΔ…tko_unicode'),
104 content=safe_bytes('Δ‡Δ‡Δ‡Δ‡')),
105 ]
110 ]
106
111
107 for node in nodes:
112 for node in nodes:
@@ -112,60 +117,56 b' class TestInMemoryCommit(BackendTestMixi'
112
117
113 def commit(self, branch=None):
118 def commit(self, branch=None):
114 self.old_commit_count = len(self.repo.commit_ids)
119 self.old_commit_count = len(self.repo.commit_ids)
115 self.commit_message = 'Test commit with unicode: ΕΌΓ³Ε‚wik'
120 self.commit_message = "Test commit with unicode: ΕΌΓ³Ε‚wik"
116 self.commit_author = f'{self.__class__.__name__} <foo@email.com>'
121 self.commit_author = f"{self.__class__.__name__} <foo@email.com>"
117 self.commit = self.imc.commit(
122 self.commit = self.imc.commit(message=self.commit_message, author=self.commit_author, branch=branch)
118 message=self.commit_message, author=self.commit_author,
119 branch=branch)
120
123
121 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
124 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
122 to_add = [
125 to_add = [
123 FileNode(b'foo/bar/image.png', content=b'\0'),
126 FileNode(b"foo/bar/image.png", content=b"\0"),
124 FileNode(b'foo/README.txt', content=b'readme!'),
127 FileNode(b"foo/README.txt", content=b"readme!"),
125 ]
128 ]
126 self.imc.add(*to_add)
129 self.imc.add(*to_add)
127 commit = self.imc.commit('Initial', 'joe doe <joe.doe@example.com>')
130 commit = self.imc.commit("Initial", "joe doe <joe.doe@example.com>")
128 assert isinstance(commit.get_node('foo'), DirNode)
131 assert isinstance(commit.get_node("foo"), DirNode)
129 assert isinstance(commit.get_node('foo/bar'), DirNode)
132 assert isinstance(commit.get_node("foo/bar"), DirNode)
130 self.assert_nodes_in_commit(commit, to_add)
133 self.assert_nodes_in_commit(commit, to_add)
131
134
132 # commit some more files again
135 # commit some more files again
133 to_add = [
136 to_add = [
134 FileNode(b'foo/bar/foobaz/bar', content=b'foo'),
137 FileNode(b"foo/bar/foobaz/bar", content=b"foo"),
135 FileNode(b'foo/bar/another/bar', content=b'foo'),
138 FileNode(b"foo/bar/another/bar", content=b"foo"),
136 FileNode(b'foo/baz.txt', content=b'foo'),
139 FileNode(b"foo/baz.txt", content=b"foo"),
137 FileNode(b'foobar/foobaz/file', content=b'foo'),
140 FileNode(b"foobar/foobaz/file", content=b"foo"),
138 FileNode(b'foobar/barbaz', content=b'foo'),
141 FileNode(b"foobar/barbaz", content=b"foo"),
139 ]
142 ]
140 self.imc.add(*to_add)
143 self.imc.add(*to_add)
141 commit = self.imc.commit('Another', 'joe doe <joe.doe@example.com>')
144 commit = self.imc.commit("Another", "joe doe <joe.doe@example.com>")
142 self.assert_nodes_in_commit(commit, to_add)
145 self.assert_nodes_in_commit(commit, to_add)
143
146
144 def test_add_raise_already_added(self):
147 def test_add_raise_already_added(self):
145 node = FileNode(b'foobar', content=b'baz')
148 node = FileNode(b"foobar", content=b"baz")
146 self.imc.add(node)
149 self.imc.add(node)
147 with pytest.raises(NodeAlreadyAddedError):
150 with pytest.raises(NodeAlreadyAddedError):
148 self.imc.add(node)
151 self.imc.add(node)
149
152
150 def test_check_integrity_raise_already_exist(self):
153 def test_check_integrity_raise_already_exist(self):
151 node = FileNode(b'foobar', content=b'baz')
154 node = FileNode(b"foobar", content=b"baz")
152 self.imc.add(node)
155 self.imc.add(node)
153 self.imc.commit(message='Added foobar',
156 self.imc.commit(message="Added foobar", author="Some Name <foo@bar.com>")
154 author='Some Name <foo@bar.com>')
155 self.imc.add(node)
157 self.imc.add(node)
156 with pytest.raises(NodeAlreadyExistsError):
158 with pytest.raises(NodeAlreadyExistsError):
157 self.imc.commit(message='new message',
159 self.imc.commit(message="new message", author="Some Name <foo@bar.com>")
158 author='Some Name <foo@bar.com>')
159
160
160 def test_change(self):
161 def test_change(self):
161 self.imc.add(FileNode(b'foo/bar/baz', content=b'foo'))
162 self.imc.add(FileNode(b"foo/bar/baz", content=b"foo"))
162 self.imc.add(FileNode(b'foo/fbar', content=b'foobar'))
163 self.imc.add(FileNode(b"foo/fbar", content=b"foobar"))
163 tip = self.imc.commit('Initial', 'joe doe <joe.doe@example.com>')
164 tip = self.imc.commit("Initial", "joe doe <joe.doe@example.com>")
164
165
165 # Change node's content
166 # Change node's content
166 node = FileNode(b'foo/bar/baz', content=b'My **changed** content')
167 node = FileNode(b"foo/bar/baz", content=b"My **changed** content")
167 self.imc.change(node)
168 self.imc.change(node)
168 self.imc.commit('Changed %s' % node.path, 'joe doe <joe.doe@example.com>')
169 self.imc.commit("Changed %s" % node.path, "joe doe <joe.doe@example.com>")
169
170
170 newtip = self.repo.get_commit()
171 newtip = self.repo.get_commit()
171 assert tip != newtip
172 assert tip != newtip
@@ -174,28 +175,22 b' class TestInMemoryCommit(BackendTestMixi'
174
175
175 def test_change_non_ascii(self):
176 def test_change_non_ascii(self):
176 to_add = [
177 to_add = [
177 FileNode(safe_bytes('ΕΌΓ³Ε‚wik/zwierzΔ…tko'),
178 FileNode(safe_bytes("ΕΌΓ³Ε‚wik/zwierzΔ…tko"), content=safe_bytes("Δ‡Δ‡Δ‡Δ‡")),
178 content=safe_bytes('Δ‡Δ‡Δ‡Δ‡')),
179 FileNode(safe_bytes("ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni"), content=safe_bytes("Δ‡Δ‡Δ‡Δ‡")),
179 FileNode(safe_bytes('ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni'),
180 content=safe_bytes('Δ‡Δ‡Δ‡Δ‡')),
181 ]
180 ]
182 for node in to_add:
181 for node in to_add:
183 self.imc.add(node)
182 self.imc.add(node)
184
183
185 tip = self.imc.commit('Initial', 'joe doe <joe.doe@example.com>')
184 tip = self.imc.commit("Initial", "joe doe <joe.doe@example.com>")
186
185
187 # Change node's content
186 # Change node's content
188 node = FileNode(safe_bytes('ΕΌΓ³Ε‚wik/zwierzΔ…tko'),
187 node = FileNode(safe_bytes("ΕΌΓ³Ε‚wik/zwierzΔ…tko"), content=b"My **changed** content")
189 content=b'My **changed** content')
190 self.imc.change(node)
188 self.imc.change(node)
191 self.imc.commit('Changed %s' % safe_str(node.path),
189 self.imc.commit("Changed %s" % safe_str(node.path), author="joe doe <joe.doe@example.com>")
192 author='joe doe <joe.doe@example.com>')
193
190
194 node_uni = FileNode(safe_bytes('ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni'),
191 node_uni = FileNode(safe_bytes("ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni"), content=b"My **changed** content")
195 content=b'My **changed** content')
196 self.imc.change(node_uni)
192 self.imc.change(node_uni)
197 self.imc.commit('Changed %s' % safe_str(node_uni.path),
193 self.imc.commit("Changed %s" % safe_str(node_uni.path), author="joe doe <joe.doe@example.com>")
198 author='joe doe <joe.doe@example.com>')
199
194
200 newtip = self.repo.get_commit()
195 newtip = self.repo.get_commit()
201 assert tip != newtip
196 assert tip != newtip
@@ -204,24 +199,24 b' class TestInMemoryCommit(BackendTestMixi'
204 self.assert_nodes_in_commit(newtip, (node, node_uni))
199 self.assert_nodes_in_commit(newtip, (node, node_uni))
205
200
206 def test_change_raise_empty_repository(self):
201 def test_change_raise_empty_repository(self):
207 node = FileNode(b'foobar')
202 node = FileNode(b"foobar")
208 with pytest.raises(EmptyRepositoryError):
203 with pytest.raises(EmptyRepositoryError):
209 self.imc.change(node)
204 self.imc.change(node)
210
205
211 def test_check_integrity_change_raise_node_does_not_exist(self):
206 def test_check_integrity_change_raise_node_does_not_exist(self):
212 node = FileNode(b'foobar', content=b'baz')
207 node = FileNode(b"foobar", content=b"baz")
213 self.imc.add(node)
208 self.imc.add(node)
214 self.imc.commit(message='Added foobar', author='Some Name <foo@bar.com>')
209 self.imc.commit(message="Added foobar", author="Some Name <foo@bar.com>")
215 node = FileNode(b'not-foobar', content=b'')
210 node = FileNode(b"not-foobar", content=b"")
216 self.imc.change(node)
211 self.imc.change(node)
217 with pytest.raises(NodeDoesNotExistError):
212 with pytest.raises(NodeDoesNotExistError):
218 self.imc.commit(message='Changed not existing node', author='Some Name <foo@bar.com>')
213 self.imc.commit(message="Changed not existing node", author="Some Name <foo@bar.com>")
219
214
220 def test_change_raise_node_already_changed(self):
215 def test_change_raise_node_already_changed(self):
221 node = FileNode(b'foobar', content=b'baz')
216 node = FileNode(b"foobar", content=b"baz")
222 self.imc.add(node)
217 self.imc.add(node)
223 self.imc.commit(message='Added foobar', author='Some Nam <foo@bar.com>')
218 self.imc.commit(message="Added foobar", author="Some Nam <foo@bar.com>")
224 node = FileNode(b'foobar', content=b'more baz')
219 node = FileNode(b"foobar", content=b"more baz")
225 self.imc.change(node)
220 self.imc.change(node)
226 with pytest.raises(NodeAlreadyChangedError):
221 with pytest.raises(NodeAlreadyChangedError):
227 self.imc.change(node)
222 self.imc.change(node)
@@ -233,14 +228,14 b' class TestInMemoryCommit(BackendTestMixi'
233 self.imc.change(node)
228 self.imc.change(node)
234 with pytest.raises(NodeNotChangedError):
229 with pytest.raises(NodeNotChangedError):
235 self.imc.commit(
230 self.imc.commit(
236 message='Trying to mark node as changed without touching it',
231 message="Trying to mark node as changed without touching it", author="Some Name <foo@bar.com>"
237 author='Some Name <foo@bar.com>')
232 )
238
233
239 def test_change_raise_node_already_removed(self):
234 def test_change_raise_node_already_removed(self):
240 node = FileNode(b'foobar', content=b'baz')
235 node = FileNode(b"foobar", content=b"baz")
241 self.imc.add(node)
236 self.imc.add(node)
242 self.imc.commit(message='Added foobar', author='Some Name <foo@bar.com>')
237 self.imc.commit(message="Added foobar", author="Some Name <foo@bar.com>")
243 self.imc.remove(FileNode(b'foobar'))
238 self.imc.remove(FileNode(b"foobar"))
244 with pytest.raises(NodeAlreadyRemovedError):
239 with pytest.raises(NodeAlreadyRemovedError):
245 self.imc.change(node)
240 self.imc.change(node)
246
241
@@ -251,7 +246,7 b' class TestInMemoryCommit(BackendTestMixi'
251 node = nodes[0]
246 node = nodes[0]
252 assert node.content == tip.get_node(node.path).content
247 assert node.content == tip.get_node(node.path).content
253 self.imc.remove(node)
248 self.imc.remove(node)
254 self.imc.commit(message=f'Removed {node.path}', author='Some Name <foo@bar.com>')
249 self.imc.commit(message=f"Removed {node.path}", author="Some Name <foo@bar.com>")
255
250
256 newtip = self.repo.get_commit()
251 newtip = self.repo.get_commit()
257 assert tip != newtip
252 assert tip != newtip
@@ -260,31 +255,27 b' class TestInMemoryCommit(BackendTestMixi'
260 newtip.get_node(node.path)
255 newtip.get_node(node.path)
261
256
262 def test_remove_last_file_from_directory(self):
257 def test_remove_last_file_from_directory(self):
263 node = FileNode(b'omg/qwe/foo/bar', content=b'foobar')
258 node = FileNode(b"omg/qwe/foo/bar", content=b"foobar")
264 self.imc.add(node)
259 self.imc.add(node)
265 self.imc.commit('added', author='joe doe <joe@doe.com>')
260 self.imc.commit("added", author="joe doe <joe@doe.com>")
266
261
267 self.imc.remove(node)
262 self.imc.remove(node)
268 tip = self.imc.commit('removed', 'joe doe <joe@doe.com>')
263 tip = self.imc.commit("removed", "joe doe <joe@doe.com>")
269 with pytest.raises(NodeDoesNotExistError):
264 with pytest.raises(NodeDoesNotExistError):
270 tip.get_node('omg/qwe/foo/bar')
265 tip.get_node("omg/qwe/foo/bar")
271
266
272 def test_remove_raise_node_does_not_exist(self, nodes):
267 def test_remove_raise_node_does_not_exist(self, nodes):
273 self.imc.remove(nodes[0])
268 self.imc.remove(nodes[0])
274 with pytest.raises(NodeDoesNotExistError):
269 with pytest.raises(NodeDoesNotExistError):
275 self.imc.commit(
270 self.imc.commit(message="Trying to remove node at empty repository", author="Some Name <foo@bar.com>")
276 message='Trying to remove node at empty repository',
277 author='Some Name <foo@bar.com>')
278
271
279 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
272 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
280 self.test_add(nodes) # Performs first commit
273 self.test_add(nodes) # Performs first commit
281
274
282 node = FileNode(b'no-such-file')
275 node = FileNode(b"no-such-file")
283 self.imc.remove(node)
276 self.imc.remove(node)
284 with pytest.raises(NodeDoesNotExistError):
277 with pytest.raises(NodeDoesNotExistError):
285 self.imc.commit(
278 self.imc.commit(message="Trying to remove not existing node", author="Some Name <foo@bar.com>")
286 message='Trying to remove not existing node',
287 author='Some Name <foo@bar.com>')
288
279
289 def test_remove_raise_node_already_removed(self, nodes):
280 def test_remove_raise_node_already_removed(self, nodes):
290 self.test_add(nodes) # Performs first commit
281 self.test_add(nodes) # Performs first commit
@@ -297,13 +288,13 b' class TestInMemoryCommit(BackendTestMixi'
297 def test_remove_raise_node_already_changed(self, nodes):
288 def test_remove_raise_node_already_changed(self, nodes):
298 self.test_add(nodes) # Performs first commit
289 self.test_add(nodes) # Performs first commit
299
290
300 node = FileNode(nodes[0].bytes_path, content=b'Bending time')
291 node = FileNode(nodes[0].bytes_path, content=b"Bending time")
301 self.imc.change(node)
292 self.imc.change(node)
302 with pytest.raises(NodeAlreadyChangedError):
293 with pytest.raises(NodeAlreadyChangedError):
303 self.imc.remove(node)
294 self.imc.remove(node)
304
295
305 def test_reset(self):
296 def test_reset(self):
306 self.imc.add(FileNode(b'foo', content=b'bar'))
297 self.imc.add(FileNode(b"foo", content=b"bar"))
307 # self.imc.change(FileNode(b'baz', content='new'))
298 # self.imc.change(FileNode(b'baz', content='new'))
308 # self.imc.remove(FileNode(b'qwe'))
299 # self.imc.remove(FileNode(b'qwe'))
309 self.imc.reset()
300 self.imc.reset()
@@ -313,11 +304,11 b' class TestInMemoryCommit(BackendTestMixi'
313 N = 3 # number of commits to perform
304 N = 3 # number of commits to perform
314 last = None
305 last = None
315 for x in range(N):
306 for x in range(N):
316 fname = safe_bytes('file%s' % str(x).rjust(5, '0'))
307 fname = safe_bytes("file%s" % str(x).rjust(5, "0"))
317 content = safe_bytes('foobar\n' * x)
308 content = safe_bytes("foobar\n" * x)
318 node = FileNode(fname, content=content)
309 node = FileNode(fname, content=content)
319 self.imc.add(node)
310 self.imc.add(node)
320 commit = self.imc.commit("Commit no. %s" % (x + 1), author='Vcs User <foo@bar.com>')
311 commit = self.imc.commit("Commit no. %s" % (x + 1), author="Vcs User <foo@bar.com>")
321 assert last != commit
312 assert last != commit
322 last = commit
313 last = commit
323
314
@@ -329,12 +320,10 b' class TestInMemoryCommit(BackendTestMixi'
329 assert len(repo.commit_ids) == N
320 assert len(repo.commit_ids) == N
330
321
331 def test_date_attr(self, local_dt_to_utc):
322 def test_date_attr(self, local_dt_to_utc):
332 node = FileNode(b'foobar.txt', content=b'Foobared!')
323 node = FileNode(b"foobar.txt", content=b"Foobared!")
333 self.imc.add(node)
324 self.imc.add(node)
334 date = datetime.datetime(1985, 1, 30, 1, 45)
325 date = datetime.datetime(1985, 1, 30, 1, 45)
335 commit = self.imc.commit(
326 commit = self.imc.commit("Committed at time when I was born ;-)", author="Test User <foo@bar.com>", date=date)
336 "Committed at time when I was born ;-)",
337 author='Test User <foo@bar.com>', date=date)
338
327
339 assert commit.date == local_dt_to_utc(date)
328 assert commit.date == local_dt_to_utc(date)
340
329
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -38,34 +37,34 b' def binary_filenode():'
38 b"\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7"
37 b"\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7"
39 b"\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00"
38 b"\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00"
40 b"\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a"
39 b"\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a"
41 b"\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?\"\x14j?\xa2M\x7fB\x14F\x9aQ?&"
40 b'\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?"\x14j?\xa2M\x7fB\x14F\x9aQ?&'
42 b"\x842?\x0b\x89\"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?="
41 b'\x842?\x0b\x89"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?='
43 b"\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04\'`EpNp\xa2X\'U?pVq\"Sw."
42 b"\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04'`EpNp\xa2X'U?pVq\"Sw."
44 b"\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/"
43 b"\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/"
45 b"\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H"
44 b"\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H"
46 b"\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$\"q["
45 b'\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$"q['
47 b"\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?"
46 b"\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?"
48 b"\x9f\x8cE??x\x94??\r\xbdtoJU5\"0N\x10U?\x00??V\t\x02\x9f\x81?U?"
47 b'\x9f\x8cE??x\x94??\r\xbdtoJU5"0N\x10U?\x00??V\t\x02\x9f\x81?U?'
49 b"\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&\"?\xb7ZP \x0c<?O"
48 b'\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&"?\xb7ZP \x0c<?O'
50 b"\xa5\t}\xb8?\x99\xa6?\x87?\x1di|/\xa0??0\xbe\x1fp?d&\x1a\xad"
49 b"\xa5\t}\xb8?\x99\xa6?\x87?\x1di|/\xa0??0\xbe\x1fp?d&\x1a\xad"
51 b"\x95\x8a\x07?\t*\x10??b:?d?.\x13C\x8a?\x12\xbe\xbf\x8e?{???"
50 b"\x95\x8a\x07?\t*\x10??b:?d?.\x13C\x8a?\x12\xbe\xbf\x8e?{???"
52 b"\x08?\x80\xa7\x13+d\x13>J?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1"
51 b"\x08?\x80\xa7\x13+d\x13>J?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1"
53 b"\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J"
52 b"\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J"
54 b"\x0bV\"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X"
53 b'\x0bV"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X'
55 b"\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~"
54 b"\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~"
56 b"\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u"
55 b"\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u"
57 b"\xb2?1\xbe|/\x92M@\xa2!F?\xa9>\"\r<DT?>\x92\x8e?>\x9a9Qv\x127?a"
56 b'\xb2?1\xbe|/\x92M@\xa2!F?\xa9>"\r<DT?>\x92\x8e?>\x9a9Qv\x127?a'
58 b"\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00"
57 b"\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00"
59 b"IEND\xaeB`\x82")
58 b"IEND\xaeB`\x82"
59 )
60 return FileNode(filename, content=data)
60 return FileNode(filename, content=data)
61
61 return node_maker
62 return node_maker
62
63
63
64
64 class TestNodeBasics:
65 class TestNodeBasics:
65
66 @pytest.mark.parametrize("path", ["/foo", "/foo/bar"])
66 @pytest.mark.parametrize("path", ['/foo', '/foo/bar'])
67 @pytest.mark.parametrize("kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
67 @pytest.mark.parametrize(
68 "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
69 def test_init_wrong_paths(self, path, kind):
68 def test_init_wrong_paths(self, path, kind):
70 """
69 """
71 Cannot initialize Node objects with path with slash at the beginning.
70 Cannot initialize Node objects with path with slash at the beginning.
@@ -74,44 +73,46 b' class TestNodeBasics:'
74 with pytest.raises(NodeError):
73 with pytest.raises(NodeError):
75 Node(path, kind)
74 Node(path, kind)
76
75
77 @pytest.mark.parametrize("path", ['path', 'some/path'])
76 @pytest.mark.parametrize("path", ["path", "some/path"])
78 @pytest.mark.parametrize(
77 @pytest.mark.parametrize("kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
79 "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
80 def test_name(self, path, kind):
78 def test_name(self, path, kind):
81 path = safe_bytes(path)
79 path = safe_bytes(path)
82 node = Node(path, kind)
80 node = Node(path, kind)
83 assert node.name == 'path'
81 assert node.name == "path"
84
82
85 def test_name_root(self):
83 def test_name_root(self):
86 node = Node(b'', NodeKind.DIR)
84 node = Node(b"", NodeKind.DIR)
87 assert node.name == ''
85 assert node.name == ""
88
86
89 def test_root_node_cannot_be_file(self):
87 def test_root_node_cannot_be_file(self):
90 with pytest.raises(NodeError):
88 with pytest.raises(NodeError):
91 Node(b'', NodeKind.FILE)
89 Node(b"", NodeKind.FILE)
92
90
93 def test_kind_setter(self):
91 def test_kind_setter(self):
94 node = Node(b'', NodeKind.DIR)
92 node = Node(b"", NodeKind.DIR)
95 with pytest.raises(NodeError):
93 with pytest.raises(NodeError):
96 node.kind = NodeKind.FILE
94 node.kind = NodeKind.FILE
97
95
98 def test_compare_equal(self):
96 def test_compare_equal(self):
99 node1 = FileNode(b'test', content=b'')
97 node1 = FileNode(b"test", content=b"")
100 node2 = FileNode(b'test', content=b'')
98 node2 = FileNode(b"test", content=b"")
101 assert node1 == node2
99 assert node1 == node2
102 assert not node1 != node2
100 assert not node1 != node2
103
101
104 def test_compare_unequal(self):
102 def test_compare_unequal(self):
105 node1 = FileNode(b'test', content=b'a')
103 node1 = FileNode(b"test", content=b"a")
106 node2 = FileNode(b'test', content=b'b')
104 node2 = FileNode(b"test", content=b"b")
107 assert node1 != node2
105 assert node1 != node2
108 assert not node1 == node2
106 assert not node1 == node2
109
107
110 @pytest.mark.parametrize("node_path, expected_parent_path", [
108 @pytest.mark.parametrize(
111 ('', b''),
109 "node_path, expected_parent_path",
112 ('some/path/', b'some/'),
110 [
113 ('some/longer/path/', b'some/longer/'),
111 ("", b""),
114 ])
112 ("some/path/", b"some/"),
113 ("some/longer/path/", b"some/longer/"),
114 ],
115 )
115 def test_parent_path_new(self, node_path, expected_parent_path):
116 def test_parent_path_new(self, node_path, expected_parent_path):
116 """
117 """
117 Tests if node's parent path are properly computed.
118 Tests if node's parent path are properly computed.
@@ -119,11 +120,10 b' class TestNodeBasics:'
119 node_path = safe_bytes(node_path)
120 node_path = safe_bytes(node_path)
120 node = Node(node_path, NodeKind.DIR)
121 node = Node(node_path, NodeKind.DIR)
121 parent_path = node.get_parent_path()
122 parent_path = node.get_parent_path()
122 assert (parent_path.endswith(b'/') or
123 assert parent_path.endswith(b"/") or node.is_root() and parent_path == b""
123 node.is_root() and parent_path == b'')
124 assert parent_path == expected_parent_path
124 assert parent_path == expected_parent_path
125
125
126 '''
126 """
127 def _test_trailing_slash(self, path):
127 def _test_trailing_slash(self, path):
128 if not path.endswith('/'):
128 if not path.endswith('/'):
129 pytest.fail("Trailing slash tests needs paths to end with slash")
129 pytest.fail("Trailing slash tests needs paths to end with slash")
@@ -134,22 +134,22 b' class TestNodeBasics:'
134 def test_trailing_slash(self):
134 def test_trailing_slash(self):
135 for path in ('/', 'foo/', 'foo/bar/', 'foo/bar/biz/'):
135 for path in ('/', 'foo/', 'foo/bar/', 'foo/bar/biz/'):
136 self._test_trailing_slash(path)
136 self._test_trailing_slash(path)
137 '''
137 """
138
138
139 def test_is_file(self):
139 def test_is_file(self):
140 node = Node(b'any', NodeKind.FILE)
140 node = Node(b"any", NodeKind.FILE)
141 assert node.is_file()
141 assert node.is_file()
142
142
143 node = FileNode(b'any')
143 node = FileNode(b"any")
144 assert node.is_file()
144 assert node.is_file()
145 with pytest.raises(AttributeError):
145 with pytest.raises(AttributeError):
146 node.nodes # noqa
146 node.nodes # noqa
147
147
148 def test_is_dir(self):
148 def test_is_dir(self):
149 node = Node(b'any_dir', NodeKind.DIR)
149 node = Node(b"any_dir", NodeKind.DIR)
150 assert node.is_dir()
150 assert node.is_dir()
151
151
152 node = DirNode(b'any_dir')
152 node = DirNode(b"any_dir")
153
153
154 assert node.is_dir()
154 assert node.is_dir()
155 with pytest.raises(NodeError):
155 with pytest.raises(NodeError):
@@ -157,14 +157,14 b' class TestNodeBasics:'
157
157
158 def test_dir_node_iter(self):
158 def test_dir_node_iter(self):
159 nodes = [
159 nodes = [
160 DirNode(b'docs'),
160 DirNode(b"docs"),
161 DirNode(b'tests'),
161 DirNode(b"tests"),
162 FileNode(b'bar'),
162 FileNode(b"bar"),
163 FileNode(b'foo'),
163 FileNode(b"foo"),
164 FileNode(b'readme.txt'),
164 FileNode(b"readme.txt"),
165 FileNode(b'setup.py'),
165 FileNode(b"setup.py"),
166 ]
166 ]
167 dirnode = DirNode(b'', nodes=nodes)
167 dirnode = DirNode(b"", nodes=nodes)
168 for node in dirnode:
168 for node in dirnode:
169 assert node == dirnode.get_node(node.path)
169 assert node == dirnode.get_node(node.path)
170
170
@@ -172,15 +172,15 b' class TestNodeBasics:'
172 """
172 """
173 Without link to commit nodes should raise NodeError.
173 Without link to commit nodes should raise NodeError.
174 """
174 """
175 node = FileNode(b'anything')
175 node = FileNode(b"anything")
176 with pytest.raises(NodeError):
176 with pytest.raises(NodeError):
177 node.state # noqa
177 node.state # noqa
178 node = DirNode(b'anything')
178 node = DirNode(b"anything")
179 with pytest.raises(NodeError):
179 with pytest.raises(NodeError):
180 node.state # noqa
180 node.state # noqa
181
181
182 def test_file_node_stat(self):
182 def test_file_node_stat(self):
183 node = FileNode(b'foobar', b'empty... almost')
183 node = FileNode(b"foobar", b"empty... almost")
184 mode = node.mode # default should be 0100644
184 mode = node.mode # default should be 0100644
185 assert mode & stat.S_IRUSR
185 assert mode & stat.S_IRUSR
186 assert mode & stat.S_IWUSR
186 assert mode & stat.S_IWUSR
@@ -193,36 +193,36 b' class TestNodeBasics:'
193 assert not mode & stat.S_IXOTH
193 assert not mode & stat.S_IXOTH
194
194
195 def test_file_node_is_executable(self):
195 def test_file_node_is_executable(self):
196 node = FileNode(b'foobar', b'empty... almost', mode=0o100755)
196 node = FileNode(b"foobar", b"empty... almost", mode=0o100755)
197 assert node.is_executable
197 assert node.is_executable
198
198
199 node = FileNode(b'foobar', b'empty... almost', mode=0o100500)
199 node = FileNode(b"foobar", b"empty... almost", mode=0o100500)
200 assert node.is_executable
200 assert node.is_executable
201
201
202 node = FileNode(b'foobar', b'empty... almost', mode=0o100644)
202 node = FileNode(b"foobar", b"empty... almost", mode=0o100644)
203 assert not node.is_executable
203 assert not node.is_executable
204
204
205 def test_file_node_is_not_symlink(self):
205 def test_file_node_is_not_symlink(self):
206 node = FileNode(b'foobar', b'empty...')
206 node = FileNode(b"foobar", b"empty...")
207 assert not node.is_link()
207 assert not node.is_link()
208
208
209 def test_mimetype(self):
209 def test_mimetype(self):
210 py_node = FileNode(b'test.py')
210 py_node = FileNode(b"test.py")
211 tar_node = FileNode(b'test.tar.gz')
211 tar_node = FileNode(b"test.tar.gz")
212
212
213 ext = 'CustomExtension'
213 ext = "CustomExtension"
214
214
215 my_node2 = FileNode(b'myfile2')
215 my_node2 = FileNode(b"myfile2")
216 my_node2._mimetype = [ext]
216 my_node2._mimetype = [ext]
217
217
218 my_node3 = FileNode(b'myfile3')
218 my_node3 = FileNode(b"myfile3")
219 my_node3._mimetype = [ext, ext]
219 my_node3._mimetype = [ext, ext]
220
220
221 assert py_node.mimetype == 'text/x-python'
221 assert py_node.mimetype == "text/x-python"
222 assert py_node.get_mimetype() == ('text/x-python', None)
222 assert py_node.get_mimetype() == ("text/x-python", None)
223
223
224 assert tar_node.mimetype == 'application/x-tar'
224 assert tar_node.mimetype == "application/x-tar"
225 assert tar_node.get_mimetype() == ('application/x-tar', 'gzip')
225 assert tar_node.get_mimetype() == ("application/x-tar", "gzip")
226
226
227 with pytest.raises(NodeError):
227 with pytest.raises(NodeError):
228 my_node2.get_mimetype()
228 my_node2.get_mimetype()
@@ -232,47 +232,45 b' class TestNodeBasics:'
232
232
233 def test_lines_counts(self):
233 def test_lines_counts(self):
234 lines = [
234 lines = [
235 b'line1\n',
235 b"line1\n",
236 b'line2\n',
236 b"line2\n",
237 b'line3\n',
237 b"line3\n",
238 b'\n',
238 b"\n",
239 b'\n',
239 b"\n",
240 b'line4\n',
240 b"line4\n",
241 ]
241 ]
242 py_node = FileNode(b'test.py', b''.join(lines))
242 py_node = FileNode(b"test.py", b"".join(lines))
243
243
244 assert (len(lines), len(lines)) == py_node.lines()
244 assert (len(lines), len(lines)) == py_node.lines()
245 assert (len(lines), len(lines) - 2) == py_node.lines(count_empty=True)
245 assert (len(lines), len(lines) - 2) == py_node.lines(count_empty=True)
246
246
247 def test_lines_no_newline(self):
247 def test_lines_no_newline(self):
248 py_node = FileNode(b'test.py', b'oneline')
248 py_node = FileNode(b"test.py", b"oneline")
249
249
250 assert (1, 1) == py_node.lines()
250 assert (1, 1) == py_node.lines()
251 assert (1, 1) == py_node.lines(count_empty=True)
251 assert (1, 1) == py_node.lines(count_empty=True)
252
252
253
253
254 class TestNodeContent(object):
254 class TestNodeContent(object):
255
256 def test_if_binary(self, binary_filenode):
255 def test_if_binary(self, binary_filenode):
257 filenode = binary_filenode(b'calendar.jpg')
256 filenode = binary_filenode(b"calendar.jpg")
258 assert filenode.is_binary
257 assert filenode.is_binary
259
258
260 def test_binary_line_counts(self, binary_filenode):
259 def test_binary_line_counts(self, binary_filenode):
261 tar_node = binary_filenode(b'archive.tar.gz')
260 tar_node = binary_filenode(b"archive.tar.gz")
262 assert (0, 0) == tar_node.lines(count_empty=True)
261 assert (0, 0) == tar_node.lines(count_empty=True)
263
262
264 def test_binary_mimetype(self, binary_filenode):
263 def test_binary_mimetype(self, binary_filenode):
265 tar_node = binary_filenode(b'archive.tar.gz')
264 tar_node = binary_filenode(b"archive.tar.gz")
266 assert tar_node.mimetype == 'application/x-tar'
265 assert tar_node.mimetype == "application/x-tar"
267
266
268
267
269 @pytest.mark.usefixtures("vcs_repository_support")
268 @pytest.mark.usefixtures("vcs_repository_support")
270 class TestNodesCommits(BackendTestMixin):
269 class TestNodesCommits(BackendTestMixin):
271
272 def test_node_last_commit(self, generate_repo_with_commits):
270 def test_node_last_commit(self, generate_repo_with_commits):
273 repo = generate_repo_with_commits(20)
271 repo = generate_repo_with_commits(20)
274 last_commit = repo.get_commit()
272 last_commit = repo.get_commit()
275
273
276 for x in range(3):
274 for x in range(3):
277 node = last_commit.get_node(f'file_{x}.txt')
275 node = last_commit.get_node(f"file_{x}.txt")
278 assert node.last_commit == repo[x]
276 assert node.last_commit == repo[x]
@@ -114,9 +114,7 b' class TestRepositoryBase(BackendTestMixi'
114 assert len(self.repo.get_hook_location()) != 0
114 assert len(self.repo.get_hook_location()) != 0
115
115
116 def test_last_change(self, local_dt_to_utc):
116 def test_last_change(self, local_dt_to_utc):
117 assert self.repo.last_change >= local_dt_to_utc(
117 assert self.repo.last_change >= local_dt_to_utc(datetime.datetime(2010, 1, 1, 21, 0))
118 datetime.datetime(2010, 1, 1, 21, 0)
119 )
120
118
121 def test_last_change_in_empty_repository(self, vcsbackend, local_dt_to_utc):
119 def test_last_change_in_empty_repository(self, vcsbackend, local_dt_to_utc):
122 delta = datetime.timedelta(seconds=1)
120 delta = datetime.timedelta(seconds=1)
@@ -195,9 +193,7 b' class TestRepositoryCompare:'
195 @pytest.mark.parametrize("merge", [True, False])
193 @pytest.mark.parametrize("merge", [True, False])
196 def test_compare_commits_of_same_repository(self, vcsbackend, merge):
194 def test_compare_commits_of_same_repository(self, vcsbackend, merge):
197 target_repo = vcsbackend.create_repo(number_of_commits=5)
195 target_repo = vcsbackend.create_repo(number_of_commits=5)
198 target_repo.compare(
196 target_repo.compare(target_repo[1].raw_id, target_repo[3].raw_id, target_repo, merge=merge)
199 target_repo[1].raw_id, target_repo[3].raw_id, target_repo, merge=merge
200 )
201
197
202 @pytest.mark.xfail_backends("svn")
198 @pytest.mark.xfail_backends("svn")
203 @pytest.mark.parametrize("merge", [True, False])
199 @pytest.mark.parametrize("merge", [True, False])
@@ -209,9 +205,7 b' class TestRepositoryCompare:'
209 vcsbackend.add_file(source_repo, b"newfile", b"somecontent")
205 vcsbackend.add_file(source_repo, b"newfile", b"somecontent")
210 source_commit = source_repo.get_commit()
206 source_commit = source_repo.get_commit()
211
207
212 target_repo.compare(
208 target_repo.compare(target_repo[1].raw_id, source_repo[3].raw_id, source_repo, merge=merge)
213 target_repo[1].raw_id, source_repo[3].raw_id, source_repo, merge=merge
214 )
215
209
216 @pytest.mark.xfail_backends("svn")
210 @pytest.mark.xfail_backends("svn")
217 @pytest.mark.parametrize("merge", [True, False])
211 @pytest.mark.parametrize("merge", [True, False])
@@ -351,9 +345,7 b' class TestRepositoryMerge(object):'
351 "merge message 1",
345 "merge message 1",
352 dry_run=False,
346 dry_run=False,
353 )
347 )
354 expected_merge_response = MergeResponse(
348 expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE)
355 True, True, merge_response.merge_ref, MergeFailureReason.NONE
356 )
357 assert merge_response == expected_merge_response
349 assert merge_response == expected_merge_response
358
350
359 target_repo = backends.get_backend(vcsbackend.alias)(self.target_repo.path)
351 target_repo = backends.get_backend(vcsbackend.alias)(self.target_repo.path)
@@ -385,9 +377,7 b' class TestRepositoryMerge(object):'
385 "merge message 2",
377 "merge message 2",
386 dry_run=False,
378 dry_run=False,
387 )
379 )
388 expected_merge_response = MergeResponse(
380 expected_merge_response = MergeResponse(True, True, merge_response.merge_ref, MergeFailureReason.NONE)
389 True, True, merge_response.merge_ref, MergeFailureReason.NONE
390 )
391 assert merge_response == expected_merge_response
381 assert merge_response == expected_merge_response
392
382
393 target_repo = backends.get_backend(vcsbackend.alias)(self.target_repo.path)
383 target_repo = backends.get_backend(vcsbackend.alias)(self.target_repo.path)
@@ -422,9 +412,9 b' class TestRepositoryMerge(object):'
422
412
423 # Multiple merges may differ in their commit id. Therefore, we set the
413 # Multiple merges may differ in their commit id. Therefore, we set the
424 # commit id to `None` before comparing the merge responses.
414 # commit id to `None` before comparing the merge responses.
425 merge_response.merge_ref.commit_id = 'abcdeabcde'
415 merge_response.merge_ref.commit_id = "abcdeabcde"
426
416
427 merge_response_update.merge_ref.commit_id = 'abcdeabcde'
417 merge_response_update.merge_ref.commit_id = "abcdeabcde"
428
418
429 assert merge_response == merge_response_update
419 assert merge_response == merge_response_update
430 assert merge_response.possible is True
420 assert merge_response.possible is True
@@ -436,9 +426,7 b' class TestRepositoryMerge(object):'
436 def test_merge_conflict(self, vcsbackend, dry_run):
426 def test_merge_conflict(self, vcsbackend, dry_run):
437 self.prepare_for_conflict(vcsbackend)
427 self.prepare_for_conflict(vcsbackend)
438
428
439 expected_merge_response = MergeResponse(
429 expected_merge_response = MergeResponse(False, False, None, MergeFailureReason.MERGE_FAILED)
440 False, False, None, MergeFailureReason.MERGE_FAILED
441 )
442
430
443 merge_response = self.target_repo.merge(
431 merge_response = self.target_repo.merge(
444 self.repo_id,
432 self.repo_id,
@@ -491,9 +479,7 b' class TestRepositoryMerge(object):'
491 def test_merge_missing_source_reference(self, vcsbackend):
479 def test_merge_missing_source_reference(self, vcsbackend):
492 self.prepare_for_success(vcsbackend)
480 self.prepare_for_success(vcsbackend)
493
481
494 source_ref = Reference(
482 source_ref = Reference(self.source_ref.type, "not_existing", self.source_ref.commit_id)
495 self.source_ref.type, "not_existing", self.source_ref.commit_id
496 )
497 expected_merge_response = MergeResponse(
483 expected_merge_response = MergeResponse(
498 False,
484 False,
499 False,
485 False,
@@ -523,9 +509,7 b' class TestRepositoryMerge(object):'
523 metadata={"exception": "ErrorForTest"},
509 metadata={"exception": "ErrorForTest"},
524 )
510 )
525
511
526 with mock.patch.object(
512 with mock.patch.object(self.target_repo, "_merge_repo", side_effect=RepositoryError()):
527 self.target_repo, "_merge_repo", side_effect=RepositoryError()
528 ):
529 merge_response = self.target_repo.merge(
513 merge_response = self.target_repo.merge(
530 self.repo_id,
514 self.repo_id,
531 self.workspace_id,
515 self.workspace_id,
@@ -559,9 +543,7 b' class TestRepositoryMerge(object):'
559 workspace_id = "test-errors-in-merge"
543 workspace_id = "test-errors-in-merge"
560 repo_id = repo_id_generator(workspace_id)
544 repo_id = repo_id_generator(workspace_id)
561 with pytest.raises(ValueError):
545 with pytest.raises(ValueError):
562 repo.merge(
546 repo.merge(repo_id, workspace_id, ref, self, ref, "user name", "user@email.com")
563 repo_id, workspace_id, ref, self, ref, "user name", "user@email.com"
564 )
565
547
566
548
567 @pytest.mark.usefixtures("vcs_repository_support")
549 @pytest.mark.usefixtures("vcs_repository_support")
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -47,13 +46,13 b' def head(repo):'
47
46
48
47
49 def test_init_fails_if_path_does_not_exist():
48 def test_init_fails_if_path_does_not_exist():
50 path = os.path.join(TEST_DIR, 'i-do-not-exist')
49 path = os.path.join(TEST_DIR, "i-do-not-exist")
51 with pytest.raises(VCSError):
50 with pytest.raises(VCSError):
52 SubversionRepository(path)
51 SubversionRepository(path)
53
52
54
53
55 def test_init_fails_if_path_is_not_a_valid_repository(tmpdir):
54 def test_init_fails_if_path_is_not_a_valid_repository(tmpdir):
56 path = str(tmpdir.mkdir('unicode Γ€'))
55 path = str(tmpdir.mkdir("unicode Γ€"))
57 with pytest.raises(VCSError):
56 with pytest.raises(VCSError):
58 SubversionRepository(path)
57 SubversionRepository(path)
59
58
@@ -61,17 +60,14 b' def test_init_fails_if_path_is_not_a_val'
61 def test_repo_clone(vcsbackend, reposerver):
60 def test_repo_clone(vcsbackend, reposerver):
62 source = vcsbackend.create_repo(number_of_commits=3)
61 source = vcsbackend.create_repo(number_of_commits=3)
63 reposerver.serve(source)
62 reposerver.serve(source)
64 repo = SubversionRepository(
63 repo = SubversionRepository(vcsbackend.new_repo_path(), create=True, src_url=reposerver.url)
65 vcsbackend.new_repo_path(),
66 create=True,
67 src_url=reposerver.url)
68
64
69 assert source.commit_ids == repo.commit_ids
65 assert source.commit_ids == repo.commit_ids
70 assert source[0].message == repo[0].message
66 assert source[0].message == repo[0].message
71
67
72
68
73 def test_latest_commit(head):
69 def test_latest_commit(head):
74 assert head.raw_id == '393'
70 assert head.raw_id == "393"
75
71
76
72
77 def test_commit_description(head):
73 def test_commit_description(head):
@@ -79,18 +75,22 b' def test_commit_description(head):'
79
75
80
76
81 def test_commit_author(head):
77 def test_commit_author(head):
82 assert head.author == 'marcin'
78 assert head.author == "marcin"
83
79
84
80
85 @pytest.mark.parametrize("filename, content, mime_type", [
81 @pytest.mark.parametrize(
86 (b'test.txt', b'Text content\n', None),
82 "filename, content, mime_type",
87 (b'test.bin', b'\0 binary \0', 'application/octet-stream'),
83 [
88 ], ids=['text', 'binary'])
84 (b"test.txt", b"Text content\n", None),
85 (b"test.bin", b"\0 binary \0", "application/octet-stream"),
86 ],
87 ids=["text", "binary"],
88 )
89 def test_sets_mime_type_correctly(vcsbackend, filename, content, mime_type):
89 def test_sets_mime_type_correctly(vcsbackend, filename, content, mime_type):
90 repo = vcsbackend.create_repo()
90 repo = vcsbackend.create_repo()
91 vcsbackend.ensure_file(filename, content)
91 vcsbackend.ensure_file(filename, content)
92 file_properties = repo._remote.node_properties(filename, 1)
92 file_properties = repo._remote.node_properties(filename, 1)
93 assert file_properties.get('svn:mime-type') == mime_type
93 assert file_properties.get("svn:mime-type") == mime_type
94
94
95
95
96 def test_slice_access(repo):
96 def test_slice_access(repo):
@@ -100,7 +100,7 b' def test_slice_access(repo):'
100 end = start + page_size - 1
100 end = start + page_size - 1
101
101
102 commits = list(repo[start:end])
102 commits = list(repo[start:end])
103 assert [commit.raw_id for commit in commits] == ['1', '2', '3', '4']
103 assert [commit.raw_id for commit in commits] == ["1", "2", "3", "4"]
104
104
105
105
106 def test_walk_changelog_page(repo):
106 def test_walk_changelog_page(repo):
@@ -110,14 +110,14 b' def test_walk_changelog_page(repo):'
110 end = start + page_size - 1
110 end = start + page_size - 1
111
111
112 commits = list(repo[start:end])
112 commits = list(repo[start:end])
113 changelog = [
113 changelog = ["r%s, %s, %s" % (c.raw_id, c.author, c.message) for c in commits]
114 'r%s, %s, %s' % (c.raw_id, c.author, c.message) for c in commits]
115
114
116 expexted_messages = [
115 expexted_messages = [
117 'r1, marcin, initial import',
116 "r1, marcin, initial import",
118 'r2, marcin, hg ignore',
117 "r2, marcin, hg ignore",
119 'r3, marcin, Pip standards refactor',
118 "r3, marcin, Pip standards refactor",
120 'r4, marcin, Base repository few new functions added']
119 "r4, marcin, Base repository few new functions added",
120 ]
121 assert changelog == expexted_messages
121 assert changelog == expexted_messages
122
122
123
123
@@ -128,68 +128,68 b' def test_read_full_file_tree(head):'
128
128
129
129
130 def test_topnode_files_attribute(head):
130 def test_topnode_files_attribute(head):
131 topnode = head.get_node('')
131 topnode = head.get_node("")
132 topnode.files
132 topnode.files
133
133
134
134
135
135 @pytest.mark.parametrize(
136
136 "filename, content, branch, mime_type",
137 @pytest.mark.parametrize("filename, content, branch, mime_type", [
137 [
138 ('branches/plain/test.txt', b'Text content\n', 'plain', None),
138 ("branches/plain/test.txt", b"Text content\n", "plain", None),
139 ('branches/uniΓ§ΓΆβˆ‚e/test.bin', b'\0 binary \0', 'uniΓ§ΓΆβˆ‚e', 'application/octet-stream'),
139 ("branches/uniΓ§ΓΆβˆ‚e/test.bin", b"\0 binary \0", "uniΓ§ΓΆβˆ‚e", "application/octet-stream"),
140 ], ids=['text', 'binary'])
140 ],
141 ids=["text", "binary"],
142 )
141 def test_unicode_refs(vcsbackend, filename, content, branch, mime_type):
143 def test_unicode_refs(vcsbackend, filename, content, branch, mime_type):
142 filename = safe_bytes(filename)
144 filename = safe_bytes(filename)
143 repo = vcsbackend.create_repo()
145 repo = vcsbackend.create_repo()
144 vcsbackend.ensure_file(filename, content)
146 vcsbackend.ensure_file(filename, content)
145 with mock.patch(("rhodecode.lib.vcs.backends.svn.repository"
147 with mock.patch(
146 ".SubversionRepository._patterns_from_section"),
148 ("rhodecode.lib.vcs.backends.svn.repository" ".SubversionRepository._patterns_from_section"),
147 return_value=['branches/*']):
149 return_value=["branches/*"],
148 assert f'branches/{branch}' in repo.branches
150 ):
151 assert f"branches/{branch}" in repo.branches
149
152
150
153
151 def test_compatible_version(monkeypatch, vcsbackend):
154 def test_compatible_version(monkeypatch, vcsbackend):
152 monkeypatch.setattr(settings, 'SVN_COMPATIBLE_VERSION', 'pre-1.8-compatible')
155 monkeypatch.setattr(settings, "SVN_COMPATIBLE_VERSION", "pre-1.8-compatible")
153 path = vcsbackend.new_repo_path()
156 path = vcsbackend.new_repo_path()
154 SubversionRepository(path, create=True)
157 SubversionRepository(path, create=True)
155 with open(f'{path}/db/format') as f:
158 with open(f"{path}/db/format") as f:
156 first_line = f.readline().strip()
159 first_line = f.readline().strip()
157 assert first_line == '4'
160 assert first_line == "4"
158
161
159
162
160 def test_invalid_compatible_version(monkeypatch, vcsbackend):
163 def test_invalid_compatible_version(monkeypatch, vcsbackend):
161 monkeypatch.setattr(settings, 'SVN_COMPATIBLE_VERSION', 'i-am-an-invalid-setting')
164 monkeypatch.setattr(settings, "SVN_COMPATIBLE_VERSION", "i-am-an-invalid-setting")
162 path = vcsbackend.new_repo_path()
165 path = vcsbackend.new_repo_path()
163 with pytest.raises(Exception):
166 with pytest.raises(Exception):
164 SubversionRepository(path, create=True)
167 SubversionRepository(path, create=True)
165
168
166
169
167 class TestSVNCommit(object):
170 class TestSVNCommit(object):
168
169 @pytest.fixture(autouse=True)
171 @pytest.fixture(autouse=True)
170 def prepare(self, repo):
172 def prepare(self, repo):
171 self.repo = repo
173 self.repo = repo
172
174
173 def test_file_history_from_commits(self):
175 def test_file_history_from_commits(self):
174 node = self.repo[10].get_node('setup.py')
176 node = self.repo[10].get_node("setup.py")
175 commit_ids = [commit.raw_id for commit in node.history]
177 commit_ids = [commit.raw_id for commit in node.history]
176 assert ['8'] == commit_ids
178 assert ["8"] == commit_ids
177
179
178 node = self.repo[20].get_node('setup.py')
180 node = self.repo[20].get_node("setup.py")
179 node_ids = [commit.raw_id for commit in node.history]
181 node_ids = [commit.raw_id for commit in node.history]
180 assert ['18',
182 assert ["18", "8"] == node_ids
181 '8'] == node_ids
182
183
183 # special case we check history from commit that has this particular
184 # special case we check history from commit that has this particular
184 # file changed this means we check if it's included as well
185 # file changed this means we check if it's included as well
185 node = self.repo.get_commit('18').get_node('setup.py')
186 node = self.repo.get_commit("18").get_node("setup.py")
186 node_ids = [commit.raw_id for commit in node.history]
187 node_ids = [commit.raw_id for commit in node.history]
187 assert ['18',
188 assert ["18", "8"] == node_ids
188 '8'] == node_ids
189
189
190 def test_repo_files_content_type(self):
190 def test_repo_files_content_type(self):
191 test_commit = self.repo.get_commit(commit_idx=100)
191 test_commit = self.repo.get_commit(commit_idx=100)
192 for node in test_commit.get_node('/'):
192 for node in test_commit.get_node("/"):
193 if node.is_file():
193 if node.is_file():
194 assert type(node.content) == bytes
194 assert type(node.content) == bytes
195 assert type(node.str_content) == str
195 assert type(node.str_content) == str
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -20,8 +19,7 b''
20 import pytest
19 import pytest
21
20
22 from rhodecode.tests.vcs.conftest import BackendTestMixin
21 from rhodecode.tests.vcs.conftest import BackendTestMixin
23 from rhodecode.lib.vcs.exceptions import (
22 from rhodecode.lib.vcs.exceptions import TagAlreadyExistError, TagDoesNotExistError
24 TagAlreadyExistError, TagDoesNotExistError)
25
23
26
24
27 pytestmark = pytest.mark.backends("git", "hg")
25 pytestmark = pytest.mark.backends("git", "hg")
@@ -29,11 +27,10 b' pytestmark = pytest.mark.backends("git",'
29
27
30 @pytest.mark.usefixtures("vcs_repository_support")
28 @pytest.mark.usefixtures("vcs_repository_support")
31 class TestTags(BackendTestMixin):
29 class TestTags(BackendTestMixin):
32
33 def test_new_tag(self):
30 def test_new_tag(self):
34 tip = self.repo.get_commit()
31 tip = self.repo.get_commit()
35 tagsize = len(self.repo.tags)
32 tagsize = len(self.repo.tags)
36 tag = self.repo.tag('last-commit', 'joe', tip.raw_id)
33 tag = self.repo.tag("last-commit", "joe", tip.raw_id)
37
34
38 assert len(self.repo.tags) == tagsize + 1
35 assert len(self.repo.tags) == tagsize + 1
39 for top, __, __ in tip.walk():
36 for top, __, __ in tip.walk():
@@ -41,29 +38,29 b' class TestTags(BackendTestMixin):'
41
38
42 def test_tag_already_exist(self):
39 def test_tag_already_exist(self):
43 tip = self.repo.get_commit()
40 tip = self.repo.get_commit()
44 self.repo.tag('last-commit', 'joe', tip.raw_id)
41 self.repo.tag("last-commit", "joe", tip.raw_id)
45
42
46 with pytest.raises(TagAlreadyExistError):
43 with pytest.raises(TagAlreadyExistError):
47 self.repo.tag('last-commit', 'joe', tip.raw_id)
44 self.repo.tag("last-commit", "joe", tip.raw_id)
48
45
49 commit = self.repo.get_commit(commit_idx=0)
46 commit = self.repo.get_commit(commit_idx=0)
50 with pytest.raises(TagAlreadyExistError):
47 with pytest.raises(TagAlreadyExistError):
51 self.repo.tag('last-commit', 'jane', commit.raw_id)
48 self.repo.tag("last-commit", "jane", commit.raw_id)
52
49
53 def test_remove_tag(self):
50 def test_remove_tag(self):
54 tip = self.repo.get_commit()
51 tip = self.repo.get_commit()
55 self.repo.tag('last-commit', 'joe', tip.raw_id)
52 self.repo.tag("last-commit", "joe", tip.raw_id)
56 tagsize = len(self.repo.tags)
53 tagsize = len(self.repo.tags)
57
54
58 self.repo.remove_tag('last-commit', user='evil joe')
55 self.repo.remove_tag("last-commit", user="evil joe")
59 assert len(self.repo.tags) == tagsize - 1
56 assert len(self.repo.tags) == tagsize - 1
60
57
61 def test_remove_tag_which_does_not_exist(self):
58 def test_remove_tag_which_does_not_exist(self):
62 with pytest.raises(TagDoesNotExistError):
59 with pytest.raises(TagDoesNotExistError):
63 self.repo.remove_tag('last-commit', user='evil joe')
60 self.repo.remove_tag("last-commit", user="evil joe")
64
61
65 def test_name_with_slash(self):
62 def test_name_with_slash(self):
66 self.repo.tag('19/10/11', 'joe')
63 self.repo.tag("19/10/11", "joe")
67 assert '19/10/11' in self.repo.tags
64 assert "19/10/11" in self.repo.tags
68 self.repo.tag('rel.11', 'joe')
65 self.repo.tag("rel.11", "joe")
69 assert 'rel.11' in self.repo.tags
66 assert "rel.11" in self.repo.tags
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -34,23 +33,20 b' from rhodecode.lib.vcs.utils.paths impor'
34
33
35 @pytest.mark.usefixtures("baseapp")
34 @pytest.mark.usefixtures("baseapp")
36 class TestPaths(object):
35 class TestPaths(object):
37
38 def _test_get_dirs_for_path(self, path, expected):
36 def _test_get_dirs_for_path(self, path, expected):
39 """
37 """
40 Tests if get_dirs_for_path returns same as expected.
38 Tests if get_dirs_for_path returns same as expected.
41 """
39 """
42 expected = sorted(expected)
40 expected = sorted(expected)
43 result = sorted(get_dirs_for_path(path))
41 result = sorted(get_dirs_for_path(path))
44 assert result == expected, (
42 assert result == expected, "%s != %s which was expected result for path %s" % (result, expected, path)
45 "%s != %s which was expected result for path %s"
46 % (result, expected, path))
47
43
48 def test_get_dirs_for_path(self):
44 def test_get_dirs_for_path(self):
49 path = 'foo/bar/baz/file'
45 path = "foo/bar/baz/file"
50 paths_and_results = (
46 paths_and_results = (
51 ('foo/bar/baz/file', ['foo', 'foo/bar', 'foo/bar/baz']),
47 ("foo/bar/baz/file", ["foo", "foo/bar", "foo/bar/baz"]),
52 ('foo/bar/', ['foo', 'foo/bar']),
48 ("foo/bar/", ["foo", "foo/bar"]),
53 ('foo/bar', ['foo']),
49 ("foo/bar", ["foo"]),
54 )
50 )
55 for path, expected in paths_and_results:
51 for path, expected in paths_and_results:
56 self._test_get_dirs_for_path(path, expected)
52 self._test_get_dirs_for_path(path, expected)
@@ -59,18 +55,17 b' class TestPaths(object):'
59 new = tmpdir.strpath
55 new = tmpdir.strpath
60 assert get_scms_for_path(new) == []
56 assert get_scms_for_path(new) == []
61
57
62 os.mkdir(os.path.join(new, '.tux'))
58 os.mkdir(os.path.join(new, ".tux"))
63 assert get_scms_for_path(new) == []
59 assert get_scms_for_path(new) == []
64
60
65 os.mkdir(os.path.join(new, '.git'))
61 os.mkdir(os.path.join(new, ".git"))
66 assert set(get_scms_for_path(new)) == set(['git'])
62 assert set(get_scms_for_path(new)) == set(["git"])
67
63
68 os.mkdir(os.path.join(new, '.hg'))
64 os.mkdir(os.path.join(new, ".hg"))
69 assert set(get_scms_for_path(new)) == set(['git', 'hg'])
65 assert set(get_scms_for_path(new)) == set(["git", "hg"])
70
66
71
67
72 class TestGetScm(object):
68 class TestGetScm(object):
73
74 def test_existing_repository(self, vcs_repository_support):
69 def test_existing_repository(self, vcs_repository_support):
75 alias, repo = vcs_repository_support
70 alias, repo = vcs_repository_support
76 assert (alias, repo.path) == get_scm(repo.path)
71 assert (alias, repo.path) == get_scm(repo.path)
@@ -81,114 +76,101 b' class TestGetScm(object):'
81
76
82 def test_get_scm_error_path(self):
77 def test_get_scm_error_path(self):
83 with pytest.raises(VCSError):
78 with pytest.raises(VCSError):
84 get_scm('err')
79 get_scm("err")
85
80
86 def test_get_two_scms_for_path(self, tmpdir):
81 def test_get_two_scms_for_path(self, tmpdir):
87 multialias_repo_path = str(tmpdir)
82 multialias_repo_path = str(tmpdir)
88 git_default_branch = GitRepository.DEFAULT_BRANCH_NAME
83 git_default_branch = GitRepository.DEFAULT_BRANCH_NAME
89
84
90 subprocess.check_call(['hg', 'init', multialias_repo_path])
85 subprocess.check_call(["hg", "init", multialias_repo_path])
91 subprocess.check_call(['git', '-c', f'init.defaultBranch={git_default_branch}', 'init', multialias_repo_path])
86 subprocess.check_call(["git", "-c", f"init.defaultBranch={git_default_branch}", "init", multialias_repo_path])
92
87
93 with pytest.raises(VCSError):
88 with pytest.raises(VCSError):
94 get_scm(multialias_repo_path)
89 get_scm(multialias_repo_path)
95
90
96 def test_ignores_svn_working_copy(self, tmpdir):
91 def test_ignores_svn_working_copy(self, tmpdir):
97 tmpdir.mkdir('.svn')
92 tmpdir.mkdir(".svn")
98 with pytest.raises(VCSError):
93 with pytest.raises(VCSError):
99 get_scm(tmpdir.strpath)
94 get_scm(tmpdir.strpath)
100
95
101
96
102 class TestParseDatetime(object):
97 class TestParseDatetime(object):
103
104 def test_datetime_text(self):
98 def test_datetime_text(self):
105 assert parse_datetime('2010-04-07 21:29:41') == \
99 assert parse_datetime("2010-04-07 21:29:41") == datetime.datetime(2010, 4, 7, 21, 29, 41)
106 datetime.datetime(2010, 4, 7, 21, 29, 41)
107
100
108 def test_no_seconds(self):
101 def test_no_seconds(self):
109 assert parse_datetime('2010-04-07 21:29') == \
102 assert parse_datetime("2010-04-07 21:29") == datetime.datetime(2010, 4, 7, 21, 29)
110 datetime.datetime(2010, 4, 7, 21, 29)
111
103
112 def test_date_only(self):
104 def test_date_only(self):
113 assert parse_datetime('2010-04-07') == \
105 assert parse_datetime("2010-04-07") == datetime.datetime(2010, 4, 7)
114 datetime.datetime(2010, 4, 7)
115
106
116 def test_another_format(self):
107 def test_another_format(self):
117 assert parse_datetime('04/07/10 21:29:41') == \
108 assert parse_datetime("04/07/10 21:29:41") == datetime.datetime(2010, 4, 7, 21, 29, 41)
118 datetime.datetime(2010, 4, 7, 21, 29, 41)
119
109
120 def test_now(self):
110 def test_now(self):
121 assert parse_datetime('now') - datetime.datetime.now() < \
111 assert parse_datetime("now") - datetime.datetime.now() < datetime.timedelta(seconds=1)
122 datetime.timedelta(seconds=1)
123
112
124 def test_today(self):
113 def test_today(self):
125 today = datetime.date.today()
114 today = datetime.date.today()
126 assert parse_datetime('today') == \
115 assert parse_datetime("today") == datetime.datetime(*today.timetuple()[:3])
127 datetime.datetime(*today.timetuple()[:3])
128
116
129 def test_yesterday(self):
117 def test_yesterday(self):
130 yesterday = datetime.date.today() - datetime.timedelta(days=1)
118 yesterday = datetime.date.today() - datetime.timedelta(days=1)
131 assert parse_datetime('yesterday') == \
119 assert parse_datetime("yesterday") == datetime.datetime(*yesterday.timetuple()[:3])
132 datetime.datetime(*yesterday.timetuple()[:3])
133
120
134 def test_tomorrow(self):
121 def test_tomorrow(self):
135 tomorrow = datetime.date.today() + datetime.timedelta(days=1)
122 tomorrow = datetime.date.today() + datetime.timedelta(days=1)
136 args = tomorrow.timetuple()[:3] + (23, 59, 59)
123 args = tomorrow.timetuple()[:3] + (23, 59, 59)
137 assert parse_datetime('tomorrow') == datetime.datetime(*args)
124 assert parse_datetime("tomorrow") == datetime.datetime(*args)
138
125
139 def test_days(self):
126 def test_days(self):
140 timestamp = datetime.datetime.today() - datetime.timedelta(days=3)
127 timestamp = datetime.datetime.today() - datetime.timedelta(days=3)
141 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
128 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
142 expected = datetime.datetime(*args)
129 expected = datetime.datetime(*args)
143 assert parse_datetime('3d') == expected
130 assert parse_datetime("3d") == expected
144 assert parse_datetime('3 d') == expected
131 assert parse_datetime("3 d") == expected
145 assert parse_datetime('3 day') == expected
132 assert parse_datetime("3 day") == expected
146 assert parse_datetime('3 days') == expected
133 assert parse_datetime("3 days") == expected
147
134
148 def test_weeks(self):
135 def test_weeks(self):
149 timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7)
136 timestamp = datetime.datetime.today() - datetime.timedelta(days=3 * 7)
150 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
137 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
151 expected = datetime.datetime(*args)
138 expected = datetime.datetime(*args)
152 assert parse_datetime('3w') == expected
139 assert parse_datetime("3w") == expected
153 assert parse_datetime('3 w') == expected
140 assert parse_datetime("3 w") == expected
154 assert parse_datetime('3 week') == expected
141 assert parse_datetime("3 week") == expected
155 assert parse_datetime('3 weeks') == expected
142 assert parse_datetime("3 weeks") == expected
156
143
157 def test_mixed(self):
144 def test_mixed(self):
158 timestamp = (
145 timestamp = datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3)
159 datetime.datetime.today() - datetime.timedelta(days=2 * 7 + 3))
160 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
146 args = timestamp.timetuple()[:3] + (0, 0, 0, 0)
161 expected = datetime.datetime(*args)
147 expected = datetime.datetime(*args)
162 assert parse_datetime('2w3d') == expected
148 assert parse_datetime("2w3d") == expected
163 assert parse_datetime('2w 3d') == expected
149 assert parse_datetime("2w 3d") == expected
164 assert parse_datetime('2w 3 days') == expected
150 assert parse_datetime("2w 3 days") == expected
165 assert parse_datetime('2 weeks 3 days') == expected
151 assert parse_datetime("2 weeks 3 days") == expected
166
152
167
153
168 @pytest.mark.parametrize("test_str, name, email", [
154 @pytest.mark.parametrize(
169 ('Marcin Kuzminski <marcin@python-works.com>',
155 "test_str, name, email",
170 'Marcin Kuzminski', 'marcin@python-works.com'),
156 [
171 ('Marcin Kuzminski Spaces < marcin@python-works.com >',
157 ("Marcin Kuzminski <marcin@python-works.com>", "Marcin Kuzminski", "marcin@python-works.com"),
172 'Marcin Kuzminski Spaces', 'marcin@python-works.com'),
158 ("Marcin Kuzminski Spaces < marcin@python-works.com >", "Marcin Kuzminski Spaces", "marcin@python-works.com"),
173 ('Marcin Kuzminski <marcin.kuzminski@python-works.com>',
159 (
174 'Marcin Kuzminski', 'marcin.kuzminski@python-works.com'),
160 "Marcin Kuzminski <marcin.kuzminski@python-works.com>",
175 ('mrf RFC_SPEC <marcin+kuzminski@python-works.com>',
161 "Marcin Kuzminski",
176 'mrf RFC_SPEC', 'marcin+kuzminski@python-works.com'),
162 "marcin.kuzminski@python-works.com",
177 ('username <user@email.com>',
163 ),
178 'username', 'user@email.com'),
164 ("mrf RFC_SPEC <marcin+kuzminski@python-works.com>", "mrf RFC_SPEC", "marcin+kuzminski@python-works.com"),
179 ('username <user@email.com',
165 ("username <user@email.com>", "username", "user@email.com"),
180 'username', 'user@email.com'),
166 ("username <user@email.com", "username", "user@email.com"),
181 ('broken missing@email.com',
167 ("broken missing@email.com", "broken", "missing@email.com"),
182 'broken', 'missing@email.com'),
168 ("<justemail@mail.com>", "", "justemail@mail.com"),
183 ('<justemail@mail.com>',
169 ("justname", "justname", ""),
184 '', 'justemail@mail.com'),
170 ("Mr Double Name withemail@email.com ", "Mr Double Name", "withemail@email.com"),
185 ('justname',
171 ],
186 'justname', ''),
172 )
187 ('Mr Double Name withemail@email.com ',
188 'Mr Double Name', 'withemail@email.com'),
189 ])
190 class TestAuthorExtractors(object):
173 class TestAuthorExtractors(object):
191
192 def test_author_email(self, test_str, name, email):
174 def test_author_email(self, test_str, name, email):
193 assert email == author_email(test_str)
175 assert email == author_email(test_str)
194
176
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -20,6 +19,7 b''
20 """
19 """
21 Tests for main module's methods.
20 Tests for main module's methods.
22 """
21 """
22
23 import os
23 import os
24 import tempfile
24 import tempfile
25 import shutil
25 import shutil
@@ -48,7 +48,7 b' def test_alias_detect(backend):'
48
48
49
49
50 def test_wrong_alias():
50 def test_wrong_alias():
51 alias = 'wrong_alias'
51 alias = "wrong_alias"
52 with pytest.raises(VCSError):
52 with pytest.raises(VCSError):
53 get_backend(alias)
53 get_backend(alias)
54
54
@@ -77,12 +77,13 b' def test_get_vcs_instance_by_path_multip'
77 Test that ``get_vcs_instance_by_path`` returns None if a path is passed
77 Test that ``get_vcs_instance_by_path`` returns None if a path is passed
78 to a directory with multiple repositories.
78 to a directory with multiple repositories.
79 """
79 """
80 empty_dir = tempfile.mkdtemp(prefix='pytest-empty-dir-')
80 empty_dir = tempfile.mkdtemp(prefix="pytest-empty-dir-")
81 os.mkdir(os.path.join(empty_dir, '.git'))
81 os.mkdir(os.path.join(empty_dir, ".git"))
82 os.mkdir(os.path.join(empty_dir, '.hg'))
82 os.mkdir(os.path.join(empty_dir, ".hg"))
83
83
84 def fin():
84 def fin():
85 shutil.rmtree(empty_dir)
85 shutil.rmtree(empty_dir)
86
86 request.addfinalizer(fin)
87 request.addfinalizer(fin)
87
88
88 repo = get_vcs_instance(empty_dir)
89 repo = get_vcs_instance(empty_dir)
@@ -90,39 +91,32 b' def test_get_vcs_instance_by_path_multip'
90 assert repo is None
91 assert repo is None
91
92
92
93
93 @mock.patch('rhodecode.lib.vcs.backends.get_scm')
94 @mock.patch("rhodecode.lib.vcs.backends.get_scm")
94 @mock.patch('rhodecode.lib.vcs.backends.get_backend')
95 @mock.patch("rhodecode.lib.vcs.backends.get_backend")
95 def test_get_vcs_instance_by_path_args_passed(
96 def test_get_vcs_instance_by_path_args_passed(get_backend_mock, get_scm_mock, tmpdir, vcs_repo):
96 get_backend_mock, get_scm_mock, tmpdir, vcs_repo):
97 """
97 """
98 Test that the arguments passed to ``get_vcs_instance_by_path`` are
98 Test that the arguments passed to ``get_vcs_instance_by_path`` are
99 forwarded to the vcs backend class.
99 forwarded to the vcs backend class.
100 """
100 """
101 backend = mock.MagicMock()
101 backend = mock.MagicMock()
102 get_backend_mock.return_value = backend
102 get_backend_mock.return_value = backend
103 args = ['these-are-test-args', 0, True, None]
103 args = ["these-are-test-args", 0, True, None]
104 repo = vcs_repo.path
104 repo = vcs_repo.path
105 get_vcs_instance(repo, *args)
105 get_vcs_instance(repo, *args)
106
106
107 backend.assert_called_with(*args, repo_path=repo)
107 backend.assert_called_with(*args, repo_path=repo)
108
108
109
109
110 @mock.patch('rhodecode.lib.vcs.backends.get_scm')
110 @mock.patch("rhodecode.lib.vcs.backends.get_scm")
111 @mock.patch('rhodecode.lib.vcs.backends.get_backend')
111 @mock.patch("rhodecode.lib.vcs.backends.get_backend")
112 def test_get_vcs_instance_by_path_kwargs_passed(
112 def test_get_vcs_instance_by_path_kwargs_passed(get_backend_mock, get_scm_mock, vcs_repo):
113 get_backend_mock, get_scm_mock, vcs_repo):
114 """
113 """
115 Test that the keyword arguments passed to ``get_vcs_instance_by_path`` are
114 Test that the keyword arguments passed to ``get_vcs_instance_by_path`` are
116 forwarded to the vcs backend class.
115 forwarded to the vcs backend class.
117 """
116 """
118 backend = mock.MagicMock()
117 backend = mock.MagicMock()
119 get_backend_mock.return_value = backend
118 get_backend_mock.return_value = backend
120 kwargs = {
119 kwargs = {"foo": "these-are-test-args", "bar": 0, "baz": True, "foobar": None}
121 'foo': 'these-are-test-args',
122 'bar': 0,
123 'baz': True,
124 'foobar': None
125 }
126 repo = vcs_repo.path
120 repo = vcs_repo.path
127 get_vcs_instance(repo, **kwargs)
121 get_vcs_instance(repo, **kwargs)
128
122
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -37,7 +36,7 b' def run_command(cmd, args):'
37 """
36 """
38 Runs command on the system with given ``args``.
37 Runs command on the system with given ``args``.
39 """
38 """
40 command = ' '.join((cmd, args))
39 command = " ".join((cmd, args))
41 p = Popen(command, shell=True)
40 p = Popen(command, shell=True)
42 status = os.waitpid(p.pid, 0)[1]
41 status = os.waitpid(p.pid, 0)[1]
43 return status
42 return status
@@ -51,12 +50,11 b' def eprint(msg):'
51 Appends line break.
50 Appends line break.
52 """
51 """
53 sys.stderr.write(msg)
52 sys.stderr.write(msg)
54 sys.stderr.write('\n')
53 sys.stderr.write("\n")
55
54
56
55
57 # TODO: Revisit once we have CI running, if this is not helping us, remove it
56 # TODO: Revisit once we have CI running, if this is not helping us, remove it
58 class SCMFetcher(object):
57 class SCMFetcher(object):
59
60 def __init__(self, alias, test_repo_path):
58 def __init__(self, alias, test_repo_path):
61 """
59 """
62 :param clone_cmd: command which would clone remote repository; pass
60 :param clone_cmd: command which would clone remote repository; pass
@@ -75,9 +73,8 b' class SCMFetcher(object):'
75 Tries to fetch repository from remote path.
73 Tries to fetch repository from remote path.
76 """
74 """
77 remote = self.remote_repo
75 remote = self.remote_repo
78 eprint(
76 eprint("Fetching repository %s into %s" % (remote, self.test_repo_path))
79 "Fetching repository %s into %s" % (remote, self.test_repo_path))
77 run_command(self.clone_cmd, "%s %s" % (remote, self.test_repo_path))
80 run_command(self.clone_cmd, '%s %s' % (remote, self.test_repo_path))
81
78
82
79
83 def get_normalized_path(path):
80 def get_normalized_path(path):
@@ -88,29 +85,29 b' def get_normalized_path(path):'
88 """
85 """
89 if os.path.exists(path):
86 if os.path.exists(path):
90 dir, basename = os.path.split(path)
87 dir, basename = os.path.split(path)
91 splitted_name = basename.split('.')
88 splitted_name = basename.split(".")
92 if len(splitted_name) > 1:
89 if len(splitted_name) > 1:
93 ext = splitted_name[-1]
90 ext = splitted_name[-1]
94 else:
91 else:
95 ext = None
92 ext = None
96 name = '.'.join(splitted_name[:-1])
93 name = ".".join(splitted_name[:-1])
97 matcher = re.compile(r'^.*-(\d{5})$')
94 matcher = re.compile(r"^.*-(\d{5})$")
98 start = 0
95 start = 0
99 m = matcher.match(name)
96 m = matcher.match(name)
100 if not m:
97 if not m:
101 # Haven't append number yet so return first
98 # Haven't append number yet so return first
102 newname = f'{name}-00000'
99 newname = f"{name}-00000"
103 newpath = os.path.join(dir, newname)
100 newpath = os.path.join(dir, newname)
104 if ext:
101 if ext:
105 newpath = '.'.join((newpath, ext))
102 newpath = ".".join((newpath, ext))
106 return get_normalized_path(newpath)
103 return get_normalized_path(newpath)
107 else:
104 else:
108 start = int(m.group(1)[-5:]) + 1
105 start = int(m.group(1)[-5:]) + 1
109 for x in range(start, 10000):
106 for x in range(start, 10000):
110 newname = name[:-5] + str(x).rjust(5, '0')
107 newname = name[:-5] + str(x).rjust(5, "0")
111 newpath = os.path.join(dir, newname)
108 newpath = os.path.join(dir, newname)
112 if ext:
109 if ext:
113 newpath = '.'.join((newpath, ext))
110 newpath = ".".join((newpath, ext))
114 if not os.path.exists(newpath):
111 if not os.path.exists(newpath):
115 return newpath
112 return newpath
116 raise VCSTestError("Couldn't compute new path for %s" % path)
113 raise VCSTestError("Couldn't compute new path for %s" % path)
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -36,7 +35,6 b' from rhodecode.lib.str_utils import safe'
36 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
35 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
37
36
38 DEBUG = True
37 DEBUG = True
39 RC_LOG = os.path.join(tempfile.gettempdir(), 'rc.log')
40 REPO_GROUP = 'a_repo_group'
38 REPO_GROUP = 'a_repo_group'
41 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
39 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
42 GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}'
40 GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}'
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -19,22 +18,17 b''
19
18
20 """
19 """
21 py.test config for test suite for making push/pull operations.
20 py.test config for test suite for making push/pull operations.
22
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
21 """
28
22
29 import os
23 import os
30 import tempfile
24
31 import textwrap
25 import pyramid.paster
32 import pytest
26 import pytest
33 import logging
27 import logging
34 import requests
28 import requests
35
29
36 from rhodecode import events
30 from rhodecode import events
37 from rhodecode.lib.str_utils import safe_bytes
31 from rhodecode.lib.type_utils import AttributeDict
38 from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \
32 from rhodecode.model.db import Integration, UserRepoToPerm, Permission, \
39 UserToRepoBranchPermission, User
33 UserToRepoBranchPermission, User
40 from rhodecode.model.integration import IntegrationModel
34 from rhodecode.model.integration import IntegrationModel
@@ -42,11 +36,13 b' from rhodecode.model.db import Repositor'
42 from rhodecode.model.meta import Session
36 from rhodecode.model.meta import Session
43 from rhodecode.integrations.types.webhook import WebhookIntegrationType
37 from rhodecode.integrations.types.webhook import WebhookIntegrationType
44
38
39
45 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
40 from rhodecode.tests import GIT_REPO, HG_REPO, SVN_REPO
46 from rhodecode.tests.conftest import HTTPBIN_DOMAIN, HTTPBIN_POST
41 from rhodecode.tests.conftest import HTTPBIN_DOMAIN, HTTPBIN_POST
47 from rhodecode.tests.fixture import Fixture
42 from rhodecode.tests.fixtures.rc_fixture import Fixture
48 from rhodecode.tests.server_utils import RcWebServer
43 from rhodecode.tests.fixtures.fixture_utils import backend_base
49
44 from rhodecode.tests.utils import set_anonymous_access, AuthPluginManager
45 from rhodecode.tests import console_printer
50
46
51 REPO_GROUP = 'a_repo_group'
47 REPO_GROUP = 'a_repo_group'
52 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
48 HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}'
@@ -62,39 +58,42 b' def check_httpbin_connection():'
62 response = requests.get(HTTPBIN_DOMAIN, timeout=5)
58 response = requests.get(HTTPBIN_DOMAIN, timeout=5)
63 return response.status_code == 200
59 return response.status_code == 200
64 except Exception as e:
60 except Exception as e:
65 print(e)
61 console_printer(e)
66
62
67 return False
63 return False
68
64
65 #overrides backend_N with init_pyramid_app instead of baseapp
66 @pytest.fixture()
67 def vcs_backend_git(request, init_pyramid_app, test_repo):
68 return backend_base(request, 'git', test_repo)
69
70
71 @pytest.fixture()
72 def vcs_backend_hg(request, init_pyramid_app, test_repo):
73 return backend_base(request, 'hg', test_repo)
74
75
76 @pytest.fixture()
77 def vcs_backend_svn(request, init_pyramid_app, test_repo):
78 return backend_base(request, 'svn', test_repo)
79
69
80
70 @pytest.fixture(scope="module")
81 @pytest.fixture(scope="module")
71 def rcextensions(request, db_connection, tmpdir_factory):
82 def tmp_storage_location(request, tmpdir_factory):
72 """
73 Installs a testing rcextensions pack to ensure they work as expected.
74 """
83 """
75 init_content = textwrap.dedent("""
84 Defines a module level storage_location, used mostly to define per-test persistent repo storage
76 # Forward import the example rcextensions to make it
85 shared across vcsserver, rhodecode and celery
77 # active for our tests.
86 """
78 from rhodecode.tests.other.example_rcextensions import *
79 """)
80
87
81 # Note: rcextensions are looked up based on the path of the ini file
88 dest = tmpdir_factory.mktemp('tmp_storage_location_', numbered=True)
82 root_path = tmpdir_factory.getbasetemp()
89 log.info("Creating test TMP directory at %s", dest)
83 rcextensions_path = root_path.join('rcextensions')
90 return dest
84 init_path = rcextensions_path.join('__init__.py')
85
86 if rcextensions_path.check():
87 pytest.fail(
88 "Path for rcextensions already exists, please clean up before "
89 "test run this path: %s" % (rcextensions_path, ))
90 else:
91 request.addfinalizer(rcextensions_path.remove)
92 init_path.write_binary(safe_bytes(init_content), ensure=True)
93
91
94
92
95 @pytest.fixture(scope="module")
93 @pytest.fixture(scope="module")
96 def repos(request, db_connection):
94 def repo_group_repos(request):
97 """Create a copy of each test repo in a repo group."""
95 """Create a copy of each test repo in a repo group."""
96
98 fixture = Fixture()
97 fixture = Fixture()
99 repo_group = fixture.create_repo_group(REPO_GROUP)
98 repo_group = fixture.create_repo_group(REPO_GROUP)
100 repo_group_id = repo_group.group_id
99 repo_group_id = repo_group.group_id
@@ -116,67 +115,117 b' def repos(request, db_connection):'
116 fixture.destroy_repo_group(repo_group_id)
115 fixture.destroy_repo_group(repo_group_id)
117
116
118
117
119 @pytest.fixture(scope="module")
118 @pytest.fixture(scope='module')
120 def rc_web_server_config_modification():
119 def rcstack_vcsserver_factory(vcsserver_factory):
121 return []
120 return vcsserver_factory
121
122
123 @pytest.fixture(scope='module')
124 def rcstack_celery_factory(celery_factory):
125 return celery_factory
126
127
128 @pytest.fixture(scope='module')
129 def rcstack_rhodecode_factory(rhodecode_factory):
130 return rhodecode_factory
122
131
123
132
124 @pytest.fixture(scope="module")
133 @pytest.fixture(scope='module')
125 def rc_web_server_config_factory(testini_factory, rc_web_server_config_modification):
134 def init_pyramid_app(request, available_port_factory, ini_config_factory, rcstack_vcsserver_factory, tmp_storage_location):
126 """
135 from rhodecode.lib.config_utils import get_app_config
127 Configuration file used for the fixture `rc_web_server`.
136 from rhodecode.config.middleware import make_pyramid_app
128 """
129
137
130 def factory(rcweb_port, vcsserver_port):
138 store_dir = tmp_storage_location
131 custom_params = [
139 port = available_port_factory()
132 {'handler_console': {'level': 'DEBUG'}},
140 rcstack_vcsserver_factory(
133 {'server:main': {'port': rcweb_port}},
141 request,
134 {'app:main': {'vcs.server': 'localhost:%s' % vcsserver_port}}
142 store_dir=store_dir,
135 ]
143 port=port,
136 custom_params.extend(rc_web_server_config_modification)
144 info_prefix='init-app-'
137 return testini_factory(custom_params)
145 )
138 return factory
146
147 app_ini_config = ini_config_factory(store_dir)
148
149 pyramid.paster.setup_logging(app_ini_config)
150
151 settings = get_app_config(app_ini_config)
152 settings['startup.import_repos'] = True
153 settings['vcs.server'] = f'localhost:{port}'
154 settings['repo_store.path'] = str(store_dir)
155 pyramid_app = make_pyramid_app({'__file__': app_ini_config}, **settings)
156
157 return pyramid_app
139
158
140
159
141 @pytest.fixture(scope="module")
160 @pytest.fixture(scope='module')
142 def rc_web_server(
161 def rcstack(request, tmp_storage_location, rcextensions, available_port_factory, rcstack_vcsserver_factory, rcstack_celery_factory, rcstack_rhodecode_factory):
143 request, vcsserver_factory, available_port_factory,
144 rc_web_server_config_factory, repos, rcextensions):
145 """
162 """
146 Run the web server as a subprocess. with its own instance of vcsserver
163 Runs minimal rcstack, i.e vcsserver, celery, rhodecode unpacks rcextensions and repos to a shared location
147 """
164 """
148 rcweb_port: int = available_port_factory()
165 rcstack_data = AttributeDict()
149 log.info('Using rcweb ops test port %s', rcweb_port)
166 store_dir = tmp_storage_location
150
167
151 vcsserver_port: int = available_port_factory()
168 vcsserver_port: int = available_port_factory()
152 log.info('Using vcsserver ops test port %s', vcsserver_port)
169 vcsserver_log = os.path.join(tmp_storage_location, 'vcsserver.log')
170
171 log.info('Using vcsserver test port %s and log %s', vcsserver_port, vcsserver_log) # start vcsserver
172 _factory = rcstack_vcsserver_factory(
173 request,
174 store_dir=store_dir,
175 port=vcsserver_port,
176 log_file=vcsserver_log,
177 overrides=(
178 {'handler_console': {'level': 'DEBUG'}},
179 ))
180 rcstack_data.vcsserver_port = vcsserver_port
181 rcstack_data.vcsserver_log = _factory.log_file
182
153
183
154 vcs_log = os.path.join(tempfile.gettempdir(), 'rc_op_vcs.log')
184 celery_log = os.path.join(tmp_storage_location, 'celery.log')
155 vcsserver_factory(
185
156 request, vcsserver_port=vcsserver_port,
186
157 log_file=vcs_log,
187 log.info('Using celery log %s', celery_log)
188 # start celery
189 _factory = rcstack_celery_factory(
190 request,
191 store_dir=store_dir,
192 port=None,
193 log_file=celery_log,
158 overrides=(
194 overrides=(
159 {'server:main': {'workers': 2}},
195 {'handler_console': {'level': 'DEBUG'}},
160 {'server:main': {'graceful_timeout': 10}},
196 {'app:main': {'vcs.server': f'localhost:{vcsserver_port}'}},
197 {'app:main': {'repo_store.path': store_dir}}
161 ))
198 ))
162
199
163 rc_log = os.path.join(tempfile.gettempdir(), 'rc_op_web.log')
200 rcstack_data.celery_log = _factory.log_file
164 rc_web_server_config = rc_web_server_config_factory(
201
165 rcweb_port=rcweb_port,
202 rhodecode_port: int = available_port_factory()
166 vcsserver_port=vcsserver_port)
203 rhodecode_log = os.path.join(tmp_storage_location, 'rhodecode.log')
167 server = RcWebServer(rc_web_server_config, log_file=rc_log)
204
168 server.start()
205
206 log.info('Using rhodecode test port %s and log %s', rhodecode_port, rhodecode_port)
169
207
170 @request.addfinalizer
208 # start rhodecode
171 def cleanup():
209 rc = rcstack_rhodecode_factory(
172 server.shutdown()
210 request,
211 store_dir=store_dir,
212 port=rhodecode_port,
213 log_file=rhodecode_log,
214 overrides=(
215 {'handler_console': {'level': 'DEBUG'}},
216 {'app:main': {'vcs.server': f'localhost:{vcsserver_port}'}},
217 {'app:main': {'repo_store.path': store_dir}}
218 ))
173
219
174 server.wait_until_ready()
220 rcstack_data.rhodecode_port = rhodecode_port
175 return server
221 rcstack_data.rhodecode_log = rc.log_file
222
223 rc.rcstack_data = rcstack_data
224 return rc
176
225
177
226
178 @pytest.fixture()
227 @pytest.fixture()
179 def disable_locking(baseapp):
228 def disable_locking(init_pyramid_app):
180 r = Repository.get_by_repo_name(GIT_REPO)
229 r = Repository.get_by_repo_name(GIT_REPO)
181 Repository.unlock(r)
230 Repository.unlock(r)
182 r.enable_locking = False
231 r.enable_locking = False
@@ -191,6 +240,28 b' def disable_locking(baseapp):'
191
240
192
241
193 @pytest.fixture()
242 @pytest.fixture()
243 def disable_anonymous_user(request, init_pyramid_app, db_connection):
244 set_anonymous_access(False)
245
246 @request.addfinalizer
247 def cleanup():
248 set_anonymous_access(True)
249
250
251 @pytest.fixture(scope='module')
252 def enable_auth_plugins(request, init_pyramid_app):
253 """
254 Return a factory object that when called, allows to control which
255 authentication plugins are enabled.
256 """
257
258 enabler = AuthPluginManager()
259 request.addfinalizer(enabler.cleanup)
260
261 return enabler
262
263
264 @pytest.fixture()
194 def fs_repo_only(request, rhodecode_fixtures):
265 def fs_repo_only(request, rhodecode_fixtures):
195 def fs_repo_fabric(repo_name, repo_type):
266 def fs_repo_fabric(repo_name, repo_type):
196 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
267 rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type)
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -32,25 +31,27 b' from rhodecode.tests import (GIT_REPO, H'
32 from rhodecode.tests.vcs_operations import Command
31 from rhodecode.tests.vcs_operations import Command
33
32
34
33
35 @pytest.fixture(scope="module")
34 custom_code = [
36 def rc_web_server_config_modification():
35 {'app:main': {'auth_ret_code': '403'}},
37 return [
36 ]
38 {'app:main': {'auth_ret_code': '403'}},
39 #{'app:main': {'auth_ret_code_detection': 'true'}},
40 ]
41
37
42
38 @pytest.mark.parametrize('rcstack', custom_code, indirect=True)
43 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
39 @pytest.mark.usefixtures(
44 class TestVCSOperationsOnCustomIniConfig(object):
40 "init_pyramid_app",
41 "repo_group_repos",
42 "disable_anonymous_user",
43 "disable_locking",
44 )
45 class TestVCSOperationsAuthCode403(object):
45
46
46 def test_clone_wrong_credentials_hg_ret_code(self, rc_web_server, tmpdir):
47 def test_clone_wrong_credentials_hg_ret_code(self, rcstack, tmpdir):
47 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
48 clone_url = rcstack.repo_clone_url(HG_REPO, passwd='bad!')
48 stdout, stderr = Command('/tmp').execute(
49 stdout, stderr = Command(tmpdir.strpath).execute(
49 'hg clone', clone_url, tmpdir.strpath)
50 'hg clone', clone_url, tmpdir.strpath)
50 assert 'abort: HTTP Error 403: Forbidden' in stderr
51 assert 'abort: HTTP Error 403: Forbidden' in stderr
51
52
52 def test_clone_wrong_credentials_git_ret_code(self, rc_web_server, tmpdir):
53 def test_clone_wrong_credentials_git_ret_code(self, rcstack, tmpdir):
53 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
54 clone_url = rcstack.repo_clone_url(GIT_REPO, passwd='bad!')
54 stdout, stderr = Command('/tmp').execute(
55 stdout, stderr = Command(tmpdir.strpath).execute(
55 'git clone', clone_url, tmpdir.strpath)
56 'git clone', clone_url, tmpdir.strpath)
56 assert 'The requested URL returned error: 403' in stderr
57 assert 'The requested URL returned error: 403' in stderr
@@ -31,26 +31,27 b' import pytest'
31 from rhodecode.tests import (GIT_REPO, HG_REPO)
31 from rhodecode.tests import (GIT_REPO, HG_REPO)
32 from rhodecode.tests.vcs_operations import Command
32 from rhodecode.tests.vcs_operations import Command
33
33
34
34 custom_code = [
35 @pytest.fixture(scope="module")
35 {'app:main': {'auth_ret_code': '404'}},
36 def rc_web_server_config_modification():
36 ]
37 return [
38 {'app:main': {'auth_ret_code': '404'}},
39 #{'app:main': {'auth_ret_code_detection': 'false'}},
40 ]
41
37
42
38 @pytest.mark.parametrize('rcstack', custom_code, indirect=True)
43 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
39 @pytest.mark.usefixtures(
44 class TestVCSOperationsOnCustomIniConfig(object):
40 "init_pyramid_app",
41 "repo_group_repos",
42 "disable_anonymous_user",
43 "disable_locking",
44 )
45 class TestVCSOperationsOnCustomAuthCode404(object):
45
46
46 def test_clone_wrong_credentials_hg_ret_code(self, rc_web_server, tmpdir):
47 def test_clone_wrong_credentials_hg_ret_code(self, rcstack, tmpdir):
47 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
48 clone_url = rcstack.repo_clone_url(HG_REPO, passwd='bad!')
48 stdout, stderr = Command('/tmp').execute(
49 stdout, stderr = Command(tmpdir.strpath).execute(
49 'hg clone', clone_url, tmpdir.strpath)
50 'hg clone', clone_url, tmpdir.strpath)
50 assert 'abort: HTTP Error 404: Not Found' in stderr
51 assert 'abort: HTTP Error 404: Not Found' in stderr
51
52
52 def test_clone_wrong_credentials_git_ret_code(self, rc_web_server, tmpdir):
53 def test_clone_wrong_credentials_git_ret_code(self, rcstack, tmpdir):
53 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
54 clone_url = rcstack.repo_clone_url(GIT_REPO, passwd='bad!')
54 stdout, stderr = Command('/tmp').execute(
55 stdout, stderr = Command(tmpdir.strpath).execute(
55 'git clone', clone_url, tmpdir.strpath)
56 'git clone', clone_url, tmpdir.strpath)
56 assert 'not found' in stderr
57 assert 'not found' in stderr
@@ -32,25 +32,27 b' from rhodecode.tests import (GIT_REPO, H'
32 from rhodecode.tests.vcs_operations import Command
32 from rhodecode.tests.vcs_operations import Command
33
33
34
34
35 @pytest.fixture(scope="module")
35 custom_code = [
36 def rc_web_server_config_modification():
36 {'app:main': {'auth_ret_code': '600'}},
37 return [
37 ]
38 {'app:main': {'auth_ret_code': '600'}},
39 #{'app:main': {'auth_ret_code_detection': 'false'}},
40 ]
41
38
42
39 @pytest.mark.parametrize('rcstack', custom_code, indirect=True)
43 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
40 @pytest.mark.usefixtures(
41 "init_pyramid_app",
42 "repo_group_repos",
43 "disable_anonymous_user",
44 "disable_locking",
45 )
44 class TestVCSOperationsOnCustomIniConfig(object):
46 class TestVCSOperationsOnCustomIniConfig(object):
45
47
46 def test_clone_wrong_credentials_hg_ret_code(self, rc_web_server, tmpdir):
48 def test_clone_wrong_credentials_hg_ret_code(self, rcstack, tmpdir):
47 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
49 clone_url = rcstack.repo_clone_url(HG_REPO, passwd='bad!')
48 stdout, stderr = Command('/tmp').execute(
50 stdout, stderr = Command(tmpdir.strpath).execute(
49 'hg clone', clone_url, tmpdir.strpath)
51 'hg clone', clone_url, tmpdir.strpath)
50 assert 'abort: authorization failed' in stderr
52 assert 'abort: authorization failed' in stderr
51
53
52 def test_clone_wrong_credentials_git_ret_code(self, rc_web_server, tmpdir):
54 def test_clone_wrong_credentials_git_ret_code(self, rcstack, tmpdir):
53 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
55 clone_url = rcstack.repo_clone_url(GIT_REPO, passwd='bad!')
54 stdout, stderr = Command('/tmp').execute(
56 stdout, stderr = Command(tmpdir.strpath).execute(
55 'git clone', clone_url, tmpdir.strpath)
57 'git clone', clone_url, tmpdir.strpath)
56 assert 'fatal: Authentication failed' in stderr
58 assert 'fatal: Authentication failed' in stderr
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -20,13 +19,10 b''
20 """
19 """
21 Test suite for making push/pull operations, on specially modified INI files
20 Test suite for making push/pull operations, on specially modified INI files
22
21
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
22 """
28
23
29 import os
24 import os
25 import pytest
30
26
31 from rhodecode.lib.vcs.backends.git.repository import GitRepository
27 from rhodecode.lib.vcs.backends.git.repository import GitRepository
32 from rhodecode.lib.vcs.nodes import FileNode
28 from rhodecode.lib.vcs.nodes import FileNode
@@ -35,31 +31,39 b' from rhodecode.tests.vcs_operations impo'
35 from .test_vcs_operations_git import _check_proper_clone, _check_proper_git_push
31 from .test_vcs_operations_git import _check_proper_clone, _check_proper_git_push
36
32
37
33
38 def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
34 @pytest.mark.usefixtures(
39 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
35 "init_pyramid_app",
40 cmd = Command('/tmp')
36 "repo_group_repos",
41 stdout, stderr = cmd.execute(
37 "disable_anonymous_user",
42 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath)
38 "disable_locking",
43 _check_proper_clone(stdout, stderr, 'git')
39 )
44 cmd.assert_returncode_success()
40 class TestVCSOperationsOnCustomIniConfig(object):
41
42 def test_git_clone_with_small_push_buffer(self, vcs_backend_git, rcstack, tmpdir):
43 clone_url = rcstack.repo_clone_url(GIT_REPO)
44 cmd = Command(tmpdir.strpath)
45 stdout, stderr = cmd.execute(
46 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath)
47 _check_proper_clone(stdout, stderr, 'git')
48 cmd.assert_returncode_success()
45
49
46
50
47 def test_git_push_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
51 def test_git_push_with_small_push_buffer(self, vcs_backend_git, rcstack, tmpdir):
48 empty_repo = backend_git.create_repo()
52 empty_repo = vcs_backend_git.create_repo()
49
53
50 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
54 clone_url = rcstack.repo_clone_url(empty_repo.repo_name)
51
55
52 cmd = Command(tmpdir.strpath)
56 cmd = Command(tmpdir.strpath)
53 cmd.execute('git clone', clone_url)
57 cmd.execute('git clone', clone_url)
54
58
55 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
59 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
56 repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello'))
60 repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello'))
57 repo.in_memory_commit.commit(
61 repo.in_memory_commit.commit(
58 message='Commit on branch Master',
62 message='Commit on branch Master',
59 author='Automatic test <automatic@rhodecode.com>',
63 author='Automatic test <automatic@rhodecode.com>',
60 branch='master')
64 branch='master')
61
65
62 repo_cmd = Command(repo.path)
66 repo_cmd = Command(repo.path)
63 stdout, stderr = repo_cmd.execute(
67 stdout, stderr = repo_cmd.execute(
64 f'git -c http.postBuffer=1024 push --verbose {clone_url} master')
68 f'git -c http.postBuffer=1024 push --verbose {clone_url} master')
65 _check_proper_git_push(stdout, stderr, branch='master')
69 _check_proper_git_push(stdout, stderr, branch='master')
@@ -27,7 +27,12 b' from rhodecode.tests.vcs_operations impo'
27 Command, _check_proper_hg_push, _check_proper_git_push, _add_files_and_push)
27 Command, _check_proper_hg_push, _check_proper_git_push, _add_files_and_push)
28
28
29
29
30 @pytest.mark.usefixtures("disable_anonymous_user")
30 @pytest.mark.usefixtures(
31 "init_pyramid_app",
32 "repo_group_repos",
33 "disable_anonymous_user",
34 "disable_locking",
35 )
31 class TestVCSOperations(object):
36 class TestVCSOperations(object):
32
37
33 @pytest.mark.parametrize('username, password', [
38 @pytest.mark.parametrize('username, password', [
@@ -41,13 +46,13 b' class TestVCSOperations(object):'
41 'branch.push_force',
46 'branch.push_force',
42 ])
47 ])
43 def test_push_to_protected_branch_fails_with_message_hg(
48 def test_push_to_protected_branch_fails_with_message_hg(
44 self, rc_web_server, tmpdir, branch_perm, user_util,
49 self, rcstack, tmpdir, branch_perm, user_util,
45 branch_permission_setter, username, password):
50 branch_permission_setter, username, password):
46 repo = user_util.create_repo(repo_type='hg')
51 repo = user_util.create_repo(repo_type='hg')
47 repo_name = repo.repo_name
52 repo_name = repo.repo_name
48 branch_permission_setter(repo_name, username, permission=branch_perm)
53 branch_permission_setter(repo_name, username, permission=branch_perm)
49
54
50 clone_url = rc_web_server.repo_clone_url(
55 clone_url = rcstack.repo_clone_url(
51 repo.repo_name, user=username, passwd=password)
56 repo.repo_name, user=username, passwd=password)
52 Command(os.path.dirname(tmpdir.strpath)).execute(
57 Command(os.path.dirname(tmpdir.strpath)).execute(
53 'hg clone', clone_url, tmpdir.strpath)
58 'hg clone', clone_url, tmpdir.strpath)
@@ -58,8 +63,8 b' class TestVCSOperations(object):'
58 _check_proper_hg_push(stdout, stderr)
63 _check_proper_hg_push(stdout, stderr)
59 else:
64 else:
60 msg = f"Branch `default` changes rejected by rule `*`=>{branch_perm}"
65 msg = f"Branch `default` changes rejected by rule `*`=>{branch_perm}"
61 assert msg in stdout
66 assert msg in stderr
62 assert "transaction abort" in stdout
67 #assert "transaction abort" in stdout
63
68
64 @pytest.mark.parametrize('username, password', [
69 @pytest.mark.parametrize('username, password', [
65 (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS),
70 (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS),
@@ -72,13 +77,13 b' class TestVCSOperations(object):'
72 'branch.push_force',
77 'branch.push_force',
73 ])
78 ])
74 def test_push_to_protected_branch_fails_with_message_git(
79 def test_push_to_protected_branch_fails_with_message_git(
75 self, rc_web_server, tmpdir, branch_perm, user_util,
80 self, rcstack, tmpdir, branch_perm, user_util,
76 branch_permission_setter, username, password):
81 branch_permission_setter, username, password):
77 repo = user_util.create_repo(repo_type='git')
82 repo = user_util.create_repo(repo_type='git')
78 repo_name = repo.repo_name
83 repo_name = repo.repo_name
79 branch_permission_setter(repo_name, username, permission=branch_perm)
84 branch_permission_setter(repo_name, username, permission=branch_perm)
80
85
81 clone_url = rc_web_server.repo_clone_url(
86 clone_url = rcstack.repo_clone_url(
82 repo.repo_name, user=username, passwd=password)
87 repo.repo_name, user=username, passwd=password)
83 Command(os.path.dirname(tmpdir.strpath)).execute(
88 Command(os.path.dirname(tmpdir.strpath)).execute(
84 'git clone', clone_url, tmpdir.strpath)
89 'git clone', clone_url, tmpdir.strpath)
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -20,10 +19,6 b''
20 """
19 """
21 Test suite for making push/pull operations, on specially modified INI files
20 Test suite for making push/pull operations, on specially modified INI files
22
21
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
22 """
28
23
29 import pytest
24 import pytest
@@ -36,10 +31,15 b' from rhodecode.tests import (GIT_REPO, H'
36 from rhodecode.tests.vcs_operations import (Command, _check_proper_clone)
31 from rhodecode.tests.vcs_operations import (Command, _check_proper_clone)
37
32
38
33
39 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
34 @pytest.mark.usefixtures(
40 class TestVCSOperations(object):
35 "init_pyramid_app",
36 "repo_group_repos",
37 "disable_anonymous_user",
38 "disable_locking",
39 )
40 class TestVCSOperationsByAuthTokens:
41 def test_clone_by_auth_token(
41 def test_clone_by_auth_token(
42 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
42 self, rcstack, tmpdir, user_util, enable_auth_plugins):
43
43
44 enable_auth_plugins.enable([
44 enable_auth_plugins.enable([
45 'egg:rhodecode-enterprise-ce#token',
45 'egg:rhodecode-enterprise-ce#token',
@@ -49,16 +49,16 b' class TestVCSOperations(object):'
49 user = user_util.create_user()
49 user = user_util.create_user()
50 token = user.auth_tokens[1]
50 token = user.auth_tokens[1]
51
51
52 clone_url = rc_web_server.repo_clone_url(
52 clone_url = rcstack.repo_clone_url(
53 HG_REPO, user=user.username, passwd=token)
53 HG_REPO, user=user.username, passwd=token)
54
54
55 stdout, stderr = Command('/tmp').execute(
55 stdout, stderr = Command(tmpdir.strpath).execute(
56 'hg clone', clone_url, tmpdir.strpath)
56 'hg clone', clone_url, tmpdir.strpath)
57
57
58 _check_proper_clone(stdout, stderr, 'hg')
58 _check_proper_clone(stdout, stderr, 'hg')
59
59
60 def test_clone_by_auth_token_expired(
60 def test_clone_by_auth_token_expired(
61 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
61 self, rcstack, tmpdir, user_util, enable_auth_plugins):
62 enable_auth_plugins.enable([
62 enable_auth_plugins.enable([
63 'egg:rhodecode-enterprise-ce#token',
63 'egg:rhodecode-enterprise-ce#token',
64 'egg:rhodecode-enterprise-ce#rhodecode'
64 'egg:rhodecode-enterprise-ce#rhodecode'
@@ -69,18 +69,18 b' class TestVCSOperations(object):'
69 user.user_id, 'test-token', -10, AuthTokenModel.cls.ROLE_VCS)
69 user.user_id, 'test-token', -10, AuthTokenModel.cls.ROLE_VCS)
70 token = auth_token.api_key
70 token = auth_token.api_key
71
71
72 clone_url = rc_web_server.repo_clone_url(
72 clone_url = rcstack.repo_clone_url(
73 HG_REPO, user=user.username, passwd=token)
73 HG_REPO, user=user.username, passwd=token)
74
74
75 stdout, stderr = Command('/tmp').execute(
75 stdout, stderr = Command(tmpdir.strpath).execute(
76 'hg clone', clone_url, tmpdir.strpath)
76 'hg clone', clone_url, tmpdir.strpath)
77 assert 'abort: authorization failed' in stderr
77 assert 'abort: authorization failed' in stderr
78
78
79 msg = 'reason: bad or inactive token.'
79 msg = 'reason: bad or inactive token.'
80 rc_web_server.assert_message_in_server_logs(msg)
80 rcstack.assert_message_in_server_logs(msg)
81
81
82 def test_clone_by_auth_token_bad_role(
82 def test_clone_by_auth_token_bad_role(
83 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
83 self, rcstack, tmpdir, user_util, enable_auth_plugins):
84 enable_auth_plugins.enable([
84 enable_auth_plugins.enable([
85 'egg:rhodecode-enterprise-ce#token',
85 'egg:rhodecode-enterprise-ce#token',
86 'egg:rhodecode-enterprise-ce#rhodecode'
86 'egg:rhodecode-enterprise-ce#rhodecode'
@@ -91,15 +91,15 b' class TestVCSOperations(object):'
91 user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_API)
91 user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_API)
92 token = auth_token.api_key
92 token = auth_token.api_key
93
93
94 clone_url = rc_web_server.repo_clone_url(
94 clone_url = rcstack.repo_clone_url(
95 HG_REPO, user=user.username, passwd=token)
95 HG_REPO, user=user.username, passwd=token)
96
96
97 stdout, stderr = Command('/tmp').execute(
97 stdout, stderr = Command(tmpdir.strpath).execute(
98 'hg clone', clone_url, tmpdir.strpath)
98 'hg clone', clone_url, tmpdir.strpath)
99 assert 'abort: authorization failed' in stderr
99 assert 'abort: authorization failed' in stderr
100
100
101 def test_clone_by_auth_token_user_disabled(
101 def test_clone_by_auth_token_user_disabled(
102 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
102 self, rcstack, tmpdir, user_util, enable_auth_plugins):
103 enable_auth_plugins.enable([
103 enable_auth_plugins.enable([
104 'egg:rhodecode-enterprise-ce#token',
104 'egg:rhodecode-enterprise-ce#token',
105 'egg:rhodecode-enterprise-ce#rhodecode'
105 'egg:rhodecode-enterprise-ce#rhodecode'
@@ -111,18 +111,18 b' class TestVCSOperations(object):'
111 Session().commit()
111 Session().commit()
112 token = user.auth_tokens[1]
112 token = user.auth_tokens[1]
113
113
114 clone_url = rc_web_server.repo_clone_url(
114 clone_url = rcstack.repo_clone_url(
115 HG_REPO, user=user.username, passwd=token)
115 HG_REPO, user=user.username, passwd=token)
116
116
117 stdout, stderr = Command('/tmp').execute(
117 stdout, stderr = Command(tmpdir.strpath).execute(
118 'hg clone', clone_url, tmpdir.strpath)
118 'hg clone', clone_url, tmpdir.strpath)
119 assert 'abort: authorization failed' in stderr
119 assert 'abort: authorization failed' in stderr
120
120
121 msg = 'reason: account not active.'
121 msg = 'reason: account not active.'
122 rc_web_server.assert_message_in_server_logs(msg)
122 rcstack.assert_message_in_server_logs(msg)
123
123
124 def test_clone_by_auth_token_with_scope(
124 def test_clone_by_auth_token_with_scope(
125 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
125 self, rcstack, tmpdir, user_util, enable_auth_plugins):
126 enable_auth_plugins.enable([
126 enable_auth_plugins.enable([
127 'egg:rhodecode-enterprise-ce#token',
127 'egg:rhodecode-enterprise-ce#token',
128 'egg:rhodecode-enterprise-ce#rhodecode'
128 'egg:rhodecode-enterprise-ce#rhodecode'
@@ -138,15 +138,15 b' class TestVCSOperations(object):'
138 Session().add(auth_token)
138 Session().add(auth_token)
139 Session().commit()
139 Session().commit()
140
140
141 clone_url = rc_web_server.repo_clone_url(
141 clone_url = rcstack.repo_clone_url(
142 HG_REPO, user=user.username, passwd=token)
142 HG_REPO, user=user.username, passwd=token)
143
143
144 stdout, stderr = Command('/tmp').execute(
144 stdout, stderr = Command(tmpdir.strpath).execute(
145 'hg clone', clone_url, tmpdir.strpath)
145 'hg clone', clone_url, tmpdir.strpath)
146 _check_proper_clone(stdout, stderr, 'hg')
146 _check_proper_clone(stdout, stderr, 'hg')
147
147
148 def test_clone_by_auth_token_with_wrong_scope(
148 def test_clone_by_auth_token_with_wrong_scope(
149 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
149 self, rcstack, tmpdir, user_util, enable_auth_plugins):
150 enable_auth_plugins.enable([
150 enable_auth_plugins.enable([
151 'egg:rhodecode-enterprise-ce#token',
151 'egg:rhodecode-enterprise-ce#token',
152 'egg:rhodecode-enterprise-ce#rhodecode'
152 'egg:rhodecode-enterprise-ce#rhodecode'
@@ -162,13 +162,13 b' class TestVCSOperations(object):'
162 Session().add(auth_token)
162 Session().add(auth_token)
163 Session().commit()
163 Session().commit()
164
164
165 clone_url = rc_web_server.repo_clone_url(
165 clone_url = rcstack.repo_clone_url(
166 HG_REPO, user=user.username, passwd=token)
166 HG_REPO, user=user.username, passwd=token)
167
167
168 stdout, stderr = Command('/tmp').execute(
168 stdout, stderr = Command(tmpdir.strpath).execute(
169 'hg clone', clone_url, tmpdir.strpath)
169 'hg clone', clone_url, tmpdir.strpath)
170
170
171 assert 'abort: authorization failed' in stderr
171 assert 'abort: authorization failed' in stderr
172
172
173 msg = 'reason: bad or inactive token.'
173 msg = 'reason: bad or inactive token.'
174 rc_web_server.assert_message_in_server_logs(msg)
174 rcstack.assert_message_in_server_logs(msg)
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -27,12 +26,17 b' from rhodecode.tests.vcs_operations impo'
27 _add_files, _add_files_and_push)
26 _add_files, _add_files_and_push)
28
27
29
28
30 @pytest.mark.usefixtures("disable_anonymous_user")
29 @pytest.mark.usefixtures(
30 "init_pyramid_app",
31 "repo_group_repos",
32 "disable_anonymous_user",
33 "disable_locking",
34 )
31 class TestVCSOperations(object):
35 class TestVCSOperations(object):
32
36
33 def test_push_force_hg(self, rc_web_server, tmpdir, user_util):
37 def test_push_force_hg(self, rcstack, tmpdir, user_util):
34 repo = user_util.create_repo(repo_type='hg')
38 repo = user_util.create_repo(repo_type='hg')
35 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
39 clone_url = rcstack.repo_clone_url(repo.repo_name)
36 Command(os.path.dirname(tmpdir.strpath)).execute(
40 Command(os.path.dirname(tmpdir.strpath)).execute(
37 'hg clone', clone_url, tmpdir.strpath)
41 'hg clone', clone_url, tmpdir.strpath)
38
42
@@ -50,9 +54,9 b' class TestVCSOperations(object):'
50
54
51 _check_proper_hg_push(stdout, stderr)
55 _check_proper_hg_push(stdout, stderr)
52
56
53 def test_push_force_git(self, rc_web_server, tmpdir, user_util):
57 def test_push_force_git(self, rcstack, tmpdir, user_util):
54 repo = user_util.create_repo(repo_type='git')
58 repo = user_util.create_repo(repo_type='git')
55 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
59 clone_url = rcstack.repo_clone_url(repo.repo_name)
56 Command(os.path.dirname(tmpdir.strpath)).execute(
60 Command(os.path.dirname(tmpdir.strpath)).execute(
57 'git clone', clone_url, tmpdir.strpath)
61 'git clone', clone_url, tmpdir.strpath)
58
62
@@ -69,13 +73,12 b' class TestVCSOperations(object):'
69 assert '(forced update)' in stderr
73 assert '(forced update)' in stderr
70
74
71 def test_push_force_hg_blocked_by_branch_permissions(
75 def test_push_force_hg_blocked_by_branch_permissions(
72 self, rc_web_server, tmpdir, user_util, branch_permission_setter):
76 self, rcstack, tmpdir, user_util, branch_permission_setter):
73 repo = user_util.create_repo(repo_type='hg')
77 repo = user_util.create_repo(repo_type='hg')
74 repo_name = repo.repo_name
78 repo_name = repo.repo_name
75 username = TEST_USER_ADMIN_LOGIN
79 username = TEST_USER_ADMIN_LOGIN
76 branch_permission_setter(repo_name, username, permission='branch.push')
77
80
78 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
81 clone_url = rcstack.repo_clone_url(repo.repo_name)
79 Command(os.path.dirname(tmpdir.strpath)).execute(
82 Command(os.path.dirname(tmpdir.strpath)).execute(
80 'hg clone', clone_url, tmpdir.strpath)
83 'hg clone', clone_url, tmpdir.strpath)
81
84
@@ -88,21 +91,21 b' class TestVCSOperations(object):'
88 'hg checkout -r 1 && hg commit -m "starting new head"')
91 'hg checkout -r 1 && hg commit -m "starting new head"')
89 _add_files('hg', tmpdir.strpath, clone_url=clone_url)
92 _add_files('hg', tmpdir.strpath, clone_url=clone_url)
90
93
94 branch_permission_setter(repo_name, username, permission='branch.push')
91 stdout, stderr = Command(tmpdir.strpath).execute(
95 stdout, stderr = Command(tmpdir.strpath).execute(
92 f'hg push --verbose -f {clone_url}')
96 f'hg push --verbose -f {clone_url}')
93
97
94 assert "Branch `default` changes rejected by rule `*`=>branch.push" in stdout
98 assert "Branch `default` changes rejected by rule `*`=>branch.push" in stderr
95 assert "FORCE PUSH FORBIDDEN" in stdout
99 assert "FORCE PUSH FORBIDDEN" in stderr
96 assert "transaction abort" in stdout
97
100
98 def test_push_force_git_blocked_by_branch_permissions(
101 def test_push_force_git_blocked_by_branch_permissions(
99 self, rc_web_server, tmpdir, user_util, branch_permission_setter):
102 self, rcstack, tmpdir, user_util, branch_permission_setter):
100 repo = user_util.create_repo(repo_type='git')
103 repo = user_util.create_repo(repo_type='git')
101 repo_name = repo.repo_name
104 repo_name = repo.repo_name
102 username = TEST_USER_ADMIN_LOGIN
105 username = TEST_USER_ADMIN_LOGIN
103 branch_permission_setter(repo_name, username, permission='branch.push')
106 branch_permission_setter(repo_name, username, permission='branch.push')
104
107
105 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
108 clone_url = rcstack.repo_clone_url(repo.repo_name)
106 Command(os.path.dirname(tmpdir.strpath)).execute(
109 Command(os.path.dirname(tmpdir.strpath)).execute(
107 'git clone', clone_url, tmpdir.strpath)
110 'git clone', clone_url, tmpdir.strpath)
108
111
@@ -25,8 +25,6 b' Test suite for making push/pull operatio'
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
26 to redirect things to stderr instead of stdout.
27 """
27 """
28
29
30 import time
28 import time
31 import pytest
29 import pytest
32
30
@@ -42,41 +40,46 b' from rhodecode.tests.vcs_operations impo'
42 _add_files_and_push, GIT_REPO_WITH_GROUP)
40 _add_files_and_push, GIT_REPO_WITH_GROUP)
43
41
44
42
45 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
43 @pytest.mark.usefixtures(
46 class TestVCSOperations(object):
44 "init_pyramid_app",
45 "repo_group_repos",
46 "disable_anonymous_user",
47 "disable_locking",
48 )
49 class TestVCSOperationsGit:
47
50
48 def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir):
51 def test_clone_git_repo_by_admin(self, rcstack, tmpdir):
49 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
52 clone_url = rcstack.repo_clone_url(GIT_REPO)
50 cmd = Command('/tmp')
53 cmd = Command(tmpdir.strpath)
51 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
54 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
52 _check_proper_clone(stdout, stderr, 'git')
55 _check_proper_clone(stdout, stderr, 'git')
53 cmd.assert_returncode_success()
56 cmd.assert_returncode_success()
54
57
55 def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir):
58 def test_clone_git_repo_by_admin_with_git_suffix(self, rcstack, tmpdir):
56 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
59 clone_url = rcstack.repo_clone_url(GIT_REPO)
57 cmd = Command('/tmp')
60 cmd = Command(tmpdir.strpath)
58 stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath)
61 stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath)
59 _check_proper_clone(stdout, stderr, 'git')
62 _check_proper_clone(stdout, stderr, 'git')
60 cmd.assert_returncode_success()
63 cmd.assert_returncode_success()
61
64
62 def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir):
65 def test_clone_git_repo_by_id_by_admin(self, rcstack, tmpdir):
63 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
66 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
64 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
67 clone_url = rcstack.repo_clone_url('_%s' % repo_id)
65 cmd = Command('/tmp')
68 cmd = Command(tmpdir.strpath)
66 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
69 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
67 _check_proper_clone(stdout, stderr, 'git')
70 _check_proper_clone(stdout, stderr, 'git')
68 cmd.assert_returncode_success()
71 cmd.assert_returncode_success()
69
72
70 def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir):
73 def test_clone_git_repo_with_group_by_admin(self, rcstack, tmpdir):
71 clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP)
74 clone_url = rcstack.repo_clone_url(GIT_REPO_WITH_GROUP)
72 cmd = Command('/tmp')
75 cmd = Command(tmpdir.strpath)
73 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
76 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
74 _check_proper_clone(stdout, stderr, 'git')
77 _check_proper_clone(stdout, stderr, 'git')
75 cmd.assert_returncode_success()
78 cmd.assert_returncode_success()
76
79
77 def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir):
80 def test_clone_git_repo_shallow_by_admin(self, rcstack, tmpdir):
78 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
81 clone_url = rcstack.repo_clone_url(GIT_REPO)
79 cmd = Command('/tmp')
82 cmd = Command(tmpdir.strpath)
80 stdout, stderr = cmd.execute(
83 stdout, stderr = cmd.execute(
81 'git clone --depth=1', clone_url, tmpdir.strpath)
84 'git clone --depth=1', clone_url, tmpdir.strpath)
82
85
@@ -84,65 +87,64 b' class TestVCSOperations(object):'
84 assert 'Cloning into' in stderr
87 assert 'Cloning into' in stderr
85 cmd.assert_returncode_success()
88 cmd.assert_returncode_success()
86
89
87
90 def test_clone_wrong_credentials_git(self, rcstack, tmpdir):
88 def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir):
91 clone_url = rcstack.repo_clone_url(GIT_REPO, passwd='bad!')
89 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
92 stdout, stderr = Command(tmpdir.strpath).execute(
90 stdout, stderr = Command('/tmp').execute(
91 'git clone', clone_url, tmpdir.strpath)
93 'git clone', clone_url, tmpdir.strpath)
92 assert 'fatal: Authentication failed' in stderr
94 assert 'fatal: Authentication failed' in stderr
93
95
94 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
96 def test_clone_git_dir_as_hg(self, rcstack, tmpdir):
95 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
97 clone_url = rcstack.repo_clone_url(GIT_REPO)
96 stdout, stderr = Command('/tmp').execute(
98 stdout, stderr = Command(tmpdir.strpath).execute(
97 'hg clone', clone_url, tmpdir.strpath)
99 'hg clone', clone_url, tmpdir.strpath)
98 assert 'HTTP Error 404: Not Found' in stderr
100 assert 'HTTP Error 404: Not Found' in stderr
99
101
100 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
102 def test_clone_non_existing_path_hg(self, rcstack, tmpdir):
101 clone_url = rc_web_server.repo_clone_url('trololo')
103 clone_url = rcstack.repo_clone_url('trololo')
102 stdout, stderr = Command('/tmp').execute(
104 stdout, stderr = Command(tmpdir.strpath).execute(
103 'hg clone', clone_url, tmpdir.strpath)
105 'hg clone', clone_url, tmpdir.strpath)
104 assert 'HTTP Error 404: Not Found' in stderr
106 assert 'HTTP Error 404: Not Found' in stderr
105
107
106 def test_clone_non_existing_path_git(self, rc_web_server, tmpdir):
108 def test_clone_non_existing_path_git(self, rcstack, tmpdir):
107 clone_url = rc_web_server.repo_clone_url('trololo')
109 clone_url = rcstack.repo_clone_url('trololo')
108 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
110 stdout, stderr = Command(tmpdir.strpath).execute('git clone', clone_url)
109 assert 'not found' in stderr
111 assert 'not found' in stderr
110
112
111 def test_clone_git_with_slashes(self, rc_web_server, tmpdir):
113 def test_clone_git_with_slashes(self, rcstack, tmpdir):
112 clone_url = rc_web_server.repo_clone_url('//' + GIT_REPO)
114 clone_url = rcstack.repo_clone_url('//' + GIT_REPO)
113 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
115 stdout, stderr = Command(tmpdir.strpath).execute('git clone', clone_url)
114 assert 'not found' in stderr
116 assert 'not found' in stderr
115
117
116 def test_clone_existing_path_git_not_in_database(
118 def test_clone_existing_path_git_not_in_database(
117 self, rc_web_server, tmpdir, fs_repo_only):
119 self, rcstack, tmpdir, fs_repo_only):
118 db_name = fs_repo_only('not-in-db-git', repo_type='git')
120 db_name = fs_repo_only('not-in-db-git', repo_type='git')
119 clone_url = rc_web_server.repo_clone_url(db_name)
121 clone_url = rcstack.repo_clone_url(db_name)
120 stdout, stderr = Command('/tmp').execute(
122 stdout, stderr = Command(tmpdir.strpath).execute(
121 'git clone', clone_url, tmpdir.strpath)
123 'git clone', clone_url, tmpdir.strpath)
122 assert 'not found' in stderr
124 assert 'not found' in stderr
123
125
124 def test_clone_existing_path_git_not_in_database_different_scm(
126 def test_clone_existing_path_git_not_in_database_different_scm(
125 self, rc_web_server, tmpdir, fs_repo_only):
127 self, rcstack, tmpdir, fs_repo_only):
126 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
128 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
127 clone_url = rc_web_server.repo_clone_url(db_name)
129 clone_url = rcstack.repo_clone_url(db_name)
128 stdout, stderr = Command('/tmp').execute(
130 stdout, stderr = Command(tmpdir.strpath).execute(
129 'git clone', clone_url, tmpdir.strpath)
131 'git clone', clone_url, tmpdir.strpath)
130 assert 'not found' in stderr
132 assert 'not found' in stderr
131
133
132 def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util):
134 def test_clone_non_existing_store_path_git(self, rcstack, tmpdir, user_util):
133 repo = user_util.create_repo(repo_type='git')
135 repo = user_util.create_repo(repo_type='git')
134 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
136 clone_url = rcstack.repo_clone_url(repo.repo_name)
135
137
136 # Damage repo by removing it's folder
138 # Damage repo by removing it's folder
137 RepoModel()._delete_filesystem_repo(repo)
139 RepoModel()._delete_filesystem_repo(repo)
138
140
139 stdout, stderr = Command('/tmp').execute(
141 stdout, stderr = Command(tmpdir.strpath).execute(
140 'git clone', clone_url, tmpdir.strpath)
142 'git clone', clone_url, tmpdir.strpath)
141 assert 'not found' in stderr
143 assert 'not found' in stderr
142
144
143 def test_push_new_file_git(self, rc_web_server, tmpdir):
145 def test_push_new_file_git(self, rcstack, tmpdir):
144 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
146 clone_url = rcstack.repo_clone_url(GIT_REPO)
145 stdout, stderr = Command('/tmp').execute(
147 stdout, stderr = Command(tmpdir.strpath).execute(
146 'git clone', clone_url, tmpdir.strpath)
148 'git clone', clone_url, tmpdir.strpath)
147
149
148 # commit some stuff into this repo
150 # commit some stuff into this repo
@@ -151,37 +153,37 b' class TestVCSOperations(object):'
151
153
152 _check_proper_git_push(stdout, stderr)
154 _check_proper_git_push(stdout, stderr)
153
155
154 def test_push_wrong_credentials_git(self, rc_web_server, tmpdir):
156 def test_push_wrong_credentials_git(self, rcstack, tmpdir):
155 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
157 clone_url = rcstack.repo_clone_url(GIT_REPO)
156 stdout, stderr = Command('/tmp').execute(
158 stdout, stderr = Command(tmpdir.strpath).execute(
157 'git clone', clone_url, tmpdir.strpath)
159 'git clone', clone_url, tmpdir.strpath)
158
160
159 push_url = rc_web_server.repo_clone_url(
161 push_url = rcstack.repo_clone_url(
160 GIT_REPO, user='bad', passwd='name')
162 GIT_REPO, user='bad', passwd='name')
161 stdout, stderr = _add_files_and_push(
163 stdout, stderr = _add_files_and_push(
162 'git', tmpdir.strpath, clone_url=push_url)
164 'git', tmpdir.strpath, clone_url=push_url)
163
165
164 assert 'fatal: Authentication failed' in stderr
166 assert 'fatal: Authentication failed' in stderr
165
167
166 def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir):
168 def test_push_back_to_wrong_url_git(self, rcstack, tmpdir):
167 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
169 clone_url = rcstack.repo_clone_url(GIT_REPO)
168 stdout, stderr = Command('/tmp').execute(
170 stdout, stderr = Command(tmpdir.strpath).execute(
169 'git clone', clone_url, tmpdir.strpath)
171 'git clone', clone_url, tmpdir.strpath)
170
172
171 stdout, stderr = _add_files_and_push(
173 stdout, stderr = _add_files_and_push(
172 'git', tmpdir.strpath,
174 'git', tmpdir.strpath,
173 clone_url=rc_web_server.repo_clone_url('not-existing'))
175 clone_url=rcstack.repo_clone_url('not-existing'))
174
176
175 assert 'not found' in stderr
177 assert 'not found' in stderr
176
178
177 def test_ip_restriction_git(self, rc_web_server, tmpdir):
179 def test_ip_restriction_git(self, rcstack, tmpdir):
178 user_model = UserModel()
180 user_model = UserModel()
179 try:
181 try:
180 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
182 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
181 Session().commit()
183 Session().commit()
182 time.sleep(2)
184 time.sleep(2)
183 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
185 clone_url = rcstack.repo_clone_url(GIT_REPO)
184 stdout, stderr = Command('/tmp').execute(
186 stdout, stderr = Command(tmpdir.strpath).execute(
185 'git clone', clone_url, tmpdir.strpath)
187 'git clone', clone_url, tmpdir.strpath)
186 msg = "The requested URL returned error: 403"
188 msg = "The requested URL returned error: 403"
187 assert msg in stderr
189 assert msg in stderr
@@ -193,7 +195,7 b' class TestVCSOperations(object):'
193
195
194 time.sleep(2)
196 time.sleep(2)
195
197
196 cmd = Command('/tmp')
198 cmd = Command(tmpdir.strpath)
197 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
199 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
198 cmd.assert_returncode_success()
200 cmd.assert_returncode_success()
199 _check_proper_clone(stdout, stderr, 'git')
201 _check_proper_clone(stdout, stderr, 'git')
@@ -1,5 +1,4 b''
1
1 # Copyright (C) 2010-2024 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
@@ -19,11 +18,6 b''
19
18
20 """
19 """
21 Test suite for making push/pull operations, on specially modified INI files
20 Test suite for making push/pull operations, on specially modified INI files
22
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
21 """
28
22
29
23
@@ -42,96 +36,101 b' from rhodecode.tests.vcs_operations impo'
42 Command, _check_proper_clone, _add_files_and_push, HG_REPO_WITH_GROUP)
36 Command, _check_proper_clone, _add_files_and_push, HG_REPO_WITH_GROUP)
43
37
44
38
45 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
39 @pytest.mark.usefixtures(
46 class TestVCSOperations(object):
40 "init_pyramid_app",
41 "repo_group_repos",
42 "disable_anonymous_user",
43 "disable_locking",
44 )
45 class TestVCSOperationsHg(object):
47
46
48 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
47 def test_clone_hg_repo_by_admin(self, rcstack, tmpdir):
49 clone_url = rc_web_server.repo_clone_url(HG_REPO)
48 clone_url = rcstack.repo_clone_url(HG_REPO)
50 stdout, stderr = Command('/tmp').execute(
49 stdout, stderr = Command(tmpdir.strpath).execute(
51 'hg clone', clone_url, tmpdir.strpath)
50 'hg clone', clone_url, tmpdir.strpath)
52 _check_proper_clone(stdout, stderr, 'hg')
51 _check_proper_clone(stdout, stderr, 'hg')
53
52
54 def test_clone_hg_repo_by_admin_pull_protocol(self, rc_web_server, tmpdir):
53 def test_clone_hg_repo_by_admin_pull_protocol(self, rcstack, tmpdir):
55 clone_url = rc_web_server.repo_clone_url(HG_REPO)
54 clone_url = rcstack.repo_clone_url(HG_REPO)
56 stdout, stderr = Command('/tmp').execute(
55 stdout, stderr = Command(tmpdir.strpath).execute(
57 'hg clone --pull', clone_url, tmpdir.strpath)
56 'hg clone --pull', clone_url, tmpdir.strpath)
58 _check_proper_clone(stdout, stderr, 'hg')
57 _check_proper_clone(stdout, stderr, 'hg')
59
58
60 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rc_web_server, tmpdir):
59 def test_clone_hg_repo_by_admin_pull_stream_protocol(self, rcstack, tmpdir):
61 clone_url = rc_web_server.repo_clone_url(HG_REPO)
60 clone_url = rcstack.repo_clone_url(HG_REPO)
62 stdout, stderr = Command('/tmp').execute(
61 stdout, stderr = Command(tmpdir.strpath).execute(
63 'hg clone --pull --stream', clone_url, tmpdir.strpath)
62 'hg clone --pull --stream', clone_url, tmpdir.strpath)
64 assert 'files to transfer,' in stdout
63 assert 'files to transfer,' in stdout
65 assert 'transferred 1.' in stdout
64 assert 'transferred 1.' in stdout
66 assert '114 files updated,' in stdout
65 assert '114 files updated,' in stdout
67
66
68 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
67 def test_clone_hg_repo_by_id_by_admin(self, rcstack, tmpdir):
69 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
68 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
70 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
69 clone_url = rcstack.repo_clone_url('_%s' % repo_id)
71 stdout, stderr = Command('/tmp').execute(
70 stdout, stderr = Command(tmpdir.strpath).execute(
72 'hg clone', clone_url, tmpdir.strpath)
71 'hg clone', clone_url, tmpdir.strpath)
73 _check_proper_clone(stdout, stderr, 'hg')
72 _check_proper_clone(stdout, stderr, 'hg')
74
73
75 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
74 def test_clone_hg_repo_with_group_by_admin(self, rcstack, tmpdir):
76 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
75 clone_url = rcstack.repo_clone_url(HG_REPO_WITH_GROUP)
77 stdout, stderr = Command('/tmp').execute(
76 stdout, stderr = Command(tmpdir.strpath).execute(
78 'hg clone', clone_url, tmpdir.strpath)
77 'hg clone', clone_url, tmpdir.strpath)
79 _check_proper_clone(stdout, stderr, 'hg')
78 _check_proper_clone(stdout, stderr, 'hg')
80
79
81 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
80 def test_clone_wrong_credentials_hg(self, rcstack, tmpdir):
82 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
81 clone_url = rcstack.repo_clone_url(HG_REPO, passwd='bad!')
83 stdout, stderr = Command('/tmp').execute(
82 stdout, stderr = Command(tmpdir.strpath).execute(
84 'hg clone', clone_url, tmpdir.strpath)
83 'hg clone', clone_url, tmpdir.strpath)
85 assert 'abort: authorization failed' in stderr
84 assert 'abort: authorization failed' in stderr
86
85
87 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
86 def test_clone_git_dir_as_hg(self, rcstack, tmpdir):
88 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
87 clone_url = rcstack.repo_clone_url(GIT_REPO)
89 stdout, stderr = Command('/tmp').execute(
88 stdout, stderr = Command(tmpdir.strpath).execute(
90 'hg clone', clone_url, tmpdir.strpath)
89 'hg clone', clone_url, tmpdir.strpath)
91 assert 'HTTP Error 404: Not Found' in stderr
90 assert 'HTTP Error 404: Not Found' in stderr
92
91
93 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
92 def test_clone_non_existing_path_hg(self, rcstack, tmpdir):
94 clone_url = rc_web_server.repo_clone_url('trololo')
93 clone_url = rcstack.repo_clone_url('trololo')
95 stdout, stderr = Command('/tmp').execute(
94 stdout, stderr = Command(tmpdir.strpath).execute(
96 'hg clone', clone_url, tmpdir.strpath)
95 'hg clone', clone_url, tmpdir.strpath)
97 assert 'HTTP Error 404: Not Found' in stderr
96 assert 'HTTP Error 404: Not Found' in stderr
98
97
99 def test_clone_hg_with_slashes(self, rc_web_server, tmpdir):
98 def test_clone_hg_with_slashes(self, rcstack, tmpdir):
100 clone_url = rc_web_server.repo_clone_url('//' + HG_REPO)
99 clone_url = rcstack.repo_clone_url('//' + HG_REPO)
101 stdout, stderr = Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
100 stdout, stderr = Command(tmpdir.strpath).execute('hg clone', clone_url, tmpdir.strpath)
102 assert 'HTTP Error 404: Not Found' in stderr
101 assert 'HTTP Error 404: Not Found' in stderr
103
102
104 def test_clone_existing_path_hg_not_in_database(
103 def test_clone_existing_path_hg_not_in_database(
105 self, rc_web_server, tmpdir, fs_repo_only):
104 self, rcstack, tmpdir, fs_repo_only):
106
105
107 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
106 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
108 clone_url = rc_web_server.repo_clone_url(db_name)
107 clone_url = rcstack.repo_clone_url(db_name)
109 stdout, stderr = Command('/tmp').execute(
108 stdout, stderr = Command(tmpdir.strpath).execute(
110 'hg clone', clone_url, tmpdir.strpath)
109 'hg clone', clone_url, tmpdir.strpath)
111 assert 'HTTP Error 404: Not Found' in stderr
110 assert 'HTTP Error 404: Not Found' in stderr
112
111
113 def test_clone_existing_path_hg_not_in_database_different_scm(
112 def test_clone_existing_path_hg_not_in_database_different_scm(
114 self, rc_web_server, tmpdir, fs_repo_only):
113 self, rcstack, tmpdir, fs_repo_only):
115 db_name = fs_repo_only('not-in-db-git', repo_type='git')
114 db_name = fs_repo_only('not-in-db-git', repo_type='git')
116 clone_url = rc_web_server.repo_clone_url(db_name)
115 clone_url = rcstack.repo_clone_url(db_name)
117 stdout, stderr = Command('/tmp').execute(
116 stdout, stderr = Command(tmpdir.strpath).execute(
118 'hg clone', clone_url, tmpdir.strpath)
117 'hg clone', clone_url, tmpdir.strpath)
119 assert 'HTTP Error 404: Not Found' in stderr
118 assert 'HTTP Error 404: Not Found' in stderr
120
119
121 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
120 def test_clone_non_existing_store_path_hg(self, rcstack, tmpdir, user_util):
122 repo = user_util.create_repo()
121 repo = user_util.create_repo()
123 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
122 clone_url = rcstack.repo_clone_url(repo.repo_name)
124
123
125 # Damage repo by removing it's folder
124 # Damage repo by removing it's folder
126 RepoModel()._delete_filesystem_repo(repo)
125 RepoModel()._delete_filesystem_repo(repo)
127
126
128 stdout, stderr = Command('/tmp').execute(
127 stdout, stderr = Command(tmpdir.strpath).execute(
129 'hg clone', clone_url, tmpdir.strpath)
128 'hg clone', clone_url, tmpdir.strpath)
130 assert 'HTTP Error 404: Not Found' in stderr
129 assert 'HTTP Error 404: Not Found' in stderr
131
130
132 def test_push_new_file_hg(self, rc_web_server, tmpdir):
131 def test_push_new_file_hg(self, rcstack, tmpdir):
133 clone_url = rc_web_server.repo_clone_url(HG_REPO)
132 clone_url = rcstack.repo_clone_url(HG_REPO)
134 stdout, stderr = Command('/tmp').execute(
133 stdout, stderr = Command(tmpdir.strpath).execute(
135 'hg clone', clone_url, tmpdir.strpath)
134 'hg clone', clone_url, tmpdir.strpath)
136
135
137 stdout, stderr = _add_files_and_push(
136 stdout, stderr = _add_files_and_push(
@@ -140,7 +139,7 b' class TestVCSOperations(object):'
140 assert 'pushing to' in stdout
139 assert 'pushing to' in stdout
141 assert 'size summary' in stdout
140 assert 'size summary' in stdout
142
141
143 def test_push_invalidates_cache(self, rc_web_server, tmpdir):
142 def test_push_invalidates_cache(self, rcstack, tmpdir):
144 hg_repo = Repository.get_by_repo_name(HG_REPO)
143 hg_repo = Repository.get_by_repo_name(HG_REPO)
145
144
146 # init cache objects
145 # init cache objects
@@ -159,8 +158,8 b' class TestVCSOperations(object):'
159 old_ids = [x.cache_state_uid for x in cache_keys]
158 old_ids = [x.cache_state_uid for x in cache_keys]
160
159
161 # clone to init cache
160 # clone to init cache
162 clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
161 clone_url = rcstack.repo_clone_url(hg_repo.repo_name)
163 stdout, stderr = Command('/tmp').execute(
162 stdout, stderr = Command(tmpdir.strpath).execute(
164 'hg clone', clone_url, tmpdir.strpath)
163 'hg clone', clone_url, tmpdir.strpath)
165
164
166 cache_keys = hg_repo.cache_keys
165 cache_keys = hg_repo.cache_keys
@@ -180,37 +179,37 b' class TestVCSOperations(object):'
180 new_ids = [x.cache_state_uid for x in cache_keys]
179 new_ids = [x.cache_state_uid for x in cache_keys]
181 assert new_ids != old_ids
180 assert new_ids != old_ids
182
181
183 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
182 def test_push_wrong_credentials_hg(self, rcstack, tmpdir):
184 clone_url = rc_web_server.repo_clone_url(HG_REPO)
183 clone_url = rcstack.repo_clone_url(HG_REPO)
185 stdout, stderr = Command('/tmp').execute(
184 stdout, stderr = Command(tmpdir.strpath).execute(
186 'hg clone', clone_url, tmpdir.strpath)
185 'hg clone', clone_url, tmpdir.strpath)
187
186
188 push_url = rc_web_server.repo_clone_url(
187 push_url = rcstack.repo_clone_url(
189 HG_REPO, user='bad', passwd='name')
188 HG_REPO, user='bad', passwd='name')
190 stdout, stderr = _add_files_and_push(
189 stdout, stderr = _add_files_and_push(
191 'hg', tmpdir.strpath, clone_url=push_url)
190 'hg', tmpdir.strpath, clone_url=push_url)
192
191
193 assert 'abort: authorization failed' in stderr
192 assert 'abort: authorization failed' in stderr
194
193
195 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
194 def test_push_back_to_wrong_url_hg(self, rcstack, tmpdir):
196 clone_url = rc_web_server.repo_clone_url(HG_REPO)
195 clone_url = rcstack.repo_clone_url(HG_REPO)
197 stdout, stderr = Command('/tmp').execute(
196 stdout, stderr = Command(tmpdir.strpath).execute(
198 'hg clone', clone_url, tmpdir.strpath)
197 'hg clone', clone_url, tmpdir.strpath)
199
198
200 stdout, stderr = _add_files_and_push(
199 stdout, stderr = _add_files_and_push(
201 'hg', tmpdir.strpath,
200 'hg', tmpdir.strpath,
202 clone_url=rc_web_server.repo_clone_url('not-existing'))
201 clone_url=rcstack.repo_clone_url('not-existing'))
203
202
204 assert 'HTTP Error 404: Not Found' in stderr
203 assert 'HTTP Error 404: Not Found' in stderr
205
204
206 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
205 def test_ip_restriction_hg(self, rcstack, tmpdir):
207 user_model = UserModel()
206 user_model = UserModel()
208 try:
207 try:
209 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
208 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
210 Session().commit()
209 Session().commit()
211 time.sleep(2)
210 time.sleep(2)
212 clone_url = rc_web_server.repo_clone_url(HG_REPO)
211 clone_url = rcstack.repo_clone_url(HG_REPO)
213 stdout, stderr = Command('/tmp').execute(
212 stdout, stderr = Command(tmpdir.strpath).execute(
214 'hg clone', clone_url, tmpdir.strpath)
213 'hg clone', clone_url, tmpdir.strpath)
215 assert 'abort: HTTP Error 403: Forbidden' in stderr
214 assert 'abort: HTTP Error 403: Forbidden' in stderr
216 finally:
215 finally:
@@ -221,6 +220,6 b' class TestVCSOperations(object):'
221
220
222 time.sleep(2)
221 time.sleep(2)
223
222
224 stdout, stderr = Command('/tmp').execute(
223 stdout, stderr = Command(tmpdir.strpath).execute(
225 'hg clone', clone_url, tmpdir.strpath)
224 'hg clone', clone_url, tmpdir.strpath)
226 _check_proper_clone(stdout, stderr, 'hg')
225 _check_proper_clone(stdout, stderr, 'hg')
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -19,11 +18,6 b''
19
18
20 """
19 """
21 Test suite for making push/pull operations, on specially modified INI files
20 Test suite for making push/pull operations, on specially modified INI files
22
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
21 """
28
22
29 import pytest
23 import pytest
@@ -37,32 +31,41 b' from rhodecode.tests.vcs_operations.conf'
37 connection_available = pytest.mark.skipif(
31 connection_available = pytest.mark.skipif(
38 not check_httpbin_connection(), reason="No outside internet connection available")
32 not check_httpbin_connection(), reason="No outside internet connection available")
39
33
40
34 @pytest.mark.usefixtures(
41 @pytest.mark.usefixtures("baseapp", "enable_webhook_push_integration")
35 "init_pyramid_app",
42 class TestVCSOperationsOnCustomIniConfig(object):
36 "repo_group_repos",
37 "disable_anonymous_user",
38 "disable_locking",
39 "enable_webhook_push_integration"
40 )
41 class TestVCSOperationsOnIntegrationsTrigger(object):
43
42
44 def test_push_with_webhook_hg(self, rc_web_server, tmpdir):
43 def test_push_with_webhook_hg(self, rcstack, tmpdir):
45 clone_url = rc_web_server.repo_clone_url(HG_REPO)
44 clone_url = rcstack.repo_clone_url(HG_REPO)
46
45
47 Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath)
46 Command(tmpdir.strpath).execute('hg clone', clone_url, tmpdir.strpath)
48
47
49 push_url = rc_web_server.repo_clone_url(HG_REPO)
48 push_url = rcstack.repo_clone_url(HG_REPO)
50 _add_files_and_push('hg', tmpdir.strpath, clone_url=push_url)
49 _add_files_and_push('hg', tmpdir.strpath, clone_url=push_url)
51
50
52 rc_log = rc_web_server.get_rc_log()
51 celery_log = open(rcstack.rcstack_data.celery_log).read()
53 assert 'ERROR' not in rc_log
52 assert 'ERROR' not in celery_log
54 assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in rc_log
53 assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in celery_log
55 assert "handling event repo-push with integration <rhodecode.integrations.types.webhook.WebhookIntegrationType" in rc_log
54 assert "handling event repo-push with integration <rhodecode.integrations.types.webhook.WebhookIntegrationType" in celery_log
56
55
57 def test_push_with_webhook_git(self, rc_web_server, tmpdir):
56 def test_push_with_webhook_git(self, rcstack, tmpdir):
58 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
57 clone_url = rcstack.repo_clone_url(GIT_REPO)
59
58
60 Command('/tmp').execute('git clone', clone_url, tmpdir.strpath)
59 Command(tmpdir.strpath).execute('git clone', clone_url, tmpdir.strpath)
61
60
62 push_url = rc_web_server.repo_clone_url(GIT_REPO)
61 push_url = rcstack.repo_clone_url(GIT_REPO)
63 _add_files_and_push('git', tmpdir.strpath, clone_url=push_url)
62 _add_files_and_push('git', tmpdir.strpath, clone_url=push_url)
64
63
65 rc_log = rc_web_server.get_rc_log()
64 celery_log = open(rcstack.rcstack_data.celery_log).read()
66 assert 'ERROR' not in rc_log
65 assert 'ERROR' not in celery_log
67 assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in rc_log
66 assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in celery_log
68 assert "handling event repo-push with integration <rhodecode.integrations.types.webhook.WebhookIntegrationType" in rc_log
67 assert "handling event repo-push with integration <rhodecode.integrations.types.webhook.WebhookIntegrationType" in celery_log
68
69 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
70 def test_push_with_webhook_svn(self, rcstack, tmpdir):
71 raise Exception('Not implemented')
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -19,157 +18,127 b''
19
18
20 """
19 """
21 Test suite for making push/pull operations, on specially modified INI files
20 Test suite for making push/pull operations, on specially modified INI files
22
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
21 """
28
22
29
30 import pytest
23 import pytest
31
24
32 from rhodecode.model.db import User, Repository
25 from rhodecode.model.db import User, Repository
33 from rhodecode.model.meta import Session
26 from rhodecode.model.meta import Session
34 from rhodecode.model.repo import RepoModel
27 from rhodecode.model.repo import RepoModel
35
28
36 from rhodecode.tests import (
29 from rhodecode.tests import GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS
37 GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN,
30 from rhodecode.tests.vcs_operations import Command, _check_proper_clone, _check_proper_git_push, _add_files_and_push
38 TEST_USER_REGULAR_PASS)
39 from rhodecode.tests.vcs_operations import (
40 Command, _check_proper_clone, _check_proper_git_push, _add_files_and_push)
41
31
42
32
43 @pytest.fixture(scope="module")
33 custom_code = [
44 def rc_web_server_config_modification():
34 {'app:main': {'auth_ret_code': '423'}},
45 return [
35 ]
46 {'app:main': {'lock_ret_code': '423'}},
47 ]
48
36
49
37 @pytest.mark.parametrize('rcstack', custom_code, indirect=True)
50 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
38 @pytest.mark.usefixtures(
51 class TestVCSOperationsOnCustomIniConfig(object):
39 "init_pyramid_app",
52
40 "repo_group_repos",
53 def test_clone_and_create_lock_hg(self, rc_web_server, tmpdir):
41 "disable_anonymous_user",
42 "disable_locking",
43 )
44 class TestVCSOperationsOnLockingRepos(object):
45 def test_clone_and_create_lock_hg(self, rcstack, tmpdir):
54 # enable locking
46 # enable locking
55 r = Repository.get_by_repo_name(HG_REPO)
47 r = Repository.get_by_repo_name(HG_REPO)
56 r.enable_locking = True
48 r.enable_locking = True
57 Session().add(r)
49 Session().add(r)
58 Session().commit()
50 Session().commit()
59 # clone
51 # clone
60 clone_url = rc_web_server.repo_clone_url(HG_REPO)
52 clone_url = rcstack.repo_clone_url(HG_REPO)
61 stdout, stderr = Command('/tmp').execute(
53 stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath)
62 'hg clone', clone_url, tmpdir.strpath)
63
54
64 # check if lock was made
55 # check if lock was made
65 r = Repository.get_by_repo_name(HG_REPO)
56 r = Repository.get_by_repo_name(HG_REPO)
66 assert r.locked[0] == User.get_by_username(
57 assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id
67 TEST_USER_ADMIN_LOGIN).user_id
68
58
69 def test_clone_and_create_lock_git(self, rc_web_server, tmpdir):
59 def test_clone_and_create_lock_git(self, rcstack, tmpdir):
70 # enable locking
60 # enable locking
71 r = Repository.get_by_repo_name(GIT_REPO)
61 r = Repository.get_by_repo_name(GIT_REPO)
72 r.enable_locking = True
62 r.enable_locking = True
73 Session().add(r)
63 Session().add(r)
74 Session().commit()
64 Session().commit()
75 # clone
65 # clone
76 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
66 clone_url = rcstack.repo_clone_url(GIT_REPO)
77 stdout, stderr = Command('/tmp').execute(
67 stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath)
78 'git clone', clone_url, tmpdir.strpath)
79
68
80 # check if lock was made
69 # check if lock was made
81 r = Repository.get_by_repo_name(GIT_REPO)
70 r = Repository.get_by_repo_name(GIT_REPO)
82 assert r.locked[0] == User.get_by_username(
71 assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id
83 TEST_USER_ADMIN_LOGIN).user_id
84
72
85 def test_clone_after_repo_was_locked_hg(self, rc_web_server, tmpdir):
73 def test_clone_after_repo_was_locked_hg(self, rcstack, tmpdir):
86 # lock repo
74 # lock repo
87 r = Repository.get_by_repo_name(HG_REPO)
75 r = Repository.get_by_repo_name(HG_REPO)
88 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
76 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
89 # pull fails since repo is locked
77 # pull fails since repo is locked
90 clone_url = rc_web_server.repo_clone_url(HG_REPO)
78 clone_url = rcstack.repo_clone_url(HG_REPO)
91 stdout, stderr = Command('/tmp').execute(
79 stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath)
92 'hg clone', clone_url, tmpdir.strpath)
80 msg = f"abort: HTTP Error 423: Repository `{HG_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`"
93 msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`"""
94 % (HG_REPO, TEST_USER_ADMIN_LOGIN))
95 assert msg in stderr
81 assert msg in stderr
96
82
97 def test_clone_after_repo_was_locked_git(self, rc_web_server, tmpdir):
83 def test_clone_after_repo_was_locked_git(self, rcstack, tmpdir):
98 # lock repo
84 # lock repo
99 r = Repository.get_by_repo_name(GIT_REPO)
85 r = Repository.get_by_repo_name(GIT_REPO)
100 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
86 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
101 # pull fails since repo is locked
87 # pull fails since repo is locked
102 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
88 clone_url = rcstack.repo_clone_url(GIT_REPO)
103 stdout, stderr = Command('/tmp').execute(
89 stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath)
104 'git clone', clone_url, tmpdir.strpath)
105
90
106 lock_msg = (
91 lock_msg = "remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`"
107 'remote: ERROR: Repository `vcs_test_git` locked by user ' +
108 '`test_admin`. Reason:`lock_auto`')
109 assert lock_msg in stderr
92 assert lock_msg in stderr
110 assert 'remote: Pre pull hook failed: aborting' in stderr
93 assert "fatal: remote did not send all necessary objects" in stderr
111 assert 'fatal: remote did not send all necessary objects' in stderr
94 assert "remote: Pre pull hook failed: aborting" in stderr
112
95
113 def test_push_on_locked_repo_by_other_user_hg(self, rc_web_server, tmpdir):
96 def test_push_on_locked_repo_by_other_user_hg(self, rcstack, tmpdir):
114 clone_url = rc_web_server.repo_clone_url(HG_REPO)
97 clone_url = rcstack.repo_clone_url(HG_REPO)
115 stdout, stderr = Command('/tmp').execute(
98 stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath)
116 'hg clone', clone_url, tmpdir.strpath)
117
99
118 # lock repo
100 # lock repo
119 r = Repository.get_by_repo_name(HG_REPO)
101 r = Repository.get_by_repo_name(HG_REPO)
120 # let this user actually push !
102 # let this user actually push !
121 RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN,
103 RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, perm="repository.write")
122 perm='repository.write')
123 Session().commit()
104 Session().commit()
124 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
105 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
125
106
126 # push fails repo is locked by other user !
107 # push fails repo is locked by other user !
127 push_url = rc_web_server.repo_clone_url(
108 push_url = rcstack.repo_clone_url(HG_REPO, user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS)
128 HG_REPO,
109 stdout, stderr = _add_files_and_push("hg", tmpdir.strpath, clone_url=push_url)
129 user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS)
110 msg = f"abort: HTTP Error 423: Repository `{HG_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`"
130 stdout, stderr = _add_files_and_push(
131 'hg', tmpdir.strpath, clone_url=push_url)
132 msg = ("""abort: HTTP Error 423: Repository `%s` locked by user `%s`"""
133 % (HG_REPO, TEST_USER_ADMIN_LOGIN))
134 assert msg in stderr
111 assert msg in stderr
135
112
136 def test_push_on_locked_repo_by_other_user_git(
113 def test_push_on_locked_repo_by_other_user_git(self, rcstack, tmpdir):
137 self, rc_web_server, tmpdir):
114 clone_url = rcstack.repo_clone_url(GIT_REPO)
138 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
115 stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath)
139 stdout, stderr = Command('/tmp').execute(
140 'git clone', clone_url, tmpdir.strpath)
141
116
142 # lock repo
117 # lock repo
143 r = Repository.get_by_repo_name(GIT_REPO)
118 r = Repository.get_by_repo_name(GIT_REPO)
144 # let this user actually push !
119 # let this user actually push !
145 RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN,
120 RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, perm="repository.write")
146 perm='repository.write')
147 Session().commit()
121 Session().commit()
148 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
122 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
149
123
150 # push fails repo is locked by other user!
124 # push fails repo is locked by other user!
151 push_url = rc_web_server.repo_clone_url(
125 push_url = rcstack.repo_clone_url(GIT_REPO, user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS)
152 GIT_REPO,
126 stdout, stderr = _add_files_and_push("git", tmpdir.strpath, clone_url=push_url)
153 user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS)
154 stdout, stderr = _add_files_and_push(
155 'git', tmpdir.strpath, clone_url=push_url)
156
127
157 err = 'Repository `%s` locked by user `%s`' % (
128 err = f"Repository `{GIT_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`"
158 GIT_REPO, TEST_USER_ADMIN_LOGIN)
159 # err = 'RPC failed; result=22, HTTP code = 423'
129 # err = 'RPC failed; result=22, HTTP code = 423'
160 assert err in stderr
130 assert err in stderr
161
131
162 def test_push_unlocks_repository_hg(self, rc_web_server, tmpdir):
132 def test_push_unlocks_repository_hg(self, rcstack, tmpdir):
163 # enable locking
133 # enable locking
164 r = Repository.get_by_repo_name(HG_REPO)
134 r = Repository.get_by_repo_name(HG_REPO)
165 r.enable_locking = True
135 r.enable_locking = True
166 Session().add(r)
136 Session().add(r)
167 Session().commit()
137 Session().commit()
168
138
169 clone_url = rc_web_server.repo_clone_url(HG_REPO)
139 clone_url = rcstack.repo_clone_url(HG_REPO)
170 stdout, stderr = Command('/tmp').execute(
140 stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath)
171 'hg clone', clone_url, tmpdir.strpath)
141 _check_proper_clone(stdout, stderr, "hg")
172 _check_proper_clone(stdout, stderr, 'hg')
173
142
174 # check for lock repo after clone
143 # check for lock repo after clone
175 r = Repository.get_by_repo_name(HG_REPO)
144 r = Repository.get_by_repo_name(HG_REPO)
@@ -177,16 +146,14 b' class TestVCSOperationsOnCustomIniConfig'
177 assert r.locked[0] == uid
146 assert r.locked[0] == uid
178
147
179 # push is ok and repo is now unlocked
148 # push is ok and repo is now unlocked
180 stdout, stderr = _add_files_and_push(
149 stdout, stderr = _add_files_and_push("hg", tmpdir.strpath, clone_url=clone_url)
181 'hg', tmpdir.strpath, clone_url=clone_url)
150 assert f"remote: Released lock on repo `{HG_REPO}`" in stdout
182 assert ('remote: Released lock on repo `%s`' % HG_REPO) in stdout
183 # we need to cleanup the Session Here !
151 # we need to cleanup the Session Here !
184 Session.remove()
152 Session.remove()
185 r = Repository.get_by_repo_name(HG_REPO)
153 r = Repository.get_by_repo_name(HG_REPO)
186 assert r.locked == [None, None, None]
154 assert r.locked == [None, None, None]
187
155
188 def test_push_unlocks_repository_git(self, rc_web_server, tmpdir):
156 def test_push_unlocks_repository_git(self, rcstack, tmpdir):
189
190 # Note: Did a first debugging session. Seems that
157 # Note: Did a first debugging session. Seems that
191 # Repository.get_locking_state is called twice. The second call
158 # Repository.get_locking_state is called twice. The second call
192 # has the action "pull" and does not reset the lock.
159 # has the action "pull" and does not reset the lock.
@@ -197,19 +164,16 b' class TestVCSOperationsOnCustomIniConfig'
197 Session().add(r)
164 Session().add(r)
198 Session().commit()
165 Session().commit()
199
166
200 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
167 clone_url = rcstack.repo_clone_url(GIT_REPO)
201 stdout, stderr = Command('/tmp').execute(
168 stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath)
202 'git clone', clone_url, tmpdir.strpath)
169 _check_proper_clone(stdout, stderr, "git")
203 _check_proper_clone(stdout, stderr, 'git')
204
170
205 # check for lock repo after clone
171 # check for lock repo after clone
206 r = Repository.get_by_repo_name(GIT_REPO)
172 r = Repository.get_by_repo_name(GIT_REPO)
207 assert r.locked[0] == User.get_by_username(
173 assert r.locked[0] == User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id
208 TEST_USER_ADMIN_LOGIN).user_id
209
174
210 # push is ok and repo is now unlocked
175 # push is ok and repo is now unlocked
211 stdout, stderr = _add_files_and_push(
176 stdout, stderr = _add_files_and_push("git", tmpdir.strpath, clone_url=clone_url)
212 'git', tmpdir.strpath, clone_url=clone_url)
213 _check_proper_git_push(stdout, stderr)
177 _check_proper_git_push(stdout, stderr)
214
178
215 # assert ('remote: Released lock on repo `%s`' % GIT_REPO) in stdout
179 # assert ('remote: Released lock on repo `%s`' % GIT_REPO) in stdout
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -26,102 +25,83 b' Test suite for making push/pull operatio'
26 to redirect things to stderr instead of stdout.
25 to redirect things to stderr instead of stdout.
27 """
26 """
28
27
29
30 import pytest
28 import pytest
31
29
32 from rhodecode.model.db import User, Repository
30 from rhodecode.model.db import User, Repository
33 from rhodecode.model.meta import Session
31 from rhodecode.model.meta import Session
34 from rhodecode.model.repo import RepoModel
32 from rhodecode.model.repo import RepoModel
35
33
36 from rhodecode.tests import (
34 from rhodecode.tests import GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS
37 GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN,
38 TEST_USER_REGULAR_PASS)
39 from rhodecode.tests.vcs_operations import Command, _add_files_and_push
35 from rhodecode.tests.vcs_operations import Command, _add_files_and_push
40
36
37 custom_code = [
38 {'app:main': {'lock_ret_code': '400'}},
39 ]
41
40
42 @pytest.fixture(scope="module")
41 @pytest.mark.parametrize('rcstack', custom_code, indirect=True)
43 def rc_web_server_config_modification():
42 @pytest.mark.usefixtures(
44 return [
43 "init_pyramid_app",
45 {'app:main': {'lock_ret_code': '400'}},
44 "repo_group_repos",
46 ]
45 "disable_anonymous_user",
47
46 "disable_locking",
48
47 )
49 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
48 class TestVCSOperationsOnCustomLockingCode(object):
50 class TestVCSOperationsOnCustomIniConfig(object):
49 def test_clone_after_repo_was_locked_hg(self, rcstack, tmpdir):
51
52 def test_clone_after_repo_was_locked_hg(self, rc_web_server, tmpdir):
53 # lock repo
50 # lock repo
54 r = Repository.get_by_repo_name(HG_REPO)
51 r = Repository.get_by_repo_name(HG_REPO)
55 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
52 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
56 # pull fails since repo is locked
53 # pull fails since repo is locked
57 clone_url = rc_web_server.repo_clone_url(HG_REPO)
54 clone_url = rcstack.repo_clone_url(HG_REPO)
58 stdout, stderr = Command('/tmp').execute(
55 stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath)
59 'hg clone', clone_url, tmpdir.strpath)
56 msg = f"abort: HTTP Error 400: Repository `{HG_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`"
60 msg = ("""abort: HTTP Error 400: Repository `%s` locked by user `%s`"""
61 % (HG_REPO, TEST_USER_ADMIN_LOGIN))
62 assert msg in stderr
57 assert msg in stderr
63
58
64 def test_clone_after_repo_was_locked_git(self, rc_web_server, tmpdir):
59 def test_clone_after_repo_was_locked_git(self, rcstack, tmpdir):
65 # lock repo
60 # lock repo
66 r = Repository.get_by_repo_name(GIT_REPO)
61 r = Repository.get_by_repo_name(GIT_REPO)
67 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
62 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
68 # pull fails since repo is locked
63 # pull fails since repo is locked
69 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
64 clone_url = rcstack.repo_clone_url(GIT_REPO)
70 stdout, stderr = Command('/tmp').execute(
65 stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath)
71 'git clone', clone_url, tmpdir.strpath)
72
66
73 lock_msg = (
67 lock_msg = "remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`"
74 'remote: ERROR: Repository `vcs_test_git` locked by user ' +
75 '`test_admin`. Reason:`lock_auto`')
76 assert lock_msg in stderr
68 assert lock_msg in stderr
77 assert 'remote: Pre pull hook failed: aborting' in stderr
69 assert "remote: Pre pull hook failed: aborting" in stderr
78 assert 'fatal: remote did not send all necessary objects' in stderr
70 assert "fatal: remote did not send all necessary objects" in stderr
79
71
80 def test_push_on_locked_repo_by_other_user_hg(self, rc_web_server, tmpdir):
72 def test_push_on_locked_repo_by_other_user_hg(self, rcstack, tmpdir):
81 clone_url = rc_web_server.repo_clone_url(HG_REPO)
73 clone_url = rcstack.repo_clone_url(HG_REPO)
82 stdout, stderr = Command('/tmp').execute(
74 stdout, stderr = Command(tmpdir.strpath).execute("hg clone", clone_url, tmpdir.strpath)
83 'hg clone', clone_url, tmpdir.strpath)
84
75
85 # lock repo
76 # lock repo
86 r = Repository.get_by_repo_name(HG_REPO)
77 r = Repository.get_by_repo_name(HG_REPO)
87 # let this user actually push !
78 # let this user actually push !
88 RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN,
79 RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, perm="repository.write")
89 perm='repository.write')
90 Session().commit()
80 Session().commit()
91 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
81 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
92
82
93 # push fails repo is locked by other user !
83 # push fails repo is locked by other user !
94 push_url = rc_web_server.repo_clone_url(
84 push_url = rcstack.repo_clone_url(HG_REPO, user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS)
95 HG_REPO,
85 stdout, stderr = _add_files_and_push("hg", tmpdir.strpath, clone_url=push_url)
96 user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS)
86 msg = f"abort: HTTP Error 400: Repository `{HG_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`"
97 stdout, stderr = _add_files_and_push(
87
98 'hg', tmpdir.strpath, clone_url=push_url)
99 msg = ("""abort: HTTP Error 400: Repository `%s` locked by user `%s`"""
100 % (HG_REPO, TEST_USER_ADMIN_LOGIN))
101 assert msg in stderr
88 assert msg in stderr
102
89
103 def test_push_on_locked_repo_by_other_user_git(
90 def test_push_on_locked_repo_by_other_user_git(self, rcstack, tmpdir):
104 self, rc_web_server, tmpdir):
91 clone_url = rcstack.repo_clone_url(GIT_REPO)
105 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
92 stdout, stderr = Command(tmpdir.strpath).execute("git clone", clone_url, tmpdir.strpath)
106 stdout, stderr = Command('/tmp').execute(
107 'git clone', clone_url, tmpdir.strpath)
108
93
109 # lock repo
94 # lock repo
110 r = Repository.get_by_repo_name(GIT_REPO)
95 r = Repository.get_by_repo_name(GIT_REPO)
111 # let this user actually push !
96 # let this user actually push !
112 RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN,
97 RepoModel().grant_user_permission(repo=r, user=TEST_USER_REGULAR_LOGIN, perm="repository.write")
113 perm='repository.write')
114 Session().commit()
98 Session().commit()
115 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
99 Repository.lock(r, User.get_by_username(TEST_USER_ADMIN_LOGIN).user_id)
116
100
117 # push fails repo is locked by other user!
101 # push fails repo is locked by other user!
118 push_url = rc_web_server.repo_clone_url(
102 push_url = rcstack.repo_clone_url(GIT_REPO, user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS)
119 GIT_REPO,
103 stdout, stderr = _add_files_and_push("git", tmpdir.strpath, clone_url=push_url)
120 user=TEST_USER_REGULAR_LOGIN, passwd=TEST_USER_REGULAR_PASS)
121 stdout, stderr = _add_files_and_push(
122 'git', tmpdir.strpath, clone_url=push_url)
123
104
124 err = 'Repository `%s` locked by user `%s`' % (
105 err = f"Repository `{GIT_REPO}` locked by user `{TEST_USER_ADMIN_LOGIN}`"
125 GIT_REPO, TEST_USER_ADMIN_LOGIN)
126
106
127 assert err in stderr
107 assert err in stderr
@@ -26,12 +26,16 b' from rhodecode.tests.vcs_operations impo'
26 Command, _check_proper_hg_push, _check_proper_git_push, _add_files_and_push)
26 Command, _check_proper_hg_push, _check_proper_git_push, _add_files_and_push)
27
27
28
28
29 @pytest.mark.usefixtures("disable_anonymous_user")
29 @pytest.mark.usefixtures(
30 class TestVCSOperations(object):
30 "init_pyramid_app",
31 "repo_group_repos",
32 "disable_anonymous_user",
33 )
34 class TestVCSOperationsNewBranchPush(object):
31
35
32 def test_push_new_branch_hg(self, rc_web_server, tmpdir, user_util):
36 def test_push_new_branch_hg(self, rcstack, tmpdir, user_util):
33 repo = user_util.create_repo(repo_type='hg')
37 repo = user_util.create_repo(repo_type='hg')
34 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
38 clone_url = rcstack.repo_clone_url(repo.repo_name)
35 Command(os.path.dirname(tmpdir.strpath)).execute(
39 Command(os.path.dirname(tmpdir.strpath)).execute(
36 'hg clone', clone_url, tmpdir.strpath)
40 'hg clone', clone_url, tmpdir.strpath)
37
41
@@ -48,9 +52,9 b' class TestVCSOperations(object):'
48
52
49 _check_proper_hg_push(stdout, stderr)
53 _check_proper_hg_push(stdout, stderr)
50
54
51 def test_push_new_branch_git(self, rc_web_server, tmpdir, user_util):
55 def test_push_new_branch_git(self, rcstack, tmpdir, user_util):
52 repo = user_util.create_repo(repo_type='git')
56 repo = user_util.create_repo(repo_type='git')
53 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
57 clone_url = rcstack.repo_clone_url(repo.repo_name)
54 Command(os.path.dirname(tmpdir.strpath)).execute(
58 Command(os.path.dirname(tmpdir.strpath)).execute(
55 'git clone', clone_url, tmpdir.strpath)
59 'git clone', clone_url, tmpdir.strpath)
56
60
@@ -67,13 +71,13 b' class TestVCSOperations(object):'
67 _check_proper_git_push(stdout, stderr, branch='dev')
71 _check_proper_git_push(stdout, stderr, branch='dev')
68
72
69 def test_push_new_branch_hg_with_branch_permissions_no_force_push(
73 def test_push_new_branch_hg_with_branch_permissions_no_force_push(
70 self, rc_web_server, tmpdir, user_util, branch_permission_setter):
74 self, rcstack, tmpdir, user_util, branch_permission_setter):
71 repo = user_util.create_repo(repo_type='hg')
75 repo = user_util.create_repo(repo_type='hg')
72 repo_name = repo.repo_name
76 repo_name = repo.repo_name
73 username = TEST_USER_ADMIN_LOGIN
77 username = TEST_USER_ADMIN_LOGIN
74 branch_permission_setter(repo_name, username, permission='branch.push')
78 branch_permission_setter(repo_name, username, permission='branch.push')
75
79
76 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
80 clone_url = rcstack.repo_clone_url(repo.repo_name)
77 Command(os.path.dirname(tmpdir.strpath)).execute(
81 Command(os.path.dirname(tmpdir.strpath)).execute(
78 'hg clone', clone_url, tmpdir.strpath)
82 'hg clone', clone_url, tmpdir.strpath)
79
83
@@ -91,13 +95,13 b' class TestVCSOperations(object):'
91 _check_proper_hg_push(stdout, stderr)
95 _check_proper_hg_push(stdout, stderr)
92
96
93 def test_push_new_branch_git_with_branch_permissions_no_force_push(
97 def test_push_new_branch_git_with_branch_permissions_no_force_push(
94 self, rc_web_server, tmpdir, user_util, branch_permission_setter):
98 self, rcstack, tmpdir, user_util, branch_permission_setter):
95 repo = user_util.create_repo(repo_type='git')
99 repo = user_util.create_repo(repo_type='git')
96 repo_name = repo.repo_name
100 repo_name = repo.repo_name
97 username = TEST_USER_ADMIN_LOGIN
101 username = TEST_USER_ADMIN_LOGIN
98 branch_permission_setter(repo_name, username, permission='branch.push')
102 branch_permission_setter(repo_name, username, permission='branch.push')
99
103
100 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
104 clone_url = rcstack.repo_clone_url(repo.repo_name)
101 Command(os.path.dirname(tmpdir.strpath)).execute(
105 Command(os.path.dirname(tmpdir.strpath)).execute(
102 'git clone', clone_url, tmpdir.strpath)
106 'git clone', clone_url, tmpdir.strpath)
103
107
@@ -32,13 +32,16 b' from rhodecode.tests.vcs_operations impo'
32 _add_files_and_push)
32 _add_files_and_push)
33
33
34
34
35 @pytest.mark.usefixtures("disable_locking")
35 @pytest.mark.usefixtures(
36 "init_pyramid_app",
37 "repo_group_repos",
38 "disable_locking",
39 )
36 class TestVCSOperationsSpecial(object):
40 class TestVCSOperationsSpecial(object):
37
41
38 def test_git_sets_default_branch_if_not_master(
42 def test_git_sets_default_branch_if_not_master(self, vcs_backend_git, tmpdir, rcstack):
39 self, backend_git, tmpdir, rc_web_server):
43 empty_repo = vcs_backend_git.create_repo()
40 empty_repo = backend_git.create_repo()
44 clone_url = rcstack.repo_clone_url(empty_repo.repo_name)
41 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
42
45
43 cmd = Command(tmpdir.strpath)
46 cmd = Command(tmpdir.strpath)
44 cmd.execute('git clone', clone_url)
47 cmd.execute('git clone', clone_url)
@@ -63,25 +66,24 b' class TestVCSOperationsSpecial(object):'
63 # Doing an explicit commit in order to get latest user logs on MySQL
66 # Doing an explicit commit in order to get latest user logs on MySQL
64 Session().commit()
67 Session().commit()
65
68
66 def test_git_fetches_from_remote_repository_with_annotated_tags(
69 def test_git_fetches_from_remote_repository_with_annotated_tags(self, vcs_backend_git, rcstack):
67 self, backend_git, rc_web_server):
68 # Note: This is a test specific to the git backend. It checks the
70 # Note: This is a test specific to the git backend. It checks the
69 # integration of fetching from a remote repository which contains
71 # integration of fetching from a remote repository which contains
70 # annotated tags.
72 # annotated tags.
71
73
72 # Dulwich shows this specific behavior only when
74 # Dulwich shows this specific behavior only when
73 # operating against a remote repository.
75 # operating against a remote repository.
74 source_repo = backend_git['annotated-tag']
76 source_repo = vcs_backend_git['annotated-tag']
75 target_vcs_repo = backend_git.create_repo().scm_instance()
77 target_vcs_repo = vcs_backend_git.create_repo().scm_instance()
76 target_vcs_repo.fetch(rc_web_server.repo_clone_url(source_repo.repo_name))
78 target_vcs_repo.fetch(rcstack.repo_clone_url(source_repo.repo_name))
77
79
78 def test_git_push_shows_pull_request_refs(self, backend_git, rc_web_server, tmpdir):
80 def test_git_push_shows_pull_request_refs(self, vcs_backend_git, rcstack, tmpdir):
79 """
81 """
80 test if remote info about refs is visible
82 test if remote info about refs is visible
81 """
83 """
82 empty_repo = backend_git.create_repo()
84 empty_repo = vcs_backend_git.create_repo()
83
85
84 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
86 clone_url = rcstack.repo_clone_url(empty_repo.repo_name)
85
87
86 cmd = Command(tmpdir.strpath)
88 cmd = Command(tmpdir.strpath)
87 cmd.execute('git clone', clone_url)
89 cmd.execute('git clone', clone_url)
@@ -97,7 +99,7 b' class TestVCSOperationsSpecial(object):'
97 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
99 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
98 _check_proper_git_push(stdout, stderr, branch='master')
100 _check_proper_git_push(stdout, stderr, branch='master')
99
101
100 ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=master'
102 ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=master'
101 assert f'remote: RhodeCode: open pull request link: {ref}' in stderr
103 assert f'remote: RhodeCode: open pull request link: {ref}' in stderr
102 assert 'remote: RhodeCode: push completed' in stderr
104 assert 'remote: RhodeCode: push completed' in stderr
103
105
@@ -129,14 +131,14 b' class TestVCSOperationsSpecial(object):'
129 stdout, stderr = repo_cmd.execute('git push --verbose origin feature')
131 stdout, stderr = repo_cmd.execute('git push --verbose origin feature')
130 _check_proper_git_push(stdout, stderr, branch='feature')
132 _check_proper_git_push(stdout, stderr, branch='feature')
131
133
132 ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature'
134 ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature'
133 assert f'remote: RhodeCode: open pull request link: {ref}' in stderr
135 assert f'remote: RhodeCode: open pull request link: {ref}' in stderr
134 assert 'remote: RhodeCode: push completed' in stderr
136 assert 'remote: RhodeCode: push completed' in stderr
135
137
136 def test_hg_push_shows_pull_request_refs(self, backend_hg, rc_web_server, tmpdir):
138 def test_hg_push_shows_pull_request_refs(self, vcs_backend_hg, rcstack, tmpdir):
137 empty_repo = backend_hg.create_repo()
139 empty_repo = vcs_backend_hg.create_repo()
138
140
139 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
141 clone_url = rcstack.repo_clone_url(empty_repo.repo_name)
140
142
141 cmd = Command(tmpdir.strpath)
143 cmd = Command(tmpdir.strpath)
142 cmd.execute('hg clone', clone_url)
144 cmd.execute('hg clone', clone_url)
@@ -154,7 +156,7 b' class TestVCSOperationsSpecial(object):'
154 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
156 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
155 _check_proper_hg_push(stdout, stderr, branch='default')
157 _check_proper_hg_push(stdout, stderr, branch='default')
156
158
157 ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default'
159 ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default'
158 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
160 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
159 assert 'remote: RhodeCode: push completed' in stdout
161 assert 'remote: RhodeCode: push completed' in stdout
160
162
@@ -189,14 +191,14 b' class TestVCSOperationsSpecial(object):'
189 stdout, stderr = repo_cmd.execute('hg push --new-branch --verbose', clone_url)
191 stdout, stderr = repo_cmd.execute('hg push --new-branch --verbose', clone_url)
190 _check_proper_hg_push(stdout, stderr, branch='feature')
192 _check_proper_hg_push(stdout, stderr, branch='feature')
191
193
192 ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature'
194 ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature'
193 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
195 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
194 assert 'remote: RhodeCode: push completed' in stdout
196 assert 'remote: RhodeCode: push completed' in stdout
195
197
196 def test_hg_push_shows_pull_request_refs_book(self, backend_hg, rc_web_server, tmpdir):
198 def test_hg_push_shows_pull_request_refs_book(self, vcs_backend_hg, rcstack, tmpdir):
197 empty_repo = backend_hg.create_repo()
199 empty_repo = vcs_backend_hg.create_repo()
198
200
199 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
201 clone_url = rcstack.repo_clone_url(empty_repo.repo_name)
200
202
201 cmd = Command(tmpdir.strpath)
203 cmd = Command(tmpdir.strpath)
202 cmd.execute('hg clone', clone_url)
204 cmd.execute('hg clone', clone_url)
@@ -214,7 +216,7 b' class TestVCSOperationsSpecial(object):'
214 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
216 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
215 _check_proper_hg_push(stdout, stderr, branch='default')
217 _check_proper_hg_push(stdout, stderr, branch='default')
216
218
217 ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default'
219 ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default'
218 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
220 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
219 assert 'remote: RhodeCode: push completed' in stdout
221 assert 'remote: RhodeCode: push completed' in stdout
220
222
@@ -232,23 +234,23 b' class TestVCSOperationsSpecial(object):'
232 stdout, stderr = repo_cmd.execute('hg push -B feature2 --verbose', clone_url)
234 stdout, stderr = repo_cmd.execute('hg push -B feature2 --verbose', clone_url)
233 _check_proper_hg_push(stdout, stderr, branch='default')
235 _check_proper_hg_push(stdout, stderr, branch='default')
234
236
235 ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default'
237 ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default'
236 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
238 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
237 ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?bookmark=feature2'
239 ref = f'{rcstack.host_url()}/{empty_repo.repo_name}/pull-request/new?bookmark=feature2'
238 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
240 assert f'remote: RhodeCode: open pull request link: {ref}' in stdout
239 assert 'remote: RhodeCode: push completed' in stdout
241 assert 'remote: RhodeCode: push completed' in stdout
240 assert 'exporting bookmark feature2' in stdout
242 assert 'exporting bookmark feature2' in stdout
241
243
242 def test_push_is_forbidden_on_archived_repo_hg(self, backend_hg, rc_web_server, tmpdir):
244 def test_push_is_forbidden_on_archived_repo_hg(self, vcs_backend_hg, rcstack, tmpdir):
243 empty_repo = backend_hg.create_repo()
245 empty_repo = vcs_backend_hg.create_repo()
244 repo_name = empty_repo.repo_name
246 repo_name = empty_repo.repo_name
245
247
246 repo = Repository.get_by_repo_name(repo_name)
248 repo = Repository.get_by_repo_name(repo_name)
247 repo.archived = True
249 repo.archived = True
248 Session().commit()
250 Session().commit()
249
251
250 clone_url = rc_web_server.repo_clone_url(repo_name)
252 clone_url = rcstack.repo_clone_url(repo_name)
251 stdout, stderr = Command('/tmp').execute(
253 stdout, stderr = Command(tmpdir.strpath).execute(
252 'hg clone', clone_url, tmpdir.strpath)
254 'hg clone', clone_url, tmpdir.strpath)
253
255
254 stdout, stderr = _add_files_and_push(
256 stdout, stderr = _add_files_and_push(
@@ -256,16 +258,16 b' class TestVCSOperationsSpecial(object):'
256
258
257 assert 'abort: HTTP Error 403: Forbidden' in stderr
259 assert 'abort: HTTP Error 403: Forbidden' in stderr
258
260
259 def test_push_is_forbidden_on_archived_repo_git(self, backend_git, rc_web_server, tmpdir):
261 def test_push_is_forbidden_on_archived_repo_git(self, vcs_backend_git, rcstack, tmpdir):
260 empty_repo = backend_git.create_repo()
262 empty_repo = vcs_backend_git.create_repo()
261 repo_name = empty_repo.repo_name
263 repo_name = empty_repo.repo_name
262
264
263 repo = Repository.get_by_repo_name(repo_name)
265 repo = Repository.get_by_repo_name(repo_name)
264 repo.archived = True
266 repo.archived = True
265 Session().commit()
267 Session().commit()
266
268
267 clone_url = rc_web_server.repo_clone_url(repo_name)
269 clone_url = rcstack.repo_clone_url(repo_name)
268 stdout, stderr = Command('/tmp').execute(
270 stdout, stderr = Command(tmpdir.strpath).execute(
269 'git clone', clone_url, tmpdir.strpath)
271 'git clone', clone_url, tmpdir.strpath)
270
272
271 stdout, stderr = _add_files_and_push(
273 stdout, stderr = _add_files_and_push(
@@ -49,97 +49,110 b' def get_cli_flags(username, password):'
49 return flags, auth
49 return flags, auth
50
50
51
51
52 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
52 @pytest.mark.usefixtures(
53 class TestVCSOperations(object):
53 "init_pyramid_app",
54 "repo_group_repos",
55 "disable_anonymous_user",
56 "disable_locking",
57 )
58 class TestVCSOperationsSVN(object):
54
59
55 def test_clone_svn_repo_by_admin(self, rc_web_server, tmpdir):
60 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
56 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
61 def test_clone_svn_repo_by_admin(self, rcstack, tmpdir):
57 username, password = rc_web_server.repo_clone_credentials()
62 clone_url = rcstack.repo_clone_url(SVN_REPO)
63 username, password = rcstack.repo_clone_credentials()
58
64
59 cmd = Command('/tmp')
65 cmd = Command(tmpdir.strpath)
60
66
61 flags, auth = get_cli_flags(username, password)
67 flags, auth = get_cli_flags(username, password)
62
68
63 stdout, stderr = Command('/tmp').execute(
69 stdout, stderr = Command(tmpdir.strpath).execute(
64 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
70 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
65
71
66 _check_proper_clone(stdout, stderr, 'svn')
72 _check_proper_clone(stdout, stderr, 'svn')
67 cmd.assert_returncode_success()
73 cmd.assert_returncode_success()
68
74
69 def test_clone_svn_repo_by_id_by_admin(self, rc_web_server, tmpdir):
75 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
76 def test_clone_svn_repo_by_id_by_admin(self, rcstack, tmpdir):
70 repo_id = Repository.get_by_repo_name(SVN_REPO).repo_id
77 repo_id = Repository.get_by_repo_name(SVN_REPO).repo_id
71 username, password = rc_web_server.repo_clone_credentials()
78 username, password = rcstack.repo_clone_credentials()
72
79
73 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
80 clone_url = rcstack.repo_clone_url('_%s' % repo_id)
74 cmd = Command('/tmp')
81 cmd = Command(tmpdir.strpath)
75
82
76 flags, auth = get_cli_flags(username, password)
83 flags, auth = get_cli_flags(username, password)
77
84
78 stdout, stderr = Command('/tmp').execute(
85 stdout, stderr = Command(tmpdir.strpath).execute(
79 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
80
81 _check_proper_clone(stdout, stderr, 'svn')
82 cmd.assert_returncode_success()
83
84 def test_clone_svn_repo_with_group_by_admin(self, rc_web_server, tmpdir):
85 clone_url = rc_web_server.repo_clone_url(SVN_REPO_WITH_GROUP)
86 username, password = rc_web_server.repo_clone_credentials()
87
88 flags, auth = get_cli_flags(username, password)
89
90 stdout, stderr = Command('/tmp').execute(
91 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
86 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
92
87
93 _check_proper_clone(stdout, stderr, 'svn')
88 _check_proper_clone(stdout, stderr, 'svn')
94 cmd.assert_returncode_success()
89 cmd.assert_returncode_success()
95
90
96 def test_clone_wrong_credentials_svn(self, rc_web_server, tmpdir):
91 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
97 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
92 def test_clone_svn_repo_with_group_by_admin(self, rcstack, tmpdir):
98 username, password = rc_web_server.repo_clone_credentials()
93 clone_url = rcstack.repo_clone_url(SVN_REPO_WITH_GROUP)
94 username, password = rcstack.repo_clone_credentials()
95
96 flags, auth = get_cli_flags(username, password)
97
98 stdout, stderr = Command(tmpdir.strpath).execute(
99 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
100
101 _check_proper_clone(stdout, stderr, 'svn')
102 rcstack.assert_returncode_success()
103
104 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
105 def test_clone_wrong_credentials_svn(self, rcstack, tmpdir):
106 clone_url = rcstack.repo_clone_url(SVN_REPO)
107 username, password = rcstack.repo_clone_credentials()
99 password = 'bad-password'
108 password = 'bad-password'
100
109
101 flags, auth = get_cli_flags(username, password)
110 flags, auth = get_cli_flags(username, password)
102
111
103 stdout, stderr = Command('/tmp').execute(
112 stdout, stderr = Command(tmpdir.strpath).execute(
104 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
113 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
105 assert 'fatal: Authentication failed' in stderr
114 assert 'fatal: Authentication failed' in stderr
106
115
107 def test_clone_svn_with_slashes(self, rc_web_server, tmpdir):
116 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
108 clone_url = rc_web_server.repo_clone_url('//' + SVN_REPO)
117 def test_clone_svn_with_slashes(self, rcstack, tmpdir):
118 clone_url = rcstack.repo_clone_url('//' + SVN_REPO)
109 username, password = '', ''
119 username, password = '', ''
110 flags, auth = get_cli_flags(username, password)
120 flags, auth = get_cli_flags(username, password)
111
121
112 stdout, stderr = Command('/tmp').execute(
122 stdout, stderr = Command(tmpdir.strpath).execute(
113 f'svn checkout {flags} {auth}', clone_url)
123 f'svn checkout {flags} {auth}', clone_url)
114
124
115 assert 'not found' in stderr
125 assert 'not found' in stderr
116
126
127 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
117 def test_clone_existing_path_svn_not_in_database(
128 def test_clone_existing_path_svn_not_in_database(
118 self, rc_web_server, tmpdir, fs_repo_only):
129 self, rcstack, tmpdir, fs_repo_only):
119 db_name = fs_repo_only('not-in-db-git', repo_type='git')
130 db_name = fs_repo_only('not-in-db-git', repo_type='git')
120 clone_url = rc_web_server.repo_clone_url(db_name)
131 clone_url = rcstack.repo_clone_url(db_name)
121 username, password = '', ''
132 username, password = '', ''
122 flags, auth = get_cli_flags(username, password)
133 flags, auth = get_cli_flags(username, password)
123
134
124 stdout, stderr = Command('/tmp').execute(
135 stdout, stderr = Command(tmpdir.strpath).execute(
125 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
136 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
126 assert 'not found' in stderr
137 assert 'not found' in stderr
127
138
139 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
128 def test_clone_existing_path_svn_not_in_database_different_scm(
140 def test_clone_existing_path_svn_not_in_database_different_scm(
129 self, rc_web_server, tmpdir, fs_repo_only):
141 self, rcstack, tmpdir, fs_repo_only):
130 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
142 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
131 clone_url = rc_web_server.repo_clone_url(db_name)
143 clone_url = rcstack.repo_clone_url(db_name)
132
144
133 username, password = '', ''
145 username, password = '', ''
134 flags, auth = get_cli_flags(username, password)
146 flags, auth = get_cli_flags(username, password)
135
147
136 stdout, stderr = Command('/tmp').execute(
148 stdout, stderr = Command(tmpdir.strpath).execute(
137 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
149 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
138 assert 'not found' in stderr
150 assert 'not found' in stderr
139
151
140 def test_clone_non_existing_store_path_svn(self, rc_web_server, tmpdir, user_util):
152 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
153 def test_clone_non_existing_store_path_svn(self, rcstack, tmpdir, user_util):
141 repo = user_util.create_repo(repo_type='git')
154 repo = user_util.create_repo(repo_type='git')
142 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
155 clone_url = rcstack.repo_clone_url(repo.repo_name)
143
156
144 # Damage repo by removing it's folder
157 # Damage repo by removing it's folder
145 RepoModel()._delete_filesystem_repo(repo)
158 RepoModel()._delete_filesystem_repo(repo)
@@ -147,16 +160,17 b' class TestVCSOperations(object):'
147 username, password = '', ''
160 username, password = '', ''
148 flags, auth = get_cli_flags(username, password)
161 flags, auth = get_cli_flags(username, password)
149
162
150 stdout, stderr = Command('/tmp').execute(
163 stdout, stderr = Command(tmpdir.strpath).execute(
151 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
164 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
152 assert 'not found' in stderr
165 assert 'not found' in stderr
153
166
154 def test_push_new_file_svn(self, rc_web_server, tmpdir):
167 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
155 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
168 def test_push_new_file_svn(self, rcstack, tmpdir):
169 clone_url = rcstack.repo_clone_url(SVN_REPO)
156 username, password = '', ''
170 username, password = '', ''
157 flags, auth = get_cli_flags(username, password)
171 flags, auth = get_cli_flags(username, password)
158
172
159 stdout, stderr = Command('/tmp').execute(
173 stdout, stderr = Command(tmpdir.strpath).execute(
160 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
174 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
161
175
162 # commit some stuff into this repo
176 # commit some stuff into this repo
@@ -165,37 +179,40 b' class TestVCSOperations(object):'
165
179
166 _check_proper_svn_push(stdout, stderr)
180 _check_proper_svn_push(stdout, stderr)
167
181
168 def test_push_wrong_credentials_svn(self, rc_web_server, tmpdir):
182 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
169 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
183 def test_push_wrong_credentials_svn(self, rcstack, tmpdir):
184 clone_url = rcstack.repo_clone_url(SVN_REPO)
170
185
171 username, password = rc_web_server.repo_clone_credentials()
186 username, password = rcstack.repo_clone_credentials()
172 flags, auth = get_cli_flags(username, password)
187 flags, auth = get_cli_flags(username, password)
173
188
174 stdout, stderr = Command('/tmp').execute(
189 stdout, stderr = Command(tmpdir.strpath).execute(
175 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
190 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
176
191
177 push_url = rc_web_server.repo_clone_url(
192 push_url = rcstack.repo_clone_url(
178 SVN_REPO, user='bad', passwd='name')
193 SVN_REPO, user='bad', passwd='name')
179 stdout, stderr = _add_files_and_push(
194 stdout, stderr = _add_files_and_push(
180 'svn', tmpdir.strpath, clone_url=push_url, username=username, password=password)
195 'svn', tmpdir.strpath, clone_url=push_url, username=username, password=password)
181
196
182 assert 'fatal: Authentication failed' in stderr
197 assert 'fatal: Authentication failed' in stderr
183
198
184 def test_push_back_to_wrong_url_svn(self, rc_web_server, tmpdir):
199 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
185 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
200 def test_push_back_to_wrong_url_svn(self, rcstack, tmpdir):
201 clone_url = rcstack.repo_clone_url(SVN_REPO)
186 username, password = '', ''
202 username, password = '', ''
187 flags, auth = get_cli_flags(username, password)
203 flags, auth = get_cli_flags(username, password)
188
204
189 stdout, stderr = Command('/tmp').execute(
205 stdout, stderr = Command(tmpdir.strpath).execute(
190 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
206 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
191
207
192 stdout, stderr = _add_files_and_push(
208 stdout, stderr = _add_files_and_push(
193 'svn', tmpdir.strpath,
209 'svn', tmpdir.strpath,
194 clone_url=rc_web_server.repo_clone_url('not-existing'), username=username, password=password)
210 clone_url=rcstack.repo_clone_url('not-existing'), username=username, password=password)
195
211
196 assert 'not found' in stderr
212 assert 'not found' in stderr
197
213
198 def test_ip_restriction_svn(self, rc_web_server, tmpdir):
214 @pytest.mark.xfail(reason='Lack of proper SVN support of cloning')
215 def test_ip_restriction_svn(self, rcstack, tmpdir):
199 user_model = UserModel()
216 user_model = UserModel()
200 username, password = '', ''
217 username, password = '', ''
201 flags, auth = get_cli_flags(username, password)
218 flags, auth = get_cli_flags(username, password)
@@ -204,9 +221,9 b' class TestVCSOperations(object):'
204 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
221 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
205 Session().commit()
222 Session().commit()
206 time.sleep(2)
223 time.sleep(2)
207 clone_url = rc_web_server.repo_clone_url(SVN_REPO)
224 clone_url = rcstack.repo_clone_url(SVN_REPO)
208
225
209 stdout, stderr = Command('/tmp').execute(
226 stdout, stderr = Command(tmpdir.strpath).execute(
210 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
227 f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
211 msg = "The requested URL returned error: 403"
228 msg = "The requested URL returned error: 403"
212 assert msg in stderr
229 assert msg in stderr
@@ -218,7 +235,7 b' class TestVCSOperations(object):'
218
235
219 time.sleep(2)
236 time.sleep(2)
220
237
221 cmd = Command('/tmp')
238 cmd = Command(tmpdir.strpath)
222 stdout, stderr = cmd.execute(f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
239 stdout, stderr = cmd.execute(f'svn checkout {flags} {auth}', clone_url, tmpdir.strpath)
223 cmd.assert_returncode_success()
240 cmd.assert_returncode_success()
224 _check_proper_clone(stdout, stderr, 'svn')
241 _check_proper_clone(stdout, stderr, 'svn')
@@ -1,4 +1,3 b''
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
2 #
4 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
@@ -19,11 +18,6 b''
19
18
20 """
19 """
21 Test suite for making push/pull operations, on specially modified INI files
20 Test suite for making push/pull operations, on specially modified INI files
22
23 .. important::
24
25 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
26 to redirect things to stderr instead of stdout.
27 """
21 """
28
22
29 import pytest
23 import pytest
@@ -37,54 +31,59 b' connection_available = pytest.mark.skipi'
37 not check_httpbin_connection(), reason="No outside internet connection available")
31 not check_httpbin_connection(), reason="No outside internet connection available")
38
32
39
33
34
40 @pytest.mark.usefixtures(
35 @pytest.mark.usefixtures(
41 "disable_locking", "disable_anonymous_user",
36 "init_pyramid_app",
42 "enable_webhook_push_integration")
37 "repo_group_repos",
43 class TestVCSOperationsOnCustomIniConfig(object):
38 "disable_anonymous_user",
39 "disable_locking",
40 "enable_webhook_push_integration"
41 )
42 class TestVCSOperationsTagPush(object):
44
43
45 @connection_available
44 @connection_available
46 def test_push_tag_with_commit_hg(self, rc_web_server, tmpdir):
45 def test_push_tag_with_commit_hg(self, rcstack, tmpdir):
47 clone_url = rc_web_server.repo_clone_url(HG_REPO)
46 clone_url = rcstack.repo_clone_url(HG_REPO)
48 stdout, stderr = Command('/tmp').execute(
47 stdout, stderr = Command(tmpdir.strpath).execute(
49 'hg clone', clone_url, tmpdir.strpath)
48 'hg clone', clone_url, tmpdir.strpath)
50
49
51 push_url = rc_web_server.repo_clone_url(HG_REPO)
50 push_url = rcstack.repo_clone_url(HG_REPO)
52 _add_files_and_push(
51 _add_files_and_push(
53 'hg', tmpdir.strpath, clone_url=push_url,
52 'hg', tmpdir.strpath, clone_url=push_url,
54 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
53 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
55
54
56 rc_log = rc_web_server.get_rc_log()
55 celery_log = open(rcstack.rcstack_data.celery_log).read()
57 assert 'ERROR' not in rc_log
56 assert 'ERROR' not in celery_log
58 assert "{'name': 'v1.0.0'," in rc_log
57 assert "{'name': 'v1.0.0'," in celery_log
59
58
60 @connection_available
59 @connection_available
61 def test_push_tag_with_commit_git(
60 def test_push_tag_with_commit_git(
62 self, rc_web_server, tmpdir):
61 self, rcstack, tmpdir):
63 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
62 clone_url = rcstack.repo_clone_url(GIT_REPO)
64 stdout, stderr = Command('/tmp').execute(
63 stdout, stderr = Command(tmpdir.strpath).execute(
65 'git clone', clone_url, tmpdir.strpath)
64 'git clone', clone_url, tmpdir.strpath)
66
65
67 push_url = rc_web_server.repo_clone_url(GIT_REPO)
66 push_url = rcstack.repo_clone_url(GIT_REPO)
68 _add_files_and_push(
67 _add_files_and_push(
69 'git', tmpdir.strpath, clone_url=push_url,
68 'git', tmpdir.strpath, clone_url=push_url,
70 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
69 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
71
70
72 rc_log = rc_web_server.get_rc_log()
71 celery_log = open(rcstack.rcstack_data.celery_log).read()
73 assert 'ERROR' not in rc_log
72 assert 'ERROR' not in celery_log
74 assert "{'name': 'v1.0.0'," in rc_log
73 assert "{'name': 'v1.0.0'," in celery_log
75
74
76 @connection_available
75 @connection_available
77 def test_push_tag_with_no_commit_git(
76 def test_push_tag_with_no_commit_git(
78 self, rc_web_server, tmpdir):
77 self, rcstack, tmpdir):
79 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
78 clone_url = rcstack.repo_clone_url(GIT_REPO)
80 stdout, stderr = Command('/tmp').execute(
79 stdout, stderr = Command(tmpdir.strpath).execute(
81 'git clone', clone_url, tmpdir.strpath)
80 'git clone', clone_url, tmpdir.strpath)
82
81
83 push_url = rc_web_server.repo_clone_url(GIT_REPO)
82 push_url = rcstack.repo_clone_url(GIT_REPO)
84 _add_files_and_push(
83 _add_files_and_push(
85 'git', tmpdir.strpath, clone_url=push_url,
84 'git', tmpdir.strpath, clone_url=push_url,
86 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
85 tags=[{'name': 'v1.0.0', 'commit': 'added tag v1.0.0'}])
87
86
88 rc_log = rc_web_server.get_rc_log()
87 celery_log = open(rcstack.rcstack_data.celery_log).read()
89 assert 'ERROR' not in rc_log
88 assert 'ERROR' not in celery_log
90 assert "{'name': 'v1.0.0'," in rc_log
89 assert "{'name': 'v1.0.0'," in celery_log
@@ -60,7 +60,7 b' vcs.svn.redis_conn = redis://redis:6379/'
60
60
61 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
61 ; Default cache dir for caches. Putting this into a ramdisk can boost performance.
62 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
62 ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space
63 cache_dir = %(here)s/data
63 cache_dir = %(here)s/.rc-vcs-test-data
64
64
65 ; ***************************************
65 ; ***************************************
66 ; `repo_object` cache, default file based
66 ; `repo_object` cache, default file based
@@ -141,7 +141,7 b' level = NOTSET'
141 handlers = console
141 handlers = console
142
142
143 [logger_vcsserver]
143 [logger_vcsserver]
144 level = INFO
144 level = DEBUG
145 handlers =
145 handlers =
146 qualname = vcsserver
146 qualname = vcsserver
147 propagate = 1
147 propagate = 1
@@ -1,287 +0,0 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import os
20 import logging
21 import traceback
22 import threading
23 import socket
24 import msgpack
25 import gevent
26
27 from http.server import BaseHTTPRequestHandler
28 from socketserver import TCPServer
29
30 from rhodecode.model import meta
31 from rhodecode.lib.ext_json import json
32 from rhodecode.lib import rc_cache
33 from rhodecode.lib.svn_txn_utils import get_txn_id_data_key
34 from rhodecode.lib.hook_daemon.hook_module import Hooks
35
36 log = logging.getLogger(__name__)
37
38
39 class HooksHttpHandler(BaseHTTPRequestHandler):
40
41 JSON_HOOKS_PROTO = 'json.v1'
42 MSGPACK_HOOKS_PROTO = 'msgpack.v1'
43 # starting with RhodeCode 5.0.0 MsgPack is the default, prior it used json
44 DEFAULT_HOOKS_PROTO = MSGPACK_HOOKS_PROTO
45
46 @classmethod
47 def serialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
48 if proto == cls.MSGPACK_HOOKS_PROTO:
49 return msgpack.packb(data)
50 return json.dumps(data)
51
52 @classmethod
53 def deserialize_data(cls, data, proto=DEFAULT_HOOKS_PROTO):
54 if proto == cls.MSGPACK_HOOKS_PROTO:
55 return msgpack.unpackb(data)
56 return json.loads(data)
57
58 def do_POST(self):
59 hooks_proto, method, extras = self._read_request()
60 log.debug('Handling HooksHttpHandler %s with %s proto', method, hooks_proto)
61
62 txn_id = getattr(self.server, 'txn_id', None)
63 if txn_id:
64 log.debug('Computing TXN_ID based on `%s`:`%s`',
65 extras['repository'], extras['txn_id'])
66 computed_txn_id = rc_cache.utils.compute_key_from_params(
67 extras['repository'], extras['txn_id'])
68 if txn_id != computed_txn_id:
69 raise Exception(
70 'TXN ID fail: expected {} got {} instead'.format(
71 txn_id, computed_txn_id))
72
73 request = getattr(self.server, 'request', None)
74 try:
75 hooks = Hooks(request=request, log_prefix='HOOKS: {} '.format(self.server.server_address))
76 result = self._call_hook_method(hooks, method, extras)
77
78 except Exception as e:
79 exc_tb = traceback.format_exc()
80 result = {
81 'exception': e.__class__.__name__,
82 'exception_traceback': exc_tb,
83 'exception_args': e.args
84 }
85 self._write_response(hooks_proto, result)
86
87 def _read_request(self):
88 length = int(self.headers['Content-Length'])
89 # respect sent headers, fallback to OLD proto for compatability
90 hooks_proto = self.headers.get('rc-hooks-protocol') or self.JSON_HOOKS_PROTO
91 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
92 # support for new vcsserver msgpack based protocol hooks
93 body = self.rfile.read(length)
94 data = self.deserialize_data(body)
95 else:
96 body = self.rfile.read(length)
97 data = self.deserialize_data(body)
98
99 return hooks_proto, data['method'], data['extras']
100
101 def _write_response(self, hooks_proto, result):
102 self.send_response(200)
103 if hooks_proto == self.MSGPACK_HOOKS_PROTO:
104 self.send_header("Content-type", "application/msgpack")
105 self.end_headers()
106 data = self.serialize_data(result)
107 self.wfile.write(data)
108 else:
109 self.send_header("Content-type", "text/json")
110 self.end_headers()
111 data = self.serialize_data(result)
112 self.wfile.write(data)
113
114 def _call_hook_method(self, hooks, method, extras):
115 try:
116 result = getattr(hooks, method)(extras)
117 finally:
118 meta.Session.remove()
119 return result
120
121 def log_message(self, format, *args):
122 """
123 This is an overridden method of BaseHTTPRequestHandler which logs using
124 a logging library instead of writing directly to stderr.
125 """
126
127 message = format % args
128
129 log.debug(
130 "HOOKS: client=%s - - [%s] %s", self.client_address,
131 self.log_date_time_string(), message)
132
133
134 class ThreadedHookCallbackDaemon(object):
135
136 _callback_thread = None
137 _daemon = None
138 _done = False
139 use_gevent = False
140
141 def __init__(self, txn_id=None, host=None, port=None):
142 self._prepare(txn_id=txn_id, host=host, port=port)
143 if self.use_gevent:
144 self._run_func = self._run_gevent
145 self._stop_func = self._stop_gevent
146 else:
147 self._run_func = self._run
148 self._stop_func = self._stop
149
150 def __enter__(self):
151 log.debug('Running `%s` callback daemon', self.__class__.__name__)
152 self._run_func()
153 return self
154
155 def __exit__(self, exc_type, exc_val, exc_tb):
156 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
157 self._stop_func()
158
159 def _prepare(self, txn_id=None, host=None, port=None):
160 raise NotImplementedError()
161
162 def _run(self):
163 raise NotImplementedError()
164
165 def _stop(self):
166 raise NotImplementedError()
167
168 def _run_gevent(self):
169 raise NotImplementedError()
170
171 def _stop_gevent(self):
172 raise NotImplementedError()
173
174
175 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
176 """
177 Context manager which will run a callback daemon in a background thread.
178 """
179
180 hooks_uri = None
181
182 # From Python docs: Polling reduces our responsiveness to a shutdown
183 # request and wastes cpu at all other times.
184 POLL_INTERVAL = 0.01
185
186 use_gevent = False
187
188 def __repr__(self):
189 return f'HttpHooksCallbackDaemon(hooks_uri={self.hooks_uri})'
190
191 @property
192 def _hook_prefix(self):
193 return f'HOOKS: {self.hooks_uri} '
194
195 def get_hostname(self):
196 return socket.gethostname() or '127.0.0.1'
197
198 def get_available_port(self, min_port=20000, max_port=65535):
199 from rhodecode.lib.utils2 import get_available_port as _get_port
200 return _get_port(min_port, max_port)
201
202 def _prepare(self, txn_id=None, host=None, port=None):
203 from pyramid.threadlocal import get_current_request
204
205 if not host or host == "*":
206 host = self.get_hostname()
207 if not port:
208 port = self.get_available_port()
209
210 server_address = (host, port)
211 self.hooks_uri = f'{host}:{port}'
212 self.txn_id = txn_id
213 self._done = False
214
215 log.debug(
216 "%s Preparing HTTP callback daemon registering hook object: %s",
217 self._hook_prefix, HooksHttpHandler)
218
219 self._daemon = TCPServer(server_address, HooksHttpHandler)
220 # inject transaction_id for later verification
221 self._daemon.txn_id = self.txn_id
222
223 # pass the WEB app request into daemon
224 self._daemon.request = get_current_request()
225
226 def _run(self):
227 log.debug("Running thread-based loop of callback daemon in background")
228 callback_thread = threading.Thread(
229 target=self._daemon.serve_forever,
230 kwargs={'poll_interval': self.POLL_INTERVAL})
231 callback_thread.daemon = True
232 callback_thread.start()
233 self._callback_thread = callback_thread
234
235 def _run_gevent(self):
236 log.debug("Running gevent-based loop of callback daemon in background")
237 # create a new greenlet for the daemon's serve_forever method
238 callback_greenlet = gevent.spawn(
239 self._daemon.serve_forever,
240 poll_interval=self.POLL_INTERVAL)
241
242 # store reference to greenlet
243 self._callback_greenlet = callback_greenlet
244
245 # switch to this greenlet
246 gevent.sleep(0.01)
247
248 def _stop(self):
249 log.debug("Waiting for background thread to finish.")
250 self._daemon.shutdown()
251 self._callback_thread.join()
252 self._daemon = None
253 self._callback_thread = None
254 if self.txn_id:
255 #TODO: figure out the repo_path...
256 repo_path = ''
257 txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
258 log.debug('Cleaning up TXN ID %s', txn_id_file)
259 if os.path.isfile(txn_id_file):
260 os.remove(txn_id_file)
261
262 log.debug("Background thread done.")
263
264 def _stop_gevent(self):
265 log.debug("Waiting for background greenlet to finish.")
266
267 # if greenlet exists and is running
268 if self._callback_greenlet and not self._callback_greenlet.dead:
269 # shutdown daemon if it exists
270 if self._daemon:
271 self._daemon.shutdown()
272
273 # kill the greenlet
274 self._callback_greenlet.kill()
275
276 self._daemon = None
277 self._callback_greenlet = None
278
279 if self.txn_id:
280 #TODO: figure out the repo_path...
281 repo_path = ''
282 txn_id_file = get_txn_id_data_key(repo_path, self.txn_id)
283 log.debug('Cleaning up TXN ID %s', txn_id_file)
284 if os.path.isfile(txn_id_file):
285 os.remove(txn_id_file)
286
287 log.debug("Background greenlet done.")
This diff has been collapsed as it changes many lines, (1094 lines changed) Show them Hide them
@@ -1,1094 +0,0 b''
1 diff --git a/US Warszawa.jpg b/US Warszawa.jpg
2 index 5a4b530a9322aad2b13d39a423a6a93146a09a29..b3b889566812a77f52376379e96d32d3db117150
3 GIT binary patch
4 literal 85172
5 zc$~C)2S5|s+ThHjCnN+2EtG^_0tAwf03w7A0)m1FNTf(87CIILK}Aupi-NuPMpaNy
6 zR4!NS9eeKuyCO>1Gn3%OckkPM|J&UpXXZO~&iVTHO)SEF{q3C`L{y8D^G?n@S$eXH
7 zPIVG+9l4oF<EifMRDOs&BQ-CLD{vHvxKzG>`ouhW4wvsgF)>f>D<7AcEay@^Jh-`e
8 zIr79w)TwJ5V}ml2ota0UkM1FteYo-TOVF0Uy9a(yOdPg!(p|XtI8FaRvz{MQcYXZy
9 z)veR6d~f&Hk^8^oHXC**5=X{#uOb&GG|3;F<Ugl(r6eBnZ0-A8^>t+ZnSif~Me8^>
10 zZohqef5Sbf{!UGq=c*8%MTCFO)0xv=HY0x+ub(l)s&vUFuQXec(Q3-MM>}Ze?$=8m
11 z7#<3a*3z}xw9b3T;}gye!D(F=#pd`v(^ZmXIfC#^{*M&`wZNi&-t?Xi8K&hAh@x`S
12 z{Ad2F*Uvp?cbdESbN2F!D}N5{EbKJxw7y=`Q`pc9b+=u7mi@TtPiF_pq>7yU25um&
13 z$#p00aK@!VxFXLhb;k157f$hZ#XU^bE1UT)_*nBP!&ye=zSsG<`GTyr-BaRbnT@Wy
14 z+P&&hcI&E7>idNSYjr2pKJs|eo2A=ld?=W4z+H0LzxdeCC&gN=N6My!Pd`#QmzP>C
15 zoJ*5$`1JM3%NKWajP4|lxYvdhF1>D1e9gL}yma`HA6p4kGmdNT(t1kw+;V60hBr2o
16 zUkyLS+Pc?$)Y*gZR_T5(_g5`)Q)uU-z0RG5`$Y6RymM<S<6g^Bzn#_MM6?pdR+g5C
17 zDaPF~4Mj8OMUXUTBR5|gvvlVdr$>H)-8BA)QdX<J_SbcGw)@|k(7Wf?ZunY0tY#t7
18 z9tmG3y~$fI4mH?(qV>!ZXYumDgX0{07THaWT6ptt{y5@8n?=o?Ggc<-G5Dl^czbw|
19 zrt6t1*=8Xe74UKa6xB9?Q*p!MfGi$wJSQ}OdHVJ7dz&j>8_pnhCz?Qe(s-qNv@W}+
20 z_NMPY-*CC7fLnX*PMVD1!F6sjrDon;bZW%akVhkp!^QAMn*4?!llstk#`e46&)SRe
21 zmm04GRTicHsPuJvmXK;B-S=kgPeGdZ2&wOVmxWQmD^2#v0&fQ?h#3TR^Bct>v}XzP
22 zFP>PhXZO&ZeD~_7Qyxz!i}*9Q6nJ;f*SZ&wleqHQtTCgOZ(9Fn%^J$|qBLpC>@D1g
23 zQ`uLm9$wnzUB2&x@s*8pPOkE}Fz?lZJokrZdevgJqdax@Gj%dgYQ4SuaJli@vG2XZ
24 zviwt9!fg-Y3S%wD5o^zdzMtQl+H^hXUR<`3bdtWP)DRAM`7=m7!YG_FbgtArzw&0-
25 z!_Rj`&ws*W%*yr3Q+?ISYZ$?14qIKFyeFOh)^Un8uVRAEp$Cti&ReHb;9=JEW9p0~
26 z%jCd~g<21b!n^7ZJH9iUzM*$_v{flBggp9@)l}!EDQOejA02MXzPTy*-ouNzhWiDh
27 z8t%D&+V*HHi}YEm|N1KU>Ez<5yDhVRuItrMdkW318y&u$r{nN?)yr>tr1e*lU5d7D
28 zU#mF$Jmq0Q!~~(?{9v!pwdvtoq^pf5W_4J*eOo!7G4?&{(<|2EskzOQR+$bP$8LN>
29 zn&8&Eaa8_+yVp&udNWqUg%&qozEl>nF?m?t$egVkx<*}|bD%DA#oHx&8xx6RHiS;y
30 zJ&HxX%iKyx-|#X1RMV22tU0^xcb&PF>9_Jj+i|b?c4wnrwma$t*%n{eFzz$?!kQaF
31 z$NoI!^iVN=;=1ba3wt*2XiRpwsKd=|I*{nHDx7|_m0m90r*Zh|=r3KB^j1IPcAeO9
32 z>MQ0u7vSDpiI;1BG@UTb<#wm})I@&F3a#(^8%e9*<m{h%A#LQzjwr^5+I8M59OobE
33 zSr*lL>fOQz`zwtvkF3i5>AQJ|Yh<Z#+HJetr#+!#JMNfnmpnH~*}VD>CT;WR!u5M*
34 z7aZ@(t$m)KeI<6j=;sOY{I<$F-Jfu{P5bqB=H8K6H1dZ>UY@WiHfv+mz20HDlU`m#
35 zPQNR|`>&ncwtvZ{ksVLBDJ*rzA}#y9T->c|Q<fgxQG03J3Br}Uu}h{~8k@~7p<P9n
36 zH~utoXyU`D2lne9gzdeSpY;qrl=y*`e|^uc$;)>=Dv*t$jomU-&|I=)p-;YWWuAP+
37 zjKyk>_d6apsGqpeJK0yVzBD{~G*QyI)X8<T@BUp>ng200{Rfx7$9BKGI_8>V1C4kw
38 z|Gac)^vR1KSmW2OD=?0Kad_d^E9)9ud8<!u_t<HDX7Z%soOs*miC=CWGstP=OE{E^
39 zxA<K~dwduh0&n%2THLK{HEQ>9y3u`4Lce}HEk1h8K1t-z366Kwi`^BsTdBT#Zr8MR
40 zT7Q~y3_rT>=)SVsGYVYVx7i$UH(AoQF=$xO{E*RWa_5HF#GD-Vto+L<!t14>w=^b@
41 zG@yu~%$meWjtk)#p3g0ZFPt5o6iB|f;y6v+Amz5H?x@P~g+zZ@$<vKh?A@<N@6^&6
42 zQj`Lp|GYN&lfj(!*Yn$~HqFxt3R~Ld=(=sfG_T00%~QQh4K&MXp@(XUira^8jvU+f
43 z;={75Ij?p>8Pj#HO*b8J<g3x`z{#xprk#h8n{TFlcDW|38g_PP+0y3p)V%t}cMa7G
44 z#}@b>>^vK}B{ucMfqgO8mVOi-*&KP~N$;DZGdyh;-BlZ!UO9_s+j!z$neMp}BXY}X
45 zu0<_@KWtwEA8mh1dOm-;e5Peam}`dZ+ij^M?&M6>CY$JokD3;=O=D=pgIJsL{hKRZ
46 ztiL_PY5wBU5i`1eDxSF0hOBPbG$UNwb-0H00b#^VgF>y!&R5r5#v~=ZpEcd5<elB{
47 z()A-wR7FJdb{}%zQfl|Hs^aIhcN6vw`Bw4r<&VgjxM#jwR&T!;wXa-0>E=(#ALfQH
48 z7n=3$Y5(YKtutYjEVOglh1m^70o&@6>OQwC>_jaO`b{rS^YV(=fpd!a&RKoHQlBc%
49 zNLIN407BgdC&xj4gtvF*RPGqGMc@cN%Bu`@DKc{=MNZ3-bDjR3f5FGP`O#rX<K^S>
50 zxW9Z7Cl>%m<R|4}-RL@?K2n~WnV&OGp36m_Ixq)?(<g~sfNSFlwlI>*RaS*u7iAUr
51 zFkF6kPUg4>c^-ERA9zJvex!VA9+w|9DKS;<qq=*m?m;qa29+O>lbN4|E$<Tn<^}oq
52 zcqit{lex~=vNA3|0<4phn30>6m?O^^H%&DyG%+tH9pJ->i(&-3%6@mGu3R|lT<a(r
53 znbk>x7@f$@zoXY>5O)6=Z==(bQP<7M`QPJE=r|aM*x5jXcY0oKxID)vb5d4jhCCyW
54 zi(=X*b7E#rL{{QBFwj>%IenZQ*u`m}BT$~6nwG~E34TAp+v6|{LC~y?{d*?NYlBoD
55 z?z^I~=BNIC8rT2PX(B=sb0-YY?(c^S$^i4TGSTVWyqtXbU*{DF#e)Cl#Q#9re}C|}
56 zBa7nvm<t2N567LVp2s&anZ4wIl>9;a6vcO5f(bOgq)h6&cR}O)<MS8Rv=5umi8%b}
57 zc591p)U~4c+$r|oSH|x7aOddVYu9#!jUM%F>J52SRghwO!Iuk}>Jr-v#XZ^tnc9&z
58 zBRjv;KbM}0e^>mu=iH$~ytMxN0bz4xlXpGoy;lWwZt1#rWB(iSoo!}qL}r~00)I@b
59 zlHHoMaB2h_f1>uT`jI!6R}#3DYYx3Tab&IU_L3#qdT|Mv@|_jchu(!e(%4Nu@<w3E
60 z@SaoqbC34}IRE<>9ma>J-PCTu2*`djqx$=YBbnXtW2a90_*~oFZDkZq9ygm`CY!v<
61 z^8L+{)(gixNl)HODnIc(9>=pHahAC>={qpF&R?2+;&$c7_ltMb&S@}vXYj$H{>iI%
62 zyzo1|_&U6YS_qkaXYupNt1M{JdA;4Hhr`Ij>>gLiCch-7O@hg=tdVP3=xmm>H{eFi
63 z=auoHGi@H!pL5$3-By|y(D97+#%Z~*_R8?4eXetodKMh}>3CMR*r-@nZ-+(NE5+<T
64 ztv&lb{%8#=p7H!{*!wBj6JjjJ2b(n+eQsTo)-+)z!*TE456AzEKhEuOZ=FL3iEf&6
65 zVSU&)+$Q_TNh3&eXPeX{xY(`hwVydGzW7I*LE@UU@lV^md>STCeeJWdzU#h^*BRPs
66 zq}*jfwIF>Cf7`ywZzfGe@7?CUG`RAX_SR5MaOV7(>a3ePDQdfKA==zm;}Va1J|9Wn
67 z?ko{>xopc?HT8^j+sQkVHxes1MfLDrmB?LoY^sRz3jI8zta7Jo{=WB5KkHooQEES8
68 z<r=~Z)0r1D_UVLcU70!Sw1@TCGj>CscR4*y6Yr>gM*R~po33lm%-Lf*WX{u~M}~p7
69 zZnezXx%i2`Wl`UwSi9<BX|IVhGwP*Hc5dAzOkx>Qw<6Qx=+ppvoo8v+n*=fYKHQA;
70 zzMRo`AV1)J<)RHIn+#<;kT;LRHc<{6bVnxOH|%{`@u)P!T4J|6*LtV$^ftJ3CeJc<
71 zTm-Xs(T3Y3^9JU|X|7{8HRKgI-S8N8Y5SVA3a9hSpc60o`xh^=c(7sq`m-#{tLD4U
72 z&99J&^ekUIIig*(NeB9wPAfeb%Dr=WlFq23YTU{rjm}z|?;LA675e?_RenTu_>!*+
73 z^UM1$e3>`pZtH!dH0gvOgUXG&KK=d~_O{bwR*p$Ik(K6sT;e)0?rbSyqLp1xyP+g0
74 zt#jIxKaZZ&R5NtzZ7{7+Oukdh&g<y4A1&vUPW4-Pye#Sjd`<7=#s??U>MFiCC%YXg
75 zJe0Y!t+aX8vbLE&tDF)m)QY|zNcNueXPTCLbLy$p7vX0g=e!IF+#@()abkpF%COdg
76 zITJ5<Xg#Z$6VVnoGj&SmjlR3$10E?;dlwCRe>tW)h#`LB9eyyzj2bjz`|Q{;`(7_^
77 zp0%`c+1jy*`dZYom{m&*VlI?kygs{<p<iB3iBu0WEOjlQrXOBg(NT6sR`%Lu%F!2<
78 zrVq?_R~+qPS1&P~)v@ZtvLhqe?YT)e759uDe|AjXe(}t{eT~U(Aunp(b!Ux^K3jSs
79 z@j-*2${@t)jMdhICN`s!qDwo2D8rA8)%f02kmMczV}I6*Jts^1-j=MWN1m=UTITp<
80 zN!G}SoQ(mg?xQH{3KlcGId!q&$M%PGANr&8<I?jPng@^fMV~CSw|;h|+&LtWS+Hgs
81 z!FM(i!pphZd0<S(p>ZBx`WoC<@K%LC=gti5+Z#IlL)zpid1FQphuDo5ou_&@tk>+_
82 z_=GdeqHC4(4>Lg)w<CmLq2-2*dH8Uc<>&A}{=EI?!@ce%Wsk?X?m3bCLH#wUYL}s6
83 z=ji)zVg==CM=<Bhdj2>UbN?c5qpHd>%bp$Tu-jVaWt4j<(PxZbR%R>?n;?!&@je&p
84 zIo5-swer%iw!|LyDoWYoN#`XOk1x}9yGwgnA?JRcm^;I5^<LwJi=KLhP(lnHOY?4g
85 z8*0$G;}7Sl!eHX2)Pms6WP=IBP1o1GbGF}be9cf3@`c6k`VaMFW#p~3XQqfRKFB&-
86 zTH*C#Y{66!+bAsP(t>9qw@cZ2J*!+#dcBz8$j(umopjhU-tFG{IW&*6VDPJ*%Xe0-
87 zT}g>)xpFx;zN6H~)Mr0`hgYJFFtPi@+n85ZHZ1)z?Zl-w-8--7<<uwEzP}z9c<Hm(
88 zi`V;$@5yF4UijkM_4)}{ZPy#am;8B)zQ`?oCKY;gZSmL?7N52AG&SbrgD-P-Sf!rO
89 zA&;3hyCz|U(W~T7h}JXLS3CY}eacMxCVVo+@!_<?Svy<GDh<!wuHqUl;#+0qx$XEQ
90 zes{;J#fZsR_+*re=!uc|S?3R~q5Onm@|!)EUz(1dd~+7%!XEd7sqb`!53}CrAAYyg
91 z>ae<Z=8ohO52FfCe7T+d)$PPZw{_>`-WOcIt#UL!-s^H_9_7qQ`J!2WW>t8(82&Jx
92 z<)!z)T8qe;?m#ZPE%&~)iDPq$Jmd7L=dD`HNS1LmMiX30r(|u2O%$vi>WW;jx_`7g
93 z@a4<SsN<b)tlDF5*X}U;7;*pG50AuM$@j;p$qcv4maW>kRbr50QX+mQ;CWS#SwHLK
94 z+v@j`+=)gJ<KxOYKJ<Nl(PL*J$+2}g)*V;1B5$5;*ST@=G24qDc`n$NRv5rc$;lY@
95 z<)ot8>xCY#%<yCBI<4i`Yh|u0b@dfzD(t0AA9pm>CBHOToYHV-{odu0?Ip>^K`*Mv
96 zGh$<nZ-06Bwbaa!xZ@}H%xCrX+}=6s)r2!1j&Kp5dpvUU_7xM1E}zg3jVf!rAM<6V
97 zy1L|0cHz;3+>>%on@8j6FSnmO=eoU%RhrXNnD+DNgC*U0D-QW4CuTm$%KbL`Nb;(m
98 zpQlmQ>{=e?>K{sf8+8w7^X2UAJnl)yo$u#KsHa&Q)MM(kc7OPCJ?jp4i|5a2*^57X
99 zM8e$@V>(JhY>nTv$M>H-vob9Bn@a=Nn0(%ZA}m>WTrj%m;>?mMQ55aMEuHgizB({d
100 zzOcL}zj+XJ?0R;!Lr3z|GZ`6LBPZ-yHYTL&WE`h;%=p)A+oop|SMDh9UDY3Sx+XC1
101 zapjytL!v+OyT2Euc~vF%93FP_RKl_7t9Qq_jxOZA-uTYvv9+s4dk00&!|z9dg=q7h
102 zGkceBc(vk^*^bw1Hf+y{W4rK&Pw$G$-lt*ISH+hcOQ6Soz`b*OwLz3}aryPhef=b|
103 zqWtFHokH;-r!X9Kp!2wr0iW-8M}Ls3hE{#$DT(<L^F{`Eb47z3eE-Z0Wh{UK_5v>O
104 z|CPZ2R(Ik$i3Zn!V=h+J1LYGZbDf<3+gRa0k3;yrAufsF!A@9}pO*?cMO-YBQa$+}
105 z;*Nn`2PvIhez5BP&#{NnSq@kFy`l2t^u%AGnX{|2gFxuQbrHD8F#i|$(HVKUTr4Au
106 zVjZlYB8}wlq!}2*4^B_c<&ME{C;RPzx!9ulV64;NwFc*#m2)Nk@m!}t8yVE<^!F6D
107 zlat{8G28q*Mi>YVl`*fqGT^lz6!#8_e*ZDl`In$}aBwkd*b(Y%Df7)c-ZN5n-Jya*
108 z)Q8mBIxkD#wny<+l>YRL3DurEO)zy+#GSe0`ltHiWK!LGyhqxmuf%h=&#hWw?sMqn
109 za2C(>)x4g>^$Sv;BNt4)G~b+fdF)wItbEJbu`_O~6}fF3U3RGmF3L5U^mmaI3=Zmj
110 zRa6FOh>nTniX6Eju@g5Vf8s>hZ!{Vl+zn5h1du<*dO~JKx_q1sSKv4hPYs$dIJWhb
111 z=b|j=`ZpFFOyj6@`4o9hc#b?pj`BKqg3~We5tg4fF+Bq`34f0leG>B$CuXK{2lnLj
112 zE1pF8EfggMe^jn~I0y(;ZModP3xY_si+_p`RYRQr;eCqO5#`3=U{C+STNRi?eeh-K
113 z+I1^aBO|+X{=CpSrC`%J$2`4)caxx(tw9Gi3(l%JR>Eca%s=MNqx!Nm(#yGCFwWm+
114 zX|%>4-r<oo21~db+Y?wLMobePj(T+F%yfQd|H;;4jTi1cUoSf!s|zg)gQ}|FV0g)S
115 zt@l2ycVAp`s?B@DErE>_N?f6ksHl{L>d`LD&aNx#)?4Ht{oA|DAkAZ$>>v0mFCV-M
116 zTD_uE7lH^CH<q32Oq$;$ZBKWd6;fLi)qb@!(&?nN(<eU?tG1<jxkvpas|^?4n5Im2
117 zTWp{7VU_Ua%ZX0k=1(1SaEV!xxwL)G&A8!<=4H;0=7m<D*ZyFTJ<+Xf6#*(YKS*=g
118 zo2;>Bes#OLdcN)lO(<*YR=&$anod>}tv#qYHTl`c=AcgGuyOE)jEvMjH~z@Vx$J_>
119 z%ywM9;`EdHm)-g=SA~pqfHveR%=NP;z1(biyZZLc8+v1^tBXhM>~HSTwdj7ETA=@N
120 zS1NPTgDJN+e3y%$wrZ_wDbL>>EVYFWAB4EY@20G|4nyXCMXy`F`gw>xw3yTItG;gE
121 z?&|7l8OJ!|8+Y;k=VBwptfM{a=TALy|FqAy2j+q~>B!-xjJ*pQ)J!(4U(uPI{i37T
122 z@KLd0&-3Sw)Tib-J%;P8O-G94abuV5yz4F=-FEK5rRJ9Y>+QvQUr9ylCoHvI$bs{>
123 zQI;I`C!D%~gZxYo^NeF@gx-Cba8Drj=(G{zmv4BkSiZ6Uj<$ogXI)d!hDUdNPQJ7=
124 z@%-^=bx$lJ;zB`E0#&M=h73(mvn#RBf*jhkdS-H&jHEVd8`8-&dhf+Jg<B&vu_g{V
125 zZX8a3B{fSUPI{pC0<k*onxzp(=sND$r0#zlKb4qrd_oQ6#ndtNVFkh?XE&JmoI;YM
126 z!<SgGpgG=7;qI0A4_uQ<(xkcNn*=-AyUSK@%G{)N(e;HugugqdV`KV@(Jzb^w3$~^
127 z_q4gy*qkkqB^J9Fj%8RK-}AsZ3vR*fFEw~zmSt7QI7(GNx#hiZDg!2KY%+N@&*_Bp
128 z&Qh+qPLOZUse-NKT~hWE@mA8yfDF?ur_?^`%<&o#o>%E~8h$#vTv{8xC;asYi=E@I
129 zCPrHAR?pXRp1azw$andr(`h~pLvQICEgbIsd?W3W_6y5AQZG`VH+1HVg~t=p^kfU{
130 zJ4jlygO2b0qcY)Z;w`6CC!gx<vxYhqquSU>MzvnimQf}4?NNz~6D}lVCRo*EX8EpV
131 zpGTh0srchV*7qiA9Iac_P0Y|dR8n<f!sE9eyrzmhbmPl2+rK?H@X+c#4WB;C;rLSK
132 z@IaG^rjeSr$hu^8GJn4BX5ytqdfrboC7QhX0rRWojcY$-uD=ZKT*nH~TsC<LW9j*2
133 z-OI)<>r6JPqq(lM<G$lcmS0%TS$aP(+&sj*(Bg)sf%$lIH;arw_p0{b@xh0O?OzgS
134 z7Gp9k?&Inw3s_@^1)P?Z+cb{d?f<Iu>ZDz+>E#X0c}wyR=grTn_$+^EqrZ2-q6ItD
135 z#%z~9+jw@`s%)zFrL9d7(Us>a{ZA84<F$RRZR;HwvGb3snLB4+jfv5Z;oS_pd9BX5
136 zZbx!JIytoLS#a={6_*!X@=tsBB=SkzQ<t3EPj5ZRICbs}uk*;M+$R-JZa*!1df(=v
137 zw!8L7?a4NIHtvzWHf-y;*3YcFN4IQr+2T7YCp^hU+r}v3@D4SefpwYffl(ei-8Z`L
138 z*u4AEzGb^cZp#@f_g}I7)#lZkWA_)t=EpAF{A_E-DBkEHTfSC01W(zv;}Y*>(dU?a
139 zc|Iq9Q$cI*#@<<T_xqRnAN4=d;S)N`(nmO=KCXUrZd7n6FPs;)XVmNU;<0Nst=ro=
140 z`bhlru>qs4hNbQqUUj~5f6M*@-R!$jcS!}wLsMHv70?R~eRk?q>x=9w>g}Q@;3mvY
141 zp3R;;VQ$T)g(h8D8y+svxOM9Myws}+dAF;R-;7^7>CSlf6mKELu~YI=FfC)ZoIT<2
142 znTGqHx2$T6Ssk<GW>KzwuIH!1PsX3(?bYm}b)0n$*>$cQyVAh!h<$vter)#MuVe1k
143 zZjUC#W;S*=DiX#`IC&_&e$~Nibqf#eYivEYQ7}v2VnOiav>}os^vPCl@7=n6gZ5_e
144 z&HhITZ<f5e_{g!*cM2sn=<%}b0D8doajlPcYTdQD_p;>5&icc`E6vBh*H&z&-QLzI
145 zzdY)`^B-U4z0Q>gB}d1U-EKM3`ory8z45%VLjg<w+_+;wYH4cYb2C2ufcb%_12LHs
146 z-ZkD!ExPET@HkSK@nd&SUE!Ld)V_&**S~-HzWnFOpL_Z^Bt86lTmkY>daUj4O!bxH
147 z`A_ZNQW3ACUPopd6JHY#P!%3{EltCbS|81?x{LS?o(nV&3ievO$!{7nHNB^g+)7UL
148 z&D@%t9G9H?q_8p|GUjSpL;B~#FZMW<JKd@7j-EI%d1AzyJAw=H=_^CqWB%0cSZLQJ
149 zJ6iDlsdhQ<a@5*bo91QBAE&yzdDtGcPz)8$yV<rj_<iWk&}w;e{nxtnwXz#e-=r@x
150 z+p$2n>P2<*N|zN+svCx+yWMncHk~&2_4$3zjs|`R+7-Mij6Q5=<>6|b71!4<3SJ!M
151 zd3^c9Z6A9Dk;14{H90#)CuPz8y7<P_Yj*cH+^;0a?JvC23HS4iz<CdSsPi!TV8rCm
152 zuWQzxTKjm<w$Y!W)Ss}QWL#Jm_qg%d>&BgpdmCG)y_&Y>W5@O8{d?lo_k~5@KjM|^
153 zbA0anh4&29GfgKByZhPaXv~Zl#h!B`)=Z~HQpGw8GC$nA9BVz|6u+hQTH=Je;;Z8B
154 zgU8>lyuIgOv0aj`%tp-HZ#^Uagl+T372ij%h_~+!yyFx-KH_k(Rf<*S^HUDbWo2i!
155 zp1EGR@EzmM-I(EgzHM=1>eKaSw4=21r&FfSnI6Ey*&f-`uzPY;>w)3>jrV8oNk1@Z
156 z|J|`ejxBC|*D<52Z&JQ({vY}87+LCfc75CbZsNuCoi6iStBc+TjY}Ta-?iYtnl-F7
157 z<!d|)9~rTS8WuDixOuR-*{^w9gLa1Y!Qq_wLvs2I7H1y5z2Nq%+v#ur2sl3C;iVn-
158 zJBo+oob8W(m)-H<;Jb!nd){u$W956VYhG6`>y&M|>vY%0>gVv!UtK=u6=r_y+qC?Z
159 z)r8@l>uraIy2!E$KAc@G%uN+uIKMX7O>*x@-8aSe7oQIGdiN6fJ@HL>mtH^mKBG$^
160 ztTN=UYnfR5y<NLw#r{3w(L+dkNp3T06r09fOsV;_`fX$P>&acy=dM&+cl2T5<2#{?
161 z*XU+`zjxTJC2wk)dD4^fZ<}s!nQ~*w_U`R>i?aDY?w#mcsD4qs+<4f~y;HaT-1#t|
162 zk8?X<l7HC3X$$D?jOm5NYn>HucI@47FO21G;<>9ja?Ql`b=7XJZ=H+!Ue@i<x**tK
163 zf0m`N>&tzc+xq-|-=mLb+9rN}(@s-V{1G2K*0#m{^XXTk-o?GYv!(0h0o`X6_pDo7
164 zf3Ey+uwQodNavij-)?+c!TId@x$tv;Pl8iX_J{ayrJsJBNuqfD-CY;{Q*<Qwhv-No
165 z5&S2fsI2nbqFauvIp5spR9*SQy2ovJig{@}eo6G_fNe!3OZ3>gT$WO3_m0lL@FC>3
166 z-8_#s(+_wK>59p}VO%KL{btOIHCqg>HGi6vbx2{%-Z8xU^0dZ_n{Vyc*XkIZyN}dZ
167 z{dn=>+b{N$io35RHIBUbExkuPx2)h+Ltkgn$eUdk4`2LVQP3M>WaV+M?BSs!Zl1fh
168 z#U2>-`h4#03t!%EJL<OLkEZA8-Jd=lsO2m^ZgZT&o{-*q{qEeu*_*!VN>;=VzYx3c
169 z>y`Q_+Vk7}snHdut~7riRkoPi0)nBad78BseQ~08!yfMjjo@HA$HQhT+eE!cq1jQ>
170 z$v$@y^1=#n+eeM6j?+A(S7gzha`jAP+TszzufKP;+xn0fdGWoy&CZ8op858yjGH_6
171 zK<KK6JaEtKTzKts=&lEAGUvZP*KDx*sJ6yY@jmKNxoyMACza(#3E`U$oNrE<fAm(}
172 zk~Yb6eM(_&icU|gExC}Ev97J_a^QnCpHAVAdJ{Hft~M-c96sm4k)S!YdsoG;Ynu{W
173 zbo{$z(aZyhdmnzk-1WY&*_C*7?kajAJwm^SJ;Sv_afQ9Tr)}4vk?Q9r>159S#>nV4
174 z3At;;UwGq4Zr9{{t?_m~p6xD*pYWpfr04$-<_(U?N2A#R@Dqbr%~>GAf;?wu$zMev
175 z=89ZhxFV5=>nsw0HUU=x?gD`eHXf}DorPSXivWxfbA>_?S15Mmx;i>xZD>C>M*`M&
176 zktpYZb<s6lgsxm6h|B~M5jIyWmT-j<A?SDF3S7WGT!dVSBWMHhlvv_|?L`P8eP<B=
177 zq3zDju3%5jSP_bazhJbWZ(Y$K9F5wr7@r?8$~(d@k{6knmN_XAO*jPkMoi1ilTQlD
178 zNXf)f01^3FSrg@x(0qp@mp@XTnhp>*jce@%&?dLx^22hH<vHmYsa)$(J`pymL;zSO
179 zGBY5^H#9Me%MVHhlhgC2sp8IG$tPuc0Ytmwa?-Q%GIO|se~DMIEe=W!fK|~Zv=fzG
180 z-_cxWN5|jtD#G8x=yB+-?FEiv^c5+HebdKzWu#7&bJ2IeK;lzDge*{P11JKps6h1h
181 z9SqtaI9Otq>LsWIdoSyye1;R6<Dl|G({pnHDwHp7Rc#RfnNi9og2j{w_*bl>J*wP^
182 zv#Tq*V+q$uD0Bg92t~go3Iw8m2(29jE{?z6_&N>L|I%U*`dXR-IYKxnVN&AMEJ6|b
183 zVj8}RzMW2!Cnnp<u{YIgK@pFeoX0)ok5=hVAP9#~$x5B1T?{!u5NSIE5#7@!<xTzZ
184 z5F6hE%AM4S(^5hSumuD$UqH~%j5M&IhCbGhu5Ugub6g_#K3W|f4K5F|a^zW^TOo7s
185 z{LdhQuEq9-vrI9q+8-3J?*0WbAY+sFw;eV_@2z>sYClAZXlf(bA3Ftt&4AD=8CJwV
186 z5RLmo)LTGpRJH&0k&r<X{u@+<7ea2hT4)G<GBgjA66`L9hTwie^KgqGCww&Iik}G0
187 zBfN)<0al99@t{5gxe9&4F`<dLWsncD57I^+LGwrwhz;6?q2tjpL|d?a15^d(Ex_kP
188 zp<qoSu?d=oEP#v<0i=h{C5(ouke`qYpAPO`{oC<-`}+yIAa7(MG>k9_%m;fxN+1Yt
189 z4(Xxo_-T+kek>FL$|U3k6btT;K=A>k5I+_^j`m}xl|Tz1CX@`30aloh6dDCk8Afym
190 zbVh0;Aumn12n=I_s}z(apx_`+I3J3~%4ATJ!kP8f_F`!hoLQ%9AZ~)Q>v3)-OT5@{
191 zHkj!MuHm3$fwBRVW1!pzg$SvGt0yS&pcI3G?{9-6>I{v<%>Wozr>TVj!|Q}16<}@&
192 z79U>kuESApg~veeKtaIMW9#+V=+k5CwY9N(9g~U8h^ePg(S|J5_Oj6Jp&hb1Z6%OY
193 z$I?~-s^^Xz08;DtVimx!#DJ7K3LOIy2LO4UP^<#1ZB>8~7i>~q$JXa4hlY(%&GGP5
194 z0s6`ZB-U$T+fA&e(b0Q+owWe#2&;>VRRZ;{L&T0=I&c^`Wf;I>7{GQGxR!u|0Pal&
195 zbV&x#MR0w9LU3|jRx<Wr|2i#}SPAIpr~rTs6!5OoKvCNc04f7(@u<_)!+@c6GyuSk
196 zFkea&aGt+N=DfJ<$l<oYu7HlNV13s*oF2!E3J-w=fLS=O1l;N1?hFg+SRB5W8mPmt
197 z7#2bd&?*9V7PvdX!a62LdKh*A4CEr^BkGU&j}hrDQ*-g9E>F(&E;gs+8hj1hI9+g7
198 z7ks+YmUFt()#WryJLP){Y3gq~c!#1P4!aBkP-<LfBM>v|xOLLH@VczJlDds`HFX#3
199 z9@J6W>fYC(qoux$;6NN-SUrjpg@lcYABDIav*lzSgEt&Ia14<&YjgNV;FhDhoYtc{
200 zoTIJUoWso{IR~3<IER`AoF-73on1LiaIVzNQ^0WtWvJ&+j)$iS$H&u%<Lx<;;{}Qo
201 z6i;UXM<7h*2%SY7XHg_a<ZR5jD0(1zFTyqTch@&q1d5ybAJjK6&A`1|%4nddiyPR1
202 zoQoOogKX?Hlxmbov%&G2`j1N~S(<<;G683xee4X1rZ{UAEKLQ|vQkP?Hm201;19@;
203 z$+1(~CWBd-%ZryInM>fTMR4(g4GWNxjWCoHp466vlO|>*BFr(|F-ykan&3R?A(nuX
204 z#tP)rgJRFp;MlP&I6Rgs$CjnVxo`c>8o6MN-f-3|CTBg@hr?kRb4<XU%W~&zV2|Xm
205 z4FWg@Y<CVDtfLL)X|cRGnk+U)1C%TlEM<B5h#?}Z4uc;b_JlLxV)y`j488!rgZp8s
206 zIs~gje8>~Zgo>d9&@t!&^bYEWs8ouCqmHQKkPFBKTtCu}!>iF<33yi=;tE5q;RLt|
207 z%&yT4hJqu#YqVf+kMORg2zbFY$s@hzEL><*vod&ObWMp-OcNQ5iLQZb;6*Vt<lqPu
208 z2=TxtHxK-B^J;K@HAKI(#2TWRZ!TJ4pcRIhF92%PQCQu~7p`IXrPZ*^e0jON!5^@O
209 z3Q&D`=x<OLl+XL8PagJkYzI6zj|YKSSPQlidf^RRemyAfpI1`$Vh7>@J?obSF5n>8
210 zX!HS)bZF!dH&+*l*jXeLI63ki?Cp5AHr7^_7UpKACR~oOk)Z)wUr$#@o28|x!Bl6^
211 z)o4@-nM5SuaR?0A!8Lk5(Y5*{o`H#pNsPU!&i=h#gR@|MG^v3!e;Z`*FOv-arEc^u
212 zb>m<4;WZGe#^21x7hSj(@_$wXY1P228VG#~tQ88Lq?+WHJl@PNy++?BIUxbe@HJy{
213 zYy98yRa?Q<uBB0Z%zWfjdpoF>N&_GbKwxZEE$r_BV*ujs=T?hAWV*dwjV7-KvG7C7
214 z_!?<N0szc>!AhV<>#rUlYzqc=L15Ydq@{%58lq1P341O#y+)c?166Qq?b^y0G%=y1
215 z1fE*5S#n|=z+58OV=aWU@JowC@#u$^gfwmq9xRSs3_uIlFO6GnhN8nSEdgB3e8H@L
216 zYzNI0pXf4^HiH^XaQCax;MD|x5dj5H3~=Rsdg)xWT3%kpt=TvtdT_4^dX0&R(E}SU
217 z_cH?vg2nvCdxN9s@$K!DXHp>`Ibl5d%<+lnKK;gX%PZvAUKe0{z=ryz0m3BypGK9J
218 z`z4$CB_}3(D_8fakw#)SC^9x0J0L)vZ;Yx*H30PBv7Q9q7!xJ3fx!Eq8#7DvHBgfM
219 zS4)Db1vK~#baK%h1c3!=xZ}7r(8y>rFw_OT$X%fFaV{A1n8d(fR`_4DY6uogGj918
220 zs0L0jd;8}1mPAzx(SrE}LP38s|Ag{#e>1Lsc|v(&Q-4X48JB5RUK<=-p5>PSo*E9+
221 z5okYLVNm10Af|?ykOsQ}+@l=eKQh|Wz(gZvpc+0<g#b$d#-d@z0ayUPAk__E8H$Vs
222 z`nd)g6&+&$@Dz>0kpNcSQAPrc>jG%3LK})Bxyvu~_^3b=6O=nEnxs$?sMM5<h*s9Q
223 zP?A9%B<1q}JrdBKwt*h)QD{%eK+mt~31;9a4VYJ{T|>6~^;2i+X!)hN)xbLcn{K(X
224 zzedX^8fSpSC?UiEheA{ykaYJNT^@j~dF6lz7tLyzyc$Aun}K@_m#G0X07|_Gv)~c2
225 z(OkdsUtFMUQJoa!1HkHLiD~63Q9_yipIcG$p~N)Gu|T9N0LDv_#sj7Szr+Qo`Y|bI
226 z*7$!lF)%6DFw^9^^3km#K9SLbpE{tJfWq;vHG}7ksFlL=B4VSPnILeQ7a3iLz=%(R
227 zcTBB0=!<UVLXZ?|MbK8X5v_313KWcTc^yK=#v3$CA*clF#bXUveOwa^VQu7rHW(V$
228 zgecpX18oRs!z<gQSR4AW2a%aEkwth<xP&jn%W#*0g9r6L?|-P6sz_GE;MPD^fN(3J
229 z-B2^s0iFL9A+<m!u=`}F4r+tW{1!dUgesx!&{^mfF!ckUYG^&Q2dV*36@ZH16u1yt
230 zg0*gic0v20dgw5868b-VuE0hr=p@2|l{<I|sUer(TzC;!xeD?I-;O~u=0PQpEBO8w
231 zAGjY0!g(UG$XR4Ql8H!^tw;elT^sH)ZU+<szHQKT=;%L8o(X@4ze9P@Q-HVA@Cu{@
232 z+6(P~=79Yy1*hHyYMD?Ov=~|gZT#1%#BzcL{^M^=P0&7QE))md2AFOKGv-1O5Q@pA
233 z;93MxAvVMzB&f#jhBp5<r~Uu&!H-260k$fUb2x9L1*yUD5j?I1UJCfF7l(%uz&8f$
234 zKLp?*1qufE*beOkoQ35WKzv#UzKZ~Vpr35;T@Cd>vyt6kd_I(qTZ?l7eJ#*XC<&ep
235 zlfi6P=peivdIZISZx(PiAHgR9_Q6bi3zP=9tp$IJq(^!K#X}=N*$p4UA0%9ZW<Zlb
236 zK~Vif0`Vybwnz}`5D~#cXyKo`dk(ueIhtsgXjp&?3`X^q5TGCE9RdJg3P1~pphvib
237 zhDXo?X}F)dpAkZZ^<g{M4H^oEz{6Ova11NPXaYQ*H4UD^oX%2*)#!BM5CpCVlb9s7
238 z2$8C*gE~Uc6A^I`fvlmePKSY~Mni_soBBUX4b;>C#H7=~RZUHe2(IV?bUKp&0hM1*
239 z(U5@qDJhLcBMyNe6MY<u(A2MJKqnB;1!znraR>pAP6GAzgXlOyGMD)obu9Y?8bUsg
240 zr|Ig-#~e-BTgcPh^Q|VRf{T+QT*fTu(`$oyGDv1(0-I@=X_yFfbajAFNhE2BEUhdp
241 z&CE1}0<lCaCg>^p71obB;F6Mk!-tRAzBt9f(KaihegCL=jyxMAysw6E$xtX*pTG5v
242 zGrZkP&NbBSJ8#Ul%|kKi*8dP!kDm&uL1K7$EfLkM&5&CEs|Ix6;DE2B*s&p)PSxTJ
243 z#iA)A6dOz!20<tcYgu|a;Hh3(VzL(1S<HdN*wN7uVl@=OXlN!iMb9o4yGTg-YA9Cp
244 zu`YcKt@=9X`ua`%cMhSW^z~gN8k#DsoKry;+nDDdPfzAs3_~&*?=9%i0B8bUy1_vg
245 zDM~7}CT!Cf^^^cK0N6G4zpvNPmpGtz3RS`br)XqoNZ_Fy^gM=YbP0!}<q&2I+uEYX
246 zff(R8;7nBPI6cxJbbT?~nXRsl3~^?oHD@Bauydx1K9j+e=rbua2~Ub%cq9XT9dw*N
247 z7iA@VY_z^6I$A$Vq9p!Xl=M92SCkKU?(UH1TTcz4&=WMvumiv4@thpdfiN!y)*uo|
248 zM4~oJ2M}8qgUw7)@;i$F?ad|5N?tG}lFXdNVxf{pG|Z45pL3qUNfXmzmU!xhnhRu+
249 zeuaE{anks)FjQ%6Zk{Ibs)?qFPQ>S~_Z;ee1lEGZL~SkI_)!T-p~)IUHQ9zDzC&4X
250 z-WErm3EA9xgpRJdmBlf2b927Kvh+SYn$;W9{~WgnKLxUa)s%cA74w;%OoXYf>#2q}
251 zHR75$njj`do)AUd)YQhBP1Hn5Pt-w4Pc%eHPsB)11RR2qo=CK@{+;wSv|yASP5WQD
252 z#Iyl4os((0dcJi%Cv$Q%kTQnoI~j!POATU>QB<NSN=Yh10~ul}1r#yC!hRZE!jyuG
253 zI$F~Q6lXHkrRW%JizX(f%H5&8mJ}li*k-&9%r+om1SGOi0unI-64@vLiCNYv0?JV7
254 zMF}Sq^1+2CRBqbIQ5Hk^i#ixNw9PD46jIVqBGksHi1_BV`T2!}d3*nn9k<7a#`3hc
255 z8t=5Mc29NOb}7v;($q0TKOivhcIEQxK|x_6(<>Twx-8?nr#tS3xL=`LP#7TA2lfrZ
256 zeI>FW9hj@40iDJqdu!ntFifVCfduq4qL_w&;67+T@ehC=Ott~=xdH71z!NDIB>)WT
257 z=`z&@=BQz6kcN_rPQ+ASQ~zm<P;|PUu8L51d2ItEJI4be>IujrK&hjJ)7H_Rtc_#R
258 zRokJH5mc7wC^``~q{C!l5~^rGag8Y&8YZ+vbO2V@K=BV{>M?0#w2j6F8~7^KM<>z|
259 zOwzHjbOh7c8M>H2p%SGe5)dO)r+Q-gR2E}mrtwQufHoCsVC2!peQi<K)*Tt(zapTe
260 zC35SSZHJM<&^gw&Ho@*cj}lpZ=Yqp;o(GsN0eZ3<FN5@9!GM;8R0z~Z@uZ1DdIU8B
261 zOx8tM-UO;H!0R)m5r7dCiLlZ-{Eqnt1DH2pGiXiyy-Ix3Fr7;qz&DL%z*b@T+%ExX
262 z!?+J192g6r1)CuQR5hc>p^%Hn(<pg5_$C6Z_xpx=4x$)?*#N)y8vaIsUs_YjRHEL1
263 zfnlD(rv1V^4O4tHJVp^BI*gXVR$~4wPi5OwlHuv;4g^&Ol$eQ`^DngpFwa7j6e<cD
264 zLR@#t=(Pz!GaVhBTj;vFVZ;2^dbJdWggJ^t>$8z-eWfw^c0B9ht~l=jZpRn?1Gf|Z
265 zJ-7G%&$*raf6MIz!0qraZioNBa=YL^;&$!-fZOr1LX6pgS1_di86E<dHimexOoeM5
266 zo;!m9SqvrOxq7gkp1uaqo%$RUZ5XOCvx}jxN##AzQ7T}bQ&azSCC=$|dpjNj#f&jJ
267 zl!0TSQ*oG0z-gkBaK`8i+<-fP!`a*YV);Sn=bh_NYF`=V61+u;z_rI50Sk3|wn*Ji
268 zJ;rnuem9<Ep$E*nzN4DDgox5wohbq5z!JZjT7b2cCsG|JRSnkH=db}&Ji(+x4HaP#
269 zihQ+fdry0q$6zYu0f!M3huH}nW*cyroxov6AD3mPqz)<yG89Bj`+&7jS$LjOGzeTs
270 zg9)kZwzi4VM2LS`39SL+0bmj`N%2K-6E=RHw$a^P`m?uvpT`tOp}#K8B*{t=Q;l$q
271 zO;SUG(|NXbL^f;>Yr`6_kGoXbJU8X!c|#ptp4K_K6-}Lq{1K92X=`WCvzrj0lq$R>
272 zupu<qO+{%24k6R<1S)|9!vr#r{q933nJ82kLNTNA410ln#av$k88;wBIKnR-Mj)y@
273 zU!@cg2z0eUQiK}JbJtNJYV-8e)OAJar*DnifNxW96qo}WAso6r90*T?CsWWJwABLw
274 z(okMQkxfQr)f4l;Xnr&h#^ZpD(I7IV0ijWGR5J2w??^Z`fr?P6S>T|+zlZ2#941~k
275 zHrUyFsTN8`0w!JrJW4u3h8iYMfN&Z@8D9qI=IV+G7I4)ux4f;b;ENu<4M2V5Hb@41
276 zG@dLQ$Q!J*DzFg7TYgi-4{K?G55H14BL;yAdm`&+Hhe>24>sWhbXW?oBNS-7R6m^r
277 z<49z31A=3LGazISm`uWX5C{~M@8~GnD0m7EPsSl6(9(!DkVvTWN?}PzG+<A=8_+g{
278 z&(jd{_&k6|A)gO$`qtI6&$W!;z$+_i2b%^Fgfnd7Y@!XD;8K2`K?3_O;xzjjk*Ers
279 zWX;^49jL^+_CLgPfW*@vUD#kiyJ~yl$m*UHyc(oQ1QdUd2|57YV1kadsF>jeCg=dT
280 zEafV>LXU{aHj#z-dzfq!^)RpRFMm&8@0UDlsP4>jsH+bgeUu|a`cll5mZGE?!Vob<
281 ztRXtV3}2=%D_95H8Q39J%>W`}`qfmWTptV=vR<hW)2~W%maWgxhc$tBhYlX_@i4zm
282 z>EmItOU%+!$?jX(0HE3rEaL%xoC%qkDn%B;iT-ly@EpbaM=up$V6BIbVCISS%a(80
283 zuzcwT#6j^)aSk2|GhsbgtT>~1e)Gzeo0o51LvcP2;M@sFo*HBX4^`n@lc<ODHo)t^
284 z+GLUzRm!A7^Z~@vQ8cSG?5zRC`hGywURFk;bUMdakEGSq{}U5PE$v@sO&c?7+5-Zq
285 zt<5q1OCSd+)&ai)T%8<M9=KZnXO|cQsUruY2M42v8!S{4GlF4nH9tm_VTxfsiSoC-
286 zqncg|BY^=OLpWVW6<~jCz^oZ+8Mv5r%jSS>W3s8m*2WwQZA>n;GmKF>pn;Og`R4t$
287 zL!d+fRYXclr*tb&5?NZ22An)iTxLO8=sIaYhWC=KClp^#jhX4GR?`x&J-zi7A~<y+
288 zP|{aoW_0$oqFYgCZz%#!-&JX(;u0F;7z6Eg8Az)doUNqKL7W~{19Qz@G@t;`L@%T!
289 z7(~P2h`|4&5QqeXOdvyQG$M(Ci7a`LIiu4C*q=tD<D2^XG5S#ffO-T#;4#;aj3P({
290 zT*gL|$qf2$!h#m&UEVq7-N7_Ehj}HQ8kkDuVSXVMIQ%F&Q5Q%$8S~g+utyS+LZKPJ
291 z+C*)VE=8Ya2iqbRcuRsgnMV`BLkPZbFkv(tLl{j<hf?72NHRW+kW3y=Nv2Mq6(YI#
292 zsf5Yo0!kir8qEsAF>zKnoEjS-jKn4qh-3;4LI5&|1Ok~t1!zGNCQ>FvTLM80bq0sw
293 z$w*+}iF_=I0y_nJ*3eaI*FNxrWJ*bc&Cs7v3#e2r!ezzN{fc*r5Bn64+W&wlYhdPK
294 zyzIvoob2ZoyzJ)&Tq0V4OcevNjUwDe^GYyJB~uiFFA;|TM;PGyr_>TfCmtdoSbm0z
295 z*(zn0hKNcGY6rj-LMfoqa({W1pQXB(TS>+odTbV~rgAHlbb?_D`7c@G@j8@Rtpjrx
296 zF{L(mH@ywO91lSOsU^z#_zeUEZ6RR(H6a1Q1Oj4*!8LFjd=bXOSn5e#0`5OC7X^(f
297 zP-hB2n7F{0mk49V4Bbl@1qRlnt&XNH;mLk&j3;}``;3axz_w6_Q7AEU7Q%roE%fWx
298 z@v<W)(d>{S9={gL4h@4YHbV}7<xiw)$^?oO#Q~N^;s0wIWtf-@G0-%MRO;U9E0wz1
299 z1_W@RVQgm%=gz>r#gv#c8U+ba+&O24{8KuGfu)SFNvdR#rYh{ZgC*7Uvw{aS^`OLx
300 zySso7>JYzAz{m8(pahFD!}4FIR)~WGc`yMT{Xa{$fEB%?O}w-y<|9m}Z>LfIr`Z>_
301 z*kDAs7HvJ1R#|Q0&O!ZkMGWo<u-<Br5cX7Yy_qcmw!up+@erHpWkeDRwWNHy4aCOG
302 zG8+p?*{Jq1LvhGvJBtJ)F4o7zOg5K^qMFM>QOyMg2jgxo*ID!zcenpa&!HMwiDk5)
303 zT<+nF83_-fi0rQBZV*EBBL}G!!9`9KsSxJtG1MhNM({uFP+b6nP5ug=*lasC%I9|I
304 zUpri|Y#zqpc9^xc%XAh9NCsTYq;oL~!^KQG7c=QxY!Ej~G!P>R`M*L`ciHcomhn`<
305 zDy%UmTLbG1#HwPkGZw@m8oyFI5}^hYL0QHbXYD^_$l|R<3xd}4b!ZLai=suNr<rou
306 zA<Ht3JsR%qxi@Zfv6NQZ(?4?0Fqn_XO^q(MA3oux7)drzqca_BY$p08d3(Z!Ftws9
307 zIB>X)rQ=Ui#S_I>woW^s?kAwr9sw(a=E}{bbT|qj5TgiW5@b&V;{OQq0bxGRz0ZBX
308 zQ$bB1DDb0q;DeLGMnAT|MnFAJR3sxEz~j({f~3wQDV-^S(F=(Qq#8yq);McIATE$F
309 zg+4`t2wZ42vLlp*{trr>!^Cs(2%axPr3gI8Q|S=#c(9ozqQTTuMJ$LmOA}yB7wLGq
310 z|Ia^ao{m`V=^h;9;l4Ulk$eF@4-bJw@U;unpku|;6*so-Q9LP`{-+<<nNR;y+ytO$
311 z#BjBWe+`{9*w%O>$i&3L!w8SZ(>x#wSA&6arG^t4W1_t3ge6%}es#inoKS}X<7g)*
312 z7YWXqjmNkh%Qv$z<-x}EHG9Acv)P05&Fz1M_w57h!Pij5Ck4txvkS)9R6Qb}t*6bm
313 zB-xmWNv>wY5I;@~5~CN%3FoIHa*o`79Df=znN!LsHPg{&>Fe6CY;=dPhUgMmy1ETK
314 z2NsX#kk6~&RXE^y4!SHnWVFNt9(+_B7j4IJiEIr6Crs<>NSu^;4Fo;rXDpTAgo*|B
315 zlEMjlbpcLoZNSE|+6-+8TN53_#<I_B9ZZ}w(NS#Wao=jF@|JJG3HkhA2bQ5HmX$Fa
316 zc#I-uJCv<*ve3+?vv9CLU<y*HFb7hdO3BgH8F*JgB*F)?PD-hIUSh{w5Bn5-Gft4q
317 zJkO~l?@6nyMM@2<jg^&zMB1jh78bcm+^Y=SO|NJ<=<AqjGj&BGA75RH9>ZcyM%7UV
318 zvrbp<;bH!MIy7wtXGvk;Ob3Ao_0hEZUm?2*>mdWENorvv#f%~s*bXw8j`yIF2zq)f
319 z5ZZeh!FmvlN#oLRw1Hd@?N|PdMl+()iKsTjUTzTSnBAqbN$NxnkqbN+oFx|U4`gs~
320 z18;f&@KMQsILvWFJ20b!t2R_A9iBku{uV@MOm__3GF0)PHVKHUkdMB?fO!D3&Kkh@
321 z0;x68Hu)<*hKdw?YxUmZ;ubhov7N}$2=Q`=(xN#}(mBwIOjrwhDUPk}dpj!5+RV&=
322 zvP)e9QpWSh4WKPFq4iP&h|Z*Q>5g=~8buv~it&qq2>Vji2^2E+5{s%S!F)4yWz5D>
323 zBVh*zF=p>U6cHpg1r3umu(TrPUr;d5f}(Ukh$x^?o}Yrr6{w%a&QHnEQW<4b+wzoW
324 z2U^3uPbL5og(j(BvwzJ`=^{6%+I*9cl|C&ksjC8wj5Kk}i8ybspU?5K?PG#*IP`pM
325 z2qGB~E&-pMb`Zx@dE@T~yz%$a^(aKZY2r+A65JTv1Y9AAvGF9DKB9x?A?A2XstY2f
326 z1|oqJf2tZCrXhGdLco*NXl%SS&I)fsv81{qBD{neh!3C!(V~$wB$YfBUyjVh-@tbf
327 zx~R_y&&l85Z!}{X8BI!1X^5H@*cX`sp27lNuMK1cTN5ma!=PZ;m*7VvlW{a00iqCb
328 zFablmltR&vY=mlnUTx~XI)Hr4FOFy6(D)cr=|nyc^N?j&X3yOnQ$w%}c*t#JEdPMs
329 zc(QB=Rl@_`GK~I6MvC(Lic5+cw-mF_z`(WdgzbT`z0}LcU;SZ6xb(vofBlCs{t?*q
330 zFklh3ViqB;8PWnQ0n7mo-6IbGL>y5|8>g*h30vYUwJfy*-~jEZl&N$zO&G7H>4`4}
331 z+=Qobz$wlv<p}$?w=USUjQ6{lfSCy9MM=b(&LRZW8bFh1B3nBSJPlpz<`Fb(=+M<6
332 zz+xcoAV#*kK=tdKHWTLT*alk`7IyeiFDM+)cv+E*cLH7xb5OjjK_lW=-V8j2U?9a&
333 z$Z8sc^As=smZx|ykf*pe@Zy$ZOw%O|`1mC9uW*@6R>fbZRq@xq4VQt<MYDRCjj$ga
334 zAv?oEG@Xom;9yOu(P;G)b@IRN^_Q2UVF2l<k{7oIk{6$)dYJRiP$n-pV4IldLt<lW
335 zj%9|)m`swfb}|+;la=8zmKhcd<mCtNQ5i2QL*!qQr->UJK;xYhU(QC903r0>BX8?E
336 zQ7}Jzh0m~b-({PT5XDPHqouXNj_^I-bClwu;`OmB0WN)I=IraryzxWa6TnUSkR@!b
337 zq7aL1j*xYAS$dGMHwzB~f0m3U2EIx$*AmiY!<g3z6ICxs2E7|GRYp|SRwfK=3<L8_
338 z8Q4SyX3ZG`ku`&1ZS}YJ#(+?0iV;1#r-o1f)T<CBlC8Befq@&!6`8tOhH-sOrx}vi
339 z2-yJJrPshN!Pb5yFkm)>Ig=1om9g)Znq!2@v^FtdV8ml!0~l;&iWu!=WLc?5@Y~B?
340 z<@;U@RrW&pcI5Zq15<_?Lfl|i6!)aXPRg^6avifqEO8I<8M-e%vd|CEDAQ}{))^JU
341 zR_0hZvb3~Af`;8(xV$SoD8j-De$x9BDK%x^Jom>XA5=Ec|4l5%bf=+IH;M|oAT9(k
342 zH517sBv6ZyVnP;`f?708{oqm4=88h2C{ff{8g4QLCQuO^1;K0LZSc-`UwkBf0-l7&
343 z@;7*(VJSG61Ok4b><|{3C6sDlxWf1nLR4)1Vn8@w4G3qi^8W&mno35WNewX3lrz*B
344 z;S7X;VH~{zwXQ`Pc;7%8Fu|zsN;S~6NCZrVap*dr^%Q1^NXhg8Lj<Be*)O96ydPaG
345 zv+IuXicF<kb09D#2L!`Fm!ki<fMFBD@(xAeR7G(!%z*RYR9K6E`&orc@9iU8`gsyJ
346 zREbsmRe)6*^g<e^Pl<x#C^&113uPE3jWVBdgF>Xj#;`tO3!BqiVONSXEeH;x_|f8E
347 zIjs<$PEkh?Trv#9^;im<OJPVT^;jM|j|QWuY!VL3WK*fi|D7-;zbQ2IT^*{&=<0MS
348 z9U=ezFO*&kRRk16%A~xSDk*<T^>-Atffp45nbH{)@?8h8f+-6u;oZkmX1ojfWO~5n
349 z$g*=xOa|q<uPXZc9**wlK$1T6RU?f_eftQP`XZ2heW@tVEdZB5pc8P=aivCRKwy~s
350 z-{)5d_`&%V%)%H*b+9%6DO+-oqAm#|z`rL<4E|lhgn%auWJ`WMMTTZeu;*Y|5;Q{s
351 zjss2>+W!>qO?U%h?hvUzh^>K7?~1t6#fHvSARM3$Q}Z(nGWWHLMWU!N>alvG49A+w
352 ziQ}~<YNzPQ4O6TW?9v=38yXfc@~jbSo(2Pi!N#cN)ke?BG3FZ=8{>?PO+{=Zn*gKR
353 zl9~XwmLOHWP9cjRDmKeO*VtI~!dTk@`xgX-C2_!_0Z_e)v4X>ZEp$NN8K4LSz%z`8
354 z0B}+1f;c!ht2>7|1CwrnQo<4Kwb0(ETceBUVi|N@H5664YUql(40JhNj5xa3({+I=
355 z#uSGx_PRzxqRSr;HGn@MUp5eJpn|B3s<4OybYnpW4~seg&ke*GK<E96Ffhev{_lef
356 zM7;TNG9#%cck9mR1izf9&;q`L5N6*lx*x9|aIc_zcXGVvLHE^*0;GYB20l(baq{_D
357 zF<AyIwhqff;527+;}?O$Td%~K>G4@?4W1_dXvOH=L%jS^DiamSxUayUCPAm9HabLC
358 zVjOWI5wDN4gb^Gb&2l0nsyc8vT}d&dbP|q)ra_4W)WEUPW-%d(kV3!_*mxXX57Nck
359 zK$duvF<(h34B%=pzy*)ibpID~Zvq}wkv)o6)!lmU+nwIK(@Q$tSvr`Ig%F6OSpo=z
360 zAP^v!NRWLe1Q8G<sDR>zj=PS43nHTuLckCiiHbNnGT<^Uj0!4lGlMcCKgNZ0zf*N@
361 zCqbS0&wKCt{@+WdZk>B?-L2}>+3M6O0FZ+o5)nsuffzV~IbP}W_EIJ(x|Oc3HEXe1
362 z8_?!!r)igHg>QKbYI(g<zlu3kp=PY=K~AiXbt+p8l^E+$JxGews*1VuT~EIDhs69p
363 z;=Z_mum4fPZ}C|dUf~9+dW6^?q^(QvDV3AE{2^VYLmHCh7!u0jL%dH!8nec$0TjBx
364 z)P+lsDFvF$o)qZuQnIhOLLiOW=M###8|9|Pjh<7!E#FLW&qB+ZS^qb)K_fVNoRu(}
365 z^SzY7(E?spl-|(G1{=ME7KNj4u%cdXLH^IjmwJYo)PKcZULl{6K^rczG9j*{fGPSy
366 zSje@9?YYr#G`B1u=`yq2e#tQ;3qD*xX0N6XBR73K-J;tCbx_~XJ*dyLZFHY5)06U~
367 zFjIaPqJ+rIE;BPCWM-nC+3(RPHZQNj<p}+lL_Cic^fJc_FDu|>EY*7-GRx+md5gKl
368 zESSv&`N4v91%z2R>(8l*SD~!*H%nPr_uxr+c?DBGdr2TY&H~KD`D?xd&K?t}u*Q&P
369 z8&xr2!wAeBWZ<P6D-IqUb?4+64`kOoI&I{Nygq%&#;O}4+0hY0E&0K&kh(W!AagSq
370 zb@|je)2`}MSag3=SBT<TItY*Nh<@ZkU#hqka@m2aR@jl=G{~Y6EP_#^)gqJTUOh4+
371 zvqkSQ(H_R|#FWXTLPm;AwBndN$lCc#XHijpzWh0@6WY?CGgX`c{gX9wxyDvzBi&EY
372 zT=Wccz?^U9*rQZ`3}li`K@(?Uwnh`P-J4Z_$$AtkSsUl8B(I#hC*;eg)rUisQ@x)`
373 z8Nf+Y<=1Yf3V)a?|Edv%yrXkp8M$T_E=?RexNhA``-*4g2n|}xf?J~xU#Q?-d^qy@
374 zTY3rV%msDkqvQ7I-gx$9=rW-e)e7^_EO9kz5<wNkI$mD^j6*NzISuACNI*JFWjZZm
375 zt{y?+DQ&UXN?EQo(EO}YrF@W<;e)hF`P7&{PsIqP=JB$zz)0#-<h-beDjU>*L`TW<
376 z^A%yLo=FH&NhwGPrNsBd!N(pu_yjlk`mPwM+PL+4F0ZR@BT6O}%b$2F>4&U<3Qkm#
377 zT!RqO@5iWre^1{Cj+LmAE8ht+mKK-B$Lg8<zCywV$tz__#Zsj-PMSxiORGpD5M!Xf
378 z0c;VYxL6z}P7oL125}8u3Va^(BAJS7M1n=5g^Ct>Wjq?Fh#vNdgf*a~^BNJ;0rv}S
379 zsx9OkRnkC`OhdtLeNq)-&#N-7qNF)g88?yAG81h-mudB8czHtD_zvB|Q`7j=4f&Ml
380 z#&^rKJfms6L2ZB|Om5?6>V93O?V=Q}m;6Rqz_Lc5_|VHkszhrk;yXX?T6_Q`(#Zz|
381 z{>KZ#n?KFsAA&lr2aMXv<k-j5UTDzrg(9cp3PBKMiUD)i=_b3J%$z=as^#mQ)EK9s
382 zw|Wd+uB(&ww|+f0)9AP9Yy#P~h1<rpa2&^~k7JS;#|R?52^DC^8M)M`RsEU5t}}%o
383 zG8Do;mBm6OMbK|Tb|6t?Yu5`eyg-J$@WTE4_WSSuX`13A&JR_6#CON@2G|CqkG73Y
384 zA7`3qo@HL?)3}R~Bumx6u_djmSO*HZg$;RF?7jTE&$X$v{M}TOz7e;vmSpAughJ*J
385 zc1t#Gs^zrIAjnK(j3;RoJYs;-c-oW?c#qJX($Oj9W{w7!2{qv!w{PiUmv4S(<)u}}
386 zx9&WGSKQb!V#1YwD(RQMw*K)=EAQ@`m(9!5pSg6**sebdZ|3za+r7SOS!Te~^-NY_
387 zpZU<h$sLJW+#&o0*+ENxCtm7hyiSk3IEFdrFc+6L$R<dVzJfE_0#d$2B&SFzo+{+A
388 z5pCyI2jzB*7QnmNI4kukrT4MY&!zQXWyFGhxYe`Uj_s7~F*Bmw#76a*HQG+KKsMCR
389 zJmP4r(LsOarJz`9064*F&A!B&vZpfQ8jCrL6hA7Uv5G;J4SfKj3pS=ot0E+|Q&Tiy
390 zgPRAAwgsx9dPj+C^mLcQ{5sx<-+ecD=ntZ}Q_x1TGZ$BLcR&36%LoHqLOiH3AocB1
391 z`+{D?B^-w|f<#bC2}RxJA{12d<LgwHe@B&QdW{Dk`RE8Vv(g)U>g~+h3M~<?kZj}%
392 z83oZ6v<>l!EWt33$??qGU}VGvZns3cPD{Fb#p$lo_4KB!OIWCvPsoD5OBncP69ysF
393 z((>a;=nF%Vp9ph=v&e~Z&@1tfPAk|wTDyI!w#F{{i|r(<SgUg}e0AAZa+piC3zu@H
394 z;i%J(>Y4ZVHnqQhsB{HxPRtw&#42LzV!SzqGil2g(;}JnYs}cph@zRbx0zW;%xpl{
395 z%oGT-jmdZpRmMBT+S)ujw}+%bTi2&39Mi*~nSp^~ht+N)DXR%7DYhzBleFR<hdH}!
396 zEh@C~Z8cZcO}^s0yYBnv<Vkh)lO}z4@BQb>;sXXgId|dqK?5t!A3OFxLiDP{#~u3;
397 zAI_LNWA>~WIODm4*f)E|ocUc>-F<p=Rn_R~F<;!7_)Fq7T*_>wx8rj-9W>gKcqA9c
398 zNC_$<m1rcXLA7KiT8?y}(NE&@1&jdn5JH$E8IsQ_?4OC>=MrDt)q>Nz8p$2087+MP
399 z-%KD2JsuA!2E_i>e%_JR;odn!a1?7eXR)L)+vlQdnBn+TiZq?;neCwBVE62e_f_Na
400 zoPaqH4-luVx7DQEj-na|B;(HvC;rs1X;1y5?p}tK+7BrN%K9?`CG>gXmd3C5C7!|)
401 z-uiapmPh`yY-x4d#{2HO@yPf&bIF%)B--mn6bNrtluk>$^4`<m4J*j~>4x00;a{_{
402 z>}kyR6i7XKGj1Jjo`R?J<I1|-I4yJ*hu+BCIpYOpxilvya%mr91@%rXqrRL0w~M3R
403 zoCPvmGRJ^(b2D-a3JjdvT|`|w6((j#8K4YQBc>m2=pH&WG!CvXc{o=u-&DpKIA6*)
404 zRT<)08c$hI%2hL85mPvrHVZZ7RNXz#Q1V32Q{^`LAN@V0U-2%$7x{ozoKO7o*Ihte
405 zrFLHI<yatLb8nYWml5Y;?XUWTiU!V7<m~n-aOacvd%1`zvL+t@>@=dg;-)+<S4VVu
406 z!f6Cv%Tqg_M{6$S%Q#M_<Fy=h;1oGS3D)R1X0nPKbihlIS);cJ9HG*r87h{f(3GT1
407 zz5twM6-yp4DfL-csLYd61}7EalF2oo&c-%%cFLWdopgSbDrHiS*AO4Luze>m#u14F
408 z<N*@cynPdA>$(;jwkN(%U@C7iTByMwZ)Pd-Cb|J%jht~485hH(Acj#aX68WdtS>?E
409 z4_B1K*N<UW^i!`ua79{NkH_gTMtVK)4l1TXX~5qJ$xnE_@GVGDVf0emUW^CoEAc4(
410 zYKt(yUJ@E*uMF{QP>uRPa58M{2ec$~fYX^nGFqpM38i}U*&gvj<pbRV7^hNw{6aiW
411 zVKm;*HnCGAA{!1CnWwVI-0eihx<!NPeW^GMh*ZciT1v(IZbF-~&#2uzrg%h97Y91P
412 z52O^Q#(Y)!ni{C&^<RH?{tKFtDBb1q`{zxZupr=fqy^I#Oq#HOe4SX5xEWuEZ^FO9
413 zS0}E!uz%F24?ghe=rPsRW5;}T*TZj3sG3j>Shfi2nFlTvMXTdQo{WeSxJsy8=;tg8
414 zj}eADFAGm}P7Pg=G0WNDUGBWbdu?!CMw-oL>TnYh>A)H~6FVNz%caq%CRE|8_7NYm
415 z#rqr)hmXrAxRYSQMm>$CBtKT)=%n7V?6au=L5)!Gy&zWh>lxfud!6b4q}ZD1e~9NV
416 z%$w0Nao3uxjBs6P$)dvCu`d0fk7gYIdroF#?tm-5tRx@4dF9jf&wsLV(3Jtd-)plJ
417 zSl$T?{OI?S@2(i!JZScZblUJ#(wE#$z5q^=9@pX_WE~wpBdYxj-jQ+*>fBDfcggKt
418 zZ;)crm&Q-}6d|r%co!Maweg6sUlg^Ro@>XNe1vzyX<$5BRc#I8AlJ-M;r*KwBTp!z
419 z1!qt>{J9ikKNn{{6!ZiY<9j(A#_EgC#L5cu;e8sRuF5Nl=vzTmQHg^RD@%xL@5;rd
420 z#6KUnQ~MCs+|PY+)8th@z69tzH2E2?h5lee1JIFpobRja@96944@8?>swvS|k<pq;
421 z{Y2-K$khH<lr8ALuxvG1nvquMH6JJ`$r2BE35a+dS-L_i3dtcHqJ4pWNGR1iLaE*X
422 zz^Q%o146mfE*_y^ZYWn9T54~$Z?kvWdAp;R?CDSi&(q3Cpq-I;dulo+=<YE^k?$%j
423 zROL%b%9WAp6mE3)06>^X_rOhW(L|9#t);2q9WLI#Z{E~?*WOjoFWOKVzq=H3UEJuw
424 zgZ9?VdGgY#nhBRr`p2H^saZPr{L;gx%;WB?nQ&9(xbfVg`wtZtWVSqc<?6hEy-&p4
425 z_mQR-Du;}!96Ua8{*MO|`xY%<tu-7nc`!NKuQ0Rt(&s7FUX%QU`wpmfAM|j%Fjq4`
426 z)6Z7s9;F#-8|9v=nP{8pp08Q0F$9C@2ePxp%#IMyZ%c=$3x}Pd3PgSV&;HDybuLAP
427 z&MC5OBp6KLRgj4h%;^dR8x*I*y;@_b7J3-1%h;Lx6p>N=OrSQIIV7d*{JM7uGeS|B
428 zHf4U|zN;Km@%-`y$HthPlZppkb!qvsoJdwIC$_fg^2hsfeY>_~4o_e9$YrCd@ri})
429 zLx+vckG^e*rP-aag1i;us%M9y0k@tccPEzfe6+OxPK5D`f`gkQ-U_`i7`KkVlhACm
430 z0`VNzfayA6B%gW_))#e)Zrq&2xA*K7Z%u5*E1;Sfcc7LDv?880fGg3A;wm*$xyhRO
431 z+#HRD`uC@${QFC&e}9R<Y3Y;-rVDC0N%iQID9-8?7k@p^YltF)Gw*(=K&B`b75!)L
432 zetyhPXSwt)iF^OT32s0;Kd)u)Pj~f95FZ3cLFZ7t1hVFEgv1tvG{vA%Jc5vZGeU;%
433 z5i)K8M$IE6??A{p1)(&^?PC#g=0RG4kV{6$GYWXkRD^s35K4#p`QaXccR>|~^BHd-
434 z6#gqhnTHXI?m{T*QG{|$2*rjVl&6DKfKZ>i5xV3mgz{G-RIm-9zKanmJcv*c+`HI=
435 zP|34O>IdihJq_suLj7$Bm5Gqx8_MAR17{*sek-KcX)L>6S>dmD_`S--GnUMxkq^5g
436 z9QGOcAA>7i6xC1(PdIb>;^p)}Q}Bwy!tQm}x+5OaD++!YH*w;Un#J>2tkS~1zg#7t
437 z<X7Ku^1Q`^7dOr8KG6F`>1rRdmd~5<i<i9UMf!HX2wNSbcT~b(3HV>!|G$d#Uw`+D
438 zi$5)pH-1~b;f{}n+RKNI+#*{yWHi3NsAj)ew!S=k>Ro@hky~=;=H$sm&y~q*Z@u(S
439 zrRV#9@b~9_+VM@*KmYodf1X<1wCkbQpI>n8cTW_JxaL`J(VnYzb^JZNaq_C10Y&aF
440 z1K+&&asBzDPnG=U^S3gW4E_GZPtUKKGWpD#T?Mb*visrP5A9vG@}y53^kK(0yPt30
441 zQ95Do)CXSw?U9KUw|q0Xu`#Os&cA8T9dGCFIr*<YxnB%8PLAJy&+{uPzh3j^!ymO=
442 zSiW-7>;v9M?s=en$B%<Dj`sa-Wp2`zXcDi)i#i{WBd0#J+P@$3Fy4Ao&e9PNOK%ij
443 z)$h@&&rX$}Djx2fa?S9%aLuSSLw3173U-7Ruh)NCH16hsZ`5;r)=bb%{c%}He=;v)
444 z&4eATXMelL<9Okv`BT2%@$};xmOTCV(L2j_T>tIE!|#3J>5Ut9A9?X?jc)4q^XoJ(
445 zv_@W<{AGOo?k|Sjvis5NJFj_lL+1A<ro=u;6z=VuJj3>aXTmJkm}%SdE5^4UditK@
446 zr8_=Nj30OEp;sne;j=H^+&Q^6wtmU$J71do)$iV)HtV<c7hd|!WA9J<q(gV|?g#Gq
447 zl0Ngn;ks#`%zN*`p+RrGl4ZKh<z4*u*Op$rrtR8Q-(A<Pd&!vQS-bEb9gTtK#=W;<
448 zx_?~)&t4%{{Yt3Se!muKwO_wrNr{>Y3RqqWU4p@o7!1b>OKG^&LY8`;OI=TMnrJze
449 zr^=P|`$Z}$D*EMdDBB;7)9ZU5FYWcr-q+FNseO74h0BV2_G$XJVOIw)n>T%7)x`f_
450 z<F2Lxp7xHrTKaz*ceV6a<F5AY`=8^k{+BUUdxdB1Ua;gpL{;snR5dtjSwZS9YK+x>
451 z{fdKqiVBO_sx&lCYf1gD2XF25h5dge?rLe@g8seZt}6TgKJKcxi?U063aN2dpMRh6
452 z&+fRZbGAcCl(rOjXaSA8+A<QwfOituUrgh!itn*I(zvT%(YUJxXYr*7xq;VJPoZ&F
453 zZGUI?>yEpcyMyk|rRD!l<F0=E7`-3d4(XCg+*O3mop?4x<F0DdxT}wTmX%BM1AlHi
454 z|KskiIWp21*jM15xU15j#8@;`MnCROL}m1gxT^z1sw+%F_wr3>6r@eS_-}%InD8nR
455 z0j_{F4&X=N*l1FQGV!aRE58b5;5ejgIJX_He-zH&0C}A-DcJ?(IskqI%5m%-O=viz
456 zA&@3Ps-cPOLW3c%f;0gVqR*uFgL>Zt*B^lWlaMyjcY<d;j9*PY3g<&`-3Iktr_*P{
457 z{u+QiaLq0XOSd66y$1FXOS{m=ke*{-J^(fqAlOMP25UZ=1kW`>{jwOW=D``96MYLQ
458 z35i1i*ycm3hBOV*9gwy{5|LRwvINpPNQWSuWoP1?v-Pf`cspdbux#)Ag@tT)y0TkW
459 z&vy4tu2u4?%awfCWlFIDO0mAh%Hd0fD0x<%l3OE%%{12;3p)opI0rfoiJ+yh#pEzD
460 zgRmMv+c-OFfkZe_Jrd`vdo!bjTMt2VV9*3Hnu!9*P7b#kEro-1BuTzSRumxLkTc2|
461 za%Qi|Qn+>SDDo-V4e1ahj(iHgPsk@|9XUl~)xx#{(pE@^ARUABEhLegg5OE_eM~+^
462 zX7Ulrhg1P+8l<g|4ng`Bl0-g&txP^-D({Evo*zQV4-p|B!d51KfzSF2Y|Z2ZKqtrv
463 zc(u1%OUnumFd&w%0)dDMIK3)hbriOfw_1P53AB^X_6B2tZG#KQJE#Q`p(D+3^LJ1X
464 z(l|&Bkd{Ic0eTmpcTqE>Es(ZBYJmiDCv0U%LGn7JHz2)>3LwQHjf12i$6Mhs?c`W%
465 zbVy*ZgZzoSikwiXN68;p{suY1@;{PSSpFL1e#l=ZM_T;>G+0mQEZkS&Ax9v+3bpnM
466 z63B~tGp&K-U<)|})f0eiKBNjr)sUt^x&x9(4v~!3nE@-@<azWuP=^3%MPIXg2YOV4
467 z;`0OX=up71Al*a<TnZ3uwg$IGNj!S*1F%as(cA9=h;E`c-U<-iL|3l|h;E__R{%se
468 z(V6oBqMPW{X#mkpw0a^yuxTfcJeQdjD6L+AgM-cF8mOgfpq8$ITDk`D<Qn=zKk)S3
469 z9%{|ag&KP>9?QuMG<V?UgSdG-ZhjOu&%({u;pX+Yxg0lNiJN1%*@v6`xH*oSpU3@a
470 zFb*7VyJ)v8?!wKl<K|~@a}#ck;^qi$&cw|@TpDjDq1MZaSXRlhy@P4Lg8b4!g=Toi
471 z5Y%W0&^iR&{t#@BK}xc{I1~zID23gAnrH0Itx)za8Bn-n@JRA9T=+6H!^`L-Bpw>*
472 zWk92s;U+J`4a~5ufHVzKC!}v7B_WAWIs;z(4z@MJHXl+2q-l`WLHZVw$X@4LNCYiW
473 zUvW2kv3&KVtLYwj8Gae?3z1MfUG~W_c_ep-51akC+Mo23Qsi&|`LkLymUe91|1abB
474 z|1u)&U@f_w+=0@eA-1Ub9j!m42io!dt<mQLgYEb}<mUnFa2bl?2;}`y6WcFAJ`K%_
475 zk&pZi^1@c%B)H7n8qEuIU=zJ+f8Yn->A=^%b^_p6zUKq)1>1St8u&9D`Q84&JHDF(
476 zujRLE;K;#fJBF+y$ci5D^$$GrIxD&!PCVEexQ=G~18aRF0t<ZX#4P2+l})f0HwVT?
477 zrv^sC{f7Cj3dEb>p8Eq8zAFReN~sch&Hg|Eyh2O?bKzxkeC(lNKf7T~X*-@9&y((z
478 zrbyLNKdDg4lR{EJN|(HnO=H!_8k5GL(P=aqQ3GnZ25D?mRgY1AY7-e-6<I(ep20E=
479 z8qIh&rE6G2&?wZB#*HSUCk(-(TRLZ;(N_gq&QA!pW8LLbTZHhC?!deKW24)p<oK4-
480 z*yt8%+!a&yU>an%f2@Vv)Q-`_DeX8(4{cy^NDm;4EgNq0(mZ>^ZMC(?<ycYSs;~~S
481 zlno#DE1L#&8|(RVT?Er0zZ0gk>`Jd~DWpI$y>@iVZzcrmrX0Zk#Ahpq9l-yfS?!bq
482 z+#vkV%JK9#H)vRG?dWzqiIqb^{0}Gx81oNSPUA;GS`Gy@ex=-lO1TJJ59Kmx2IaI`
483 z6k+8eS}iNbV_I%cQ)cC`J(-!Tj5COuSeYhguy>i)BTyz1VPzc6=yg`+bw@KT(=vz^
484 z@%f;L-^Yqz5Av}hKI~ydCiN7_SBu=-UF2r=Bo6nK@hN4Dr&48%r=U#izx|svBo@Pa
485 z2iDH0tDF_CYzSA*g4ED*%Zj<KmgcL1!96o->4{(q7j3v|#$1|DpVd+uo;9pxMtE3o
486 z&%nB0IbBCj4-606gX$_LPT5lzpEay?V0<7A-CDbM#JJ+pi=K6J_p^$}{mRY8(VG?1
487 zXN@TRmD8p4^a%Q_Qu?e?`m7Q05$sur;rO^Ido*ZB?a(?Ur@qX9uMOT%?GT5&bPz+{
488 zfg#s*-VP8*b|QVOw#5)0(qe=}PxTqxXD~eleTtqk!C|v{%5~kqkhcTxR8Prp*b*Ls
489 zV#}{?x*EAE=M7W-Y1n8uq{MetqdYhLyFWO`0^&9;M`(0Q?u5}T6_-z)vPY7XV7PE>
490 zK<b!YU)i4QR1RJO2M5rD9M@ft9xJEEv|6>`FWWyAb9yN41<mC7y*Td2%TZG;*Ww>N
491 zkpL-}I2FFNZt9c{5SJ)>XsU$|Y{D_z9le^pwi2K^hUoXE((<cSNUewEYOY)bmo&wG
492 z9`u^l640vn(3**Z;}wH0EoY&v{|_TuSB8f-1Y4pFEqpXQa%3O6AD#{eruRC~&=Q10
493 z!!It=5^P|_f)|&JL&@1cD;ZZx#=A>mIarR$X;|NIu;u8maIhUuy?hG5w+#!|23yWB
494 zxQfAB7;FTXhUyIlD_wJk1zT`Ku(D<Nin*IBX-wLG6WrQKgIhZXVFnOqcW~><7EgFs
495 zWs94JiDrR?E2qzF8FzVigyR-GbVm597DTZ)#)_b!>@oivv8-9dX=iv?i*xm9SI;hu
496 zteuKSt)5NAqjp{#k2-ZWM7Jy)H4UO08Q!pYI6V5+ku(x75V#hyp>|3O-jIsoOTS9_
497 zK7|ZK8s-OEwBaG)xtr%VK+||Ox1jN>LaiQ8{6O*)@>B*lPn;4CwN!Y+wbO_B_Sn$o
498 z@vHW_<3abuXZqyrku6Ha?lGBFz+mhR%<4YJAXa>q{{J}QbjwU=k$ElJp$(hm0o@VO
499 z|F>dKQ#|QLH8`JwTVk=6+}z$_qzAG6l0JDW+DSOPR0i3GR-Y1_x^_Um>;JWI(7zJW
500 zdE&nh@C@fv=-0wXkDjPduT&A(nPO0aJc|seXSPe4%hj;YI5@mTZWyU-YjvT}e|b@R
501 z@+`fIWj&Xvuh}vncJcnei}o*iJ;P=WUYL&(pt_qk>n=L?pQC{T?y0fAS^V!_vB3fV
502 zltb$7MFoEuF#O-dG(SLIA}{^NsOJCg2fO~afx3kb<c8!Co<wdwO1(>xUqbqd=81X9
503 zujqN2lfNPwjaf+u?L^PudFUB*2)%^Q!d1J`0n~=5f;|j9jMkvL(I!xuroxe%(Rla?
504 zaQtrUPPU<ZL@jpP(NQQn8C{1ukORAtU!!$s1NSyuwE-DX1{#dUp(W@xJSKTHszWFF
505 z8&D}4gBGKuxH&l`d3*A%<m2cGbb$LqvJ2^v2hD)r(d0M6`^gVcAGqQk^Z+`E@6zr=
506 zad=8Il>9APh92bVd7PY+{1M(Dgsy>i;87JiiaSXR?mi2BiCuUNHx%yjSh6K~7)tq2
507 zJ(`OiL>;&Uk02qTE?Jd4iX8Bam2lSwP%GLGzjpL{bOIZMv&qMkXOSD_q08WV+R&eH
508 zCzt42pQwNu6QGuIP#K(Af?hzcqT@J>UnENegHR~Mh1JP-kPY=kHSo4i!j)g(f066p
509 zw~jl)4^Iw3Ca9M?X>Frd&?ne~^KmtvL~_Uy@(8yKY2b-{;Wrb_gZjQ7?*1_dDf@|m
510 z9OEA2f5-nMrYBA%P0%7y^bq<jdJ!ApOM|!xUyt9#pOK+t8hMC(%H7RB#lIy@hp)a8
511 zEkd`U-=TkDEAEdk$5-IFcn#i!@5B$_qxd-fiVP+b$pZ2%H<xSVe$Nkq-vqvizd_h2
512 z+#-IJn36c0cr)>@WMOh6x*YIfJ-q!r=n?p`1Lzp~0DdRYr&z#xY=U19hj0zP7Jk>^
513 z+wh}!Cw>aI!BdXoPx05F@ca+{i2z~~k$6G1rV4hLEF;&DyUD|J85MGz{GI&3Ik^lj
514 z#+7j8TrIZ*-f9!K1%CUuPk0Z1j88&67Yg?ZTZNs%?}V3xv!X$|9yG`|E<Dzi+x2k*
515 zC2mUGn`lk6B|l;QxS*S$04j&KpANtI(BAg~#_dLLV*}Kk2j}8Jcns9cG&~<S;+0U7
516 zH{u8J6YSle#Rs95-oxL*I~$3Qz4IlcgbX3o@Vk=CB8{Nx-$mNUyW~es;`E%EvvaxJ
517 z2(F%+#VzMnarbg9+#B3S+^5`m?gIRhyp9j>8GMwF@gw+Y{MGy;{FnTfLY?qO;d4<Z
518 zE)qA2?czT`XC5StlP;I)r8}hk(mR?4z|NP^KJ;8~V>3R*t>-Gaedu;l#JkC#$e#e8
519 zr=gi#6#+~oJMm3qEp8*3!b))<8HmTCvp|;bCP&Cta-Iz2s_<w$0nI0UmAi>Hp4#Zk
520 z`Ipfd{vdqYpWx0bMFYN$d@CAID`xgz{0djV$GAVD6WmEG@!Qc~cpY})GvrBb9ALrk
521 z`9Z=I6yhF6&vK1;E!sya5z_snxfSquEZzllVInTX-*ZXCk+Fb9rQB!e2DE^@kIq0p
522 zz6srjXYzB<?WhQ^L0_UB&?|C;#bT~#$FGrj{AQAd+YsTOqPA$9i8;ZBZp8K6gW|X3
523 z19UYy#_Q0>+|%&($H=o>6@OM3kLN-USc^8I#^idmN|?gGh36m+PeKv?6i~M{Tp=HV
524 zd>v4gI-qI$p?7xx9T?12!4Vf=&KSUp8lVsl!tZ{d6FgwhJm}+-fr|YJwTTl+JDMYy
525 zFi;(Y_&+Abqp8Uq=z-)Mv^aSe>I3v(Q*sU5aVPp5-GO%E4T)>fQsjr8^D!PH3@68g
526 z;mJN^Gx>l_AopI}zEIl{>_UHq-?P-$KzJT)=HEjTP(||A<eve%vw^}sfUZKL&}sO}
527 zZ{P_dxlUA+7)$mfhjUBedrzXvlTRiCScm2&7ouu(5IrFYXu1?r;>8Q_V=-`0Pi%M|
528 zT?lfW7veNNe30*?=kJ9t`x-ci1*M~0oOS^5$#d}$dc9b~^TWfF!n4Co+8edvJkQm_
529 zQf-s|2H^(1nB~xNuB=?YBVDUav-)#$b8?U`-4C2J;P+dQ#uXI}vP^13I+LTwYzBx1
530 zRjCo_l!Y2mL+{S~>uN;m&|D)<j6@ASdWAtp%NeM16`j{vWANmq`-3<LY&1yE(>Ww`
531 zB9Q{nvUEl?b!<ujVh!`e(b4;&`q;oaS1jcnU0>c+K9=oNok=;z(?IfF<uqB#^2_C}
532 z^0ItOnZ;U$)fvukiz4c?%beNh)<nWMRH!)IM43Nt=^&!O_mb$&KQ_&tv*C`(%`e`X
533 z_zk{veg9FThhP6l;xBm7mC>P72TZ)@*2FVHN9}=GSMDgvI@mmCPeWgByu~rQ>ary{
534 zKW&o?{TB=$zp5`C%o>&aiudt@01rw@pLm|usLgd7J-Io?+}tu_KYOWnK<;I^^~U<#
535 z`Nny<4F#Kx8*?6XJmh)GXwRmzS83&C(RqY)5@gK|_pa>y?&q@)yN_kRW&bE!Gt7bg
536 zY+eejFe?oZRwJ+mVd2GUC=_r8T(P{|;xfK0?=pU5-Xu+JY_?`zY=vQy;Wfh##vfvq
537 z(qa?l<^0TIXJN?ZnwGO9hvfM3O%<j)Oj}J!ld#pa+w`r8GZ|=$nUo+z41m^^WwFLY
538 zJuPcYG?YP&$>igl?PS+}*F83$PhwLAJghk@vvh?%PM<Sfo{mJ;<dIM&^{b~hqX2We
539 zs>!62gy{Zh>UKo|HnoicAF|03HJR)|nd%f10LT?_Q&t>B<!CTk5ZxUW%0Sp`Gnq(D
540 zv_1Lmeg^cV$KpnRxVWIKvy5yj!(~qTZiDF^oe@_?e&!+Zm`DO*g-ApbLv{m0b{9i-
541 z>Nd!>faWHK>@q|AzWuwM_UjwZoI4Yf>l<U|smoXQyp8g%*yo>9-_p~uiZflOduCWS
542 zo@t~J{jFtXSc$fTl+aR*kfFv%$`iU@Df^WaXEDF9tU;uxka~>SsUKcA%88PRsP5DK
543 zfF;j7Fn{;K5ltgY7Mz%aiz;tgw<^8GwfOkWH|-iHYn>Sfea@>6FR3eBG;i*s(exW?
544 zhW~EE*!5#=CZi`aqFdbO(%ME><1M4((??yh^6XC=F71y$%J#|GRrw<uuBg8B8tA~A
545 zfZTim812Ao<7omXrjea;yZjmVW!hQpe45Cn)0OLu#j9j|zkJ+v%9V8SL5<C1b69<X
546 zghhu@XEYg1nJyZ%sK!NI{%iEv6xQ2lQ|Va@1zPni-gb>XgOy-&PH6S(ZM4Z?{|7di
547 zwnk55<khfP-K+!X<3;_7lX?vQ`mrt+^SHQQaf|D$i!60*bG5iSUA&7UMRrF7GxB4x
548 zs8mO#Oh3k*^gR%JI?~anBN`r2IY*~})gZ!9nrGCBKEevmJWCbMAUcjt0f8Rtl+Obu
549 z#CnjR{9HN1jowEo<tmrUl^E6*pjLncLs!KeqD8CI=p-E{%2A7G@?x{j>cuo@QY?2p
550 zu5UzuJVkVls)`A0@+x!9v}}6x>W><>kCSz6xeG=%J;_J!+g-V|s&H*r6WO?U(crt@
551 z=sKv*bLPGWexWDR<6h<}T_cH;#Hm`&Z2V3*FLK%x>1UI{YIG?;n*umWmZ<w^O$~RA
552 zj#$NDTBw)?Y1wPdDyF{PZID|9b|AzK#BYQnBF_uFSgIYt3lXu8Zi?<2?rPl$?lV!^
553 zA>yzYl_HulvA?#$SZ%E3YsD#2t#&QHN_aqfM0|^XS3E6#E&WUUL1VY-bOOimL=+{h
554 zRs%a)jV2<AHc1jWo{tDRn;__PTG-KGf_OocG{CTsj&H~2xR!>R1X`J)p?jgAR3Q<G
555 zb!f@6#fXji2qF=Tw;)`Bs+n_oBHouFh0G|I+zU0V3^f?)vFUL&$Zarw5*jhv6^o5c
556 z5rO)~@+vypdaT@dK2}fp7Uftn{OGJji<5aa@@wV8kh@p_GD+5yYsxvcO+_Xit;GTD
557 zjU3UsjFw`G1GT{0hSp7q>$G|4Wm-*odO0-M$F1pQkiF9yWZ9lj8GD7=dem5t0d*8B
558 zteEU<4VBTLN39N;ecURSiAu)y3@qEDS1yj#V#RzLw|>N9jm-g1v)Rho2Ch2a>Y^9_
559 zeUDcuj_Yd`S_V*KtVoHNfoZhpo7(WMuM_j}p^p>W*9jdL4&s)?imsU?usU%C<(I>N
560 zUuFSyF(Nl!u;1>Y6`DplAqC77<(equW+$wY&TSYWj?_#NYc+Gkc^XZzJit1@QR1qU
561 zM_WfbDqVF#op!ujZ>@KXcP$bYX=lodtcx5oUDsf{Ruqg^a1(`zx+@F|xmm(2-9m%T
562 z>Ek7fPp`LSdgHXEy^K6bEL1(^36hH*m1OnkS;m?qH8P0;Q2HP#ZrTj!jHhKriVGx6
563 zCte06PU?Hoi@o&tWt1BMY|2Ci6BUM-bjAFRYmkp2mx&=4V>pN|B2$AL4AJ6n%K)l?
564 z3hV1BE~T<jeGf&E8)Nn7>w6CL#By!~9H5P;oghrmUL{<m<!MXNB586d5EBIA5pW%p
565 zW>e!(07)74_|32U1v{?&+bt&(XAZP(+Ss~x!=_dc^RsSWk@%$R=-;l#er$Z>k8iyB
566 z${$~+!-62QKF3Fql~3P8lr<x)R77DFr!aoD)zKLr1^{>#CxJqipg}nS+*@WRI^+2i
567 zvc%cG&cb125G4_dW*yQQ4NMR<STF&d%|avQ)L4|2J^-Mx){n~X9+lsTT@)W!9YUb^
568 z?2S9jHk`}n=*TF`6_(pAoFz!BTBlAlp>Lo7boy0X8weH4zI27U#h(jg7W1M(n<jd-
569 zZmYl}Ues&#CXH1_X`D^+X}tP$XxWIAtBIM4QHeA_Gte}Q8zIJ}D$Qv9Q1b}ODC-sG
570 z@zw>>OwAnYDsi>6TysF|Fz>hikNA@|TW`rm*~TnWwmHk1Z|jdrt=DKaYVPOmGdziR
571 zlAZb;hJ9$i*kSquP{j|lU-4g=zqFncf7JTuVih2Av#l&Dq8aNwY-?4CiC1Sb^HyZh
572 zNI*8t5tB87kV)c<*brgysY@y58G#DsQUDvVElt$vEm2*}GLavztFtV$tg&pi=qx&(
573 zLzp&)(l9+$U5_nnrKfZ7Pxs~1^ruKX@b8V=I6)wyBneubPNO#%bh5<)U3~Oj0a=6X
574 z$;;xib!JoWWs9T<N*1d%CP+ZjB@?uB#ApJI!=!;e8PjQOa5)kPyeqOX-*qfmd5zg(
575 zFd5nFS&asRMk7gtV9{zen@mV&J1-k?gK?>`*~l5&@sn{~uv&+g=+^0ouAS7xwbd59
576 z#InvpEObvV3%Eg8Dl`k60EPGA^J(XSypNU7xvS3A*Sosv8{yyM?gIFWZn`e3KlpT3
577 z6Yy_g+f7x>i+vMN`d?&#m77dJ{Y`Q?O%$eSbW32uls03~5F`harx1o@N*-@R1?Hd?
578 za1{4f|7u6K6i<L2r%4{)BNbqFAT(igOVQ<1+0j#bq@Z%t3P=6yDBNhjIY{pf-1~T|
579 zR6y_8iu#id<w@N)@4n8-uCpXh?bQYOAfhK!PDJna&VFkd%7a9iPa0(iwJHx>Mu+Yo
580 z0UxTb4dJ3RXTQ=k*un;fxhxKkPCVc7R0Uu3)Pb!fm+s%4XnX#tocH)>*F&c*uam`H
581 z_y6%Inf=oVvS#0fW631w*k5sbgqH}jrD6&7M^A&o9Em&dvEq}(B)@oCF{v)b#db6@
582 zhH>!$l7Vul3{_vR%y1hA*Pmh6o7C%HhwBUWVvPN%!b)%1hx~rMelQRWBl2(alXZUV
583 z*TY>Y3oZd7{0PvzsUS_6LHqtZo*xY0p&CV#w#a@n(m10*VBL(PXqf`6)lqJuW!yx|
584 z6e&v9wVpt_{9mid|4OOJ->1}M|IgLrUNcx#^_a<*{W!0r5jBE_*YKj-<#7>FuLGSO
585 z<W0N7=16mJqL*`qu+;<`m&O;u4xJ^07}3myKM+BwHtkd={eUhV3H8ig35OoRKm2a$
586 zb+yZz#;(5e=!V1|Tz2OZeJiW(TR8Ta#2Z3~J$=koiDQSKOeCI~UieJEzLj6^_~Ku=
587 zeyGRM!2SKe{p~0nPl!7M$Y%#ZvtFpz*63$(3xp-wS$d6K4b4G`^6B_^3Z>Iv3^iHS
588 z4}>3W=RJI1>i~CO-(YK%XRz;bYn^+%Z@P7nXS#2txYB-}oOj8fyqJwn=Qu}$W2u94
589 z_{>}6Z8DK%-s{szsDtbRyzgZC09*TwhT*G`0WI!H^YMCT+(?5y(dIC!(`ajq>O5o$
590 zbjGz=xy3CYMtW$7@V${}F$)zwn6mo-4mgVBOevn3TO0tflSX0)GHHV8nUasONyhdi
591 zACn*)OtNtMDVragiUv?0tLi!p!elIVo~aX6l*M-e%RjBeQz-9hELVw+YW|>l<udGK
592 z>|I&N!D5p_Y}yeHMVV^JUD=WM&4I5I-(uT`f5s+!;VWJ1h8eeZogkMR`cJxf%~N=i
593 z^RYG@z#KN<?8L{3ALQWfj=A`rjYH?|0G!QC{*&Yi4<ILcp9bLX+#8NkRtaAYK(iYo
594 zY%uCDha9ptX4Z)gAE!6V87KoAtq}uGN}5V-WrMU-YL>Q0JOWv1o75t8O2;KpqV-Ih
595 zLsCNkQ{bOW#g|eHUs9tnP=G~%V^SEK<XuB!NV0uJj+8pceB{FY_RQ|-E{s5(J1w7~
596 z-RAT;7P1gP3)G;cs7QWIiRc+|Ql(stTU!bmSlD8tbx35-nDVO@=G}PX-hKPhV%h%f
597 zTjfEs9wjqw#nQsWZMSy)rYg@v^{yLWQwo%VL!a#z`q74`G0ygu_Ah36afy-_^i}eV
598 z2+QLUyR+CV1ca@^NrA71%~^p9FpiWIcsAjlW7;gc6VpiTMJ2^s5$;51sasY`Ev0qT
599 zt>&ixLd~@?aZ|$tO*M`S1(GRBbAbi7s6k`-5#xSVb<1K(C~g+|lu>&4qmus(Z9>P7
600 z!`aFo@a^U7+a+`~ex;UuezkUscAK_E+o?UNJ*$<FHlSUqZPsp851rB`wYq>7a7W?^
601 z2kP8)G|rgF>qIFcAbu;qjc?&Q`BS{u$)Dvx(F^j&VUOoiU&?nY^gR1go_#6LzLZx3
602 zYS-|okLA0+ksqtM_#5G)ln*SgIK#S@MF}=jzpOEqR#If=;9GBMYir~GcI?<sc0T&k
603 z3EJL_#{fn1;Hio<94|JlGh?$J$I&>n6zYPv`t*_u=n6L3B@IJqW_XgA9-yikN_g&f
604 zjxa%9K3rc&6Ah>_LTkVQ-_W$7&IxG~oDFFW&WFfD+=Ir)<;Oh+jnS=}Pv&v+g{uuz
605 z)B%xEyI;4@U~m{V8a^YODPx*>iFuuwGvju$E551#(U)p~*P;PAP66s@k=d+AJ@4p)
606 zSI;ytHP@8krF~u>3n+_X#+jTLr`3r^(i+4bS`1(2vu7TYaDeI93SrZ+-pN#Esjs*A
607 zuv*RajY`DOSp70}`1Am!!~JW|ECYRc8T)oCHSFfg^{3&V8q692*=kc{t=P(@OG>@t
608 zhnDY2|MuAviGMBo`sQao4D5EVn|jl($8Vf}JKo@Y?ifzTx~DN&zk9oP!NQl{e)py8
609 zX`5vu#JvL=j0x|K8?Ei+H4U-iLaUQnH~tjY0)PkknXTic_$UB!NVYa#F2iNI%kXeA
610 zTyvSWTCT$r$wbXm?KpWMo<U}4=4-FT%Qe?(Z^0WhH*0^u=ZM#>iQ*hhOk1XTLh~M$
611 zWazce$@XFbqLY@!y$f5*Fd3lL5sgk4!30EGf<fjL$#fwmiK1@05wYdlv`kWunRKKb
612 zo7+Ip6~yPs6?CatIO$*&o?+Z(!pIaiHJF-BXH5dL-Dc8LrsYU?9j5cAtI-lf=LNVJ
613 z4RD*~<)Jl)6&r?Xz38IA>6m<uinU$P9I<lwb0CtRGgG@dH&ZqpjwvCSK-r8bMuUAh
614 zII1ByReKs*m$3KJbF>y|RU*aiS&QqL45>+e+-jzkpk`k^=PlD}9NtT*;??S;hd}0W
615 zlo1;wkE1NrMQe*nuoz~uLZyC1A$vA?ylG0Jnw#16;*wSK@!#&^G~!*?bX|F^_904-
616 zwu9964A3JNu%HX^kX3KO)_%UJf!Ufx0ba{YTN<{N*fx{urL<2PQzkG&3ecwj5S>5W
617 zYxNXcso5(dtJp&KL1UEF+^pts{{6k_QRO_8m(`q}k6#8r#5Bq`DmX!3=Ue1krd?@T
618 zW!|8>$$X#jDRaB|E7O-|*<>&TEoK{NOct|2YxR<l$DtFg7TIVJTw1Ne>2dp=Yz0u-
619 zo=ztUWl(t(xm=(DY5Y;s!=e%%lNtFG=~86jhnc;b>55`~Fmq{Ub0(LW;rjm+OW8c=
620 z8XEUj{B`LxJaA_(2?pfTD&VyBti!ucyL#+lYz&7+7b!2xXS$QKj7H@b*2;%dt1oo_
621 zQN*cd7BEfRTxOOBSgZq>wW?87Mm~;v++~&wcbOHEDefziGi;CoknCmMXwx$^1Tt+}
622 zn7f2z(WowPn8~!^&~~!<@EfaNf4eHXW=!(jOErrp_X&;u1aIGP@7VhuOB4tl)qhy^
623 z@Vn`e%&}J|8gbtnZ|$#_x~}GmN>`1TyOGkAM}de^4-Y-MCvF!7Kh;MO$I~X!X+f9J
624 zFiTOoY%P{1a-)K}ppoc2Mqc}WF=%EOctvlCkyiIu`FwrVxzn+qkCyc<P-5^a=}|uO
625 z!XsSl!k@Vtg^p(u6;CIO&oJ2qc-blHxq_<?AP<#uK`J4^GzUf7v+*LUtvHs3Gc{=r
626 z15R`3MWnNUh=+<C5tox1<vq;e?_{=qr<HO#XZNs=lQCy!x0T<?to%+kzn*@MlP+dK
627 z_s_GibTvsQ?sQ`3Shnheof`N(XFX)8XPc+Rll1T&Lxf45GPTQt^q>uDkAo~hV~{9f
628 z0V5$6`VLz5WGxNrMUS43)7UpPT4v#=(f?sP)_svEP~R9k^Gj2HdDm$c=DM5>=O~7J
629 z4=<aHW+M?5b&C^a-rz+>jm3+oA_|%mqLImCp%NxmW+@AWD!~ByQJAY(^XDrctCsa`
630 zdduR=FTZ_Y+rw=m7gd)uk-NI~-qv@-<rD6>iIjn;fl*!ZOa5=d+o*up<5@Gf8GIAB
631 zoaZB1C0v<rD0i7OCcQE+EOU6)1g=)9OP`#5bDAlv#06rbYzh!b0nrqYl>)*H>y*L@
632 z5J>^i6p%&hZ#adrjnPb!$z?_QnTx~2B9-}5gOkEFk%jvC#s#L?wpp%K`qjqO=C$(G
633 znN5+6+-CjF#?9v2<PDiOMD8-)Yrfa+SM5}NLQ$(X>d{7lhai;WvGRp|qi7b8Z)2ZT
634 z-kZI|8*v!>__HE7A~@I*<ZJ*`@b}UB{SHov=>qLhuUMUER?mjp^3NzgZ@f<=(`3{O
635 zA<)>p8cF0iBH~D91{{Nq>h0s9vEpj(@L<mw2kOHVLMuHY;~*Y~8}L%R1&g>Hx5U%>
636 z&_~io!yAv%Mo|vVp&}+lvm7;|B0!!(_$C~+(!o!9%9`rWEPR}q@sWwN+q?VDP`Yq^
637 z)oEtfJ;TPm>G*+S(xvfjPty%EumPcDgNXpvGTEpHIYCNFEA^A2LRCl4RAR0x<3}lZ
638 z$?4#g(87R>nbErEjMM(GcFC>@<LU+`7G6GY&UOF1`>`K33LWNWo@&`%)*pW`rFr$n
639 zpMLvl;=2d%d-CGjCJ$*ERyil^oE|HEY}S$&XU=<Lz3G<Q*I!XxRJ0&_;Jy`CA8T6v
640 zHI2pB1$?&_XrTlW)|~y=WCna8EHbtNu%((Qa4ufUdZD3bU^`zfkmqRTY8&L6xGnN)
641 z!V$4kJ}c`rLM@&|#>sQ_E%JAU?~LD>pj-1s-o)v3S^*?~DmY1!0bq@2kU(SVu4)vN
642 z3^q7LIJ%%GHN=n_F31~faFy0C2pYf0iS1-*T#Gb@uj4?lNe9-0#H$B12%=dMH+~#{
643 zj6cb9TX@X3Q$cu~p;J0(;I<gBf$qy@>6k>;fnH0b-<aQh4@k+m`bIY-_;*38dff7v
644 zGssm@?m1I&x*Swc`3#k&Vst#{5*N!Ajian=lYIEF>G0uAf|3J69NnT<O?hp+nbSxe
645 z$+HM0zh~w;ysS~>mvoVgFc;#|LR>UUlsHoKCYkcl@46n^{sI2S1H&_XMN~_}2NT1{
646 zRDAD&Yi_%RQv43sY(hstDvZQkq#V@D@@XI@u0y*KzYWfB<G0_>1|L}PB;X>T2X!1h
647 zdK4)(F}@30*$DDgYdmk3Wr2;1mPgyJkgu@udV`<ZVVy1}lW43_4f7b$(6H_`Y&i-q
648 zZEcMw=)v&saT)(x`NNPJsr>~%boaK7sa_<F_2*UVNs1d%W=IVt%)sR*b{h$Wm>`g{
649 zZIPV2sutc=`%U7t#7+3xgOAjY>3d`1W}(Amowa|_^NFsmr#XD<y1E<eMp}b(@s>l3
650 z@4p-$8DIwIPUCT7(kLRV;q@G^Cz41cL8rwUK_`K#@fz0Ybr|y+jgCi>rc%%mtc&v}
651 zLB$$t?5B=}D@;t^t;MTD<xIELGu>9NQHRMD#TI~{6gLs159*KWIsI4zV`^PJ0@WAw
652 z<!7h}MvWhCMbQGBq{EhG_@mX5rUtgPOP274>#yItY13XZXz?S9$;VwCO}}mG%7HIt
653 zR$0(^3pi)|T0X;;F+e*?J1lcj#;lAr+S|1^X6{J)UEWKaQS0=$oCTxv-gOFI==MY|
654 z#5z}<rcPU@tJBvR>WuS2X_~K_ub*$2Z)}UUWtl12%FOAPIaODypBbH*y*#`egwx;X
655 z9yZ*SeP7-^1&`~VGCY>`c=q1tD^W+b5+YEn$w&d=6p)z$veht9N*M}-Q$S`4NT;i&
656 zSp8*FHCYjZj`swkc3yu;x`!&v8SXq<;Q@DryV^a?z1w}vEt=f{_Y(I>Hy?1{;U@0i
657 z14`Q2oNUl!Y2--CH{zJc_&6XwmN8+A8`>PjilxzHDaQDcy7YzVB;9AH!HQY<A!?ZT
658 zB4w)hBA!M?6yA4<J^+0q(;ZK96&JF>GTJNLE@ewQhMPuxs&NPDm2Mixx5mwex!IC}
659 zHSTtDMXQvVOBdJml^xH;x%4UYirkcLkgKk8OM$=crx)dV*rP&Oxy22IorR>Lu(^;F
660 zQtN0Yaw((ZtiJ`-7>zYSHRvJ(;%@p9!Avu=Yp?*BHD<b&Mh%M*9;DASo9F}0%qhW~
661 zaWXXs@9wLbK<gW;RH2qeLX{y~Hdb}J0j;x_S6EE38bDrgrjfbbP|Fl$m^4!a0@d{f
662 zgtd59AAeY|<wY&BRZf#RF~b=2B5k(h#X=w0_-(KoGKIY;BWyHia&%stou$=@G2V*;
663 zays?Th|%t&Y*-xn+}Qf{>rpQ&8MPMGr<JN<{<ET4myqtIPkJ+A_)*%!T!1QC%{O1W
664 zW@Sm_H%A_*9^60o&IxOOKh@G=XqvZXzQd94z46d}lja>+d+Y;zsc*ruS;H<3yCQ{`
665 ztsgsLRdygY^4dAB@pa=%!@l%1U1rhXHFZ<BPJWuw=i$&#PbwaRxF-I%j_}5avDi4w
666 zD3sVre3QvU-FVvs-yAYin5CUzYw&dj-Vy$s_L2MZw9jqdI{)VWJpEK48E^ywF;BUp
667 z+%wv<G_WNgT|zRAmpBHH660u6X&i35%r{v#$vDUOx%j2yM|{pCV>@Tk%Vy;D=_L!&
668 z*+D~a6=4*ym?N@$+=69G+|poaw(u+%XpJ&xVzE+kXHi2i10+(SXJP1OVX=oP$+OT9
669 zt8||&_P~aM<3rd$!E$TnA>hF$rKH5CtnU6^R(HQ*aAUR-#TSP8tWm4`xZXNcW7V0i
670 zSg+=!#IdWK$^GRtF+&Iyz1T>*D%F>$W-00g(Q90t>p$!8x<6k%|D78e?#<uZ6@2>Y
671 z6;JHEcIEbskKFpxV_Pw|`SQWU^y6@1edF~PkDPepFm1CTiOada0&Vr9T)ZUSpw|Ps
672 z>La!>`bwLqO?Ri~>7%y1aGAcJZIphvZIU!aKUe>w?tkp2OTu|sgTjNd#$;{D+m<Kw
673 z3-!yX$Q!O79;(cl7@C+fPnr>$k<*aZoOdGYtI#*$Z?h~;hiGpnd)l&nX%e#z%0X1X
674 zY=h0H6CDTPt(~lm7YaU~Sy!3iGw2-lqDYZ0;&L5#V%ZsYHaMG|d>(LRQj^CT(a9Ro
675 z+1-dv)`(6AJI6vHGsVxUDt<~MI;lZ+6w?5gn{fnX1TqhqkC{)JlV(0(t}s`dITl~H
676 z#_VCFB!iI>AH9=GKv+wf-LbsoA=;STJ~^~8&rze+MeW&j`aCuLpQfWJr)kc_fkqIf
677 zowQdnC%Y`VAg0o)&JxhqSTF7EGBJC%zHsRBwKus;cty)!&MtoQwu7s8%=*i=7ykOd
678 zj<sucKC^n|&MBVDBZV`kmbTo2%Rjmw<6G}<zA*p$V=I5h<-XZ@=#7_;yiBEsNy#sG
679 zhtLVSVy$XD*1KJ-GF?8z7)FeGOyHcb&S*B6{W@Ka-S6Z5IX)rB2t>-|#>g6E-bz6!
680 z%1AjBkLJgY(tr9x)-q)s2pZ<^h<wCaCLfL!(nPJx*@DqwtTb*k@|Bj!mK9!Zykns}
681 z-!{{6wQ-egqj9tCX73Y5oe<=dFm51a^Ad*V0>V7Lmrj~_9@D89MqC1oY3E%X<Z<LC
682 zbK_a?mIAz{(b_aExFkq|%$+CLEHy>rp>T0Q6jN_b5~VTs=$Yu2KCX7$zt#OV?!f&)
683 zyzPwZp<EErNjO{b+VNd`Zc&_IXp^328v40ZL{G)#lIlvQd+0sx-=D1s(pcL~jiKeF
684 z(w;_?vJ|SE@))LThfQh=+_PZa?nl=ajj>tvP3;@!&%4#u7W(V6D_>tQd*=095?{Uh
685 zViMordSFw_^=r1<9w95&&ba=@8-x2^ozpsV+QXOle}8*t;(xw?m#zVA-3={Ed-Ai2
686 zs{5Qi;G%@krHHp;jVmRmxTqAHP4U=eVMuLZOx)tK<i+&ae!94HwTUyCY-k+D3@wbZ
687 zMXbTR-)GG5+XPk(hhz1HEYy0T>X*SHJ<3PlIVykj3N*HYV7*@AuS@`-Ks=YBoP~`@
688 z{F|p<{H&inD!=!G;>80zV;u4D6^_Z_+1x_MBF~)gYR_8#t)5%_4?3Rm9Q6Fv@kQ`_
689 zFzr&uBaUYr+<=^!BFUo82w}i7S12e3v;EbkX(nPa`RF6@+v8~9Ml!dJzC@q{mmxjw
690 zAGf#|F2U!xWEXL5$pam|e;e8tX|m8NGEoEqW<xAnVz2hb9*R85aO&xv;Ph0TVxtYU
691 zD#B22@hnlH1T?a-)glv02b4wGmBELl&p6ghpRjgZKiu#6Mf)#c>Bt>tu3i0)N1r}H
692 z{`kc5m90;$S-TxikXJ7rv+n(+2G^toSo8i#EI*j|Eb-68mx;a49^#50+JE@rTX*k9
693 zD$ZB{XXr|RH7Y&OdHlR!1ODaIrbIR^^A~5H1s247wvj_ojhJ*sTm5LhvgAEmn_D%s
694 zTy&>%3<0(3a!C~UMlmGAEfw*OVIX4qTK6q`?%5^=-Lj8b>q$`m0yLcik`<B}n(!)K
695 z+(d5D-(-HxBxohQi&UnKv5#^O^-fHyv)8%DdlyIx^fS^H+84MRysOAH;tKt0^Ct0r
696 z>0bFY*9r2j_^$phW>5F)HfdQqQ!AgWCE6_kOB3pDV1+{XyBk<(-jqKaCG3cKwqWMO
697 zk|r}&?XcQeV`fFuWZI4vnbHQaX2ILrR<tf3GXL%E@2tA>z*B41JoVIdYev<Rw=s_|
698 zeR|s7MDj!;k$Cx;`=7(VP2BhGSv(id|7ISu{|*L{IG-#6eK0TXUP_j7qzYFNg2RY-
699 zgr(Hh!!Ny!iW;Zu<u6cv6}%k+Y)dN%*$0yx+`exg#egcn06Sm+o#RohVnD!*19%$d
700 zus7QuH)5mFCiuMq^b4cT4?S6CLN{%B+3%DY-(~!OEofEaRGW@=&R0?cZ1r@6k*({v
701 zz>SAVar-d$u;5hd#NYyMrZiJC-#Rn6TywQ=gJz@eUClcVi^QDfvQoZsVa5z76bh<d
702 zk*C_Sg5e+yRU5a^w;X3A@Sa}$_B5sA#<i(;rhPPSrJYn}ttw+AgV+FHarQZiaq^Zt
703 z9c2N2To!j!IHx(6IM+FOC-aJQGNs1ZPBQnZJ({|{bn^q18!#@=)2S8f7^<(u67zzf
704 zagN#Ikg6-Bg*iq#u+7m+bo*(qEAO%elLpsZMFt<7)7Ev(@f$x$oc`_2Up@0tS84U_
705 zW0yVt=(VeN@e@q*3#tkRee>arhQz<#+I;3ZJQ}aTPrbPFr3)X`@2YKo<bLSb9Gal=
706 z0w)Ti0)muwZ8IRRpRH`>OiLp*mt0~E`9&eyZ#DY0hLj`re%8xlX2tK6=}=!Wb&&$>
707 zq}jzO>#Q(`ZKV{~jS^g@olR`8vpd=uO4xgP^TnisDo|(Gf}bkC@+*cp^+iPGMNTuy
708 zK>>D5eLg+LQBf{~9;A<@SK1jfv$HSl`M%V{;8{3deSuV>SXo-)z&Vb~9G6AEFnnDg
709 zXbbRKv=*=7munjJ%M4c=S37S(oAIsuM$LNtjfRcJ+njG$j-**Ls8^dWNY}t_4F>aR
710 z)+ZRHDCy4$8j#<G4DcG;F2TKOy6F&&=}YFs<yezB9t3<eW4inlF}LG8_ZPaFTIjM7
711 zWKL^llf9ed*yDC$-_o~-r$B!?$1qS8NUUY`?Bi4if|PR|)eTW&Z7ohME!|7<KyakD
712 zgvs?rM?8P&!Y>YW{&m5kO}8b^fAB%#{GC^ATrhXT&9mp+G~lu=6V~s1=K6I{a^9T#
713 z=Wjc4a@*|ta`Fz}bTEl9?!4ngJaO)gH%yzc>BbAmsx8$!ny=rrQ;nI&$C&kQZ`@8z
714 zRYs0CUSz6z9-EK`5o~J3dR;p<#WM{?jcT_7qL1|x&64ILX1$ul<9Z{T7-m!!KvV(M
715 zh3dyPoieI=KIKBCNPR(|X6>YPuZbD%QiiH8QbsD7E&<YGr|RlusY>e=8s8qe^Ugb8
716 z!ebI`<oye?Ny)WOt|1?F{Q<k5Onlk()-%bZ;&aaY00q?#@Em-;H*O-Q($A)obRLb2
717 z5uD!RKghfXnU?^&%B)`X6kO%shpVmFDzzu(wNbA_YX`jw&)lG1$mY+8QPAPS_|*f*
718 zs7iR6l&Ghr%U4wCjl~iEG=EzAiSzTI@MqzCkT^BLu-4@bYB?_K_lb6@;z(Ewd)%__
719 zcm!{WY>SYH)9Eoqwpg%*XZkWT1X<V^8`GC<s)zJfaVPEd7Q(b<=2vcEld3E!yQw8*
720 zg}1ch`nbUr+2X}sb~|tP?Y!)EUMd<`=<U30l8l!*cX(54-PL&6z@0ZWTgyxD=Rl+=
721 z9KpvCrhX=bI<i*-9i-Q!|6-N}BT&epnswO%+%;@knT^fFQ5=#K4|#WHq#du^8yeAD
722 z&x<LpJ5^n$<z7d6*@$BGU1KX}4f~>T8C@lgE)&P5YFHGvU3DbSWU!@0Z3c@MTa9*g
723 zD$aV<v973RwK&>qdriin5CC*q&i29`^H<y#xbF2wcI^$<4O)74+mxAO)(_yL_l%u(
724 z)s&9i`@6EpZx>D*aL?mi_mS3>E5|)}XV(Xm=N|y<+Q>(l*Hr&_kQb0BX^B|QbLCj%
725 zb)-C>&I6#v@$DLBjR#so?b+}NDYs+j5Y#~l(t)E#k8-s~k6w84=ux=hBs}0J;5<6C
726 zJ09fXMoaMmejT}kJfPv9=CKxu0^zg*HW2)}vZ#fQ@?^wJ@sz8p)_8St1oE*4GpQE&
727 zvx==RB`LBN@)(4;(X9BiQDRvj;Ghr}h~U<D;BveHDI!o~?4r?DI&=+RL>naK&KL??
728 zfKk&$3yR23ZG+#Qc;Bb_%lT^utqD9k;`M2?-Znz74gdwC(sFbBAuJfonL>$BDF_vT
729 zmH-I^GJHk8AwFug5(lJF6V@2V7*D;X-Z;fv@3_)4U$f9S*Sy%V*wY#Kz;MEO!u@I5
730 z-<^MVf5z-y?x2uw&bJi^73R1w#yn1#Eu2XIAO1(#Alps6NRZbD-BV}xne?vA<9aOX
731 z<N5}Dvz}KBT6!jz>zM&duMj>4C=*KbDFIxsE`6a)f6Hfv{N>nEq&n>>^UsR72qB#q
732 zZ^7Ge3qFha0RCU?y?J~TMfxya)yEt?_mxa0$MobTfy~u&K_Cq|AY2JI0Rx%L!KEZX
733 zKtWva0tI$;b-i!b)vqTpD1vz4vAVjt%6hH3qPy$Y6O>(D*RL?0-&5T)NjUa+_x-)^
734 z=lx?RsqX6T>guYep0lo}0k?rdsCbN&=dhko2n)*+9?OJ<iJL~aBTw5>Mif*^Qh5Lz
735 zJLNfC^1@zRP%?Ss?9(BdGH2k<$HRRnGUh3+o`-tEAR$^qT*z90I!Y)9al&0i+w(`r
736 zuQB&Lvi9Nb-D^bYFHb(PiW27ibi)I`Sij)`_VLVLuV}mC<#p26(%ZM-YoDIC^VMI!
737 z{=%!!y4$4{j0d==6*R$a@h5uo81s1ZG&9q{_w$sSAEhrV4mbkElZt!!D|yY>{IP{o
738 z^QRVeXcp_w%0H`crDm0Wh54NPRfPxnH|!rcKXAQK^0EEnl8^X7KCg_aHP<>q%vf^^
739 zGu6Dv{NK7SizTzpVx;qmNTaIqpiz;rp!{_Xn>mr|;<j=OFZWjRVs>TACuH6!#GKjd
740 zA99Q6zLFc5%QY}Zm~k0_G}ni1K`Ka=VVt=wb4_R-(S+ucn((_*n^0D=fMLl^=!Pc5
741 zC$(X&2~TZ6xdkn;k?m*8u{cmiWHYzQLXS{Kw%;}Os^yoxzH<GC=P$aV&T{vL^X|W=
742 zZ{5Sv3iinzvu5oYy#D9X@txDhW{%T$z54vG-}?2-?+_j1W`tNmp;~;w!&aTlUTh9Q
743 z<J9M6R~LFnOheH$uh!VrY7M2PX^oblD6Lw<(7cK`oIN6|6Rg|`Od!L`WAkwRH_=7l
744 z!ghi#!t2_B`g(!>Se-`8%QIy~1AOdX8&hQd?wlrWT*t^UMKT4}+_^>uF_~AB^GS%w
745 z$Sot!k5hE#{fHj?=rO%T(C~V)l&`kK?<3Re8_!z!h(>IY*X8YPiD|@u5)H)Er3Eo+
746 z-{9fB1rQA=QF6CTj*7annA&KEU?bPx?zKU*Scw)x)Ip-(KRh($2`eXgXrcq;7w{BI
747 z8MfT~B27L1;t7d;{P=~;mLFP}t;e^L5&Ju(S$M2GbJvRA5}O&ef*r>M*=;PFuVGm=
748 z!%z%sL)f6BXuF=VusSuFrwa;7k;QbS9os=K(d!Kr9CxJ-yLAn^HXSXGT}I@wOBthe
749 zGDhoUX|F4hxuC3=5RWb-F7v(uoBhF4rMo;ik&NZaG8{F`kg=Bt?x9(_mSAwZS);6d
750 zFlx-EN{yK-#9E_TnTA6IIT4Up>HQ;9aMXa~+a8gYmxkSu@FT&A*R?XAA3F3u=igv#
751 zy^1;O`0dZnND>V8NVDj#KsLMZ-?G+tF=sc?I=ZO9WL4=@HqmP0b)ud($-0xNz+3D3
752 zz~y`uls0lAYo(6L-I0ZROhwp48uFZ?Shan=X*UPrSOi0Zul5OMa!~8F);xpLTBWPf
753 zR~f?kupwl;!J@0SR@<iJbyz!W9gY>&6}A<Qjj9cXjh0RJO^!<qJ1o1byKI-+ujl?t
754 z_k{U?<#GEL+^6=x8Zzepu@4rN<Q9YF*>pvPjH$_ViHSB93>9B4Uv|PwEC!aB$)q=1
755 ztX2-u1$LXQ!phm<!lXA@^c6bJuG4WgvI&7sMKVA|<|4{l^mGwbw2x|d)C8~)?fa;?
756 zqOQRzTB)Vhr>&HAAD*<=giBF#Ax9p`&=PsQPv54e+x3HbN)IXacufEss^O7BeskJc
757 z3&6{enG-IjJaNKl{`TVnqB9(IIn77q7;=)q7-A)mX%w1~%W4ssscJOW*K3}i)^D6M
758 zt=~Co(ZT~rKlmBa4St44u4J+Ug+*dqX(VO@X7s2dX30(&>>&CHTGL6Ex!S6fF#-6)
759 zL0g`@pb0CUUAEZAjjo@PZ>eN;(mBt5P+RJ*{nI1T*%Qlso97GCvU|<d<%O$E#Y}bP
760 zhV>V2-axH7{;S=SI_40(x)g}+1(0qQ^pZHvYr|#+moWm<$jo8VOdq4tS~OaX)?l+}
761 z4T#oYoh+6Rr>(wHgEghR4cn+vi?Sd<nF2R*Y5IsY0NE*US#?4sYuYF2*s>f!su7Eu
762 zXIdvefAX|=GV{&c`R&?|6?BoOsmKU}%r9*>5^pW2b1hjhq==Awwc=mOVY&3q2`d^F
763 zFPSiD(zqq|5~lKwHB-jkQ#HAvYi;IDVqr~{dYEsS31DGSvV|3+wxa@M6+s=1R-q|X
764 zaN=Zx3~Cn#do_=}2kg~e(fGUpGw@Go7%go;`=|q=kHU;rZ&*ju6zNuKqn1)V6ivBI
765 znswSQQ5#;0mr`^CoO<vU%-{lJc1S@UZLXj3Etz;l^iXqlrU=wgndFDp$bEDwM61+g
766 zVXHL~ryt!V9i0|7?WZsL=5pqT2X|c~S*7Fq-hU8(fnU6twC){)pR%REP3`Dik*hQ<
767 zWEN^((lB|XvGPDB3CuVRXdapkrn}kCOlmzMix3YyqSD$cshqr{hU6V3^G%BMW5}ak
768 zR41Q@`MmZ#imYwgnn&juDtQibIq}NL;*QISJ1!^gxFK=JF<BKw5qI2>xZ^q<MBL3B
769 z>5_DYxwCVPj12M;BxSg)7Ssk{q7+kBT29IQRPL>?FkR0krQ>h@M*5-m*~t%H@b+Hz
770 z@e>b!Af5R6Pq5*0y6wck(~ovPi|w)(CutVd#l8XB`*@M7GGS!4sx@ZwJ{;VGZZm4&
771 zELzmJ8J8g1O!G8NKVZ3Km)zfy`R=IsyQ8w;m0cHAQWgP-GYA+5IA_NA!)tysW6={A
772 zZmb$#2B;&=dIEosjbFW&IsSUbj%yD*DY+#c@XZDIWQI24kO5bUVe3Nua{Z0^d-X5r
773 z+3ECj!_OGniYX1!t7tXL>1Z_q`DS>TX6!W00M1c*1EZ!7PzU5LlG{ZNF$^R@FLTU3
774 zD*YJCa$>Pt;Bqz;r*yPuHAH0zGLDD_wOCpvsJD7T>MKpeK^io<T|ktX;wj=&Q1((v
775 z+Ft2vqaFpc-6fBN9U;mHA=0cW@`<dBG?>4w|E@kqp4*ij2b8HjGRHFv9v-m93}o?v
776 zE*PUr$HZu+xVT>7c}SjTZP$ysn0{+}OfOc(^rb~`9+TZ)fOtj2L3vFSZNb#FnM<f!
777 zu72T#N2Cy5`U`sRiK)Mk?w}azn#?L_-}|J)_+s=bz<1`O9GJHEtM=h`u@cku6oolV
778 zrkc@kfmBiT*fyk0I=daQ=yqM!#+8?9k*zbxkqpBhRS1$yL9@$)cagoXwl4_8!t|@J
779 zuGv{RqaeQcB)m$A`f94$OldhYMpiAsEbcZ?v~lnebQ`_InCmt*5`LN8`Q~F8;6~B~
780 zV&XV5Q^jqhj1O3|xJSTn3jW#3kcG_isH_T-jXKO!=0Z?7O2=1iy6}lbGhPF|>LdJz
781 zC-z^vW6^JqXWsit`irC?g8>MsH;|PH_<gZ*6sok0vQ|1{C=5z@*x8CETUxD?oeR+d
782 z%R=h{r}=u#^(HDS$%AI>DyVe`te|gVoAlEhbJ@B2#f~JK)UR^%v3>gU9VXVHrxCVl
783 zkj6x5$o#ugmTVV_B{U;1&DLlDx%GOj!3dJy4#M4$m+y2s_6^qWVUd$3XT8-z&f+2m
784 zDAr^ZHm|J0cCs2xiNk4kIGk3!R$Jn*Ley#jyO%dx>}F6Sw0e!x!I~^);BZhRmUfy=
785 zCaqSZ0Sus=R;$H=G%nB_%@ehF7Dz8W932ou#Nt_dc^+e2P_Pg0d{|jl(CI3ek#RXQ
786 z8COBZDR=0caV2(6CCpBz{O3!t$@=U1=X0@<2RyCc1R8_|G>8E!M}}?5l*?)T6>zgg
787 zUcH3K>LsMi&@{rGJ$jK9vpUAwPES^2fRj~G9Lg}6oQd(R()llbSni5)IRA^^wv`o)
788 z`Q$li^#SSERqA}Z^b&B*hU>2RvYbAgaY;wM+4%_l5UA>%yZF@P<3G>t&_mW5*l~P6
789 z${Rem$B{3{BlL1FSp^fKo9V|5jC^Zseu0p$vFI&!n#IUe#H#H&PG6ywH__4JgIb&?
790 zTef-f+<mP)cV8>-)1w{Qrzbmu-bK>MGw9`s99nq>zjkN_zn04mDSo$C9yORL3%q<X
791 zbwBS|9@U$7dtQIuU>=i4+5f)g@^?1r(VVhLPu_@4dMHJ0ah$rEFKesSj_A`+ruHLw
792 zE{`QPydRHIWmH!fRr*3~(3o<ae+W1%mRC3`cJav0zDIs^aKl5>9$CMt{m1p}<C(u)
793 z)p^&=nWfYn+s~i#lM6BjNSy{hrDDK`Xf%h|f`<p}iXF$>W7pA`qHm*j(-ge{$<mM_
794 z^GG@RGems`{OR6DK{oB#<RtUVzCCJIcnwi>JBL=tILLM@_g*P2EMSlPAj{Lk%zacF
795 z`z+*>B8%#NwFlZ=cDV^>VquDv5ZQvaQXgWvhu(s=&~y*#!BiV=r$GAxDNE7YK{MJ%
796 zbq!G4=zY|jM^ORu?maT;&B%P4K@FKsnd5cJJCE3GLHh8eM}E)Thn>=Aqy&#i^Qm)?
797 z6FBcYQTr~1jl97o$o^Ohir`G$CK~l7#A$e~PfKa%S>;k0;Tipa2|s|MrrRuc?8n;>
798 zX+=_P!f=U9J6cQj^uV1U;^k5INX~`uN1Ll=PnfY-*fb__p0i_G)7-jq3ro*S&ChP!
799 zR|AE579jaEgj9<V()<M>?h%AcB@mWC_%DRa#}KkUgplpG2-$ywkmFg!A7BAIFMJuH
800 zqMsmCG8*vnZG=j}P%eD}p)yEQ4*67{1)ne@1dq{}_Ym?K5%R-30k{|ZMDg<%NkagY
801 zF!ClsV;Klt2#u>pXgp{i6AqItY!RAxJ3@`$Bh(}z)C^^AhHtg}fY4-ke+uN=IvWB!
802 zpB_eN#(PSbITgYy#P8A>d;|vn4IhESPOJWjlfdx5<Rma~vXj8jX-)zIu`_t=iv|8Q
803 zk9{+)c#nAOJHfdh+dl8uuHp?9mps3%`H!!guK7-V$@8yIsknSn^w`&hC!R_#c=f{1
804 z;%C2Bf9vl$9)ADGduid_EAF`eySVV;-4AZ>{rsUu$19$jZ+!ce2d;no-WM+U^SoPr
805 zefX8HzWLi@zjprSz<0m9;oY?>25y`4eZUy`=8=aEG=ASb@O5MHw!6pQaL<Ro{LvpX
806 zkA6M4ZQb(8!N~pB-22?0pL+A1fx-E=Ui{J@pZIjek#A1SIr7efrj}#do5##suzlLn
807 z`SI%a=Uo5W*X~)Cy7v9gJ8tM${^j80k8j=>oxkPfKO|I3Eax=c)?WI_;yYW;irz`r
808 zT~>1O(#vD7+wz^GpU-c4WA^UL9=d#0(}$P$Tz2<0x~2~=E4;npsYLJW-<2*xV>ahM
809 z@ba^fdoH?kcjYC!KWX~D#@6wDjbrrYYc`JAe9cpNPyI0IlHE0yr*6FOqTSQh{oza7
810 zv+w-knPcnkt@(cJ>RS%Jvu5`tnbeXCcPF;6PyM8YTKd}En$5)r_(Sj9{RCV*c=czi
811 z-0;%uCEx$>v&)`3^3#Lw{9^yHzmD==u>Wd!{6YKo++Q6}t!}(zcX@Kl{()x~>??fv
812 z>l;F~n?HS^;}5kDbR7CHbYS<fzn)lYf9lyA9Zy~Lmpu2f-m#B<-gns4`^2)6a~sze
813 zZ~Of4W$(;-!tvGLCcd%l#cSR=_kx>Wylu;qKREw<I&KGjXL8*aitu57gctlFJ{pbj
814 zL7$%w#e%#L4Dw+>v=HE5Sn%<o2%LQZcovoKlXT=hiHC#Xe;%U5-5#9bJK?CnM<VcU
815 zj64g<@dznLARv353`L{7-xr9;4?$Cfm*B$@#eZLzBo0Fc!6+Yu`%$^De)%II;`fid
816 z2Ol8M0zeeuh`<XWCHx&veu|IJNj+yL|F1gp^PTR@FX;c@bmk{#aJn-;U-%z8^BeZk
817 z7Yv+Uf`91X?+l*za=!brSEo7oI|<$=I|Ix)`HMwJsUy5U4EX2wg^82D5w#4Q&H(*k
818 zf{w!u{j%48-=UvcEpJeJB<IlYj*m}v==axm$fapBaX?NS`rYP1r634Fn5Zry4*jNe
819 z3_0}smN@jQyAu5wgdqsJ(U%d2e%9wtcIf9BAeSQX)c3vY&@YHU_n`EU-xrEQzsmI7
820 zpFXsUIP^nVhkg(J_M`L2`N_Uzi+@-pEi@p_Hb|=l6MV>_p9V`E2pN|Du<D07hSMGT
821 zjVBfpI|<#$976&<2RYOw$V9C~o0)@feIDAzYEhU8As_2T+nAdMGt6({{4xr_{cVtL
822 z8{EHuq+y+CJ%nxu&p{XiVJ3v@AT&T&2H`yR0(2w$8hq;roHq^rpsIoMRS+zwRW%AV
823 zs!Z_gI11B;(Gq4avT9<eggj#xpho5sS>3piAzN5<4jxq%!+W2jOR25s3Uvj#kzR*Z
824 z!#A3kThRt4iUg8BwRP|~wGbgTJ$Q`16Rn2O#w@|}WGIYByNDr6`WWCWqIr;iIlx+W
825 z?zePeJ|%la#2MnC363I!D<ROR0sj>(mBV-FbqLJhL0mhaH-z^?Wb{DAsBFaUQ6hV^
826 z=CHUAR}Z*c@{Q^}1_QZKv8Sa)J`cEg`9Z}%VNo{X$je4dmTZL6%aPK7s;X?Hq(q7A
827 z;W&~_=^nkFoIRkGk~2M!pD!n%2MWlCpT&-W5_dMj+2x3B0N&j{_zcb)m@_XMnc0?&
828 zG&g4>jg8sJs8LE}PkANzLf$|@f&7KMfxNtI#A3-tv<f^52K;^{vS;*Y@>0Qo+mn4#
829 zT#}7s3u@g1*+3HOfK$oWIxur)Hqtyf8>y(wMzUYB<{<AL(CM-fL+(yCjeEdm%SN&V
830 zb<1GHRTvKh-0(@&fYmBLpa!ay_QQLsMv=l$d!Te8@&1)9W%+>9nT?pt;U}>Xv&e$n
831 z02g+TL2hYg4}6!L2eh1=lF0#z8K@thjZB@YMD{G`AZfh=TAd7B&4AV=N3;WCV>Tkc
832 zL?U&*Y^16t8!0W%MqIhK><&3%A1E)+Mk=e6$R52dY?^4qK|tXkpl^_Pt;80Du^F3?
833 z0CCd*)9xn4LZVye3=e+pc7O4u%kBFTe{OfVzdmAie+2=3Cw>R!&sNd-olYPA4!hh(
834 zkLcXyV^<ub#3Q{&o<2gXk3Bz_?O|etjQt<0J^bk>F83!Pms@-f3f%BJeCVA9_uKEd
835 z+*{wmZ^5bSo!)n-mtU%Jzx+}x`ZCu2s_9o$|L@6U?7hDS>goNhJTLpw-*UEa`L4OU
836 z`gUEi>#kk>yPn^r7N5hB{TBBM2+u%x8p2Z$o`moOgah*}?#K5Rx*vu3-u*83V-WU1
837 z*bil?Z?L$>Lzn=e2|^=;NevG7L<l0p8$uR$AZT|7Lw0vau)75~-yV`n;tA=XrPqud
838 z8~$(&7T0KP;VXLkd#Mk5u-F5&eSNi@DsMF@d-}?Bf0`DTX-(nVQn)`UKN^=LOyPFS
839 zU(<gL)o>MFdc~G2DE`L>e@yYK#8nhpj^+RM<z36^EphByEH2)%c<W*&db35|4(pqn
840 z_3-{LvA7363@y`dcer=kE$)XPJP6?dyUu;T-RQm#LT!!3-CKi4k1@K(*bVMm`9`<f
841 zUgGw^nYY)wA90ntZ*!&Gg|2{mi|Yy(<+7K$U$jkeJM3P!&Ca`hR?*sSz0%6`TDMwX
842 zx6)R-(`|u(?6}?DW$(4qJ|pmCY{KyG#SQW(z;66Behq((2Qg<tfLvsnjk5IhY4jTU
843 z8Vw?j(?;AT%0v^-2=pK|NHgS)R#W3<*cvxQSGx6v7|X<HDuz+4odx54aLDhS4NoJU
844 z!yeWKYNz!lXHUBH$3G^%=w>hM-&)+UPXlQd_Tzry3r|D*(#g@GrDW^+;D23TKi%A~
845 zYF-}iSCuubBNs+;VI&vE=6(~oFqJjoetYxsemmUl%XtPH_J_4@IJQo4velP$5k~$Y
846 z;d*jFBhsDzTel9w^L0qhp;kV~1I+S`+FXEd|D#O)jUOp%EodC#+$m!f7hd6i(}x%C
847 zcQ!73xIkSy<R*z+yH39Drz^~V@$@6>Nr?(id8z1>>*7<cONOpz_9J_LOIc$R$@pQ^
848 z@<~6k^<#TKB45F_8Stg-OU=oZWz8%43mTJMUGPRznVIizIp)n4Mb7)o4!l~31Fx0|
849 zSdLIFLk_(9t+oABMKcL2`^BAI5GiYtJ@fS2hMxeL?i%?B!E3pgO^N-lc=UQ0(G|_h
850 zW#>S$&#<oLd_M!3%Ln2kh;Lrb?<gbSXkOk${9H8=cRFXf54W|A3%7d?77lv@n+z!?
851 z4|@Zfe6lyN+*1!ZcF69)_#Mq<kRfE&ymAs$#pxwJsz{x(07!PNB;Q;aC*^Ej$?w>i
852 zlFPbFE`yx3c{wyp{GX-Tv7<Rz)|`wdCn>o%_KS1n6Pmkdp<F@YjISemE1O^u@x;U{
853 ze)bfvNyKqcBPm!}Jm(H<=+2Pas(;T{So`o>|7#w?{*EW7ho?>Zhkl%7uW>E27d8}n
854 zEFHPa_T1&LTd{xak7}d`s+{Yo)16Hb7^Q~&TQz2*|8Kb?JCj?ney$cMy}m!c7UI>l
855 zJD?F>FY7ng_OlBQ7S?z0W(#ltd`4GhCpj;Z{o0iQ)-2zVB_)FNXTD2hjRI+cu|U#0
856 z0peS`R|3+&e|#5VKb{@t{+91Ng`ORjGAkeRb|_eFe!c7^5K}f_Yvz7jFINl8JPp3o
857 z+E3?4;o-mig_EoFZ}|c1_&?zYjQIQl3%5jNXOFRmm~BjgehjQJH2BBC52W*?q|`y*
858 zByUhVA9vxEcmuv@$aTyTyiAVVh2wY?KKCT6b~<_#6pi=LpNP$!{S9IU^xOi7{WrFu
859 z3*@)oK!?$X=v#Cgv)F=NxD5TX-;J`X8uEJ^@1x{C0>q(R)NSZREXhuE8c`#Z<_l^w
860 zeJM?zZ$lS=ZY})NoX_y14b&C*Eb3fzJHCTzq!v;iQ1^{|jx}gH)O{_!>hyPU9(IE|
861 zHWs(w*|;0;z+Y2AJQ014zCjs)gblmV<LLM3V{`;lScC0&D!!bWK^?~uUa8u_TA06_
862 zl-mkC1!{5@UWb?C<>)(z&yk_=GdQkBdgMaxq3_nBXP~Y8SWkCT1N3xy6a5#KqX)na
863 zKZIN=GxaT%21RfSx*GmE5O7e`g)T-HqE`X_$M6X>N`~VtkYW}5eaIx3jr5Cn0Hx7<
864 zl!o&gXc4{=CD7$izZtlI`ZcnnJ=A}rJJ7rMEP5ikn%;=Pwgq*x2g>s^c<c9Q54wUm
865 z^zXF6e}4ae{+RdFMd~ByL39~}`|xATUiK~YCAu5Et2k^@lO6EMWc^~%q@oxELD^T6
866 zM&8a>KX?^+;o$cTdvj8uAGQ)tPH;vLK@?yPV(oxwHdKLTihPI-l{K@?W$W1urK+M3
867 z$LR}023oIKfR-W3?vTeyEm}(ip(<0ASVa|bc73wa{9Pu{X$g93k9Lw#GP0n`@9V^N
868 zGC7URPb;$!uSa?Ld3h|^*@o<QC(mRf|Ezt|gv!N{$c63COFO7Z7ao{C_bh2^eN8An
869 zC*%q(DJyBTRu<C7mp{=^H7P&eB^9uTyus*!9Sw^u`D$qrrDBV%741+HVekXy31%D8
870 zp)yn<I;^6WDG_O?09sSgP=TF=s@`(*cSk$T-wv(GG$RjBb|{7;RtiRDi;=NCWnVNX
871 z9`l5BNcvJbD!m3IY6HUeNjG?k-7^E;>3m6fX<^aaV9f%To2UGc>KU--9N3A+OV3E3
872 zB`>Y6@f44$x@_69EtOT}<>j>-fulPHzo4hGccDVGP;}^U5vDX61Jl5%S*O$9fV56|
873 zax$+qM|8U4hT^tjO2rvmYLm*X;%Qn%Mn`wkH0|vSb_O#6uMG7dYJg}1TnD`XceXUU
874 zX|S@%-cV(N4wZ@$-hOV|HjoChq}`OsIJv24v9%<&HLw3yl;K-EQF{7YY3;a$Wo4tB
875 z+}}(VD6a{y%)}&sXFV>ukQQsSIz-VbR>iWKsi;{+X*p6XQfNIC8X+TZ6pfThDH)mP
876 z0T^L{Q+9WAn%CMRWUav-IBUqtI5-nqlPtHJro&pAVL1(}E5vqoG%jPSG!YzS$7({l
877 zCOnOus%g?KqUJI4xCOdZRFYZFE>mxzdYSc_joLnLlaANBpv-C)ln<JByEy~@xU&Oe
878 z4^4Zp$7X|nwMp71?fT^l(k|%$?tS1b9D3<qCUN{0>g`MolgJcMpOQdmV>9r>uT*xK
879 z!p4bqm1r<WRFhbiVzGv~ja`ada_@vuYm8v5)%dicwp~ki%3hb`HJ}YaFBvlSKtn_b
880 z{e3LmiI;HFPw-kMaXYpk8EgN@Tm@KL0`#{`EF;^ZOr+YFiH14l>!?i*jjKj)j#Sj^
881 zbf|QKin-m1iJPH4Msq}VH&me2MVwBHh_nR-ZWp%(h!R)mTy(EOl7Q+#FCcr6Y)6x$
882 z%i2yhhx8VQCvO<lBSDKyt~P}Zs9#FYN)O_Cf}@zpU^ZGe=Df+{H~4E?^DAm6Pl#<S
883 zrsK)<I#mhw;R2wIDbg2G=0`JExZQ<?dA89O=|fAg$z-8E>gn6Gg7l?t17>v|vkW;<
884 z;!(ZU3j8gHHcXS1C2n;_@Rxj3zB`}Nt6kdFc|-h#OdrbB2D|{`+~jus;K2c{4NwRW
885 zi0r}%2&IcaEjw_TQV*F7!r6^)^6B%6?bGWb8)Kv?ob$>mQ)4VO7TT#+^LZIFlei<V
886 z(Vp)i^AIQ$9{dAyF|>akaFj-I%s9ii;^~Iz#l1GRjI;_(4h~c}(ZXPjXOf1q=VKS@
887 zsVFJb$(+WV$piw~H7aDveW%A0QutvhWy$TtlMiTN8L^2lbFp-1=>kv9?AXDNrZ<j%
888 zD86vbG`vK*(>1r`M_W>B>Xxi863uoyo`AWlf7jkRuc8VcK3+;y87%#`-u<(3fU*zZ
889 zssf7Dq6K0Nt=4EBp=dir(GAoJs!P3*>Q%FpW+K+8>4_|>Wq{x`G^LjJ1l0oM)w(S@
890 zOcC;}C0n8fy##?`4Vf5lmh8T}P!CLiiam_#L=EMX5WOe!DOLU{wH~Ygl-ZSSYvxYo
891 zB7j{?ET`-?yTCPby$;rF&@|Z?Ba00h0xV~j6PtA|!|HqmVU_7@1Xba4KsQ5NIopb4
892 z(HnE8bi2-EZHm<{43Jiw|MSEHd#N!^+xbclUv}bSCb7A7_Pf7<QVeFKBy%gkLXDmk
893 zx2YM9rB(Tii?ylJD)m+P7Mj|QchZ!GRxwUGkEy_wR1}4Agq=c9VV2=dl%a#hG(*@O
894 zr6!z3#kv@p;SkHXkb|i~6-*3;m}zJdvk1*)R-z<xKH5O{F&4t9DTc8#0yB?EF)YJU
895 zdYXc~l<fj!PN_9%z1lW_{}8VP^zO`h5d_9{kOU3<DGOsdfs5G)4`Xi4yd+(ApOnPC
896 zM?PQ@KVVP{res9wU=HqI0M6>ry`o1$0ad9nW;NwHLD!_4txM}R=u|qYi|gemPFZJX
897 zlM7=o8QccZKx=3hyIMa)a6xZpjIeNM6KiaU<}`%LvNR{w0B+%G$OPI`hcqrN)>doF
898 zBicu3iqml_on5DVe4b84l!EQ%gUmtpueEELgM>lXY7T<wjOF&VU=L=#kh-MT!=zpD
899 zbbO0+F8;h#>QdQHT#p}<iU?H(fMd>JE(VD-25lDYRE@1>5xb~hCA+eqPqVgkeT|m5
900 z;(HWm6W14!3P$vXNKuik+FkA~-%@@>Ia6NFH&ioYG+cv?D@J2nqkFt2FU~Uq<1kgY
901 zE2xSJwJWc;4v-P5U)xE5k)>m}R=7K1qgbJ4g>Lh+lwB$Bu~i<)ZtPNK0i5L-hs8tv
902 zxnp&xE;w&aT~}3k%;)WBzvh?8B}=j9`pYk$aCcjY@Yx1jinY>RxZ(?~!DgNqZW~u~
903 zS&7e?@BG#E7u-0euAEbKP99T+O{P)LypUnYvO~b`H-N^>s2I%`tsxvLZo*B)^GpfT
904 z1?&X{nmi(A0S<byY_bF*Iu&i7WMs5#;Y3=i(OW<@a_jQdF3su^^S4k*nf&C6$}2g6
905 zWnr>}uCi)~`mD;HBWjB*G?+J}uYWana|>y$8Mm)^?rrJD)GBZ1n4-qIt(&Qd(l^rH
906 z%4%seJGg$*Z0Tj`tGj+yQj+<#p1UWDsrX}Ax>oI_Fc@ber76q!SdndKbJA4a?B6pa
907 zLsjxbvkQPf-vHE|2hzDhrz%!mpr&m+)RS_sT9DC}CYH13xC{q^BwyD5pgHB%RO+a3
908 zWzuk#5xSDa!`vX<x%m1O_Z+}oT8p`{z9LnNmu#Ni_Qspk`<XWutgb98FL%>2Muw#h
909 z=3;0E6N;f;aaO%*no6`amiJN{i#L_@hBc#zpm6H*oe^gZs1PP{8Dp7=0%Mp|t2VoM
910 zm(SJfdfmmkT%L(}h6t*5nR~{G1-PI<U0ByURzbuZ8bXw)ud+xXEi@!Ol%litEFxA}
911 zf+K``D0GV4iFoJ=*YwICZLzruW;J*h<L!%P&V29E-j}A1a!oF;on6~;Zb^xA+)cCJ
912 zJK8#NLjThFmqiVD4%o&Sz4_-?wWg{ntN7<`esBDQNU;qUa86EVtSBv8I>jM0fW}aI
913 z$#17lo-w*2K-y_N@T4ce#!#W8SV|*|Z&f4`D3beG!hSJIB1|m=9zs!sLmu0s1t=)W
914 zfex6H#-wp;D55RwA<BZcR_0B$1T#9x;tx;?$ZFlJcmh62#1r#GW}DPS-H;i}OlIyq
915 zzJS?Bx-%%$4OsIgdlZ$TvFLoUUavP66zkmuH9D_;bipFsTJ>6ibJ3&sp<;6}Ra{J)
916 zY_|OU9VTp|qK}2?7Mh~D`3PIBl^(f-CanodSZ}I#*Hf;5hinDCM`w*_y|p=;2pSAE
917 zbRKQ!%*cw1w^NaB*p58VN|j`LFW?xmE|67Olora%!`Ur#<(u~5HfG+G-j+^W^vdLU
918 zi{>s}j4NMiy}Gc_b>58KPvx~<x3vAokr|7nnZ@pMk7us8YEC&drqtC~QPP5se<>ZB
919 z);b@XpL`zs*7sasQ%Ua|Jo_H-Mr%inJ1AXVK5zb%B}GLJyNRnS+j?^~UsOWc?>T6{
920 zo51okp%$^0-K$dRX(PQ4YrZCvA+Q_s$ct&DH}aUL+iA*Vaa$;wwwUG0b_S2e0-atN
921 z2$=xUh!sM%ctVQSNt)`W6V-UE^g{DxqkJI-x8NXVXxledyK`3kaWAyln86dwSAanr
922 zGN27&SZ^>y2tZV7y-KY$?8gmE8?%(z!oZJKgLR~CP%l=mQlGD8ky>v6@y>(kBJZZG
923 z)dP1inM`gIWl)Khyod^foFeLiWF0;f)7wDG4b~Kyw4L>p2qL*GE21nxI5J<oBwd$T
924 zLtTv5zm)lnv>h*&?!ZfN9^G}~T6|n$q0BP@&t7J~mbY4&A{G?xA8Vaz-Dst)lxKfP
925 z)vVf}qE%d(C#w(uzM~3R`jmL|1cwaPh@KeKG72g9LrhjXm8I@1-YA%2Wy!WdDvP{@
926 z5cBecrl$99z2&{;rU~OGxBUM4JKkw-8ZTYG`0Q0@owaJ!S=8sxNQdL`L?Y3R-3MR5
927 z&Qv0vN_I;h9l-VvKLjQI*q{CcEqfib`UmV+h(|vW^U4fk3tNp-3)|Imjq{z0i&rq4
928 z!1UiY_&Fg+)4stw#Y(VAkkwddwiXw93zrq1Td1~Lb&uy!UgG3=Kh|`)d)<^g9GkB*
929 zN36YoExg9#wo(NJrKVDMDMe-Jn^Nc-2%C@&To47cQ)YZQH9DO20HxULM9P*EDXVli
930 zc$g24Kfd#Wx#x6m?un&@;1p|d1J-ZGT5Q>T<)Zs4sdK+s{rtjp51qBVrzkJqr^n4D
931 z4M*O-B=ggboyBCr%HUz<UG^KW#{|@ZZWmLm*Ws-;dZT`Rd}I<oBQlp+YV3$Ctv}zg
932 zzS!ui69QsgQ=p?@Zr#$z{E1z4D<ge%TOymlT8J1*19d9ZsK=H8E2kztrs5WrrqK%h
933 z3R~LER`TxAyv<~DmooYgKSC{>Vw&QfLQUXhEHID6g6vM>1Rj74#I_#lWsxyL);&VX
934 zDaS$xOdP;Tgkd3(9}e4Ou2Pj#UPwR3yKU2_zyITF|2SoG(~q}ZynM=(rb8F)cy&_K
935 zl=HXzY?suVoIg7?UM!j+R+YOaq>Im~sTzOD*~Qa}`6_%{*Dvbpo0`XsyS-!UeKD1|
936 zw|DlHvGBxkKL5^HEAJnxs(+5Ep1yc?eSNFjSmF#W&0Nqr({HS?R;_DZzS(BapFnub
937 zGN6`y>@n1cc8i6h4du0!;{)}L^$W+%o|uXzCasHdqXna^%Dhdi-vFSV*alRi$p@fK
938 z7+<)rs|Xhr392?9_W4FVE>I$eInMOBit~z)Ue^MLGS9rRcx<89XZQFD$B$*S2vjGq
939 z^qNi0?q*6Svu@5zShvg(h$?;*7#A??+S;7p&1!(18AU-N0{A3>8yt2{Hr&k>8qv9=
940 zAnVAsff%!I*37oIZg}W_W|q(XU~za$ZRx17KHs*WIId}Jb@iAUcX?N7WNo<StUUJ&
941 z%wBfsbIsGIUwvLE<r^~&KX=X}4GoQB%W<PH-NqNRHcpys28&dsx7wP<j)|GAdY9cC
942 zG~fo$_`1>FpDy0=bdgb0SzC2JK>a*``T^jpb!ekFgVT&Q1(<-niD|NT8t0AC80G@d
943 zGz9=)kEeM5s8Onl{iU=FR|~+ktioL?>Wq=nd{0@a3`(ACLG3g7+&)U1rDd%`%S1{a
944 z1q}A)zz84&X{e}}3j8WhRak~#RyJ(=5l|gyYn%DbEq8xCv$Uus7CNVK?6y%QrL|?j
945 zD}~uN#d!L?%=VHw`K$J}%v*xL?|Wg&<e4~9ikr>Vd5*&3%95!QglYMuHkXNRl0N;O
946 zqHD)Q_LDwT(hoWjAmKpeD2SGeW9B<jj`M7^)iOHJRf0=O)Ww2^9wVqVDbofMm1nn(
947 zCOd*sCX>3tk+q{pw?)Vl;zMGHE*WiCCxzjTQGzqpm_obC#%HnEF!koTM~A6(B>LD+
948 zzGNrPojmuYn=%hj#=GvF+TM0f*N(d-PeoNlD(Uie%@{SNbV)Q?dTz4WepT<-pcntD
949 z=f5J8qU@o9>e?%poO55brg#s2rF^=@OiRCHY>ug!H=AeL4YahADaf0b>k~c>urMPx
950 zXh@Bq0ht7IlbPYL3wv=r9&c&|>+Br7(zXS26t+S_v)UyzP;C_aSf`+(5v#E{h)GEq
951 z3}!Z~$`SZ@3TQi<<OyJ8qc=uCAuod&(u{~%c#!P80(RkW&oKcFeF`9sOHnDlP1;2*
952 z*t~CM`;r?kOw|U;=8G4snt4uTc^OrjIm{k%`-8V^y#K8Rd}VZ2o=GG1*xW@k3CDq2
953 zGM7Uwb!fWi*Mx|@K^$)~EQ^>BrnMoYK`K#2!5`9@b(BtL;K4$sSgjo;hb-jIpsd*(
954 z9ooK8?sm=o-AI3(xs+<nJVu?*9+EzmjzhRi`j~8U@B_ejD|}rII?eTB^l9yDTIy*n
955 zuEe9LDQGg?!OmAL<Tf$u)mp8V)3A&U^iD0OQd^<uoA7$jo=G1u*%?R7gNRD-0bOc2
956 zOw-6olnITzro{n|Xj*U5yY&tFHvJYoYtmx`N}1A`R$Jc?%kEDHgh;@CQ0{a%sFC5{
957 zSw=Kz+CxLGD*QKmJ^q`trBfQ+i9fvldiKx{UhI=zrzYSpC5Hk}_FAZs7TqhBQEIGF
958 z(U{SwFr^lu6lx1f=|r+Q4X^V-gdx)YF(U0POp*f&r?!(=IJFj{&MP}v<a!&_*@Pq;
959 zq;wP)m@raN8tlZiSimBlLQPk-sL#h*l}w4iwnk%60s$eXsCQGht4a6P*2jotNQ8mH
960 zc8Ehwz%bHKaA2>MuFpIx-G_gT-^cSA<MF?+hfds2&&y&8yGO>9OT?NEjE6crETqF@
961 zwH9lAV6<v2a)$@$b5YZn)#&yOJ|`Lo59A6sN_Sd!3$Jvy05y#4Zi&S@YjXgQNrCj1
962 zWNJHydP{KU$nFw$k5n(MY?XH44fyx?;mlF0Gc9S^Lzz|7HJKSu_SV7wW{$8&fTz8-
963 zAGrt55@!Pd*t^KV!a2BFP;M+PhpX1%4XGN;<E$}f*gcjH)&y(Afo4^kd78D=+2Wqc
964 zH`X-PwgeXG7n>L7&8?a1JKL5XbH1gow$E2yM3Ar!kdETDT!bgvEO|>PZ9dMg_8S@G
965 zDk&)yTvRWMSym$;uB621@XKrNv_%oWKg2NAjw|!~^QnAU?KMa8L1uuI$l6ZBf|sls
966 zUDMf_ZM|Byw46u`W~8`~bs*#kFhizPZZt>cAc#ueFJ*d4d$Dl~zk;s{FC0Jp!D$a(
967 z3s$Z6QmkI?ne+9J{4+t8%<7xAv+btqq@Nc~^5gEMX`{(9J`im&+sj?!e{<6zOo!{F
968 z2PStL?dFE6SYx4uFN!{S(_!f^gnKp){(=1jG#N4uwNmyfN32;kr{?QKIH<|Nvrl;>
969 zJLnz-5fi!VAcqa~MlEFxX{nMB&FLLz8JGrQ1?6<8(#jatcgh%6r=o+9-@h`L#eO3F
970 zNa~k%OIz_)JPWtui=-b#>U<j~Hunm3v9Q;-Y2u{KV|)VjnRK1hjem}lcp1J!ic8mD
971 zdZ2!M$loyb;H8&8I(|YZRzLB<oTPXKD8q<~QG;09VVz%?rdAjj6>ZQ{c_9s*A5v>H
972 zU^_d`B|4s4bho>ye2|PK%1B&iRv;>hESNToED9N7(VW8TU)j5}Px|^ssSf|<7HmE5
973 z>ieXvsTI@j=~JtJ^x(|1x~NZImmXU<t(HAhJ!6UV%v)C<8dsw^ahBFU_7(VY7<l~~
974 zGH>b;ZS}OEnNBxpm>@k)(@0O#Fu)xi5IqEg=fZ*XnZKy?P*9o>=_n7ev{p;YUE~HZ
975 zAk}KZ$rwgMc?tJdjK^FzdA1WmcK+Q-h?SU(<iNafJV8B}IhUE0nM=Ki^XXf0k@WG2
976 zWrV{l0TpDc%wa^)u4RHMnxlhQqtPwm>@-U)q7{09vH@UC1|P7FUIU}_jeQ$1XTA-{
977 z#&sqj+txt~5t{%xdh3b9Z@*2KzWp}X(=TC0I&mTbUn3(gx5C#9s9W?_Fr&Bzs)1{x
978 z+PFDP2X_v24!4P3quIh4C>=|-wSYgYqV)g?Ev+n;m01`V(O9d|*;$%^ArnwIm^@rS
979 zy4Jj2xhI<dfb5Xyu~ax%u=H;|QF7mXxQ`4qQmgU1sbdM6%RG1R*pt$FC|L<YnnEa<
980 z9#@N24XvYDq*W1Sib{ZEsffqABBISBMpcOHs#;X8sz*W9bkwNgu9P>5JGfUFg;dG>
981 zgRBpgxhYwXog0A^wKg&W3AAS;BaqqQBSo+*1EH_mP17GzuQTs!-{crAWzr(9O)GL&
982 zP+OTRv|Bmy=tJ%6oI$Hm>0Fe9acLb~4OPihs%tgnx@!GcN?^umg8<P<%na=$ZXvZo
983 zvrM-UZ(y&WE@JN0-o+Jj`vzYZYoR9E8d4ojN9k=)e2o@x(WYWm8mwl^aV;z0acnc5
984 z%4%{$0vcHikdwTp7CL2YQ&T8ym(pK<CZ(nA_>xzD!0V3wjtFY{h7-#`o)yral0Y^*
985 zk>!F0Q(@!*1k6YE=svt26^Lfs9>$mtw}&Yd4(0nR<gFy(ABxvF_$SuEeuc)MJ4Fw}
986 zaW!_Xux7l|Uo^uh7A?$MP_mKf(`_DQC|eGdvg{kYgji}I=l6<QKwV}k9Eu74jD|=)
987 zVRK#sUq#7>9(gz6&?XgDQC(HIs)`R0eBcb#O0SJ6QeEvec}4G1FYR*B={obbicwro
988 zx<-hAkdK{13^6yzDeI21G&mzi%#C@{Mbg942D}rub^K&v@Vtu3!r6jwLDQ`5<D-+O
989 z){nhn^3-j0f$2r1qt1@CY%0dr;2w|}zpz_Pf=#-`*~s%_f(@}}FWLFz*l56C;uZ^}
990 zJ8gc8BQLwX=_j(L=NC&<I0PD+#-w#?DR!~iPA%R_l4v!ec8iu$Ov{fmWVca;n6ru`
991 z-X~3AK7sHk;TgxL<@DFhkS~iy?WJf?Sv4a%5!5C%X6!hrB79a+_Kr5>I^LMgCra=J
992 z>TLFql9Ouib9xix<U}=MUY?LobDS}x(&puHATp6lbYV{Bazw>drW(jSQD-oFN$&Kf
993 zNsZH9xis<6q|!0nGZL%v@>%JA>eu+u`2F=wqREKstZo3(g>BS4Yy|Om6l6#*d|iX~
994 zh+R+`#*EDjSn-UAMP>o+!f~b-uVD;G%>Wl;X<B3Ad|VGl0~cWJsE{p3wQLBDV}T2>
995 z^HB$T4(etvKx<hu*<#hsni-z)G5w@(kkeYSU3*~E6Bt{ECML*g=J23aflu9{o~Q0s
996 zpQ~ooDz(N+rWL4cU>#)b69}4vGWjWr7-oAjlC&f9S9~8X#|1!aw`8uO7SkItM<{z%
997 zhrC$k@bzK^9r9^JO_!!u!;oz+WOY#s$x@&ef`u_cDO3h4WOY#1(>=q+g@?UZ+9A=h
998 z>X(7${8<JXH1i?lA)7jZ4w8hgSx~cR*nqH+A@&Ssup8x-7WVvz(P$HG*aruSwG)?O
999 zCv~qM?acK%9&M-xnj*v`50A*3mU)1%sBfh=r6=)*^_N@-oK5<r8<&Ded+fy0o36j=
1000 z4*HH0Q>CNQyHJuVpbh?vFDK4k_fpRRj`qt=h_t#0R`Smd>-<@|6)5U*1yZV=L<XS$
1001 zA+Eh266=YwdLCgV<iK*o5Mo@6muX-`ri<xi)C{B6X=$w1XgLrW1dv9PQ8cz2X*O$R
1002 zuvT@_AX^!5u;j;vU<w!j+#)L)I8k9(@&pSud*&3eZy*K;=yBwbT^k`!Sus&27I8F)
1003 zPnIHwwcKuMH+ws$noXa@EY)?<J<Lk3N7qYlVK#AFbUGcU)fI9Dy3slxJ)Vhi<8>n4
1004 z#<Xc$IfJ~V0hnFHF0tS;#WcYl)lO-b^yfk8&(iI9H=c@H@oxJ4%*E6NCn})9J=BMU
1005 zhF4Q6pp7DcbFrw!&rofIt;kj3Or%GZSgEAzohTzTn^{3!N2zcg(czi~|HSNNzed$a
1006 zL~FzrGi|nbD4ymm^Qm=|+FmwppF9&IKVRj2ELPT9MwNjnW86OvST)yCjJ5Vr9&K^9
1007 zCvB2xl6w*r@(?z^$7GEZXZtQYtuZoL3CUw1r0;UXD2S}}oy0~WBl|OldbrQ1(+Xi`
1008 z@50V5=})h<TwhRJv}|$fW&U83zWws_j31Akv8b(e%5N{c=;bMM7f3%DRpFXg>1il-
1009 zl~?fm>_AONAx+mmB|X1p-T78Et}yadHKQ*{1VW=~>z}-~@0YDpX17k8Cw;qZ<1M54
1010 zqC!t`Z{w8CqQbm<y^gpZ+aNtbe<F`wHi-E#u#geP(X7%)W3a(xMEdMenHy7CO<!&Y
1011 zqLFxq2dl`iKBhm(?3VsO%~aXH|1b4I(w<~qkdwIx)u5D^Kgw3_8D|T6n&>v|w1VbB
1012 zO%)kOG!nbZ1Q9bqTeCS|)HCJs&P{MxUS2FBtx37o@^C%cRjBH%1-Y4>GDc<v$p_h>
1013 zVow41u*4kdBF@Q5>cO3#&YQa=y<qO2&hLDD$r#7@^2)Ap+pfOlqDjf}vVe6=N!zGk
1014 zM@dU->j&4~@yFJd##*oRrrl?E6d$<t-n&a2jxkp0O+%^9Xd?X}(o*(Qp!P!0{rW{;
1015 zuK#0t{=$NVB^wLYyR^l{B}D~J(*IFh;wn5t|Hl}J*m=;)U4oDF)D=6SF^2j<j7IK&
1016 z)d0Gx$)v7zI7W1f@Fw2PH}Gxz7M?ZnL){`V@6qgFm6vpokWP2Pd27$mMMC;V%7!}8
1017 zgN~sI097OVOg^Hh&KVW+O0?dMWp@p@xG5ugQ1YttCeE}Qe`V0!b>p~M#a|5frwksK
1018 zenQpBoU@K>+j2x!qNrce&yf+*4@Es(5hCByK#}P>M>}f~v@Pk$!MMyu)k$~0^-Fwt
1019 zhNX@}UdKTCY>-D_ZxP2Mma3sD0G?ppRO2eD5l^QW3ey~7X%}+PpjFf3kU)bxrO!fh
1020 z=(ACR-i+4K=B$RDo#>}AX_jgzTCslsbn-lrhRzWkH|$7>>HuNQiYyHxR7Dz5>C!q)
1021 zntEhex<NwHCJEt7a1(AKZ~}R1qW?_zoRp+z1Kky(CedfaSU+EVu7fi0fby6eOvTt}
1022 zz_i+g6sPwLLXHQ`YF_PA(`v7*Bod)tOGcF%2s^AC9!iUN90b8Bhuun)MtZjN(|J1r
1023 z-cV$;Z|4HsGwbSx`f05b>aS~;lBu`PUx%I7{Q~QIHomjGxOM;LbEU&~-7kHydBZc4
1024 z3H`P~IfHUJTOM&>wO*c!q1Z4ViP?glXT-(_RgOHRw6n9Ny;6<UO1B~ze^4x#hTO=X
1025 zS{eJYh6IXqcZB5mdsaFa@&<OylYZKM{kRFO)9M?po`riB?DUO}Q1#bIpZA^rcB**t
1026 z!E-m`y!-FMrRQ$m-&(x<osB)xw@^|%4^*li)h@uZQi5j(_ZkRs$SwV-QRZG=qCa|N
1027 zPC;;s?vdX7>NV9a>0H20<<|lknIY_j)M>C~yCJ*)`YD;k4sn$e!mXf{mIK=4DeL|@
1028 z5EtA*gmiG$?LbI>A3}yGSZ*sJ90Bw4c91Du2w8v~T7QF(y$N756QR6C2<3|qo<hiZ
1029 zB|-&|-c^ZE;q?d=U4l^YXb7($R5BLAn+Wj}Ago2m^IwQ;Ie<#1LiiOzWe*`#UIgK8
1030 z2){$9(g<M+LRCM4a2TO#DF3LB5ULrS4c{SD3w0Phh3rXihRtrm|HfuFCvD#FkN3Ng
1031 zHyrur``!3X-tQ)M+I}~|k(=@~LJj&>^sH`*_oevirm+E^Kj8QK0=`%%77C1lyxQ0H
1032 zB-bZWYx(Nd?$mjFds7Q9ztppq7wUp_k-CsK6bwfB#dR~A=kkF%U#E9Ad2dEf^6zFk
1033 zT$uCLrqY~`4+@-b=*Ndcp<swl^VxghAlacpezH1uFBA!fPQ4fNM}<@Gg@thVw0j|+
1034 z|Frx9k?3jn!qM2N_X0xfw08q!e~nY``32u;`9+1((m~2o(}iL}?9_WfUnF`e41&Ix
1035 zaBAHmk?`r?2t@;@!73bxoeryjaC$m_EO;861HRK?9|@gWKGNW)rt|yA<}#<$!w+9M
1036 z?H+vi)O!J6^wj)<fuR4?cSAA2pHuFIeSyHK<qP{FzEj~B_Qisy!!+t2kze21_=>Yr
1037 zYh~)0vtm;UaF^@`L$<e(DNEk%Wp+<bAFu2_1IW&;>x-}LlTkeqiG{*kMMZO43uoX)
1038 zZZ9)`Me5wt+IC2kTANy(NUh`jkz96d>-)}Lu^Mg#Pu@<ZG2RzHyJuOJ0l*v4-1GMM
1039 zveXR10KD_orDpc5PG#?{<A-*nnV4AH)6%o{98v+eWf;jK1f^gGD4BumgOxfv2_FwB
1040 z>7d%nvb){DMbkvS+B+q6_6B%3Hw}kn{3vpJHm|%(xS1T2(*z^gJQl^9y7^!<_vK-R
1041 zDW?T4)6|m~x)%)R(vhE1NCPLAM~*Fqi<pvx?;Ls@KB*E4#By1ZyoM4+PD*$ZbM+f?
1042 z%#{@1Fp`6l8>zk(iMU}T=QfPs{lmo``c(8^`4oZ8NuOF2?@c#Bj@|LTb*Fr7MBcHJ
1043 z8*MI68j2+6t0A&F-nV{j%0Fu8{fVnr_w<b{#DW#cWhru}h4%*oq!svV1-!FUi9Q~n
1044 z>>Kgl>7CCDzEf*BQV6!k*OHBkhR}F+YF*FzwFI?g(bIEw&)PY?@kEMOwpU8W*Pq=t
1045 zd-5b+P+lNgKjrX;%WdZ7&ys+0?dSc%$ZH?(4`;7sbn}NsWCfBeF((D}Am|0xb5rN_
1046 z!Kw8epo_*_WKu5D3X;~lu&sL~sX8gZigSdhKfq@Ns&X0RvlQoTnjVeA*A<%g_AP^l
1047 z0`K<?e_q+hYVz8i^}R~<=j7_?ZRPvcu7?JgmOambvaOA;Ue`<Dm)MwnRcSRs#6-HL
1048 zk}Kl!?oK`)a^XR+)&&5&_-G(1s|bw%jug=w{5gUjCU3!aDi-E;x{D{fQ+2(o66+#$
1049 z$y8rFu{@PD4DW*{Z_!oP-ac){oFObyP;BUH^+8`W;EQwz2rCW5qhTQ$4W}c%B(VO7
1050 zFA$fX#)C?#SQ<ox-{%Jj&>aLB0`G(Z-M$ovwtzqG3lSOT>po*nemSR5FdkISVfifh
1051 zQ*rWAKo&j$zb_>CqbcCv@klx@_(87v)7|MrEbJ#=i$zKP$ur~+FDhS;`V)Z>r3;2a
1052 zJ^`c^d?5rn3do*F(B}^gJ&D7U6zJ!n2<QY*s2FJKsXzh>77M4qs!qWhX?P<!^hP&S
1053 zK7866Q0_<|5{d*P0=%1orvga!G^FVc`h!rKU~FU#!2p<?B!^@Kgk(4!4g+xE?QjUL
1054 z{3MTX6mk$EzDO__52OR>i2MtrBH1rTa$km0!uv@`5di7v2OvhF+%foK6uz8*bA0H_
1055 za3dHzz3f6ToPe(eNm+scxK2XiZXuS1f&gC&Ls}sc3j4ze(s&RHKna2*pKyTWM{*I!
1056 z*OL%~i~wdSKeP>e65_#-Tpr)>Unrh^8kC><Ap}U_f-w?83YPc-!ry62pOAo-5#TF<
1057 zp(jAUzV2`mY7z&?N1$XOfK4|!Lt1#vmmW$RfwWOjN&Nt|Af)$0djDxT#z~GsAOva_
1058 z)G7eAPldZl{RMxvUcM2nDu9MYTGc1#DnRNalprNz1c751phci8V@kLi&H%Sa4$5H}
1059 zTS)1JNGwds1XvV~2mJz2iXfbXJ;{Fs8|0Ft$rC6`TK-a62m+Y|bJ#84^#uR}!g4u6
1060 z0fN|Yoe&a245%*<NC}CMAP8BUB7c6lErTO*3eqQ`MhW;%YzRt;VTj4CK}tftOdv#H
1061 zlL+T(=_e^qhB4@-SttWi5VmkS#RF#ml~W+O6BxsL08s$bP)WkMlB56*i7Y+^{x1I@
1062 z90V8$Nd<V~Q_}n5Ku$8p^Z637L?Y=AMf_nQn21Jv-Dx2eOZn3NSg1Q0_jmgf{#ZN`
1063 zOoqYy3HzXAk#r*MPYIAtNC37&L4(mmItc;b0NDYS$KtV6GMx-Y!og4=8H)KL>5xAW
1064 zi-*#X1z?Xq<PXF{DKKK<fs{WOjYNF0BrpXb8cB9Xl8`MlIW!0~fDi|?2*l%wbS#-n
1065 zhr+RhF9FEn7n1%!G8XUd_QxWLWH<_?izS1}1aNL46(w~_`NFAS66)g@68=ysk`!X`
1066 zKz9N%jYm?ka5NDip8$jaR0#RILxg%#z%`?hP&@!+7zl)jj!alYDh}-h34mi05>JKV
1067 za3cvUJqn);BvR=}G8Kg{#^Q-^6j)+11WZZ@b_;1=JQ4{(GJvZu4V(`cXAH^&aw(mN
1068 z0|ElFr9<ghK#2NMX-E=`#e$K9Kb!_&3aMlWzzfne<WEBd0B!vMrertZQ`#3xMZr`|
1069 zN5b7HfqW(2o$mHWl3>RHatPgi_-4o#O9S<$Lx}*CI3@Vg5uux8>yL-Sz=1;k7!(Vb
1070 zBf$fZWI<WV0K@4{5A!B}5?GAjBN*lzrdU7lLtu?bpqZQ$=nne`?)m(HeNiDqq$BV`
1071 zcrO54F8~b$s7GX|BGV#7HUg$6{#}WXmB?~g0J8-`v&7Jglj_Knby9g0Y|o{N5Q!@!
1072 zhH41!3qbz@P^AD8J(z}&1m@74E1gm{nI;v^6OyGw;vbb$fYhOEYN#;C7KJ;&6C#%g
1073 z#{jMgyel9i0!boW$h#!hf2<8;so75&0C;&MLPT+dBmf+WML{5?pd}-OPll2qvdgAF
1074 zN`#-_2V90$PN(8&z(F6>ygQjr_>-Y@Bn}z_;gG;=5|AntCLj%Tr()?yB9TY|<wlb!
1075 zAsOoihy(&aUVufha4eln0Wn3AfmA#g2Xp{zO?C&8gwCRX6G9y9-4rB(yU`F}OAL@d
1076 z<x7$hrX$gA5W=8?1%Q#HgQ0l056VvDF>oBff1nZ2%Yx}>*q;uhLa~TH9tHXX3=E~f
1077 zHjPITX((nO5rfiocSmBNhy@c+mRL8C3kV}2C8U7=#}i2)6<`U#heD||Xk5THBB|~O
1078 zkVYWjPsF>S0P%DJbm&Af0J1(Bi*%=wP>pb$P!!-%3~~=dL%{YxuL}bcAWC5@5l#T}
1079 z2e|n|QK%=86+r>04md<G74wBaLxQ#r0?8**Q0q_(*tVZkD?(UkD3pwa5+ErO!6a1C
1080 z7a}MJstQP;Xb4E&51bF$2)HkBO5jP!L=a${1}H}pJ^~9r6glVzOb7@HR}TaeSuGYQ
1081 zm=xnbI^+KvI^(~n|DVC?Ae<;jSm>Gb1kfS<fOdWXge1^acPf>NMMJ=&K)nnQ-A5oF
1082 z0ShG@iUmNz5ZyI;#+-r#OU`WF{1d%GkU5br;g6?4EXO0sDDeFR@Ej;oBo>S&fsF<t
1083 z-Eks!(!da70nlNBF<>HqN<ttBbOF2-#C0-BI1@Atu)}050F)xcK;jYbCcBe??f`%l
1084 zNLYyYqutS92)K(tV3SVzeL!p=ML?LRfkps<1i+*)G3;WYPzuxzkh95H8Z-frM}Cm{
1085 z=_C{`0l1P%K?6bweR0B)5(z)>w0L(4co^`T6bSbSGyurRXb^N5A&~?H0W^DmpxaOC
1086 zD#U@=LbZ~D5Qzc<6o8|FFefAnN<|cyTLM@!acP_e;R`hbHV^6)Fwz*vC18!9Ndb}r
1087 ztARH`lS+e-f=ttqbaynF^aIz5kT1rfiFh28cz|azL0C#63{Rt=i~xNAH;P3A!4z;F
1088 zP{$&mdH|ONwJiYpSFjtHbrLvx$_ENqifBOHK_N+07(mQ4ksgqBDggRg3ivK4R7oKk
1089 z0)-)%i~|uRp)q68RCh8eBmhppqtX#TEKs&X2_X$)1(>i;0EH|NOecK*`TghjpWpxH
1090 zZvyFR-Po6!9bdgHMf%esK$~Gc3c^p|!B*|$|Nbx6Wm`RC{n>phy5p-?k^a>gCk=W5
1091 zb%%nBMvd$wB|`xQ&c_D~XBhU%-it&d!$TINU)7&`5EF*SX$+*VJ~H$Qi0t7f+0?$_
1092 zk*MK&5&voT{Qkgc_X5MCFhlo7jJOQli-t#(C_6%87#<=K{Qr1}#M{0$wSkw%MhKV;
1093 T!viJ;E(+{1YEXtu4BY<*Qm7~*
1094
@@ -1,17 +0,0 b''
1 #!/bin/sh
2 set -e
3
4 ## MIGRATIONS AND DB TESTS ##
5 echo "DATABASE CREATION TESTS"
6 rhodecode/tests/database/test_creation.sh
7
8 echo "DATABASE MIGRATIONS TESTS"
9 rhodecode/tests/database/test_migration.sh
10
11 ## TEST VCS OPERATIONS ##
12 echo "VCS FUNCTIONAL TESTS"
13 rhodecode/tests/test_vcs_operations.sh
14
15 ## TOX TESTS ##
16 echo "TOX TESTS"
17 tox -r --develop
@@ -1,27 +0,0 b''
1 #!/bin/sh
2 set -e
3
4 SQLITE_DB_PATH=/mnt/hgfs/marcink-shared/workspace-python/rhodecode
5 RC_LOG=/tmp/rc.log
6 INI_FILE=test.ini
7 TEST_DB_NAME=rhodecode_test
8
9
10 for databaseName in p m s; do
11 # set the different DBs
12 if [ "$databaseName" = "s" ]; then
13 echo "sqlite"
14 rhodecode-config --filename=$INI_FILE --update '[app:main]sqlalchemy.db1.url=sqlite:///'$SQLITE_DB_PATH/$TEST_DB_NAME'.sqlite'
15 elif [ "$databaseName" = "p" ]; then
16 echo "postgres"
17 rhodecode-config --filename=$INI_FILE --update '[app:main]sqlalchemy.db1.url=postgresql://postgres:qweqwe@localhost/'$TEST_DB_NAME''
18 elif [ "$databaseName" = "m" ]; then
19 echo "mysql"
20 rhodecode-config --filename=$INI_FILE --update '[app:main]sqlalchemy.db1.url=mysql://root:qweqwe@localhost/'$TEST_DB_NAME''
21 fi
22
23 # running just VCS tests
24 RC_NO_TMP_PATH=1 py.test \
25 rhodecode/tests/other/test_vcs_operations.py
26
27 done
General Comments 0
You need to be logged in to leave comments. Login now