Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,61 b'' | |||||
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import time | |||
|
20 | import logging | |||
|
21 | ||||
|
22 | from rhodecode.lib.config_utils import get_app_config_lightweight | |||
|
23 | ||||
|
24 | from rhodecode.lib.hook_daemon.base import Hooks | |||
|
25 | from rhodecode.lib.hook_daemon.hook_module import HooksModuleCallbackDaemon | |||
|
26 | from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon | |||
|
27 | from rhodecode.lib.type_utils import str2bool | |||
|
28 | ||||
|
29 | log = logging.getLogger(__name__) | |||
|
30 | ||||
|
31 | ||||
|
32 | ||||
|
33 | def prepare_callback_daemon(extras, protocol: str, txn_id=None): | |||
|
34 | hooks_config = {} | |||
|
35 | match protocol: | |||
|
36 | case 'celery': | |||
|
37 | config = get_app_config_lightweight(extras['config']) | |||
|
38 | ||||
|
39 | broker_url = config.get('celery.broker_url') | |||
|
40 | result_backend = config.get('celery.result_backend') | |||
|
41 | ||||
|
42 | hooks_config = { | |||
|
43 | 'broker_url': broker_url, | |||
|
44 | 'result_backend': result_backend, | |||
|
45 | } | |||
|
46 | ||||
|
47 | callback_daemon = CeleryHooksCallbackDaemon(broker_url, result_backend) | |||
|
48 | case 'local': | |||
|
49 | callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__) | |||
|
50 | case _: | |||
|
51 | log.error('Unsupported callback daemon protocol "%s"', protocol) | |||
|
52 | raise Exception('Unsupported callback daemon protocol.') | |||
|
53 | ||||
|
54 | extras['hooks_config'] = hooks_config | |||
|
55 | extras['hooks_protocol'] = protocol | |||
|
56 | extras['time'] = time.time() | |||
|
57 | ||||
|
58 | # register txn_id | |||
|
59 | extras['txn_id'] = txn_id | |||
|
60 | log.debug('Prepared a callback daemon: %s', callback_daemon.__class__.__name__) | |||
|
61 | return callback_daemon, extras |
@@ -0,0 +1,17 b'' | |||||
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
@@ -0,0 +1,52 b'' | |||||
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import pytest | |||
|
20 | from rhodecode.tests.utils import CustomTestApp | |||
|
21 | from rhodecode.tests.fixtures.fixture_utils import plain_http_environ, plain_config_stub, plain_request_stub | |||
|
22 | ||||
|
23 | ||||
|
24 | @pytest.fixture(scope='function') | |||
|
25 | def request_stub(): | |||
|
26 | return plain_request_stub() | |||
|
27 | ||||
|
28 | ||||
|
29 | @pytest.fixture(scope='function') | |||
|
30 | def config_stub(request, request_stub): | |||
|
31 | return plain_config_stub(request, request_stub) | |||
|
32 | ||||
|
33 | ||||
|
34 | @pytest.fixture(scope='function') | |||
|
35 | def http_environ(): | |||
|
36 | """ | |||
|
37 | HTTP extra environ keys. | |||
|
38 | ||||
|
39 | Used by the test application and as well for setting up the pylons | |||
|
40 | environment. In the case of the fixture "app" it should be possible | |||
|
41 | to override this for a specific test case. | |||
|
42 | """ | |||
|
43 | return plain_http_environ() | |||
|
44 | ||||
|
45 | ||||
|
46 | @pytest.fixture(scope='function') | |||
|
47 | def app(request, config_stub, http_environ, baseapp): | |||
|
48 | app = CustomTestApp(baseapp, extra_environ=http_environ) | |||
|
49 | if request.cls: | |||
|
50 | # inject app into a class that uses this fixtures | |||
|
51 | request.cls.app = app | |||
|
52 | return app |
@@ -0,0 +1,49 b'' | |||||
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import pytest | |||
|
20 | from rhodecode.tests.utils import CustomTestApp | |||
|
21 | from rhodecode.tests.fixtures.fixture_utils import plain_http_environ, plain_config_stub, plain_request_stub | |||
|
22 | ||||
|
23 | ||||
|
24 | @pytest.fixture(scope='module') | |||
|
25 | def module_request_stub(): | |||
|
26 | return plain_request_stub() | |||
|
27 | ||||
|
28 | ||||
|
29 | @pytest.fixture(scope='module') | |||
|
30 | def module_config_stub(request, module_request_stub): | |||
|
31 | return plain_config_stub(request, module_request_stub) | |||
|
32 | ||||
|
33 | ||||
|
34 | @pytest.fixture(scope='module') | |||
|
35 | def module_http_environ(): | |||
|
36 | """ | |||
|
37 | HTTP extra environ keys. | |||
|
38 | ||||
|
39 | Used by the test application and as well for setting up the pylons | |||
|
40 | environment. In the case of the fixture "app" it should be possible | |||
|
41 | to override this for a specific test case. | |||
|
42 | """ | |||
|
43 | return plain_http_environ() | |||
|
44 | ||||
|
45 | ||||
|
46 | @pytest.fixture(scope='module') | |||
|
47 | def module_app(request, module_config_stub, module_http_environ, baseapp): | |||
|
48 | app = CustomTestApp(baseapp, extra_environ=module_http_environ) | |||
|
49 | return app |
@@ -0,0 +1,157 b'' | |||||
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |||
|
18 | ||||
|
19 | import os | |||
|
20 | import shutil | |||
|
21 | import logging | |||
|
22 | import textwrap | |||
|
23 | ||||
|
24 | import pytest | |||
|
25 | ||||
|
26 | import rhodecode | |||
|
27 | import rhodecode.lib | |||
|
28 | ||||
|
29 | from rhodecode.tests import console_printer | |||
|
30 | ||||
|
31 | log = logging.getLogger(__name__) | |||
|
32 | ||||
|
33 | ||||
|
34 | def store_rcextensions(destination, force=False): | |||
|
35 | from rhodecode.config import rcextensions | |||
|
36 | package_path = rcextensions.__path__[0] | |||
|
37 | ||||
|
38 | # Note: rcextensions are looked up based on the path of the ini file | |||
|
39 | rcextensions_path = os.path.join(destination, 'rcextensions') | |||
|
40 | ||||
|
41 | if force: | |||
|
42 | shutil.rmtree(rcextensions_path, ignore_errors=True) | |||
|
43 | shutil.copytree(package_path, rcextensions_path) | |||
|
44 | ||||
|
45 | ||||
|
46 | @pytest.fixture(scope="module") | |||
|
47 | def rcextensions(request, tmp_storage_location): | |||
|
48 | """ | |||
|
49 | Installs a testing rcextensions pack to ensure they work as expected. | |||
|
50 | """ | |||
|
51 | ||||
|
52 | # Note: rcextensions are looked up based on the path of the ini file | |||
|
53 | rcextensions_path = os.path.join(tmp_storage_location, 'rcextensions') | |||
|
54 | ||||
|
55 | if os.path.exists(rcextensions_path): | |||
|
56 | pytest.fail( | |||
|
57 | f"Path for rcextensions already exists, please clean up before " | |||
|
58 | f"test run this path: {rcextensions_path}") | |||
|
59 | else: | |||
|
60 | store_rcextensions(tmp_storage_location) | |||
|
61 | ||||
|
62 | ||||
|
63 | @pytest.fixture(scope='function') | |||
|
64 | def rcextensions_present(request): | |||
|
65 | ||||
|
66 | class RcExtensionsPresent: | |||
|
67 | def __init__(self, rcextensions_location): | |||
|
68 | self.rcextensions_location = rcextensions_location | |||
|
69 | ||||
|
70 | def __enter__(self): | |||
|
71 | self.store() | |||
|
72 | ||||
|
73 | def __exit__(self, exc_type, exc_val, exc_tb): | |||
|
74 | self.cleanup() | |||
|
75 | ||||
|
76 | def store(self): | |||
|
77 | store_rcextensions(self.rcextensions_location) | |||
|
78 | ||||
|
79 | def cleanup(self): | |||
|
80 | shutil.rmtree(os.path.join(self.rcextensions_location, 'rcextensions')) | |||
|
81 | ||||
|
82 | return RcExtensionsPresent | |||
|
83 | ||||
|
84 | ||||
|
85 | @pytest.fixture(scope='function') | |||
|
86 | def rcextensions_modification(request): | |||
|
87 | """ | |||
|
88 | example usage:: | |||
|
89 | ||||
|
90 | hook_name = '_pre_push_hook' | |||
|
91 | code = ''' | |||
|
92 | raise OSError('failed') | |||
|
93 | return HookResponse(1, 'FAILED') | |||
|
94 | ''' | |||
|
95 | mods = [ | |||
|
96 | (hook_name, code), | |||
|
97 | ] | |||
|
98 | # rhodecode.ini file location, where rcextensions needs to live | |||
|
99 | rcstack_location = os.path.dirname(rcstack.config_file) | |||
|
100 | with rcextensions_modification(rcstack_location, mods): | |||
|
101 | # do some stuff | |||
|
102 | """ | |||
|
103 | ||||
|
104 | class RcextensionsModification: | |||
|
105 | def __init__(self, rcextensions_location, mods, create_if_missing=False, force_create=False): | |||
|
106 | self.force_create = force_create | |||
|
107 | self.create_if_missing = create_if_missing | |||
|
108 | self.rcextensions_location = rcextensions_location | |||
|
109 | self.mods = mods | |||
|
110 | if not isinstance(mods, list): | |||
|
111 | raise ValueError('mods must be a list of modifications') | |||
|
112 | ||||
|
113 | def __enter__(self): | |||
|
114 | if self.create_if_missing: | |||
|
115 | store_rcextensions(self.rcextensions_location, force=self.force_create) | |||
|
116 | ||||
|
117 | for hook_name, method_body in self.mods: | |||
|
118 | self.modification(hook_name, method_body) | |||
|
119 | ||||
|
120 | def __exit__(self, exc_type, exc_val, exc_tb): | |||
|
121 | self.cleanup() | |||
|
122 | ||||
|
123 | def cleanup(self): | |||
|
124 | # reset rcextensions to "bare" state from the package | |||
|
125 | store_rcextensions(self.rcextensions_location, force=True) | |||
|
126 | ||||
|
127 | def modification(self, hook_name, method_body): | |||
|
128 | import ast | |||
|
129 | ||||
|
130 | rcextensions_path = os.path.join(self.rcextensions_location, 'rcextensions') | |||
|
131 | ||||
|
132 | # Load the code from hooks.py | |||
|
133 | hooks_filename = os.path.join(rcextensions_path, 'hooks.py') | |||
|
134 | with open(hooks_filename, "r") as file: | |||
|
135 | tree = ast.parse(file.read()) | |||
|
136 | ||||
|
137 | # Define new content for the function as a string | |||
|
138 | new_code = textwrap.dedent(method_body) | |||
|
139 | ||||
|
140 | # Parse the new code to add it to the function | |||
|
141 | new_body = ast.parse(new_code).body | |||
|
142 | ||||
|
143 | # Walk through the AST to find and modify the function | |||
|
144 | for node in tree.body: | |||
|
145 | if isinstance(node, ast.FunctionDef) and node.name == hook_name: | |||
|
146 | node.body = new_body # Replace the function body with the new body | |||
|
147 | ||||
|
148 | # Compile the modified AST back to code | |||
|
149 | compile(tree, hooks_filename, "exec") | |||
|
150 | ||||
|
151 | # Write the updated code back to hooks.py | |||
|
152 | with open(hooks_filename, "w") as file: | |||
|
153 | file.write(ast.unparse(tree)) # Requires Python 3.9+ | |||
|
154 | ||||
|
155 | console_printer(f" [green]rcextensions[/green] Updated the body of '{hooks_filename}' function '{hook_name}'") | |||
|
156 | ||||
|
157 | return RcextensionsModification |
@@ -0,0 +1,17 b'' | |||||
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
@@ -0,0 +1,17 b'' | |||||
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |||
|
2 | # | |||
|
3 | # This program is free software: you can redistribute it and/or modify | |||
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |||
|
5 | # (only), as published by the Free Software Foundation. | |||
|
6 | # | |||
|
7 | # This program is distributed in the hope that it will be useful, | |||
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
|
10 | # GNU General Public License for more details. | |||
|
11 | # | |||
|
12 | # You should have received a copy of the GNU Affero General Public License | |||
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |||
|
14 | # | |||
|
15 | # This program is dual-licensed. If you wish to learn more about the | |||
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |||
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
@@ -27,8 +27,11 b' from rhodecode.tests.conftest_common imp' | |||||
27 |
|
27 | |||
28 |
|
28 | |||
29 | pytest_plugins = [ |
|
29 | pytest_plugins = [ | |
30 |
"rhodecode.tests.fixture |
|
30 | "rhodecode.tests.fixtures.fixture_pyramid", | |
31 |
"rhodecode.tests.fixture |
|
31 | "rhodecode.tests.fixtures.fixture_utils", | |
|
32 | "rhodecode.tests.fixtures.function_scoped_baseapp", | |||
|
33 | "rhodecode.tests.fixtures.module_scoped_baseapp", | |||
|
34 | "rhodecode.tests.fixtures.rcextensions_fixtures", | |||
32 | ] |
|
35 | ] | |
33 |
|
36 | |||
34 |
|
37 |
@@ -65,8 +65,7 b' dependencies = {file = ["requirements.tx' | |||||
65 | optional-dependencies.tests = {file = ["requirements_test.txt"]} |
|
65 | optional-dependencies.tests = {file = ["requirements_test.txt"]} | |
66 |
|
66 | |||
67 | [tool.ruff] |
|
67 | [tool.ruff] | |
68 |
|
68 | lint.select = [ | ||
69 | select = [ |
|
|||
70 | # Pyflakes |
|
69 | # Pyflakes | |
71 | "F", |
|
70 | "F", | |
72 | # Pycodestyle |
|
71 | # Pycodestyle | |
@@ -75,16 +74,13 b' select = [' | |||||
75 | # isort |
|
74 | # isort | |
76 | "I001" |
|
75 | "I001" | |
77 | ] |
|
76 | ] | |
78 |
|
77 | lint.ignore = [ | ||
79 | ignore = [ |
|
|||
80 | "E501", # line too long, handled by black |
|
78 | "E501", # line too long, handled by black | |
81 | ] |
|
79 | ] | |
82 |
|
||||
83 | # Same as Black. |
|
80 | # Same as Black. | |
84 | line-length = 120 |
|
81 | line-length = 120 | |
85 |
|
82 | |||
86 | [tool.ruff.isort] |
|
83 | [tool.ruff.lint.isort] | |
87 |
|
||||
88 | known-first-party = ["rhodecode"] |
|
84 | known-first-party = ["rhodecode"] | |
89 |
|
85 | |||
90 | [tool.ruff.format] |
|
86 | [tool.ruff.format] |
@@ -4,8 +4,10 b' norecursedirs = rhodecode/public rhodeco' | |||||
4 | cache_dir = /tmp/.pytest_cache |
|
4 | cache_dir = /tmp/.pytest_cache | |
5 |
|
5 | |||
6 | pyramid_config = rhodecode/tests/rhodecode.ini |
|
6 | pyramid_config = rhodecode/tests/rhodecode.ini | |
7 | vcsserver_protocol = http |
|
7 | ||
8 |
vcsserver_config |
|
8 | vcsserver_config = rhodecode/tests/vcsserver_http.ini | |
|
9 | rhodecode_config = rhodecode/tests/rhodecode.ini | |||
|
10 | celery_config = rhodecode/tests/rhodecode.ini | |||
9 |
|
11 | |||
10 | addopts = |
|
12 | addopts = | |
11 | --pdbcls=IPython.terminal.debugger:TerminalPdb |
|
13 | --pdbcls=IPython.terminal.debugger:TerminalPdb |
@@ -1,5 +1,4 b'' | |||||
1 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | ||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
|||
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 |
@@ -24,7 +24,7 b' from rhodecode.model.db import Gist' | |||||
24 | from rhodecode.model.gist import GistModel |
|
24 | from rhodecode.model.gist import GistModel | |
25 | from rhodecode.api.tests.utils import ( |
|
25 | from rhodecode.api.tests.utils import ( | |
26 | build_data, api_call, assert_error, assert_ok, crash) |
|
26 | build_data, api_call, assert_error, assert_ok, crash) | |
27 | from rhodecode.tests.fixture import Fixture |
|
27 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | @pytest.mark.usefixtures("testuser_api", "app") |
|
30 | @pytest.mark.usefixtures("testuser_api", "app") |
@@ -27,7 +27,7 b' from rhodecode.model.user import UserMod' | |||||
27 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
27 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
28 | from rhodecode.api.tests.utils import ( |
|
28 | from rhodecode.api.tests.utils import ( | |
29 | build_data, api_call, assert_ok, assert_error, crash) |
|
29 | build_data, api_call, assert_ok, assert_error, crash) | |
30 | from rhodecode.tests.fixture import Fixture |
|
30 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
31 | from rhodecode.lib.ext_json import json |
|
31 | from rhodecode.lib.ext_json import json | |
32 | from rhodecode.lib.str_utils import safe_str |
|
32 | from rhodecode.lib.str_utils import safe_str | |
33 |
|
33 |
@@ -26,7 +26,7 b' from rhodecode.model.user import UserMod' | |||||
26 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
26 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
27 | from rhodecode.api.tests.utils import ( |
|
27 | from rhodecode.api.tests.utils import ( | |
28 | build_data, api_call, assert_ok, assert_error, crash) |
|
28 | build_data, api_call, assert_ok, assert_error, crash) | |
29 | from rhodecode.tests.fixture import Fixture |
|
29 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | fixture = Fixture() |
|
32 | fixture = Fixture() |
@@ -26,7 +26,7 b' from rhodecode.tests import (' | |||||
26 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL) |
|
26 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL) | |
27 | from rhodecode.api.tests.utils import ( |
|
27 | from rhodecode.api.tests.utils import ( | |
28 | build_data, api_call, assert_ok, assert_error, jsonify, crash) |
|
28 | build_data, api_call, assert_ok, assert_error, jsonify, crash) | |
29 | from rhodecode.tests.fixture import Fixture |
|
29 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
30 | from rhodecode.model.db import RepoGroup |
|
30 | from rhodecode.model.db import RepoGroup | |
31 |
|
31 | |||
32 |
|
32 |
@@ -25,7 +25,7 b' from rhodecode.model.user import UserMod' | |||||
25 | from rhodecode.model.user_group import UserGroupModel |
|
25 | from rhodecode.model.user_group import UserGroupModel | |
26 | from rhodecode.api.tests.utils import ( |
|
26 | from rhodecode.api.tests.utils import ( | |
27 | build_data, api_call, assert_error, assert_ok, crash, jsonify) |
|
27 | build_data, api_call, assert_error, assert_ok, crash, jsonify) | |
28 | from rhodecode.tests.fixture import Fixture |
|
28 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | @pytest.mark.usefixtures("testuser_api", "app") |
|
31 | @pytest.mark.usefixtures("testuser_api", "app") |
@@ -28,7 +28,7 b' from rhodecode.model.user import UserMod' | |||||
28 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
28 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
29 | from rhodecode.api.tests.utils import ( |
|
29 | from rhodecode.api.tests.utils import ( | |
30 | build_data, api_call, assert_error, assert_ok, crash) |
|
30 | build_data, api_call, assert_error, assert_ok, crash) | |
31 | from rhodecode.tests.fixture import Fixture |
|
31 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | fixture = Fixture() |
|
34 | fixture = Fixture() |
@@ -25,8 +25,8 b' from rhodecode.model.scm import ScmModel' | |||||
25 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN |
|
25 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN | |
26 | from rhodecode.api.tests.utils import ( |
|
26 | from rhodecode.api.tests.utils import ( | |
27 | build_data, api_call, assert_error, assert_ok, crash, jsonify) |
|
27 | build_data, api_call, assert_error, assert_ok, crash, jsonify) | |
28 | from rhodecode.tests.fixture import Fixture |
|
28 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
29 |
from rhodecode.tests.fixture |
|
29 | from rhodecode.tests.fixtures.fixture_utils import plain_http_host_only_stub | |
30 |
|
30 | |||
31 | fixture = Fixture() |
|
31 | fixture = Fixture() | |
32 |
|
32 |
@@ -26,7 +26,7 b' import pytest' | |||||
26 | from rhodecode.lib.str_utils import safe_str |
|
26 | from rhodecode.lib.str_utils import safe_str | |
27 | from rhodecode.tests import * |
|
27 | from rhodecode.tests import * | |
28 | from rhodecode.tests.routes import route_path |
|
28 | from rhodecode.tests.routes import route_path | |
29 | from rhodecode.tests.fixture import FIXTURES |
|
29 | from rhodecode.tests.fixtures.rc_fixture import FIXTURES | |
30 | from rhodecode.model.db import UserLog |
|
30 | from rhodecode.model.db import UserLog | |
31 | from rhodecode.model.meta import Session |
|
31 | from rhodecode.model.meta import Session | |
32 |
|
32 |
@@ -20,7 +20,7 b'' | |||||
20 | import pytest |
|
20 | import pytest | |
21 |
|
21 | |||
22 | from rhodecode.tests import TestController |
|
22 | from rhodecode.tests import TestController | |
23 | from rhodecode.tests.fixture import Fixture |
|
23 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
24 | from rhodecode.tests.routes import route_path |
|
24 | from rhodecode.tests.routes import route_path | |
25 |
|
25 | |||
26 | fixture = Fixture() |
|
26 | fixture = Fixture() |
@@ -37,7 +37,7 b' from rhodecode.model.user import UserMod' | |||||
37 | from rhodecode.tests import ( |
|
37 | from rhodecode.tests import ( | |
38 | login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN, |
|
38 | login_user_session, assert_session_flash, TEST_USER_ADMIN_LOGIN, | |
39 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
39 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
40 | from rhodecode.tests.fixture import Fixture, error_function |
|
40 | from rhodecode.tests.fixtures.rc_fixture import Fixture, error_function | |
41 | from rhodecode.tests.utils import repo_on_filesystem |
|
41 | from rhodecode.tests.utils import repo_on_filesystem | |
42 | from rhodecode.tests.routes import route_path |
|
42 | from rhodecode.tests.routes import route_path | |
43 |
|
43 |
@@ -27,7 +27,7 b' from rhodecode.model.meta import Session' | |||||
27 | from rhodecode.model.repo_group import RepoGroupModel |
|
27 | from rhodecode.model.repo_group import RepoGroupModel | |
28 | from rhodecode.tests import ( |
|
28 | from rhodecode.tests import ( | |
29 | assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH) |
|
29 | assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH) | |
30 | from rhodecode.tests.fixture import Fixture |
|
30 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
31 | from rhodecode.tests.routes import route_path |
|
31 | from rhodecode.tests.routes import route_path | |
32 |
|
32 | |||
33 |
|
33 |
@@ -24,7 +24,7 b' from rhodecode.model.meta import Session' | |||||
24 |
|
24 | |||
25 | from rhodecode.tests import ( |
|
25 | from rhodecode.tests import ( | |
26 | TestController, assert_session_flash) |
|
26 | TestController, assert_session_flash) | |
27 | from rhodecode.tests.fixture import Fixture |
|
27 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
28 | from rhodecode.tests.routes import route_path |
|
28 | from rhodecode.tests.routes import route_path | |
29 |
|
29 | |||
30 | fixture = Fixture() |
|
30 | fixture = Fixture() |
@@ -28,7 +28,7 b' from rhodecode.model.user import UserMod' | |||||
28 |
|
28 | |||
29 | from rhodecode.tests import ( |
|
29 | from rhodecode.tests import ( | |
30 | TestController, TEST_USER_REGULAR_LOGIN, assert_session_flash) |
|
30 | TestController, TEST_USER_REGULAR_LOGIN, assert_session_flash) | |
31 | from rhodecode.tests.fixture import Fixture |
|
31 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
32 | from rhodecode.tests.routes import route_path |
|
32 | from rhodecode.tests.routes import route_path | |
33 |
|
33 | |||
34 | fixture = Fixture() |
|
34 | fixture = Fixture() |
@@ -22,7 +22,7 b' import pytest' | |||||
22 | from rhodecode.model.db import User, UserSshKeys |
|
22 | from rhodecode.model.db import User, UserSshKeys | |
23 |
|
23 | |||
24 | from rhodecode.tests import TestController, assert_session_flash |
|
24 | from rhodecode.tests import TestController, assert_session_flash | |
25 | from rhodecode.tests.fixture import Fixture |
|
25 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
26 | from rhodecode.tests.routes import route_path |
|
26 | from rhodecode.tests.routes import route_path | |
27 |
|
27 | |||
28 | fixture = Fixture() |
|
28 | fixture = Fixture() |
@@ -27,7 +27,7 b' from rhodecode.model.repo_group import R' | |||||
27 | from rhodecode.model.db import Session, Repository, RepoGroup |
|
27 | from rhodecode.model.db import Session, Repository, RepoGroup | |
28 |
|
28 | |||
29 | from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN |
|
29 | from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN | |
30 | from rhodecode.tests.fixture import Fixture |
|
30 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
31 | from rhodecode.tests.routes import route_path |
|
31 | from rhodecode.tests.routes import route_path | |
32 |
|
32 | |||
33 | fixture = Fixture() |
|
33 | fixture = Fixture() |
@@ -22,7 +22,7 b' from rhodecode.model.db import Repositor' | |||||
22 | from rhodecode.lib.ext_json import json |
|
22 | from rhodecode.lib.ext_json import json | |
23 |
|
23 | |||
24 | from rhodecode.tests import TestController |
|
24 | from rhodecode.tests import TestController | |
25 | from rhodecode.tests.fixture import Fixture |
|
25 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
26 | from rhodecode.tests.routes import route_path |
|
26 | from rhodecode.tests.routes import route_path | |
27 |
|
27 | |||
28 | fixture = Fixture() |
|
28 | fixture = Fixture() |
@@ -20,7 +20,7 b' import pytest' | |||||
20 | from rhodecode.lib.ext_json import json |
|
20 | from rhodecode.lib.ext_json import json | |
21 |
|
21 | |||
22 | from rhodecode.tests import TestController |
|
22 | from rhodecode.tests import TestController | |
23 | from rhodecode.tests.fixture import Fixture |
|
23 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
24 | from rhodecode.tests.routes import route_path |
|
24 | from rhodecode.tests.routes import route_path | |
25 |
|
25 | |||
26 | fixture = Fixture() |
|
26 | fixture = Fixture() |
@@ -40,7 +40,7 b' import pytest' | |||||
40 | from rhodecode.lib.ext_json import json |
|
40 | from rhodecode.lib.ext_json import json | |
41 |
|
41 | |||
42 | from rhodecode.tests import TestController |
|
42 | from rhodecode.tests import TestController | |
43 | from rhodecode.tests.fixture import Fixture |
|
43 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
44 | from rhodecode.tests.routes import route_path |
|
44 | from rhodecode.tests.routes import route_path | |
45 |
|
45 | |||
46 | fixture = Fixture() |
|
46 | fixture = Fixture() |
@@ -24,7 +24,7 b' from rhodecode.model.db import Repositor' | |||||
24 | from rhodecode.model.meta import Session |
|
24 | from rhodecode.model.meta import Session | |
25 | from rhodecode.model.settings import SettingsModel |
|
25 | from rhodecode.model.settings import SettingsModel | |
26 | from rhodecode.tests import TestController |
|
26 | from rhodecode.tests import TestController | |
27 | from rhodecode.tests.fixture import Fixture |
|
27 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
28 | from rhodecode.tests.routes import route_path |
|
28 | from rhodecode.tests.routes import route_path | |
29 |
|
29 | |||
30 |
|
30 |
@@ -3,7 +3,7 b' import mock' | |||||
3 |
|
3 | |||
4 | from rhodecode.lib.type_utils import AttributeDict |
|
4 | from rhodecode.lib.type_utils import AttributeDict | |
5 | from rhodecode.model.meta import Session |
|
5 | from rhodecode.model.meta import Session | |
6 | from rhodecode.tests.fixture import Fixture |
|
6 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
7 | from rhodecode.tests.routes import route_path |
|
7 | from rhodecode.tests.routes import route_path | |
8 | from rhodecode.model.settings import SettingsModel |
|
8 | from rhodecode.model.settings import SettingsModel | |
9 |
|
9 |
@@ -31,7 +31,7 b' from rhodecode.model.meta import Session' | |||||
31 | from rhodecode.tests import ( |
|
31 | from rhodecode.tests import ( | |
32 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, |
|
32 | assert_session_flash, HG_REPO, TEST_USER_ADMIN_LOGIN, | |
33 | no_newline_id_generator) |
|
33 | no_newline_id_generator) | |
34 | from rhodecode.tests.fixture import Fixture |
|
34 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
35 | from rhodecode.tests.routes import route_path |
|
35 | from rhodecode.tests.routes import route_path | |
36 |
|
36 | |||
37 | fixture = Fixture() |
|
37 | fixture = Fixture() |
@@ -22,7 +22,7 b' from rhodecode.lib import helpers as h' | |||||
22 | from rhodecode.tests import ( |
|
22 | from rhodecode.tests import ( | |
23 | TestController, clear_cache_regions, |
|
23 | TestController, clear_cache_regions, | |
24 | TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
24 | TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) | |
25 | from rhodecode.tests.fixture import Fixture |
|
25 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
26 | from rhodecode.tests.utils import AssertResponse |
|
26 | from rhodecode.tests.utils import AssertResponse | |
27 | from rhodecode.tests.routes import route_path |
|
27 | from rhodecode.tests.routes import route_path | |
28 |
|
28 |
@@ -22,7 +22,7 b' from rhodecode.apps._base import ADMIN_P' | |||||
22 | from rhodecode.model.db import User |
|
22 | from rhodecode.model.db import User | |
23 | from rhodecode.tests import ( |
|
23 | from rhodecode.tests import ( | |
24 | TestController, assert_session_flash) |
|
24 | TestController, assert_session_flash) | |
25 | from rhodecode.tests.fixture import Fixture |
|
25 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
26 | from rhodecode.tests.routes import route_path |
|
26 | from rhodecode.tests.routes import route_path | |
27 |
|
27 | |||
28 |
|
28 |
@@ -23,7 +23,7 b' from rhodecode.model.db import User, Use' | |||||
23 | from rhodecode.tests import ( |
|
23 | from rhodecode.tests import ( | |
24 | TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL, |
|
24 | TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_EMAIL, | |
25 | assert_session_flash, TEST_USER_REGULAR_PASS) |
|
25 | assert_session_flash, TEST_USER_REGULAR_PASS) | |
26 | from rhodecode.tests.fixture import Fixture |
|
26 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
27 | from rhodecode.tests.routes import route_path |
|
27 | from rhodecode.tests.routes import route_path | |
28 |
|
28 | |||
29 |
|
29 |
@@ -21,7 +21,7 b' import pytest' | |||||
21 | from rhodecode.tests import ( |
|
21 | from rhodecode.tests import ( | |
22 | TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
22 | TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, | |
23 | TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
23 | TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) | |
24 | from rhodecode.tests.fixture import Fixture |
|
24 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
25 | from rhodecode.tests.routes import route_path |
|
25 | from rhodecode.tests.routes import route_path | |
26 |
|
26 | |||
27 | from rhodecode.model.db import Notification, User |
|
27 | from rhodecode.model.db import Notification, User |
@@ -24,7 +24,7 b' from rhodecode.lib.auth import check_pas' | |||||
24 | from rhodecode.model.meta import Session |
|
24 | from rhodecode.model.meta import Session | |
25 | from rhodecode.model.user import UserModel |
|
25 | from rhodecode.model.user import UserModel | |
26 | from rhodecode.tests import assert_session_flash, TestController |
|
26 | from rhodecode.tests import assert_session_flash, TestController | |
27 | from rhodecode.tests.fixture import Fixture, error_function |
|
27 | from rhodecode.tests.fixtures.rc_fixture import Fixture, error_function | |
28 | from rhodecode.tests.routes import route_path |
|
28 | from rhodecode.tests.routes import route_path | |
29 |
|
29 | |||
30 | fixture = Fixture() |
|
30 | fixture = Fixture() |
@@ -20,7 +20,7 b'' | |||||
20 | from rhodecode.tests import ( |
|
20 | from rhodecode.tests import ( | |
21 | TestController, TEST_USER_ADMIN_LOGIN, |
|
21 | TestController, TEST_USER_ADMIN_LOGIN, | |
22 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
22 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
23 | from rhodecode.tests.fixture import Fixture |
|
23 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
24 | from rhodecode.tests.routes import route_path |
|
24 | from rhodecode.tests.routes import route_path | |
25 |
|
25 | |||
26 | fixture = Fixture() |
|
26 | fixture = Fixture() |
@@ -19,7 +19,7 b'' | |||||
19 |
|
19 | |||
20 | from rhodecode.model.db import User, Repository, UserFollowing |
|
20 | from rhodecode.model.db import User, Repository, UserFollowing | |
21 | from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN |
|
21 | from rhodecode.tests import TestController, TEST_USER_ADMIN_LOGIN | |
22 | from rhodecode.tests.fixture import Fixture |
|
22 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
23 | from rhodecode.tests.routes import route_path |
|
23 | from rhodecode.tests.routes import route_path | |
24 |
|
24 | |||
25 | fixture = Fixture() |
|
25 | fixture = Fixture() |
@@ -21,7 +21,7 b'' | |||||
21 | from rhodecode.model.db import User, UserSshKeys |
|
21 | from rhodecode.model.db import User, UserSshKeys | |
22 |
|
22 | |||
23 | from rhodecode.tests import TestController, assert_session_flash |
|
23 | from rhodecode.tests import TestController, assert_session_flash | |
24 | from rhodecode.tests.fixture import Fixture |
|
24 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
25 | from rhodecode.tests.routes import route_path |
|
25 | from rhodecode.tests.routes import route_path | |
26 |
|
26 | |||
27 | fixture = Fixture() |
|
27 | fixture = Fixture() |
@@ -22,7 +22,7 b' import pytest' | |||||
22 | from rhodecode.apps.repository.tests.test_repo_compare import ComparePage |
|
22 | from rhodecode.apps.repository.tests.test_repo_compare import ComparePage | |
23 | from rhodecode.lib.vcs import nodes |
|
23 | from rhodecode.lib.vcs import nodes | |
24 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
24 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
25 | from rhodecode.tests.fixture import Fixture |
|
25 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
26 | from rhodecode.tests.utils import commit_change |
|
26 | from rhodecode.tests.utils import commit_change | |
27 | from rhodecode.tests.routes import route_path |
|
27 | from rhodecode.tests.routes import route_path | |
28 |
|
28 | |||
@@ -166,14 +166,15 b' class TestSideBySideDiff(object):' | |||||
166 | response.mustcontain('Collapse 2 commits') |
|
166 | response.mustcontain('Collapse 2 commits') | |
167 | response.mustcontain('123 file changed') |
|
167 | response.mustcontain('123 file changed') | |
168 |
|
168 | |||
169 | response.mustcontain( |
|
169 | response.mustcontain(f'r{commit1.idx}:{commit1.short_id}...r{commit2.idx}:{commit2.short_id}') | |
170 | 'r%s:%s...r%s:%s' % ( |
|
|||
171 | commit1.idx, commit1.short_id, commit2.idx, commit2.short_id)) |
|
|||
172 |
|
170 | |||
173 | response.mustcontain(f_path) |
|
171 | response.mustcontain(f_path) | |
174 |
|
172 | |||
175 | @pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)') |
|
173 | #@pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)') | |
176 | def test_diff_side_by_side_from_0_commit_with_file_filter(self, app, backend, backend_stub): |
|
174 | def test_diff_side_by_side_from_0_commit_with_file_filter(self, app, backend, backend_stub): | |
|
175 | if backend.alias == 'git': | |||
|
176 | pytest.skip('GIT does not handle empty commit compare correct (missing 1 commit)') | |||
|
177 | ||||
177 | f_path = b'test_sidebyside_file.py' |
|
178 | f_path = b'test_sidebyside_file.py' | |
178 | commit1_content = b'content-25d7e49c18b159446c\n' |
|
179 | commit1_content = b'content-25d7e49c18b159446c\n' | |
179 | commit2_content = b'content-603d6c72c46d953420\n' |
|
180 | commit2_content = b'content-603d6c72c46d953420\n' | |
@@ -200,9 +201,7 b' class TestSideBySideDiff(object):' | |||||
200 | response.mustcontain('Collapse 2 commits') |
|
201 | response.mustcontain('Collapse 2 commits') | |
201 | response.mustcontain('1 file changed') |
|
202 | response.mustcontain('1 file changed') | |
202 |
|
203 | |||
203 | response.mustcontain( |
|
204 | response.mustcontain(f'r{commit1.idx}:{commit1.short_id}...r{commit2.idx}:{commit2.short_id}') | |
204 | 'r%s:%s...r%s:%s' % ( |
|
|||
205 | commit1.idx, commit1.short_id, commit2.idx, commit2.short_id)) |
|
|||
206 |
|
205 | |||
207 | response.mustcontain(f_path) |
|
206 | response.mustcontain(f_path) | |
208 |
|
207 |
@@ -33,7 +33,7 b' from rhodecode.lib.vcs.conf import setti' | |||||
33 | from rhodecode.model.db import Session, Repository |
|
33 | from rhodecode.model.db import Session, Repository | |
34 |
|
34 | |||
35 | from rhodecode.tests import assert_session_flash |
|
35 | from rhodecode.tests import assert_session_flash | |
36 | from rhodecode.tests.fixture import Fixture |
|
36 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
37 | from rhodecode.tests.routes import route_path |
|
37 | from rhodecode.tests.routes import route_path | |
38 |
|
38 | |||
39 |
|
39 |
@@ -21,7 +21,7 b' import pytest' | |||||
21 |
|
21 | |||
22 | from rhodecode.tests import TestController, assert_session_flash, HG_FORK, GIT_FORK |
|
22 | from rhodecode.tests import TestController, assert_session_flash, HG_FORK, GIT_FORK | |
23 |
|
23 | |||
24 | from rhodecode.tests.fixture import Fixture |
|
24 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
25 | from rhodecode.lib import helpers as h |
|
25 | from rhodecode.lib import helpers as h | |
26 |
|
26 | |||
27 | from rhodecode.model.db import Repository |
|
27 | from rhodecode.model.db import Repository |
@@ -21,7 +21,7 b' import pytest' | |||||
21 |
|
21 | |||
22 | from rhodecode.model.db import Repository, UserRepoToPerm, Permission, User |
|
22 | from rhodecode.model.db import Repository, UserRepoToPerm, Permission, User | |
23 |
|
23 | |||
24 | from rhodecode.tests.fixture import Fixture |
|
24 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
25 | from rhodecode.tests.routes import route_path |
|
25 | from rhodecode.tests.routes import route_path | |
26 |
|
26 | |||
27 | fixture = Fixture() |
|
27 | fixture = Fixture() |
@@ -15,6 +15,9 b'' | |||||
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
18 | import logging | |||
|
19 | import os | |||
|
20 | ||||
18 | import mock |
|
21 | import mock | |
19 | import pytest |
|
22 | import pytest | |
20 |
|
23 | |||
@@ -41,7 +44,7 b' from rhodecode.tests import (' | |||||
41 | TEST_USER_ADMIN_LOGIN, |
|
44 | TEST_USER_ADMIN_LOGIN, | |
42 | TEST_USER_REGULAR_LOGIN, |
|
45 | TEST_USER_REGULAR_LOGIN, | |
43 | ) |
|
46 | ) | |
44 |
from rhodecode.tests.fixture |
|
47 | from rhodecode.tests.fixtures.fixture_utils import PRTestUtility | |
45 | from rhodecode.tests.routes import route_path |
|
48 | from rhodecode.tests.routes import route_path | |
46 |
|
49 | |||
47 |
|
50 | |||
@@ -1050,7 +1053,6 b' class TestPullrequestsView(object):' | |||||
1050 | ) |
|
1053 | ) | |
1051 | assert len(notifications.all()) == 2 |
|
1054 | assert len(notifications.all()) == 2 | |
1052 |
|
1055 | |||
1053 | @pytest.mark.xfail(reason="unable to fix this test after python3 migration") |
|
|||
1054 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token): |
|
1056 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token): | |
1055 | commits = [ |
|
1057 | commits = [ | |
1056 | { |
|
1058 | { | |
@@ -1125,20 +1127,38 b' class TestPullrequestsView(object):' | |||||
1125 | response.mustcontain(no=["content_of_ancestor-child"]) |
|
1127 | response.mustcontain(no=["content_of_ancestor-child"]) | |
1126 | response.mustcontain("content_of_change") |
|
1128 | response.mustcontain("content_of_change") | |
1127 |
|
1129 | |||
1128 | def test_merge_pull_request_enabled(self, pr_util, csrf_token): |
|
1130 | def test_merge_pull_request_enabled(self, pr_util, csrf_token, rcextensions_modification): | |
1129 | # Clear any previous calls to rcextensions |
|
|||
1130 | rhodecode.EXTENSIONS.calls.clear() |
|
|||
1131 |
|
1131 | |||
1132 | pull_request = pr_util.create_pull_request(approved=True, mergeable=True) |
|
1132 | pull_request = pr_util.create_pull_request(approved=True, mergeable=True) | |
1133 | pull_request_id = pull_request.pull_request_id |
|
1133 | pull_request_id = pull_request.pull_request_id | |
1134 |
repo_name = |
|
1134 | repo_name = pull_request.target_repo.scm_instance().name | |
1135 |
|
1135 | |||
1136 | url = route_path( |
|
1136 | url = route_path( | |
1137 | "pullrequest_merge", |
|
1137 | "pullrequest_merge", | |
1138 |
repo_name= |
|
1138 | repo_name=repo_name, | |
1139 | pull_request_id=pull_request_id, |
|
1139 | pull_request_id=pull_request_id, | |
1140 | ) |
|
1140 | ) | |
1141 | response = self.app.post(url, params={"csrf_token": csrf_token}).follow() |
|
1141 | ||
|
1142 | rcstack_location = os.path.dirname(self.app._pyramid_registry.settings['__file__']) | |||
|
1143 | rc_ext_location = os.path.join(rcstack_location, 'rcextension-output.txt') | |||
|
1144 | ||||
|
1145 | ||||
|
1146 | mods = [ | |||
|
1147 | ('_push_hook', | |||
|
1148 | f""" | |||
|
1149 | import os | |||
|
1150 | action = kwargs['action'] | |||
|
1151 | commit_ids = kwargs['commit_ids'] | |||
|
1152 | with open('{rc_ext_location}', 'w') as f: | |||
|
1153 | f.write('test-execution'+os.linesep) | |||
|
1154 | f.write(f'{{action}}'+os.linesep) | |||
|
1155 | f.write(f'{{commit_ids}}'+os.linesep) | |||
|
1156 | return HookResponse(0, 'HOOK_TEST') | |||
|
1157 | """) | |||
|
1158 | ] | |||
|
1159 | # Add the hook | |||
|
1160 | with rcextensions_modification(rcstack_location, mods, create_if_missing=True, force_create=True): | |||
|
1161 | response = self.app.post(url, params={"csrf_token": csrf_token}).follow() | |||
1142 |
|
1162 | |||
1143 | pull_request = PullRequest.get(pull_request_id) |
|
1163 | pull_request = PullRequest.get(pull_request_id) | |
1144 |
|
1164 | |||
@@ -1162,12 +1182,39 b' class TestPullrequestsView(object):' | |||||
1162 | assert actions[-1].action == "user.push" |
|
1182 | assert actions[-1].action == "user.push" | |
1163 | assert actions[-1].action_data["commit_ids"] == pr_commit_ids |
|
1183 | assert actions[-1].action_data["commit_ids"] == pr_commit_ids | |
1164 |
|
1184 | |||
1165 | # Check post_push rcextension was really executed |
|
1185 | with open(rc_ext_location) as f: | |
1166 | push_calls = rhodecode.EXTENSIONS.calls["_push_hook"] |
|
1186 | f_data = f.read() | |
1167 | assert len(push_calls) == 1 |
|
1187 | assert 'test-execution' in f_data | |
1168 | unused_last_call_args, last_call_kwargs = push_calls[0] |
|
1188 | for commit_id in pr_commit_ids: | |
1169 | assert last_call_kwargs["action"] == "push" |
|
1189 | assert f'{commit_id}' in f_data | |
1170 | assert last_call_kwargs["commit_ids"] == pr_commit_ids |
|
1190 | ||
|
1191 | def test_merge_pull_request_forbidden_by_pre_push_hook(self, pr_util, csrf_token, rcextensions_modification, caplog): | |||
|
1192 | caplog.set_level(logging.WARNING, logger="rhodecode.model.pull_request") | |||
|
1193 | ||||
|
1194 | pull_request = pr_util.create_pull_request(approved=True, mergeable=True) | |||
|
1195 | pull_request_id = pull_request.pull_request_id | |||
|
1196 | repo_name = pull_request.target_repo.scm_instance().name | |||
|
1197 | ||||
|
1198 | url = route_path( | |||
|
1199 | "pullrequest_merge", | |||
|
1200 | repo_name=repo_name, | |||
|
1201 | pull_request_id=pull_request_id, | |||
|
1202 | ) | |||
|
1203 | ||||
|
1204 | rcstack_location = os.path.dirname(self.app._pyramid_registry.settings['__file__']) | |||
|
1205 | ||||
|
1206 | mods = [ | |||
|
1207 | ('_pre_push_hook', | |||
|
1208 | f""" | |||
|
1209 | return HookResponse(1, 'HOOK_TEST_FORBIDDEN') | |||
|
1210 | """) | |||
|
1211 | ] | |||
|
1212 | # Add the hook | |||
|
1213 | with rcextensions_modification(rcstack_location, mods, create_if_missing=True, force_create=True): | |||
|
1214 | self.app.post(url, params={"csrf_token": csrf_token}) | |||
|
1215 | ||||
|
1216 | assert 'Merge failed, not updating the pull request.' in [r[2] for r in caplog.record_tuples] | |||
|
1217 | ||||
1171 |
|
1218 | |||
1172 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): |
|
1219 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): | |
1173 | pull_request = pr_util.create_pull_request(mergeable=False) |
|
1220 | pull_request = pr_util.create_pull_request(mergeable=False) | |
@@ -1523,7 +1570,6 b' class TestPullrequestsView(object):' | |||||
1523 |
|
1570 | |||
1524 | assert pull_request.revisions == [commit_ids["change-rebased"]] |
|
1571 | assert pull_request.revisions == [commit_ids["change-rebased"]] | |
1525 |
|
1572 | |||
1526 |
|
||||
1527 | def test_remove_pull_request_branch(self, backend_git, csrf_token): |
|
1573 | def test_remove_pull_request_branch(self, backend_git, csrf_token): | |
1528 | branch_name = "development" |
|
1574 | branch_name = "development" | |
1529 | commits = [ |
|
1575 | commits = [ |
@@ -26,7 +26,7 b' from rhodecode.model.db import Repositor' | |||||
26 | from rhodecode.model.meta import Session |
|
26 | from rhodecode.model.meta import Session | |
27 | from rhodecode.tests import ( |
|
27 | from rhodecode.tests import ( | |
28 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, assert_session_flash) |
|
28 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, assert_session_flash) | |
29 | from rhodecode.tests.fixture import Fixture |
|
29 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
30 | from rhodecode.tests.routes import route_path |
|
30 | from rhodecode.tests.routes import route_path | |
31 |
|
31 | |||
32 | fixture = Fixture() |
|
32 | fixture = Fixture() |
@@ -24,7 +24,7 b' from rhodecode.model.db import Repositor' | |||||
24 | from rhodecode.model.repo import RepoModel |
|
24 | from rhodecode.model.repo import RepoModel | |
25 | from rhodecode.tests import ( |
|
25 | from rhodecode.tests import ( | |
26 | HG_REPO, GIT_REPO, assert_session_flash, no_newline_id_generator) |
|
26 | HG_REPO, GIT_REPO, assert_session_flash, no_newline_id_generator) | |
27 | from rhodecode.tests.fixture import Fixture |
|
27 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
28 | from rhodecode.tests.utils import repo_on_filesystem |
|
28 | from rhodecode.tests.utils import repo_on_filesystem | |
29 | from rhodecode.tests.routes import route_path |
|
29 | from rhodecode.tests.routes import route_path | |
30 |
|
30 |
@@ -31,7 +31,7 b' from rhodecode.model.meta import Session' | |||||
31 | from rhodecode.model.repo import RepoModel |
|
31 | from rhodecode.model.repo import RepoModel | |
32 | from rhodecode.model.scm import ScmModel |
|
32 | from rhodecode.model.scm import ScmModel | |
33 | from rhodecode.tests import assert_session_flash |
|
33 | from rhodecode.tests import assert_session_flash | |
34 | from rhodecode.tests.fixture import Fixture |
|
34 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
35 | from rhodecode.tests.utils import AssertResponse, repo_on_filesystem |
|
35 | from rhodecode.tests.utils import AssertResponse, repo_on_filesystem | |
36 | from rhodecode.tests.routes import route_path |
|
36 | from rhodecode.tests.routes import route_path | |
37 |
|
37 |
@@ -30,7 +30,7 b' from rhodecode.model.user import UserMod' | |||||
30 | from rhodecode.tests import ( |
|
30 | from rhodecode.tests import ( | |
31 | login_user_session, logout_user_session, |
|
31 | login_user_session, logout_user_session, | |
32 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
32 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
33 | from rhodecode.tests.fixture import Fixture |
|
33 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
34 | from rhodecode.tests.utils import AssertResponse |
|
34 | from rhodecode.tests.utils import AssertResponse | |
35 | from rhodecode.tests.routes import route_path |
|
35 | from rhodecode.tests.routes import route_path | |
36 |
|
36 |
@@ -32,16 +32,13 b' class TestAdminRepoVcsSettings(object):' | |||||
32 | @pytest.mark.parametrize('setting_name, setting_backends', [ |
|
32 | @pytest.mark.parametrize('setting_name, setting_backends', [ | |
33 | ('hg_use_rebase_for_merging', ['hg']), |
|
33 | ('hg_use_rebase_for_merging', ['hg']), | |
34 | ]) |
|
34 | ]) | |
35 | def test_labs_settings_visible_if_enabled( |
|
35 | def test_labs_settings_visible_if_enabled(self, setting_name, setting_backends, backend): | |
36 | self, setting_name, setting_backends, backend): |
|
|||
37 | if backend.alias not in setting_backends: |
|
36 | if backend.alias not in setting_backends: | |
38 | pytest.skip('Setting not available for backend {}'.format(backend)) |
|
37 | pytest.skip('Setting not available for backend {}'.format(backend)) | |
39 |
|
38 | |||
40 | vcs_settings_url = route_path( |
|
39 | vcs_settings_url = route_path('edit_repo_vcs', repo_name=backend.repo.repo_name) | |
41 | 'edit_repo_vcs', repo_name=backend.repo.repo_name) |
|
|||
42 |
|
40 | |||
43 | with mock.patch.dict( |
|
41 | with mock.patch.dict(rhodecode.CONFIG, {'labs_settings_active': 'true'}): | |
44 | rhodecode.CONFIG, {'labs_settings_active': 'true'}): |
|
|||
45 | response = self.app.get(vcs_settings_url) |
|
42 | response = self.app.get(vcs_settings_url) | |
46 |
|
43 | |||
47 | assertr = response.assert_response() |
|
44 | assertr = response.assert_response() |
@@ -20,7 +20,7 b' import os' | |||||
20 | import sys |
|
20 | import sys | |
21 | import logging |
|
21 | import logging | |
22 |
|
22 | |||
23 |
from rhodecode.lib.hook_daemon. |
|
23 | from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon | |
24 | from rhodecode.lib.ext_json import sjson as json |
|
24 | from rhodecode.lib.ext_json import sjson as json | |
25 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
25 | from rhodecode.lib.vcs.conf import settings as vcs_settings | |
26 | from rhodecode.lib.api_utils import call_service_api |
|
26 | from rhodecode.lib.api_utils import call_service_api | |
@@ -162,9 +162,7 b' class SshVcsServer(object):' | |||||
162 | extras = {} |
|
162 | extras = {} | |
163 | extras.update(tunnel_extras) |
|
163 | extras.update(tunnel_extras) | |
164 |
|
164 | |||
165 | callback_daemon, extras = prepare_callback_daemon( |
|
165 | callback_daemon, extras = prepare_callback_daemon(extras, protocol=self.hooks_protocol) | |
166 | extras, protocol=self.hooks_protocol, |
|
|||
167 | host=vcs_settings.HOOKS_HOST) |
|
|||
168 |
|
166 | |||
169 | with callback_daemon: |
|
167 | with callback_daemon: | |
170 | try: |
|
168 | try: |
@@ -33,19 +33,24 b' class GitServerCreator(object):' | |||||
33 | 'app:main': { |
|
33 | 'app:main': { | |
34 | 'ssh.executable.git': git_path, |
|
34 | 'ssh.executable.git': git_path, | |
35 | 'vcs.hooks.protocol.v2': 'celery', |
|
35 | 'vcs.hooks.protocol.v2': 'celery', | |
|
36 | 'app.service_api.host': 'http://localhost', | |||
|
37 | 'app.service_api.token': 'secret4', | |||
|
38 | 'rhodecode.api.url': '/_admin/api', | |||
36 | } |
|
39 | } | |
37 | } |
|
40 | } | |
38 | repo_name = 'test_git' |
|
41 | repo_name = 'test_git' | |
39 | repo_mode = 'receive-pack' |
|
42 | repo_mode = 'receive-pack' | |
40 | user = plain_dummy_user() |
|
43 | user = plain_dummy_user() | |
41 |
|
44 | |||
42 | def __init__(self): |
|
45 | def __init__(self, service_api_url, ini_file): | |
43 | pass |
|
46 | self.service_api_url = service_api_url | |
|
47 | self.ini_file = ini_file | |||
44 |
|
48 | |||
45 | def create(self, **kwargs): |
|
49 | def create(self, **kwargs): | |
|
50 | self.config_data['app:main']['app.service_api.host'] = self.service_api_url | |||
46 | parameters = { |
|
51 | parameters = { | |
47 | 'store': self.root, |
|
52 | 'store': self.root, | |
48 |
'ini_path': |
|
53 | 'ini_path': self.ini_file, | |
49 | 'user': self.user, |
|
54 | 'user': self.user, | |
50 | 'repo_name': self.repo_name, |
|
55 | 'repo_name': self.repo_name, | |
51 | 'repo_mode': self.repo_mode, |
|
56 | 'repo_mode': self.repo_mode, | |
@@ -60,12 +65,30 b' class GitServerCreator(object):' | |||||
60 | return server |
|
65 | return server | |
61 |
|
66 | |||
62 |
|
67 | |||
63 | @pytest.fixture() |
|
68 | @pytest.fixture(scope='module') | |
64 | def git_server(app): |
|
69 | def git_server(request, module_app, rhodecode_factory, available_port_factory): | |
65 | return GitServerCreator() |
|
70 | ini_file = module_app._pyramid_settings['__file__'] | |
|
71 | vcsserver_host = module_app._pyramid_settings['vcs.server'] | |||
|
72 | ||||
|
73 | store_dir = os.path.dirname(ini_file) | |||
|
74 | ||||
|
75 | # start rhodecode for service API | |||
|
76 | rc = rhodecode_factory( | |||
|
77 | request, | |||
|
78 | store_dir=store_dir, | |||
|
79 | port=available_port_factory(), | |||
|
80 | overrides=( | |||
|
81 | {'handler_console': {'level': 'DEBUG'}}, | |||
|
82 | {'app:main': {'vcs.server': vcsserver_host}}, | |||
|
83 | {'app:main': {'repo_store.path': store_dir}} | |||
|
84 | )) | |||
|
85 | ||||
|
86 | service_api_url = f'http://{rc.bind_addr}' | |||
|
87 | ||||
|
88 | return GitServerCreator(service_api_url, ini_file) | |||
66 |
|
89 | |||
67 |
|
90 | |||
68 |
class TestGitServer |
|
91 | class TestGitServer: | |
69 |
|
92 | |||
70 | def test_command(self, git_server): |
|
93 | def test_command(self, git_server): | |
71 | server = git_server.create() |
|
94 | server = git_server.create() | |
@@ -102,14 +125,14 b' class TestGitServer(object):' | |||||
102 | assert result is value |
|
125 | assert result is value | |
103 |
|
126 | |||
104 | def test_run_returns_executes_command(self, git_server): |
|
127 | def test_run_returns_executes_command(self, git_server): | |
|
128 | from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper | |||
105 | server = git_server.create() |
|
129 | server = git_server.create() | |
106 | from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper |
|
|||
107 |
|
130 | |||
108 | os.environ['SSH_CLIENT'] = '127.0.0.1' |
|
131 | os.environ['SSH_CLIENT'] = '127.0.0.1' | |
109 | with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch: |
|
132 | with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch: | |
110 | _patch.return_value = 0 |
|
133 | _patch.return_value = 0 | |
111 | with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'): |
|
134 | with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'): | |
112 | exit_code = server.run() |
|
135 | exit_code = server.run(tunnel_extras={'config': server.ini_path}) | |
113 |
|
136 | |||
114 | assert exit_code == (0, False) |
|
137 | assert exit_code == (0, False) | |
115 |
|
138 | |||
@@ -135,7 +158,7 b' class TestGitServer(object):' | |||||
135 | 'action': action, |
|
158 | 'action': action, | |
136 | 'ip': '10.10.10.10', |
|
159 | 'ip': '10.10.10.10', | |
137 | 'locked_by': [None, None], |
|
160 | 'locked_by': [None, None], | |
138 |
'config': |
|
161 | 'config': git_server.ini_file, | |
139 | 'repo_store': store, |
|
162 | 'repo_store': store, | |
140 | 'server_url': None, |
|
163 | 'server_url': None, | |
141 | 'hooks': ['push', 'pull'], |
|
164 | 'hooks': ['push', 'pull'], |
@@ -17,6 +17,7 b'' | |||||
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
19 | import os | |
|
20 | ||||
20 | import mock |
|
21 | import mock | |
21 | import pytest |
|
22 | import pytest | |
22 |
|
23 | |||
@@ -32,22 +33,27 b' class MercurialServerCreator(object):' | |||||
32 | 'app:main': { |
|
33 | 'app:main': { | |
33 | 'ssh.executable.hg': hg_path, |
|
34 | 'ssh.executable.hg': hg_path, | |
34 | 'vcs.hooks.protocol.v2': 'celery', |
|
35 | 'vcs.hooks.protocol.v2': 'celery', | |
|
36 | 'app.service_api.host': 'http://localhost', | |||
|
37 | 'app.service_api.token': 'secret4', | |||
|
38 | 'rhodecode.api.url': '/_admin/api', | |||
35 | } |
|
39 | } | |
36 | } |
|
40 | } | |
37 | repo_name = 'test_hg' |
|
41 | repo_name = 'test_hg' | |
38 | user = plain_dummy_user() |
|
42 | user = plain_dummy_user() | |
39 |
|
43 | |||
40 | def __init__(self): |
|
44 | def __init__(self, service_api_url, ini_file): | |
41 | pass |
|
45 | self.service_api_url = service_api_url | |
|
46 | self.ini_file = ini_file | |||
42 |
|
47 | |||
43 | def create(self, **kwargs): |
|
48 | def create(self, **kwargs): | |
|
49 | self.config_data['app:main']['app.service_api.host'] = self.service_api_url | |||
44 | parameters = { |
|
50 | parameters = { | |
45 | 'store': self.root, |
|
51 | 'store': self.root, | |
46 |
'ini_path': |
|
52 | 'ini_path': self.ini_file, | |
47 | 'user': self.user, |
|
53 | 'user': self.user, | |
48 | 'repo_name': self.repo_name, |
|
54 | 'repo_name': self.repo_name, | |
49 | 'user_permissions': { |
|
55 | 'user_permissions': { | |
50 |
|
|
56 | self.repo_name: 'repository.admin' | |
51 | }, |
|
57 | }, | |
52 | 'settings': self.config_data['app:main'], |
|
58 | 'settings': self.config_data['app:main'], | |
53 | 'env': plain_dummy_env() |
|
59 | 'env': plain_dummy_env() | |
@@ -57,12 +63,30 b' class MercurialServerCreator(object):' | |||||
57 | return server |
|
63 | return server | |
58 |
|
64 | |||
59 |
|
65 | |||
60 | @pytest.fixture() |
|
66 | @pytest.fixture(scope='module') | |
61 | def hg_server(app): |
|
67 | def hg_server(request, module_app, rhodecode_factory, available_port_factory): | |
62 | return MercurialServerCreator() |
|
68 | ini_file = module_app._pyramid_settings['__file__'] | |
|
69 | vcsserver_host = module_app._pyramid_settings['vcs.server'] | |||
|
70 | ||||
|
71 | store_dir = os.path.dirname(ini_file) | |||
|
72 | ||||
|
73 | # start rhodecode for service API | |||
|
74 | rc = rhodecode_factory( | |||
|
75 | request, | |||
|
76 | store_dir=store_dir, | |||
|
77 | port=available_port_factory(), | |||
|
78 | overrides=( | |||
|
79 | {'handler_console': {'level': 'DEBUG'}}, | |||
|
80 | {'app:main': {'vcs.server': vcsserver_host}}, | |||
|
81 | {'app:main': {'repo_store.path': store_dir}} | |||
|
82 | )) | |||
|
83 | ||||
|
84 | service_api_url = f'http://{rc.bind_addr}' | |||
|
85 | ||||
|
86 | return MercurialServerCreator(service_api_url, ini_file) | |||
63 |
|
87 | |||
64 |
|
88 | |||
65 |
class TestMercurialServer |
|
89 | class TestMercurialServer: | |
66 |
|
90 | |||
67 | def test_command(self, hg_server, tmpdir): |
|
91 | def test_command(self, hg_server, tmpdir): | |
68 | server = hg_server.create() |
|
92 | server = hg_server.create() | |
@@ -107,7 +131,7 b' class TestMercurialServer(object):' | |||||
107 | with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch: |
|
131 | with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch: | |
108 | _patch.return_value = 0 |
|
132 | _patch.return_value = 0 | |
109 | with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'): |
|
133 | with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'): | |
110 | exit_code = server.run() |
|
134 | exit_code = server.run(tunnel_extras={'config': server.ini_path}) | |
111 |
|
135 | |||
112 | assert exit_code == (0, False) |
|
136 | assert exit_code == (0, False) | |
113 |
|
137 |
@@ -15,7 +15,9 b'' | |||||
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
18 | ||||
18 | import os |
|
19 | import os | |
|
20 | ||||
19 | import mock |
|
21 | import mock | |
20 | import pytest |
|
22 | import pytest | |
21 |
|
23 | |||
@@ -26,39 +28,62 b' from rhodecode.apps.ssh_support.tests.co' | |||||
26 | class SubversionServerCreator(object): |
|
28 | class SubversionServerCreator(object): | |
27 | root = '/tmp/repo/path/' |
|
29 | root = '/tmp/repo/path/' | |
28 | svn_path = '/usr/local/bin/svnserve' |
|
30 | svn_path = '/usr/local/bin/svnserve' | |
|
31 | ||||
29 | config_data = { |
|
32 | config_data = { | |
30 | 'app:main': { |
|
33 | 'app:main': { | |
31 | 'ssh.executable.svn': svn_path, |
|
34 | 'ssh.executable.svn': svn_path, | |
32 | 'vcs.hooks.protocol.v2': 'celery', |
|
35 | 'vcs.hooks.protocol.v2': 'celery', | |
|
36 | 'app.service_api.host': 'http://localhost', | |||
|
37 | 'app.service_api.token': 'secret4', | |||
|
38 | 'rhodecode.api.url': '/_admin/api', | |||
33 | } |
|
39 | } | |
34 | } |
|
40 | } | |
35 | repo_name = 'test-svn' |
|
41 | repo_name = 'test-svn' | |
36 | user = plain_dummy_user() |
|
42 | user = plain_dummy_user() | |
37 |
|
43 | |||
38 | def __init__(self): |
|
44 | def __init__(self, service_api_url, ini_file): | |
39 | pass |
|
45 | self.service_api_url = service_api_url | |
|
46 | self.ini_file = ini_file | |||
40 |
|
47 | |||
41 | def create(self, **kwargs): |
|
48 | def create(self, **kwargs): | |
|
49 | self.config_data['app:main']['app.service_api.host'] = self.service_api_url | |||
42 | parameters = { |
|
50 | parameters = { | |
43 | 'store': self.root, |
|
51 | 'store': self.root, | |
|
52 | 'ini_path': self.ini_file, | |||
|
53 | 'user': self.user, | |||
44 | 'repo_name': self.repo_name, |
|
54 | 'repo_name': self.repo_name, | |
45 | 'ini_path': '', |
|
|||
46 | 'user': self.user, |
|
|||
47 | 'user_permissions': { |
|
55 | 'user_permissions': { | |
48 | self.repo_name: 'repository.admin' |
|
56 | self.repo_name: 'repository.admin' | |
49 | }, |
|
57 | }, | |
50 | 'settings': self.config_data['app:main'], |
|
58 | 'settings': self.config_data['app:main'], | |
51 | 'env': plain_dummy_env() |
|
59 | 'env': plain_dummy_env() | |
52 | } |
|
60 | } | |
53 |
|
||||
54 | parameters.update(kwargs) |
|
61 | parameters.update(kwargs) | |
55 | server = SubversionServer(**parameters) |
|
62 | server = SubversionServer(**parameters) | |
56 | return server |
|
63 | return server | |
57 |
|
64 | |||
58 |
|
65 | |||
59 | @pytest.fixture() |
|
66 | @pytest.fixture(scope='module') | |
60 | def svn_server(app): |
|
67 | def svn_server(request, module_app, rhodecode_factory, available_port_factory): | |
61 | return SubversionServerCreator() |
|
68 | ini_file = module_app._pyramid_settings['__file__'] | |
|
69 | vcsserver_host = module_app._pyramid_settings['vcs.server'] | |||
|
70 | ||||
|
71 | store_dir = os.path.dirname(ini_file) | |||
|
72 | ||||
|
73 | # start rhodecode for service API | |||
|
74 | rc = rhodecode_factory( | |||
|
75 | request, | |||
|
76 | store_dir=store_dir, | |||
|
77 | port=available_port_factory(), | |||
|
78 | overrides=( | |||
|
79 | {'handler_console': {'level': 'DEBUG'}}, | |||
|
80 | {'app:main': {'vcs.server': vcsserver_host}}, | |||
|
81 | {'app:main': {'repo_store.path': store_dir}} | |||
|
82 | )) | |||
|
83 | ||||
|
84 | service_api_url = f'http://{rc.bind_addr}' | |||
|
85 | ||||
|
86 | return SubversionServerCreator(service_api_url, ini_file) | |||
62 |
|
87 | |||
63 |
|
88 | |||
64 | class TestSubversionServer(object): |
|
89 | class TestSubversionServer(object): | |
@@ -168,8 +193,9 b' class TestSubversionServer(object):' | |||||
168 | assert repo_name == expected_match |
|
193 | assert repo_name == expected_match | |
169 |
|
194 | |||
170 | def test_run_returns_executes_command(self, svn_server): |
|
195 | def test_run_returns_executes_command(self, svn_server): | |
|
196 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper | |||
|
197 | ||||
171 | server = svn_server.create() |
|
198 | server = svn_server.create() | |
172 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper |
|
|||
173 | os.environ['SSH_CLIENT'] = '127.0.0.1' |
|
199 | os.environ['SSH_CLIENT'] = '127.0.0.1' | |
174 | with mock.patch.object( |
|
200 | with mock.patch.object( | |
175 | SubversionTunnelWrapper, 'get_first_client_response', |
|
201 | SubversionTunnelWrapper, 'get_first_client_response', | |
@@ -184,20 +210,18 b' class TestSubversionServer(object):' | |||||
184 | SubversionTunnelWrapper, 'command', |
|
210 | SubversionTunnelWrapper, 'command', | |
185 | return_value=['date']): |
|
211 | return_value=['date']): | |
186 |
|
212 | |||
187 | exit_code = server.run() |
|
213 | exit_code = server.run(tunnel_extras={'config': server.ini_path}) | |
188 | # SVN has this differently configured, and we get in our mock env |
|
214 | # SVN has this differently configured, and we get in our mock env | |
189 | # None as return code |
|
215 | # None as return code | |
190 | assert exit_code == (None, False) |
|
216 | assert exit_code == (None, False) | |
191 |
|
217 | |||
192 | def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server): |
|
218 | def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server): | |
|
219 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper | |||
|
220 | ||||
193 | server = svn_server.create() |
|
221 | server = svn_server.create() | |
194 | from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper |
|
222 | with mock.patch.object(SubversionTunnelWrapper, 'command', return_value=['date']): | |
195 | with mock.patch.object( |
|
223 | with mock.patch.object(SubversionTunnelWrapper, 'get_first_client_response', | |
196 | SubversionTunnelWrapper, 'command', |
|
|||
197 | return_value=['date']): |
|
|||
198 | with mock.patch.object( |
|
|||
199 | SubversionTunnelWrapper, 'get_first_client_response', |
|
|||
200 | return_value=None): |
|
224 | return_value=None): | |
201 | exit_code = server.run() |
|
225 | exit_code = server.run(tunnel_extras={'config': server.ini_path}) | |
202 |
|
226 | |||
203 | assert exit_code == (1, False) |
|
227 | assert exit_code == (1, False) |
@@ -22,7 +22,7 b' from rhodecode.tests import (' | |||||
22 | TestController, assert_session_flash, TEST_USER_ADMIN_LOGIN) |
|
22 | TestController, assert_session_flash, TEST_USER_ADMIN_LOGIN) | |
23 | from rhodecode.model.db import UserGroup |
|
23 | from rhodecode.model.db import UserGroup | |
24 | from rhodecode.model.meta import Session |
|
24 | from rhodecode.model.meta import Session | |
25 | from rhodecode.tests.fixture import Fixture |
|
25 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
26 | from rhodecode.tests.routes import route_path |
|
26 | from rhodecode.tests.routes import route_path | |
27 |
|
27 | |||
28 | fixture = Fixture() |
|
28 | fixture = Fixture() |
@@ -18,7 +18,7 b'' | |||||
18 | from rhodecode.model.user_group import UserGroupModel |
|
18 | from rhodecode.model.user_group import UserGroupModel | |
19 | from rhodecode.tests import ( |
|
19 | from rhodecode.tests import ( | |
20 | TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
20 | TestController, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
21 | from rhodecode.tests.fixture import Fixture |
|
21 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
22 | from rhodecode.tests.routes import route_path |
|
22 | from rhodecode.tests.routes import route_path | |
23 |
|
23 | |||
24 | fixture = Fixture() |
|
24 | fixture = Fixture() |
@@ -22,7 +22,7 b' from rhodecode.model.db import User' | |||||
22 | from rhodecode.tests import ( |
|
22 | from rhodecode.tests import ( | |
23 | TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
23 | TestController, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, | |
24 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
24 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
25 | from rhodecode.tests.fixture import Fixture |
|
25 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
26 | from rhodecode.tests.utils import AssertResponse |
|
26 | from rhodecode.tests.utils import AssertResponse | |
27 | from rhodecode.tests.routes import route_path |
|
27 | from rhodecode.tests.routes import route_path | |
28 |
|
28 |
@@ -30,7 +30,7 b' from rhodecode.lib.vcs import connect_vc' | |||||
30 | log = logging.getLogger(__name__) |
|
30 | log = logging.getLogger(__name__) | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | def propagate_rhodecode_config(global_config, settings, config): |
|
33 | def propagate_rhodecode_config(global_config, settings, config, full=True): | |
34 | # Store the settings to make them available to other modules. |
|
34 | # Store the settings to make them available to other modules. | |
35 | settings_merged = global_config.copy() |
|
35 | settings_merged = global_config.copy() | |
36 | settings_merged.update(settings) |
|
36 | settings_merged.update(settings) | |
@@ -40,7 +40,7 b' def propagate_rhodecode_config(global_co' | |||||
40 | rhodecode.PYRAMID_SETTINGS = settings_merged |
|
40 | rhodecode.PYRAMID_SETTINGS = settings_merged | |
41 | rhodecode.CONFIG = settings_merged |
|
41 | rhodecode.CONFIG = settings_merged | |
42 |
|
42 | |||
43 | if 'default_user_id' not in rhodecode.CONFIG: |
|
43 | if full and 'default_user_id' not in rhodecode.CONFIG: | |
44 | rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id() |
|
44 | rhodecode.CONFIG['default_user_id'] = utils.get_default_user_id() | |
45 | log.debug('set rhodecode.CONFIG data') |
|
45 | log.debug('set rhodecode.CONFIG data') | |
46 |
|
46 | |||
@@ -93,6 +93,7 b' def load_pyramid_environment(global_conf' | |||||
93 | # first run, to store data... |
|
93 | # first run, to store data... | |
94 | propagate_rhodecode_config(global_config, settings, {}) |
|
94 | propagate_rhodecode_config(global_config, settings, {}) | |
95 |
|
95 | |||
|
96 | ||||
96 | if vcs_server_enabled: |
|
97 | if vcs_server_enabled: | |
97 | connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) |
|
98 | connect_vcs(vcs_server_uri, utils.get_vcs_server_protocol(settings)) | |
98 | else: |
|
99 | else: |
@@ -101,6 +101,9 b' def make_pyramid_app(global_config, **se' | |||||
101 | patches.inspect_getargspec() |
|
101 | patches.inspect_getargspec() | |
102 | patches.repoze_sendmail_lf_fix() |
|
102 | patches.repoze_sendmail_lf_fix() | |
103 |
|
103 | |||
|
104 | # first init, so load_pyramid_enviroment, can access some critical data, like __file__ | |||
|
105 | propagate_rhodecode_config(global_config, {}, {}, full=False) | |||
|
106 | ||||
104 | load_pyramid_environment(global_config, settings) |
|
107 | load_pyramid_environment(global_config, settings) | |
105 |
|
108 | |||
106 | # Static file view comes first |
|
109 | # Static file view comes first |
@@ -17,7 +17,7 b'' | |||||
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | """ |
|
19 | """ | |
20 |
rcextensions module, please edit `hooks.py` to over |
|
20 | rcextensions module, please edit `hooks.py` to over-write hooks logic | |
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
23 | from .hooks import ( |
|
23 | from .hooks import ( |
@@ -85,7 +85,7 b' def _pre_push_hook(*args, **kwargs):' | |||||
85 |
|
85 | |||
86 | # check files names |
|
86 | # check files names | |
87 | if forbidden_files: |
|
87 | if forbidden_files: | |
88 |
reason = 'File {} is forbidden to be pushed' |
|
88 | reason = f'File {file_name} is forbidden to be pushed' | |
89 | for forbidden_pattern in forbid_files: |
|
89 | for forbidden_pattern in forbid_files: | |
90 | # here we can also filter for operation, e.g if check for only ADDED files |
|
90 | # here we can also filter for operation, e.g if check for only ADDED files | |
91 | # if operation == 'A': |
|
91 | # if operation == 'A': |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -55,7 +54,7 b' def run(*args, **kwargs):' | |||||
55 | return fields |
|
54 | return fields | |
56 |
|
55 | |||
57 |
|
56 | |||
58 |
class _Undefined |
|
57 | class _Undefined: | |
59 | pass |
|
58 | pass | |
60 |
|
59 | |||
61 |
|
60 | |||
@@ -67,7 +66,7 b' def get_field(extra_fields_data, key, de' | |||||
67 |
|
66 | |||
68 | if key not in extra_fields_data: |
|
67 | if key not in extra_fields_data: | |
69 | if isinstance(default, _Undefined): |
|
68 | if isinstance(default, _Undefined): | |
70 |
raise ValueError('key {} not present in extra_fields' |
|
69 | raise ValueError(f'key {key} not present in extra_fields') | |
71 | return default |
|
70 | return default | |
72 |
|
71 | |||
73 | # NOTE(dan): from metadata we get field_label, field_value, field_desc, field_type |
|
72 | # NOTE(dan): from metadata we get field_label, field_value, field_desc, field_type |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -52,7 +51,7 b' def get_git_commits(repo, refs):' | |||||
52 | cmd = [ |
|
51 | cmd = [ | |
53 | 'log', |
|
52 | 'log', | |
54 | '--pretty=format:{"commit_id": "%H", "author": "%aN <%aE>", "date": "%ad", "message": "%s"}', |
|
53 | '--pretty=format:{"commit_id": "%H", "author": "%aN <%aE>", "date": "%ad", "message": "%s"}', | |
55 |
'{}...{}' |
|
54 | f'{old_rev}...{new_rev}' | |
56 | ] |
|
55 | ] | |
57 |
|
56 | |||
58 | stdout, stderr = repo.run_git_command(cmd, extra_env=git_env) |
|
57 | stdout, stderr = repo.run_git_command(cmd, extra_env=git_env) | |
@@ -80,12 +79,12 b' def run(*args, **kwargs):' | |||||
80 |
|
79 | |||
81 | if vcs_type == 'git': |
|
80 | if vcs_type == 'git': | |
82 | for rev_data in kwargs['commit_ids']: |
|
81 | for rev_data in kwargs['commit_ids']: | |
83 |
new_environ = |
|
82 | new_environ = {k: v for k, v in rev_data['git_env']} | |
84 | commits = get_git_commits(vcs_repo, kwargs['commit_ids']) |
|
83 | commits = get_git_commits(vcs_repo, kwargs['commit_ids']) | |
85 |
|
84 | |||
86 | if vcs_type == 'hg': |
|
85 | if vcs_type == 'hg': | |
87 | for rev_data in kwargs['commit_ids']: |
|
86 | for rev_data in kwargs['commit_ids']: | |
88 |
new_environ = |
|
87 | new_environ = {k: v for k, v in rev_data['hg_env']} | |
89 | commits = get_hg_commits(vcs_repo, kwargs['commit_ids']) |
|
88 | commits = get_hg_commits(vcs_repo, kwargs['commit_ids']) | |
90 |
|
89 | |||
91 | return commits |
|
90 | return commits |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -133,12 +132,12 b' def run(*args, **kwargs):' | |||||
133 |
|
132 | |||
134 | if vcs_type == 'git': |
|
133 | if vcs_type == 'git': | |
135 | for rev_data in kwargs['commit_ids']: |
|
134 | for rev_data in kwargs['commit_ids']: | |
136 |
new_environ = |
|
135 | new_environ = {k: v for k, v in rev_data['git_env']} | |
137 | files = get_git_files(repo, vcs_repo, kwargs['commit_ids']) |
|
136 | files = get_git_files(repo, vcs_repo, kwargs['commit_ids']) | |
138 |
|
137 | |||
139 | if vcs_type == 'hg': |
|
138 | if vcs_type == 'hg': | |
140 | for rev_data in kwargs['commit_ids']: |
|
139 | for rev_data in kwargs['commit_ids']: | |
141 |
new_environ = |
|
140 | new_environ = {k: v for k, v in rev_data['hg_env']} | |
142 | files = get_hg_files(repo, vcs_repo, kwargs['commit_ids']) |
|
141 | files = get_hg_files(repo, vcs_repo, kwargs['commit_ids']) | |
143 |
|
142 | |||
144 | if vcs_type == 'svn': |
|
143 | if vcs_type == 'svn': |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify |
@@ -28,7 +28,7 b' import urllib.error' | |||||
28 | log = logging.getLogger('rhodecode.' + __name__) |
|
28 | log = logging.getLogger('rhodecode.' + __name__) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 |
class HookResponse |
|
31 | class HookResponse: | |
32 | def __init__(self, status, output): |
|
32 | def __init__(self, status, output): | |
33 | self.status = status |
|
33 | self.status = status | |
34 | self.output = output |
|
34 | self.output = output | |
@@ -44,6 +44,11 b' class HookResponse(object):' | |||||
44 | def __bool__(self): |
|
44 | def __bool__(self): | |
45 | return self.status == 0 |
|
45 | return self.status == 0 | |
46 |
|
46 | |||
|
47 | def to_json(self): | |||
|
48 | return {'status': self.status, 'output': self.output} | |||
|
49 | ||||
|
50 | def __repr__(self): | |||
|
51 | return self.to_json().__repr__() | |||
47 |
|
52 | |||
48 | class DotDict(dict): |
|
53 | class DotDict(dict): | |
49 |
|
54 | |||
@@ -91,8 +96,8 b' class DotDict(dict):' | |||||
91 | def __repr__(self): |
|
96 | def __repr__(self): | |
92 | keys = list(self.keys()) |
|
97 | keys = list(self.keys()) | |
93 | keys.sort() |
|
98 | keys.sort() | |
94 |
args = ', '.join([' |
|
99 | args = ', '.join(['{}={!r}'.format(key, self[key]) for key in keys]) | |
95 |
return ' |
|
100 | return '{}({})'.format(self.__class__.__name__, args) | |
96 |
|
101 | |||
97 | @staticmethod |
|
102 | @staticmethod | |
98 | def fromDict(d): |
|
103 | def fromDict(d): | |
@@ -110,7 +115,7 b' def serialize(x):' | |||||
110 |
|
115 | |||
111 | def unserialize(x): |
|
116 | def unserialize(x): | |
112 | if isinstance(x, dict): |
|
117 | if isinstance(x, dict): | |
113 |
return |
|
118 | return {k: unserialize(v) for k, v in x.items()} | |
114 | elif isinstance(x, (list, tuple)): |
|
119 | elif isinstance(x, (list, tuple)): | |
115 | return type(x)(unserialize(v) for v in x) |
|
120 | return type(x)(unserialize(v) for v in x) | |
116 | else: |
|
121 | else: | |
@@ -161,7 +166,8 b' def str2bool(_str) -> bool:' | |||||
161 | string into boolean |
|
166 | string into boolean | |
162 |
|
167 | |||
163 | :param _str: string value to translate into boolean |
|
168 | :param _str: string value to translate into boolean | |
164 | :returns: bool from given string |
|
169 | :rtype: boolean | |
|
170 | :returns: boolean from given string | |||
165 | """ |
|
171 | """ | |
166 | if _str is None: |
|
172 | if _str is None: | |
167 | return False |
|
173 | return False |
@@ -49,22 +49,22 b' link_config = [' | |||||
49 | { |
|
49 | { | |
50 | "name": "enterprise_docs", |
|
50 | "name": "enterprise_docs", | |
51 | "target": "https://rhodecode.com/r1/enterprise/docs/", |
|
51 | "target": "https://rhodecode.com/r1/enterprise/docs/", | |
52 |
"external_target": "https://docs.rhodecode.com/ |
|
52 | "external_target": "https://docs.rhodecode.com/4.x/rce/index.html", | |
53 | }, |
|
53 | }, | |
54 | { |
|
54 | { | |
55 | "name": "enterprise_log_file_locations", |
|
55 | "name": "enterprise_log_file_locations", | |
56 | "target": "https://rhodecode.com/r1/enterprise/docs/admin-system-overview/", |
|
56 | "target": "https://rhodecode.com/r1/enterprise/docs/admin-system-overview/", | |
57 |
"external_target": "https://docs.rhodecode.com/ |
|
57 | "external_target": "https://docs.rhodecode.com/4.x/rce/admin/system-overview.html#log-files", | |
58 | }, |
|
58 | }, | |
59 | { |
|
59 | { | |
60 | "name": "enterprise_issue_tracker_settings", |
|
60 | "name": "enterprise_issue_tracker_settings", | |
61 | "target": "https://rhodecode.com/r1/enterprise/docs/issue-trackers-overview/", |
|
61 | "target": "https://rhodecode.com/r1/enterprise/docs/issue-trackers-overview/", | |
62 |
"external_target": "https://docs.rhodecode.com/ |
|
62 | "external_target": "https://docs.rhodecode.com/4.x/rce/issue-trackers/issue-trackers.html", | |
63 | }, |
|
63 | }, | |
64 | { |
|
64 | { | |
65 | "name": "enterprise_svn_setup", |
|
65 | "name": "enterprise_svn_setup", | |
66 | "target": "https://rhodecode.com/r1/enterprise/docs/svn-setup/", |
|
66 | "target": "https://rhodecode.com/r1/enterprise/docs/svn-setup/", | |
67 |
"external_target": "https://docs.rhodecode.com/ |
|
67 | "external_target": "https://docs.rhodecode.com/4.x/rce/admin/svn-http.html", | |
68 | }, |
|
68 | }, | |
69 | { |
|
69 | { | |
70 | "name": "enterprise_license_convert_from_old", |
|
70 | "name": "enterprise_license_convert_from_old", |
@@ -19,6 +19,8 b'' | |||||
19 | import os |
|
19 | import os | |
20 | import platform |
|
20 | import platform | |
21 |
|
21 | |||
|
22 | from rhodecode.lib.type_utils import str2bool | |||
|
23 | ||||
22 | DEFAULT_USER = 'default' |
|
24 | DEFAULT_USER = 'default' | |
23 |
|
25 | |||
24 |
|
26 | |||
@@ -48,28 +50,23 b' def initialize_database(config):' | |||||
48 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
50 | engine = engine_from_config(config, 'sqlalchemy.db1.') | |
49 | init_model(engine, encryption_key=get_encryption_key(config)) |
|
51 | init_model(engine, encryption_key=get_encryption_key(config)) | |
50 |
|
52 | |||
|
53 | def initialize_test_environment(settings): | |||
|
54 | skip_test_env = str2bool(os.environ.get('RC_NO_TEST_ENV')) | |||
|
55 | if skip_test_env: | |||
|
56 | return | |||
51 |
|
57 | |||
52 | def initialize_test_environment(settings, test_env=None): |
|
58 | repo_store_path = os.environ.get('RC_TEST_ENV_REPO_STORE') or settings['repo_store.path'] | |
53 | if test_env is None: |
|
|||
54 | test_env = not int(os.environ.get('RC_NO_TMP_PATH', 0)) |
|
|||
55 |
|
59 | |||
56 | from rhodecode.lib.utils import ( |
|
60 | from rhodecode.lib.utils import ( | |
57 | create_test_directory, create_test_database, create_test_repositories, |
|
61 | create_test_directory, create_test_database, create_test_repositories, | |
58 | create_test_index) |
|
62 | create_test_index) | |
59 | from rhodecode.tests import TESTS_TMP_PATH |
|
|||
60 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
|||
61 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
|||
62 |
|
63 | |||
|
64 | create_test_directory(repo_store_path) | |||
|
65 | ||||
|
66 | create_test_database(repo_store_path, settings) | |||
63 | # test repos |
|
67 | # test repos | |
64 | if test_env: |
|
68 | create_test_repositories(repo_store_path, settings) | |
65 | create_test_directory(TESTS_TMP_PATH) |
|
69 | create_test_index(repo_store_path, settings) | |
66 | # large object stores |
|
|||
67 | create_test_directory(largefiles_store(TESTS_TMP_PATH)) |
|
|||
68 | create_test_directory(lfs_store(TESTS_TMP_PATH)) |
|
|||
69 |
|
||||
70 | create_test_database(TESTS_TMP_PATH, settings) |
|
|||
71 | create_test_repositories(TESTS_TMP_PATH, settings) |
|
|||
72 | create_test_index(TESTS_TMP_PATH, settings) |
|
|||
73 |
|
70 | |||
74 |
|
71 | |||
75 | def get_vcs_server_protocol(config): |
|
72 | def get_vcs_server_protocol(config): |
@@ -20,8 +20,7 b'' | |||||
20 | Set of custom exceptions used in RhodeCode |
|
20 | Set of custom exceptions used in RhodeCode | |
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
23 |
from |
|
23 | from pyramid.httpexceptions import HTTPBadGateway, HTTPClientError | |
24 | from pyramid.httpexceptions import HTTPBadGateway |
|
|||
25 |
|
24 | |||
26 |
|
25 | |||
27 | class LdapUsernameError(Exception): |
|
26 | class LdapUsernameError(Exception): | |
@@ -102,12 +101,7 b' class HTTPRequirementError(HTTPClientErr' | |||||
102 | self.args = (message, ) |
|
101 | self.args = (message, ) | |
103 |
|
102 | |||
104 |
|
103 | |||
105 | class ClientNotSupportedError(HTTPRequirementError): |
|
104 | class HTTPLockedRepo(HTTPClientError): | |
106 | title = explanation = 'Client Not Supported' |
|
|||
107 | reason = None |
|
|||
108 |
|
||||
109 |
|
||||
110 | class HTTPLockedRC(HTTPClientError): |
|
|||
111 | """ |
|
105 | """ | |
112 | Special Exception For locked Repos in RhodeCode, the return code can |
|
106 | Special Exception For locked Repos in RhodeCode, the return code can | |
113 | be overwritten by _code keyword argument passed into constructors |
|
107 | be overwritten by _code keyword argument passed into constructors | |
@@ -131,14 +125,13 b' class HTTPBranchProtected(HTTPClientErro' | |||||
131 | Special Exception For Indicating that branch is protected in RhodeCode, the |
|
125 | Special Exception For Indicating that branch is protected in RhodeCode, the | |
132 | return code can be overwritten by _code keyword argument passed into constructors |
|
126 | return code can be overwritten by _code keyword argument passed into constructors | |
133 | """ |
|
127 | """ | |
134 | code = 403 |
|
|||
135 | title = explanation = 'Branch Protected' |
|
128 | title = explanation = 'Branch Protected' | |
136 | reason = None |
|
129 | reason = None | |
137 |
|
130 | |||
138 | def __init__(self, message, *args, **kwargs): |
|
131 | ||
139 | self.title = self.explanation = message |
|
132 | class ClientNotSupported(HTTPRequirementError): | |
140 | super().__init__(*args, **kwargs) |
|
133 | title = explanation = 'Client Not Supported' | |
141 | self.args = (message, ) |
|
134 | reason = None | |
142 |
|
135 | |||
143 |
|
136 | |||
144 | class IMCCommitError(Exception): |
|
137 | class IMCCommitError(Exception): |
@@ -1,4 +1,4 b'' | |||||
1 |
# Copyright (C) 2010-202 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
@@ -16,13 +16,14 b'' | |||||
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import os |
|
|||
20 | import time |
|
|||
21 | import logging |
|
19 | import logging | |
|
20 | import traceback | |||
22 |
|
21 | |||
23 | from rhodecode.lib.config_utils import get_app_config_lightweight |
|
22 | from rhodecode.model import meta | |
|
23 | from rhodecode.lib import hooks_base | |||
|
24 | from rhodecode.lib.utils2 import AttributeDict | |||
|
25 | from rhodecode.lib.exceptions import HTTPLockedRepo, HTTPBranchProtected | |||
24 |
|
26 | |||
25 | from rhodecode.lib.svn_txn_utils import get_txn_id_from_store |
|
|||
26 |
|
27 | |||
27 | log = logging.getLogger(__name__) |
|
28 | log = logging.getLogger(__name__) | |
28 |
|
29 | |||
@@ -42,53 +43,82 b' class BaseHooksCallbackDaemon:' | |||||
42 | log.debug('Exiting `%s` callback daemon', self.__class__.__name__) |
|
43 | log.debug('Exiting `%s` callback daemon', self.__class__.__name__) | |
43 |
|
44 | |||
44 |
|
45 | |||
45 | class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon): |
|
46 | class Hooks(object): | |
|
47 | """ | |||
|
48 | Exposes the hooks module for calling them using the local HooksModuleCallbackDaemon | |||
|
49 | """ | |||
|
50 | def __init__(self, request=None, log_prefix=''): | |||
|
51 | self.log_prefix = log_prefix | |||
|
52 | self.request = request | |||
46 |
|
53 | |||
47 |
def |
|
54 | def repo_size(self, extras): | |
48 | super().__init__() |
|
55 | log.debug("%sCalled repo_size of %s object", self.log_prefix, self) | |
49 | self.hooks_module = module |
|
56 | return self._call_hook(hooks_base.repo_size, extras) | |
50 |
|
57 | |||
51 |
def |
|
58 | def pre_pull(self, extras): | |
52 | return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})' |
|
59 | log.debug("%sCalled pre_pull of %s object", self.log_prefix, self) | |
53 |
|
60 | return self._call_hook(hooks_base.pre_pull, extras) | ||
54 |
|
61 | |||
55 | def prepare_callback_daemon(extras, protocol, host, txn_id=None): |
|
62 | def post_pull(self, extras): | |
|
63 | log.debug("%sCalled post_pull of %s object", self.log_prefix, self) | |||
|
64 | return self._call_hook(hooks_base.post_pull, extras) | |||
|
65 | ||||
|
66 | def pre_push(self, extras): | |||
|
67 | log.debug("%sCalled pre_push of %s object", self.log_prefix, self) | |||
|
68 | return self._call_hook(hooks_base.pre_push, extras) | |||
56 |
|
69 | |||
57 | match protocol: |
|
70 | def post_push(self, extras): | |
58 | case 'http': |
|
71 | log.debug("%sCalled post_push of %s object", self.log_prefix, self) | |
59 | from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon |
|
72 | return self._call_hook(hooks_base.post_push, extras) | |
60 | port = 0 |
|
73 | ||
61 | if txn_id: |
|
74 | def _call_hook(self, hook, extras): | |
62 | # read txn-id to re-use the PORT for callback daemon |
|
75 | extras = AttributeDict(extras) | |
63 | repo_path = os.path.join(extras['repo_store'], extras['repository']) |
|
76 | _server_url = extras['server_url'] | |
64 | txn_details = get_txn_id_from_store(repo_path, txn_id) |
|
|||
65 | port = txn_details.get('port', 0) |
|
|||
66 |
|
77 | |||
67 | callback_daemon = HttpHooksCallbackDaemon( |
|
78 | extras.request = self.request | |
68 | txn_id=txn_id, host=host, port=port) |
|
79 | try: | |
69 | case 'celery': |
|
80 | result = hook(extras) | |
70 | from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon |
|
81 | if result is None: | |
71 |
|
82 | raise Exception(f'Failed to obtain hook result from func: {hook}') | ||
72 | config = get_app_config_lightweight(extras['config']) |
|
83 | except HTTPBranchProtected as error: | |
73 | task_queue = config.get('celery.broker_url') |
|
84 | # Those special cases don't need error reporting. It's a case of | |
74 | task_backend = config.get('celery.result_backend') |
|
85 | # locked repo or protected branch | |
|
86 | result = AttributeDict({ | |||
|
87 | 'status': error.code, | |||
|
88 | 'output': error.explanation | |||
|
89 | }) | |||
|
90 | except HTTPLockedRepo as error: | |||
|
91 | # Those special cases don't need error reporting. It's a case of | |||
|
92 | # locked repo or protected branch | |||
|
93 | result = AttributeDict({ | |||
|
94 | 'status': error.code, | |||
|
95 | 'output': error.explanation | |||
|
96 | }) | |||
|
97 | except Exception as error: | |||
|
98 | # locked needs different handling since we need to also | |||
|
99 | # handle PULL operations | |||
|
100 | log.exception('%sException when handling hook %s', self.log_prefix, hook) | |||
|
101 | exc_tb = traceback.format_exc() | |||
|
102 | error_args = error.args | |||
|
103 | return { | |||
|
104 | 'status': 128, | |||
|
105 | 'output': '', | |||
|
106 | 'exception': type(error).__name__, | |||
|
107 | 'exception_traceback': exc_tb, | |||
|
108 | 'exception_args': error_args, | |||
|
109 | } | |||
|
110 | finally: | |||
|
111 | meta.Session.remove() | |||
75 |
|
112 | |||
76 | callback_daemon = CeleryHooksCallbackDaemon(task_queue, task_backend) |
|
113 | log.debug('%sGot hook call response %s', self.log_prefix, result) | |
77 | case 'local': |
|
114 | return { | |
78 | from rhodecode.lib.hook_daemon.hook_module import Hooks |
|
115 | 'status': result.status, | |
79 | callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__) |
|
116 | 'output': result.output, | |
80 |
|
|
117 | } | |
81 | log.error('Unsupported callback daemon protocol "%s"', protocol) |
|
|||
82 | raise Exception('Unsupported callback daemon protocol.') |
|
|||
83 |
|
118 | |||
84 | extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '') |
|
119 | def __enter__(self): | |
85 | extras['task_queue'] = getattr(callback_daemon, 'task_queue', '') |
|
120 | return self | |
86 | extras['task_backend'] = getattr(callback_daemon, 'task_backend', '') |
|
|||
87 | extras['hooks_protocol'] = protocol |
|
|||
88 | extras['time'] = time.time() |
|
|||
89 |
|
121 | |||
90 | # register txn_id |
|
122 | def __exit__(self, exc_type, exc_val, exc_tb): | |
91 | extras['txn_id'] = txn_id |
|
123 | pass | |
92 | log.debug('Prepared a callback daemon: %s', |
|
124 | ||
93 | callback_daemon.__class__.__name__) |
|
|||
94 | return callback_daemon, extras |
|
@@ -22,14 +22,16 b' from rhodecode.lib.hook_daemon.base impo' | |||||
22 | class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon): |
|
22 | class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon): | |
23 | """ |
|
23 | """ | |
24 | Context manger for achieving a compatibility with celery backend |
|
24 | Context manger for achieving a compatibility with celery backend | |
|
25 | It is calling a call to vcsserver, where it uses HooksCeleryClient to actually call a task from | |||
|
26 | ||||
|
27 | f'rhodecode.lib.celerylib.tasks.{method}' | |||
|
28 | ||||
25 | """ |
|
29 | """ | |
26 |
|
30 | |||
27 |
def __init__(self, |
|
31 | def __init__(self, broker_url, result_backend): | |
28 | self.task_queue = task_queue |
|
32 | super().__init__() | |
29 | self.task_backend = task_backend |
|
33 | self.broker_url = broker_url | |
|
34 | self.result_backend = result_backend | |||
30 |
|
35 | |||
31 | def __repr__(self): |
|
36 | def __repr__(self): | |
32 |
return f'CeleryHooksCallbackDaemon( |
|
37 | return f'CeleryHooksCallbackDaemon(broker_url={self.broker_url}, result_backend={self.result_backend})' | |
33 |
|
||||
34 | def __repr__(self): |
|
|||
35 | return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})' |
|
@@ -17,88 +17,18 b'' | |||||
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import logging |
|
19 | import logging | |
20 | import traceback |
|
|||
21 |
|
20 | |||
22 | from rhodecode.model import meta |
|
21 | from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon | |
23 |
|
||||
24 | from rhodecode.lib import hooks_base |
|
|||
25 | from rhodecode.lib.exceptions import HTTPLockedRC, HTTPBranchProtected |
|
|||
26 | from rhodecode.lib.utils2 import AttributeDict |
|
|||
27 |
|
22 | |||
28 | log = logging.getLogger(__name__) |
|
23 | log = logging.getLogger(__name__) | |
29 |
|
24 | |||
30 |
|
25 | |||
31 | class Hooks(object): |
|
26 | class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon): | |
32 | """ |
|
|||
33 | Exposes the hooks for remote callbacks |
|
|||
34 | """ |
|
|||
35 | def __init__(self, request=None, log_prefix=''): |
|
|||
36 | self.log_prefix = log_prefix |
|
|||
37 | self.request = request |
|
|||
38 |
|
||||
39 | def repo_size(self, extras): |
|
|||
40 | log.debug("%sCalled repo_size of %s object", self.log_prefix, self) |
|
|||
41 | return self._call_hook(hooks_base.repo_size, extras) |
|
|||
42 |
|
||||
43 | def pre_pull(self, extras): |
|
|||
44 | log.debug("%sCalled pre_pull of %s object", self.log_prefix, self) |
|
|||
45 | return self._call_hook(hooks_base.pre_pull, extras) |
|
|||
46 |
|
||||
47 | def post_pull(self, extras): |
|
|||
48 | log.debug("%sCalled post_pull of %s object", self.log_prefix, self) |
|
|||
49 | return self._call_hook(hooks_base.post_pull, extras) |
|
|||
50 |
|
||||
51 | def pre_push(self, extras): |
|
|||
52 | log.debug("%sCalled pre_push of %s object", self.log_prefix, self) |
|
|||
53 | return self._call_hook(hooks_base.pre_push, extras) |
|
|||
54 |
|
||||
55 | def post_push(self, extras): |
|
|||
56 | log.debug("%sCalled post_push of %s object", self.log_prefix, self) |
|
|||
57 | return self._call_hook(hooks_base.post_push, extras) |
|
|||
58 |
|
||||
59 | def _call_hook(self, hook, extras): |
|
|||
60 | extras = AttributeDict(extras) |
|
|||
61 | _server_url = extras['server_url'] |
|
|||
62 |
|
||||
63 | extras.request = self.request |
|
|||
64 |
|
27 | |||
65 | try: |
|
28 | def __init__(self, module): | |
66 | result = hook(extras) |
|
29 | super().__init__() | |
67 | if result is None: |
|
30 | self.hooks_module = module | |
68 | raise Exception(f'Failed to obtain hook result from func: {hook}') |
|
|||
69 | except HTTPBranchProtected as error: |
|
|||
70 | # Those special cases don't need error reporting. It's a case of |
|
|||
71 | # locked repo or protected branch |
|
|||
72 | result = AttributeDict({ |
|
|||
73 | 'status': error.code, |
|
|||
74 | 'output': error.explanation |
|
|||
75 | }) |
|
|||
76 | except (HTTPLockedRC, Exception) as error: |
|
|||
77 | # locked needs different handling since we need to also |
|
|||
78 | # handle PULL operations |
|
|||
79 | exc_tb = '' |
|
|||
80 | if not isinstance(error, HTTPLockedRC): |
|
|||
81 | exc_tb = traceback.format_exc() |
|
|||
82 | log.exception('%sException when handling hook %s', self.log_prefix, hook) |
|
|||
83 | error_args = error.args |
|
|||
84 | return { |
|
|||
85 | 'status': 128, |
|
|||
86 | 'output': '', |
|
|||
87 | 'exception': type(error).__name__, |
|
|||
88 | 'exception_traceback': exc_tb, |
|
|||
89 | 'exception_args': error_args, |
|
|||
90 | } |
|
|||
91 | finally: |
|
|||
92 | meta.Session.remove() |
|
|||
93 |
|
31 | |||
94 | log.debug('%sGot hook call response %s', self.log_prefix, result) |
|
32 | def __repr__(self): | |
95 | return { |
|
33 | return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})' | |
96 | 'status': result.status, |
|
|||
97 | 'output': result.output, |
|
|||
98 | } |
|
|||
99 |
|
34 | |||
100 | def __enter__(self): |
|
|||
101 | return self |
|
|||
102 |
|
||||
103 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
|||
104 | pass |
|
@@ -30,14 +30,14 b' from rhodecode.lib import helpers as h' | |||||
30 | from rhodecode.lib import audit_logger |
|
30 | from rhodecode.lib import audit_logger | |
31 | from rhodecode.lib.utils2 import safe_str, user_agent_normalizer |
|
31 | from rhodecode.lib.utils2 import safe_str, user_agent_normalizer | |
32 | from rhodecode.lib.exceptions import ( |
|
32 | from rhodecode.lib.exceptions import ( | |
33 |
HTTPLockedR |
|
33 | HTTPLockedRepo, HTTPBranchProtected, UserCreationError, ClientNotSupported) | |
34 | from rhodecode.model.db import Repository, User |
|
34 | from rhodecode.model.db import Repository, User | |
35 | from rhodecode.lib.statsd_client import StatsdClient |
|
35 | from rhodecode.lib.statsd_client import StatsdClient | |
36 |
|
36 | |||
37 | log = logging.getLogger(__name__) |
|
37 | log = logging.getLogger(__name__) | |
38 |
|
38 | |||
39 |
|
39 | |||
40 |
class HookResponse |
|
40 | class HookResponse: | |
41 | def __init__(self, status, output): |
|
41 | def __init__(self, status, output): | |
42 | self.status = status |
|
42 | self.status = status | |
43 | self.output = output |
|
43 | self.output = output | |
@@ -56,6 +56,8 b' class HookResponse(object):' | |||||
56 | def to_json(self): |
|
56 | def to_json(self): | |
57 | return {'status': self.status, 'output': self.output} |
|
57 | return {'status': self.status, 'output': self.output} | |
58 |
|
58 | |||
|
59 | def __repr__(self): | |||
|
60 | return self.to_json().__repr__() | |||
59 |
|
61 | |||
60 | def is_shadow_repo(extras): |
|
62 | def is_shadow_repo(extras): | |
61 | """ |
|
63 | """ | |
@@ -73,8 +75,69 b' def check_vcs_client(extras):' | |||||
73 | except ModuleNotFoundError: |
|
75 | except ModuleNotFoundError: | |
74 | is_vcs_client_whitelisted = lambda *x: True |
|
76 | is_vcs_client_whitelisted = lambda *x: True | |
75 | backend = extras.get('scm') |
|
77 | backend = extras.get('scm') | |
76 | if not is_vcs_client_whitelisted(extras.get('user_agent'), backend): |
|
78 | user_agent = extras.get('user_agent') | |
77 | raise ClientNotSupportedError(f"Your {backend} client is forbidden") |
|
79 | if not is_vcs_client_whitelisted(user_agent, backend): | |
|
80 | raise ClientNotSupported(f"Your {backend} client (version={user_agent}) is forbidden by security rules") | |||
|
81 | ||||
|
82 | ||||
|
83 | def check_locked_repo(extras, check_same_user=True): | |||
|
84 | user = User.get_by_username(extras.username) | |||
|
85 | output = '' | |||
|
86 | if extras.locked_by[0] and (not check_same_user or user.user_id != extras.locked_by[0]): | |||
|
87 | ||||
|
88 | locked_by = User.get(extras.locked_by[0]).username | |||
|
89 | reason = extras.locked_by[2] | |||
|
90 | # this exception is interpreted in git/hg middlewares and based | |||
|
91 | # on that proper return code is server to client | |||
|
92 | _http_ret = HTTPLockedRepo(_locked_by_explanation(extras.repository, locked_by, reason)) | |||
|
93 | if str(_http_ret.code).startswith('2'): | |||
|
94 | # 2xx Codes don't raise exceptions | |||
|
95 | output = _http_ret.title | |||
|
96 | else: | |||
|
97 | raise _http_ret | |||
|
98 | ||||
|
99 | return output | |||
|
100 | ||||
|
101 | ||||
|
102 | def check_branch_protected(extras): | |||
|
103 | if extras.commit_ids and extras.check_branch_perms: | |||
|
104 | user = User.get_by_username(extras.username) | |||
|
105 | auth_user = user.AuthUser() | |||
|
106 | repo = Repository.get_by_repo_name(extras.repository) | |||
|
107 | if not repo: | |||
|
108 | raise ValueError(f'Repo for {extras.repository} not found') | |||
|
109 | affected_branches = [] | |||
|
110 | if repo.repo_type == 'hg': | |||
|
111 | for entry in extras.commit_ids: | |||
|
112 | if entry['type'] == 'branch': | |||
|
113 | is_forced = bool(entry['multiple_heads']) | |||
|
114 | affected_branches.append([entry['name'], is_forced]) | |||
|
115 | elif repo.repo_type == 'git': | |||
|
116 | for entry in extras.commit_ids: | |||
|
117 | if entry['type'] == 'heads': | |||
|
118 | is_forced = bool(entry['pruned_sha']) | |||
|
119 | affected_branches.append([entry['name'], is_forced]) | |||
|
120 | ||||
|
121 | for branch_name, is_forced in affected_branches: | |||
|
122 | ||||
|
123 | rule, branch_perm = auth_user.get_rule_and_branch_permission(extras.repository, branch_name) | |||
|
124 | if not branch_perm: | |||
|
125 | # no branch permission found for this branch, just keep checking | |||
|
126 | continue | |||
|
127 | ||||
|
128 | if branch_perm == 'branch.push_force': | |||
|
129 | continue | |||
|
130 | elif branch_perm == 'branch.push' and is_forced is False: | |||
|
131 | continue | |||
|
132 | elif branch_perm == 'branch.push' and is_forced is True: | |||
|
133 | halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}. ' \ | |||
|
134 | f'FORCE PUSH FORBIDDEN.' | |||
|
135 | else: | |||
|
136 | halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}.' | |||
|
137 | ||||
|
138 | if halt_message: | |||
|
139 | _http_ret = HTTPBranchProtected(halt_message) | |||
|
140 | raise _http_ret | |||
78 |
|
141 | |||
79 | def _get_scm_size(alias, root_path): |
|
142 | def _get_scm_size(alias, root_path): | |
80 |
|
143 | |||
@@ -109,116 +172,30 b' def repo_size(extras):' | |||||
109 | repo = Repository.get_by_repo_name(extras.repository) |
|
172 | repo = Repository.get_by_repo_name(extras.repository) | |
110 | vcs_part = f'.{repo.repo_type}' |
|
173 | vcs_part = f'.{repo.repo_type}' | |
111 | size_vcs, size_root, size_total = _get_scm_size(vcs_part, repo.repo_full_path) |
|
174 | size_vcs, size_root, size_total = _get_scm_size(vcs_part, repo.repo_full_path) | |
112 |
msg = |
|
175 | msg = f'RhodeCode: `{repo.repo_name}` size summary {vcs_part}:{size_vcs} repo:{size_root} total:{size_total}\n' | |
113 | return HookResponse(0, msg) |
|
176 | return HookResponse(0, msg) | |
114 |
|
177 | |||
115 |
|
178 | |||
116 | def pre_push(extras): |
|
|||
117 | """ |
|
|||
118 | Hook executed before pushing code. |
|
|||
119 |
|
||||
120 | It bans pushing when the repository is locked. |
|
|||
121 | """ |
|
|||
122 |
|
||||
123 | check_vcs_client(extras) |
|
|||
124 | user = User.get_by_username(extras.username) |
|
|||
125 | output = '' |
|
|||
126 | if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]): |
|
|||
127 | locked_by = User.get(extras.locked_by[0]).username |
|
|||
128 | reason = extras.locked_by[2] |
|
|||
129 | # this exception is interpreted in git/hg middlewares and based |
|
|||
130 | # on that proper return code is server to client |
|
|||
131 | _http_ret = HTTPLockedRC( |
|
|||
132 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
|||
133 | if str(_http_ret.code).startswith('2'): |
|
|||
134 | # 2xx Codes don't raise exceptions |
|
|||
135 | output = _http_ret.title |
|
|||
136 | else: |
|
|||
137 | raise _http_ret |
|
|||
138 |
|
||||
139 | hook_response = '' |
|
|||
140 | if not is_shadow_repo(extras): |
|
|||
141 |
|
||||
142 | if extras.commit_ids and extras.check_branch_perms: |
|
|||
143 | auth_user = user.AuthUser() |
|
|||
144 | repo = Repository.get_by_repo_name(extras.repository) |
|
|||
145 | if not repo: |
|
|||
146 | raise ValueError(f'Repo for {extras.repository} not found') |
|
|||
147 | affected_branches = [] |
|
|||
148 | if repo.repo_type == 'hg': |
|
|||
149 | for entry in extras.commit_ids: |
|
|||
150 | if entry['type'] == 'branch': |
|
|||
151 | is_forced = bool(entry['multiple_heads']) |
|
|||
152 | affected_branches.append([entry['name'], is_forced]) |
|
|||
153 | elif repo.repo_type == 'git': |
|
|||
154 | for entry in extras.commit_ids: |
|
|||
155 | if entry['type'] == 'heads': |
|
|||
156 | is_forced = bool(entry['pruned_sha']) |
|
|||
157 | affected_branches.append([entry['name'], is_forced]) |
|
|||
158 |
|
||||
159 | for branch_name, is_forced in affected_branches: |
|
|||
160 |
|
||||
161 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
|||
162 | extras.repository, branch_name) |
|
|||
163 | if not branch_perm: |
|
|||
164 | # no branch permission found for this branch, just keep checking |
|
|||
165 | continue |
|
|||
166 |
|
||||
167 | if branch_perm == 'branch.push_force': |
|
|||
168 | continue |
|
|||
169 | elif branch_perm == 'branch.push' and is_forced is False: |
|
|||
170 | continue |
|
|||
171 | elif branch_perm == 'branch.push' and is_forced is True: |
|
|||
172 | halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}. ' \ |
|
|||
173 | f'FORCE PUSH FORBIDDEN.' |
|
|||
174 | else: |
|
|||
175 | halt_message = f'Branch `{branch_name}` changes rejected by rule {rule}.' |
|
|||
176 |
|
||||
177 | if halt_message: |
|
|||
178 | _http_ret = HTTPBranchProtected(halt_message) |
|
|||
179 | raise _http_ret |
|
|||
180 |
|
||||
181 | # Propagate to external components. This is done after checking the |
|
|||
182 | # lock, for consistent behavior. |
|
|||
183 | hook_response = pre_push_extension( |
|
|||
184 | repo_store_path=Repository.base_path(), **extras) |
|
|||
185 | events.trigger(events.RepoPrePushEvent( |
|
|||
186 | repo_name=extras.repository, extras=extras)) |
|
|||
187 |
|
||||
188 | return HookResponse(0, output) + hook_response |
|
|||
189 |
|
||||
190 |
|
||||
191 | def pre_pull(extras): |
|
179 | def pre_pull(extras): | |
192 | """ |
|
180 | """ | |
193 | Hook executed before pulling the code. |
|
181 | Hook executed before pulling the code. | |
194 |
|
182 | |||
195 | It bans pulling when the repository is locked. |
|
183 | It bans pulling when the repository is locked. | |
|
184 | It bans pulling when incorrect client is used. | |||
196 | """ |
|
185 | """ | |
197 |
|
||||
198 | check_vcs_client(extras) |
|
|||
199 | output = '' |
|
186 | output = '' | |
200 | if extras.locked_by[0]: |
|
187 | check_vcs_client(extras) | |
201 | locked_by = User.get(extras.locked_by[0]).username |
|
188 | ||
202 | reason = extras.locked_by[2] |
|
189 | # locking repo can, but not have to stop the operation it can also just produce output | |
203 | # this exception is interpreted in git/hg middlewares and based |
|
190 | output += check_locked_repo(extras, check_same_user=False) | |
204 | # on that proper return code is server to client |
|
|||
205 | _http_ret = HTTPLockedRC( |
|
|||
206 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
|||
207 | if str(_http_ret.code).startswith('2'): |
|
|||
208 | # 2xx Codes don't raise exceptions |
|
|||
209 | output = _http_ret.title |
|
|||
210 | else: |
|
|||
211 | raise _http_ret |
|
|||
212 |
|
191 | |||
213 | # Propagate to external components. This is done after checking the |
|
192 | # Propagate to external components. This is done after checking the | |
214 | # lock, for consistent behavior. |
|
193 | # lock, for consistent behavior. | |
215 | hook_response = '' |
|
194 | hook_response = '' | |
216 | if not is_shadow_repo(extras): |
|
195 | if not is_shadow_repo(extras): | |
217 | extras.hook_type = extras.hook_type or 'pre_pull' |
|
196 | extras.hook_type = extras.hook_type or 'pre_pull' | |
218 | hook_response = pre_pull_extension( |
|
197 | hook_response = pre_pull_extension(repo_store_path=Repository.base_path(), **extras) | |
219 | repo_store_path=Repository.base_path(), **extras) |
|
198 | events.trigger(events.RepoPrePullEvent(repo_name=extras.repository, extras=extras)) | |
220 | events.trigger(events.RepoPrePullEvent( |
|
|||
221 | repo_name=extras.repository, extras=extras)) |
|
|||
222 |
|
199 | |||
223 | return HookResponse(0, output) + hook_response |
|
200 | return HookResponse(0, output) + hook_response | |
224 |
|
201 | |||
@@ -239,6 +216,7 b' def post_pull(extras):' | |||||
239 | statsd.incr('rhodecode_pull_total', tags=[ |
|
216 | statsd.incr('rhodecode_pull_total', tags=[ | |
240 | f'user-agent:{user_agent_normalizer(extras.user_agent)}', |
|
217 | f'user-agent:{user_agent_normalizer(extras.user_agent)}', | |
241 | ]) |
|
218 | ]) | |
|
219 | ||||
242 | output = '' |
|
220 | output = '' | |
243 | # make lock is a tri state False, True, None. We only make lock on True |
|
221 | # make lock is a tri state False, True, None. We only make lock on True | |
244 | if extras.make_lock is True and not is_shadow_repo(extras): |
|
222 | if extras.make_lock is True and not is_shadow_repo(extras): | |
@@ -246,18 +224,9 b' def post_pull(extras):' | |||||
246 | Repository.lock(Repository.get_by_repo_name(extras.repository), |
|
224 | Repository.lock(Repository.get_by_repo_name(extras.repository), | |
247 | user.user_id, |
|
225 | user.user_id, | |
248 | lock_reason=Repository.LOCK_PULL) |
|
226 | lock_reason=Repository.LOCK_PULL) | |
249 |
msg = 'Made lock on repo `{ |
|
227 | msg = f'Made lock on repo `{extras.repository}`' | |
250 | output += msg |
|
228 | output += msg | |
251 |
|
229 | |||
252 | if extras.locked_by[0]: |
|
|||
253 | locked_by = User.get(extras.locked_by[0]).username |
|
|||
254 | reason = extras.locked_by[2] |
|
|||
255 | _http_ret = HTTPLockedRC( |
|
|||
256 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
|||
257 | if str(_http_ret.code).startswith('2'): |
|
|||
258 | # 2xx Codes don't raise exceptions |
|
|||
259 | output += _http_ret.title |
|
|||
260 |
|
||||
261 | # Propagate to external components. |
|
230 | # Propagate to external components. | |
262 | hook_response = '' |
|
231 | hook_response = '' | |
263 | if not is_shadow_repo(extras): |
|
232 | if not is_shadow_repo(extras): | |
@@ -270,6 +239,33 b' def post_pull(extras):' | |||||
270 | return HookResponse(0, output) + hook_response |
|
239 | return HookResponse(0, output) + hook_response | |
271 |
|
240 | |||
272 |
|
241 | |||
|
242 | def pre_push(extras): | |||
|
243 | """ | |||
|
244 | Hook executed before pushing code. | |||
|
245 | ||||
|
246 | It bans pushing when the repository is locked. | |||
|
247 | It banks pushing when incorrect client is used. | |||
|
248 | It also checks for Branch protection | |||
|
249 | """ | |||
|
250 | output = '' | |||
|
251 | check_vcs_client(extras) | |||
|
252 | ||||
|
253 | # locking repo can, but not have to stop the operation it can also just produce output | |||
|
254 | output += check_locked_repo(extras) | |||
|
255 | ||||
|
256 | hook_response = '' | |||
|
257 | if not is_shadow_repo(extras): | |||
|
258 | ||||
|
259 | check_branch_protected(extras) | |||
|
260 | ||||
|
261 | # Propagate to external components. This is done after checking the | |||
|
262 | # lock, for consistent behavior. | |||
|
263 | hook_response = pre_push_extension(repo_store_path=Repository.base_path(), **extras) | |||
|
264 | events.trigger(events.RepoPrePushEvent(repo_name=extras.repository, extras=extras)) | |||
|
265 | ||||
|
266 | return HookResponse(0, output) + hook_response | |||
|
267 | ||||
|
268 | ||||
273 | def post_push(extras): |
|
269 | def post_push(extras): | |
274 | """Hook executed after user pushes to the repository.""" |
|
270 | """Hook executed after user pushes to the repository.""" | |
275 | commit_ids = extras.commit_ids |
|
271 | commit_ids = extras.commit_ids | |
@@ -292,22 +288,13 b' def post_push(extras):' | |||||
292 |
|
288 | |||
293 | # Propagate to external components. |
|
289 | # Propagate to external components. | |
294 | output = '' |
|
290 | output = '' | |
|
291 | ||||
295 | # make lock is a tri state False, True, None. We only release lock on False |
|
292 | # make lock is a tri state False, True, None. We only release lock on False | |
296 | if extras.make_lock is False and not is_shadow_repo(extras): |
|
293 | if extras.make_lock is False and not is_shadow_repo(extras): | |
297 | Repository.unlock(Repository.get_by_repo_name(extras.repository)) |
|
294 | Repository.unlock(Repository.get_by_repo_name(extras.repository)) | |
298 | msg = f'Released lock on repo `{extras.repository}`\n' |
|
295 | msg = f'Released lock on repo `{extras.repository}`\n' | |
299 | output += msg |
|
296 | output += msg | |
300 |
|
297 | |||
301 | if extras.locked_by[0]: |
|
|||
302 | locked_by = User.get(extras.locked_by[0]).username |
|
|||
303 | reason = extras.locked_by[2] |
|
|||
304 | _http_ret = HTTPLockedRC( |
|
|||
305 | _locked_by_explanation(extras.repository, locked_by, reason)) |
|
|||
306 | # TODO: johbo: if not? |
|
|||
307 | if str(_http_ret.code).startswith('2'): |
|
|||
308 | # 2xx Codes don't raise exceptions |
|
|||
309 | output += _http_ret.title |
|
|||
310 |
|
||||
311 | if extras.new_refs: |
|
298 | if extras.new_refs: | |
312 | tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format( |
|
299 | tmpl = '{}/{}/pull-request/new?{{ref_type}}={{ref_name}}'.format( | |
313 | safe_str(extras.server_url), safe_str(extras.repository)) |
|
300 | safe_str(extras.server_url), safe_str(extras.repository)) | |
@@ -322,11 +309,8 b' def post_push(extras):' | |||||
322 |
|
309 | |||
323 | hook_response = '' |
|
310 | hook_response = '' | |
324 | if not is_shadow_repo(extras): |
|
311 | if not is_shadow_repo(extras): | |
325 | hook_response = post_push_extension( |
|
312 | hook_response = post_push_extension(repo_store_path=Repository.base_path(), **extras) | |
326 | repo_store_path=Repository.base_path(), |
|
313 | events.trigger(events.RepoPushEvent(repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras)) | |
327 | **extras) |
|
|||
328 | events.trigger(events.RepoPushEvent( |
|
|||
329 | repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras)) |
|
|||
330 |
|
314 | |||
331 | output += 'RhodeCode: push completed\n' |
|
315 | output += 'RhodeCode: push completed\n' | |
332 | return HookResponse(0, output) + hook_response |
|
316 | return HookResponse(0, output) + hook_response | |
@@ -380,12 +364,20 b' class ExtensionCallback(object):' | |||||
380 | # with older rcextensions that require api_key present |
|
364 | # with older rcextensions that require api_key present | |
381 | if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']: |
|
365 | if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']: | |
382 | kwargs_to_pass['api_key'] = '_DEPRECATED_' |
|
366 | kwargs_to_pass['api_key'] = '_DEPRECATED_' | |
383 |
re |
|
367 | result = callback(**kwargs_to_pass) | |
|
368 | log.debug('got rcextensions result: %s', result) | |||
|
369 | return result | |||
384 |
|
370 | |||
385 | def is_active(self): |
|
371 | def is_active(self): | |
386 | return hasattr(rhodecode.EXTENSIONS, self._hook_name) |
|
372 | return hasattr(rhodecode.EXTENSIONS, self._hook_name) | |
387 |
|
373 | |||
388 | def _get_callback(self): |
|
374 | def _get_callback(self): | |
|
375 | if rhodecode.is_test: | |||
|
376 | log.debug('In test mode, reloading rcextensions...') | |||
|
377 | # NOTE: for test re-load rcextensions always so we can dynamically change them for testing purposes | |||
|
378 | from rhodecode.lib.utils import load_rcextensions | |||
|
379 | load_rcextensions(root_path=os.path.dirname(rhodecode.CONFIG['__file__'])) | |||
|
380 | return getattr(rhodecode.EXTENSIONS, self._hook_name, None) | |||
389 | return getattr(rhodecode.EXTENSIONS, self._hook_name, None) |
|
381 | return getattr(rhodecode.EXTENSIONS, self._hook_name, None) | |
390 |
|
382 | |||
391 |
|
383 |
@@ -40,16 +40,6 b' GIT_PROTO_PAT = re.compile(' | |||||
40 | GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))') |
|
40 | GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))') | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | def default_lfs_store(): |
|
|||
44 | """ |
|
|||
45 | Default lfs store location, it's consistent with Mercurials large file |
|
|||
46 | store which is in .cache/largefiles |
|
|||
47 | """ |
|
|||
48 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
|||
49 | user_home = os.path.expanduser("~") |
|
|||
50 | return lfs_store(user_home) |
|
|||
51 |
|
||||
52 |
|
||||
53 | class SimpleGit(simplevcs.SimpleVCS): |
|
43 | class SimpleGit(simplevcs.SimpleVCS): | |
54 |
|
44 | |||
55 | SCM = 'git' |
|
45 | SCM = 'git' | |
@@ -151,6 +141,6 b' class SimpleGit(simplevcs.SimpleVCS):' | |||||
151 |
|
141 | |||
152 | extras['git_lfs_enabled'] = utils2.str2bool( |
|
142 | extras['git_lfs_enabled'] = utils2.str2bool( | |
153 | config.get('vcs_git_lfs', 'enabled')) |
|
143 | config.get('vcs_git_lfs', 'enabled')) | |
154 |
extras['git_lfs_store_path'] = custom_store |
|
144 | extras['git_lfs_store_path'] = custom_store | |
155 | extras['git_lfs_http_scheme'] = scheme |
|
145 | extras['git_lfs_http_scheme'] = scheme | |
156 | return extras |
|
146 | return extras |
@@ -1,5 +1,3 b'' | |||||
1 |
|
||||
2 |
|
||||
3 |
|
|
1 | # Copyright (C) 2014-2023 RhodeCode GmbH | |
4 | # |
|
2 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -32,8 +30,7 b' from functools import wraps' | |||||
32 | import time |
|
30 | import time | |
33 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE |
|
31 | from paste.httpheaders import REMOTE_USER, AUTH_TYPE | |
34 |
|
32 | |||
35 | from pyramid.httpexceptions import ( |
|
33 | from pyramid.httpexceptions import HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError | |
36 | HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError) |
|
|||
37 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
34 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
38 |
|
35 | |||
39 | import rhodecode |
|
36 | import rhodecode | |
@@ -41,10 +38,9 b' from rhodecode.authentication.base impor' | |||||
41 | from rhodecode.lib import rc_cache |
|
38 | from rhodecode.lib import rc_cache | |
42 | from rhodecode.lib.svn_txn_utils import store_txn_id_data |
|
39 | from rhodecode.lib.svn_txn_utils import store_txn_id_data | |
43 | from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware |
|
40 | from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware | |
44 | from rhodecode.lib.base import ( |
|
41 | from rhodecode.lib.base import BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context | |
45 | BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context) |
|
42 | from rhodecode.lib.exceptions import UserCreationError, NotAllowedToCreateUserError | |
46 | from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError) |
|
43 | from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon | |
47 | from rhodecode.lib.hook_daemon.base import prepare_callback_daemon |
|
|||
48 | from rhodecode.lib.middleware import appenlight |
|
44 | from rhodecode.lib.middleware import appenlight | |
49 | from rhodecode.lib.middleware.utils import scm_app_http |
|
45 | from rhodecode.lib.middleware.utils import scm_app_http | |
50 | from rhodecode.lib.str_utils import safe_bytes, safe_int |
|
46 | from rhodecode.lib.str_utils import safe_bytes, safe_int | |
@@ -78,17 +74,18 b' def initialize_generator(factory):' | |||||
78 | try: |
|
74 | try: | |
79 | init = next(gen) |
|
75 | init = next(gen) | |
80 | except StopIteration: |
|
76 | except StopIteration: | |
81 |
raise ValueError( |
|
77 | raise ValueError("Generator must yield at least one element.") | |
82 | if init != "__init__": |
|
78 | if init != "__init__": | |
83 | raise ValueError('First yielded element must be "__init__".') |
|
79 | raise ValueError('First yielded element must be "__init__".') | |
84 | return gen |
|
80 | return gen | |
|
81 | ||||
85 | return wrapper |
|
82 | return wrapper | |
86 |
|
83 | |||
87 |
|
84 | |||
88 | class SimpleVCS(object): |
|
85 | class SimpleVCS(object): | |
89 | """Common functionality for SCM HTTP handlers.""" |
|
86 | """Common functionality for SCM HTTP handlers.""" | |
90 |
|
87 | |||
91 |
SCM = |
|
88 | SCM = "unknown" | |
92 |
|
89 | |||
93 | acl_repo_name = None |
|
90 | acl_repo_name = None | |
94 | url_repo_name = None |
|
91 | url_repo_name = None | |
@@ -100,11 +97,11 b' class SimpleVCS(object):' | |||||
100 | # we use this regex which will match only on URLs pointing to shadow |
|
97 | # we use this regex which will match only on URLs pointing to shadow | |
101 | # repositories. |
|
98 | # repositories. | |
102 | shadow_repo_re = re.compile( |
|
99 | shadow_repo_re = re.compile( | |
103 |
|
|
100 | "(?P<groups>(?:{slug_pat}/)*)" # repo groups | |
104 |
|
|
101 | "(?P<target>{slug_pat})/" # target repo | |
105 |
|
|
102 | "pull-request/(?P<pr_id>\\d+)/" # pull request | |
106 | 'repository$' # shadow repo |
|
103 | "repository$".format(slug_pat=SLUG_RE.pattern) # shadow repo | |
107 | .format(slug_pat=SLUG_RE.pattern)) |
|
104 | ) | |
108 |
|
105 | |||
109 | def __init__(self, config, registry): |
|
106 | def __init__(self, config, registry): | |
110 | self.registry = registry |
|
107 | self.registry = registry | |
@@ -113,15 +110,14 b' class SimpleVCS(object):' | |||||
113 | self.repo_vcs_config = base.Config() |
|
110 | self.repo_vcs_config = base.Config() | |
114 |
|
111 | |||
115 | rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False) |
|
112 | rc_settings = SettingsModel().get_all_settings(cache=True, from_request=False) | |
116 |
realm = rc_settings.get( |
|
113 | realm = rc_settings.get("rhodecode_realm") or "RhodeCode AUTH" | |
117 |
|
114 | |||
118 | # authenticate this VCS request using authfunc |
|
115 | # authenticate this VCS request using authfunc | |
119 | auth_ret_code_detection = \ |
|
116 | auth_ret_code_detection = str2bool(self.config.get("auth_ret_code_detection", False)) | |
120 | str2bool(self.config.get('auth_ret_code_detection', False)) |
|
|||
121 | self.authenticate = BasicAuth( |
|
117 | self.authenticate = BasicAuth( | |
122 |
|
|
118 | "", authenticate, registry, config.get("auth_ret_code"), auth_ret_code_detection, rc_realm=realm | |
123 | auth_ret_code_detection, rc_realm=realm) |
|
119 | ) | |
124 |
self.ip_addr = |
|
120 | self.ip_addr = "0.0.0.0" | |
125 |
|
121 | |||
126 | @LazyProperty |
|
122 | @LazyProperty | |
127 | def global_vcs_config(self): |
|
123 | def global_vcs_config(self): | |
@@ -132,10 +128,10 b' class SimpleVCS(object):' | |||||
132 |
|
128 | |||
133 | @property |
|
129 | @property | |
134 | def base_path(self): |
|
130 | def base_path(self): | |
135 |
settings_path = self.config.get( |
|
131 | settings_path = self.config.get("repo_store.path") | |
136 |
|
132 | |||
137 | if not settings_path: |
|
133 | if not settings_path: | |
138 |
raise ValueError( |
|
134 | raise ValueError("FATAL: repo_store.path is empty") | |
139 | return settings_path |
|
135 | return settings_path | |
140 |
|
136 | |||
141 | def set_repo_names(self, environ): |
|
137 | def set_repo_names(self, environ): | |
@@ -164,17 +160,16 b' class SimpleVCS(object):' | |||||
164 | match_dict = match.groupdict() |
|
160 | match_dict = match.groupdict() | |
165 |
|
161 | |||
166 | # Build acl repo name from regex match. |
|
162 | # Build acl repo name from regex match. | |
167 |
acl_repo_name = safe_str( |
|
163 | acl_repo_name = safe_str( | |
168 | groups=match_dict['groups'] or '', |
|
164 | "{groups}{target}".format(groups=match_dict["groups"] or "", target=match_dict["target"]) | |
169 | target=match_dict['target'])) |
|
165 | ) | |
170 |
|
166 | |||
171 | # Retrieve pull request instance by ID from regex match. |
|
167 | # Retrieve pull request instance by ID from regex match. | |
172 |
pull_request = PullRequest.get(match_dict[ |
|
168 | pull_request = PullRequest.get(match_dict["pr_id"]) | |
173 |
|
169 | |||
174 | # Only proceed if we got a pull request and if acl repo name from |
|
170 | # Only proceed if we got a pull request and if acl repo name from | |
175 | # URL equals the target repo name of the pull request. |
|
171 | # URL equals the target repo name of the pull request. | |
176 | if pull_request and (acl_repo_name == pull_request.target_repo.repo_name): |
|
172 | if pull_request and (acl_repo_name == pull_request.target_repo.repo_name): | |
177 |
|
||||
178 | # Get file system path to shadow repository. |
|
173 | # Get file system path to shadow repository. | |
179 | workspace_id = PullRequestModel()._workspace_id(pull_request) |
|
174 | workspace_id = PullRequestModel()._workspace_id(pull_request) | |
180 | vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id) |
|
175 | vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id) | |
@@ -184,21 +179,23 b' class SimpleVCS(object):' | |||||
184 | self.acl_repo_name = acl_repo_name |
|
179 | self.acl_repo_name = acl_repo_name | |
185 | self.is_shadow_repo = True |
|
180 | self.is_shadow_repo = True | |
186 |
|
181 | |||
187 | log.debug('Setting all VCS repository names: %s', { |
|
182 | log.debug( | |
188 | 'acl_repo_name': self.acl_repo_name, |
|
183 | "Setting all VCS repository names: %s", | |
189 | 'url_repo_name': self.url_repo_name, |
|
184 | { | |
190 |
|
|
185 | "acl_repo_name": self.acl_repo_name, | |
191 | }) |
|
186 | "url_repo_name": self.url_repo_name, | |
|
187 | "vcs_repo_name": self.vcs_repo_name, | |||
|
188 | }, | |||
|
189 | ) | |||
192 |
|
190 | |||
193 | @property |
|
191 | @property | |
194 | def scm_app(self): |
|
192 | def scm_app(self): | |
195 |
custom_implementation = self.config[ |
|
193 | custom_implementation = self.config["vcs.scm_app_implementation"] | |
196 |
if custom_implementation == |
|
194 | if custom_implementation == "http": | |
197 |
log.debug( |
|
195 | log.debug("Using HTTP implementation of scm app.") | |
198 | scm_app_impl = scm_app_http |
|
196 | scm_app_impl = scm_app_http | |
199 | else: |
|
197 | else: | |
200 | log.debug('Using custom implementation of scm_app: "{}"'.format( |
|
198 | log.debug('Using custom implementation of scm_app: "{}"'.format(custom_implementation)) | |
201 | custom_implementation)) |
|
|||
202 | scm_app_impl = importlib.import_module(custom_implementation) |
|
199 | scm_app_impl = importlib.import_module(custom_implementation) | |
203 | return scm_app_impl |
|
200 | return scm_app_impl | |
204 |
|
201 | |||
@@ -208,17 +205,18 b' class SimpleVCS(object):' | |||||
208 | with a repository_name for support of _<ID> non changeable urls |
|
205 | with a repository_name for support of _<ID> non changeable urls | |
209 | """ |
|
206 | """ | |
210 |
|
207 | |||
211 |
data = repo_name.split( |
|
208 | data = repo_name.split("/") | |
212 | if len(data) >= 2: |
|
209 | if len(data) >= 2: | |
213 | from rhodecode.model.repo import RepoModel |
|
210 | from rhodecode.model.repo import RepoModel | |
|
211 | ||||
214 | by_id_match = RepoModel().get_repo_by_id(repo_name) |
|
212 | by_id_match = RepoModel().get_repo_by_id(repo_name) | |
215 | if by_id_match: |
|
213 | if by_id_match: | |
216 | data[1] = by_id_match.repo_name |
|
214 | data[1] = by_id_match.repo_name | |
217 |
|
215 | |||
218 | # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO |
|
216 | # Because PEP-3333-WSGI uses bytes-tunneled-in-latin-1 as PATH_INFO | |
219 | # and we use this data |
|
217 | # and we use this data | |
220 |
maybe_new_path = |
|
218 | maybe_new_path = "/".join(data) | |
221 |
return safe_bytes(maybe_new_path).decode( |
|
219 | return safe_bytes(maybe_new_path).decode("latin1") | |
222 |
|
220 | |||
223 | def _invalidate_cache(self, repo_name): |
|
221 | def _invalidate_cache(self, repo_name): | |
224 | """ |
|
222 | """ | |
@@ -231,21 +229,18 b' class SimpleVCS(object):' | |||||
231 | def is_valid_and_existing_repo(self, repo_name, base_path, scm_type): |
|
229 | def is_valid_and_existing_repo(self, repo_name, base_path, scm_type): | |
232 | db_repo = Repository.get_by_repo_name(repo_name) |
|
230 | db_repo = Repository.get_by_repo_name(repo_name) | |
233 | if not db_repo: |
|
231 | if not db_repo: | |
234 |
log.debug( |
|
232 | log.debug("Repository `%s` not found inside the database.", repo_name) | |
235 | repo_name) |
|
|||
236 | return False |
|
233 | return False | |
237 |
|
234 | |||
238 | if db_repo.repo_type != scm_type: |
|
235 | if db_repo.repo_type != scm_type: | |
239 | log.warning( |
|
236 | log.warning( | |
240 |
|
|
237 | "Repository `%s` have incorrect scm_type, expected %s got %s", repo_name, db_repo.repo_type, scm_type | |
241 | repo_name, db_repo.repo_type, scm_type) |
|
238 | ) | |
242 | return False |
|
239 | return False | |
243 |
|
240 | |||
244 | config = db_repo._config |
|
241 | config = db_repo._config | |
245 |
config.set( |
|
242 | config.set("extensions", "largefiles", "") | |
246 | return is_valid_repo( |
|
243 | return is_valid_repo(repo_name, base_path, explicit_scm=scm_type, expect_scm=scm_type, config=config) | |
247 | repo_name, base_path, |
|
|||
248 | explicit_scm=scm_type, expect_scm=scm_type, config=config) |
|
|||
249 |
|
244 | |||
250 | def valid_and_active_user(self, user): |
|
245 | def valid_and_active_user(self, user): | |
251 | """ |
|
246 | """ | |
@@ -267,8 +262,9 b' class SimpleVCS(object):' | |||||
267 | def is_shadow_repo_dir(self): |
|
262 | def is_shadow_repo_dir(self): | |
268 | return os.path.isdir(self.vcs_repo_name) |
|
263 | return os.path.isdir(self.vcs_repo_name) | |
269 |
|
264 | |||
270 | def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None, |
|
265 | def _check_permission( | |
271 |
|
|
266 | self, action, user, auth_user, repo_name, ip_addr=None, plugin_id="", plugin_cache_active=False, cache_ttl=0 | |
|
267 | ): | |||
272 | """ |
|
268 | """ | |
273 | Checks permissions using action (push/pull) user and repository |
|
269 | Checks permissions using action (push/pull) user and repository | |
274 | name. If plugin_cache and ttl is set it will use the plugin which |
|
270 | name. If plugin_cache and ttl is set it will use the plugin which | |
@@ -280,71 +276,67 b' class SimpleVCS(object):' | |||||
280 | :param repo_name: repository name |
|
276 | :param repo_name: repository name | |
281 | """ |
|
277 | """ | |
282 |
|
278 | |||
283 |
log.debug( |
|
279 | log.debug("AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)", plugin_id, plugin_cache_active, cache_ttl) | |
284 | plugin_id, plugin_cache_active, cache_ttl) |
|
|||
285 |
|
280 | |||
286 | user_id = user.user_id |
|
281 | user_id = user.user_id | |
287 |
cache_namespace_uid = f |
|
282 | cache_namespace_uid = f"cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}" | |
288 |
region = rc_cache.get_or_create_region( |
|
283 | region = rc_cache.get_or_create_region("cache_perms", cache_namespace_uid) | |
289 |
|
284 | |||
290 |
@region.conditional_cache_on_arguments( |
|
285 | @region.conditional_cache_on_arguments( | |
291 | expiration_time=cache_ttl, |
|
286 | namespace=cache_namespace_uid, expiration_time=cache_ttl, condition=plugin_cache_active | |
292 | condition=plugin_cache_active) |
|
287 | ) | |
293 | def compute_perm_vcs( |
|
288 | def compute_perm_vcs(cache_name, plugin_id, action, user_id, repo_name, ip_addr): | |
294 | cache_name, plugin_id, action, user_id, repo_name, ip_addr): |
|
289 | log.debug("auth: calculating permission access now for vcs operation: %s", action) | |
295 |
|
||||
296 | log.debug('auth: calculating permission access now for vcs operation: %s', action) |
|
|||
297 | # check IP |
|
290 | # check IP | |
298 | inherit = user.inherit_default_permissions |
|
291 | inherit = user.inherit_default_permissions | |
299 | ip_allowed = AuthUser.check_ip_allowed( |
|
292 | ip_allowed = AuthUser.check_ip_allowed(user_id, ip_addr, inherit_from_default=inherit) | |
300 | user_id, ip_addr, inherit_from_default=inherit) |
|
|||
301 | if ip_allowed: |
|
293 | if ip_allowed: | |
302 |
log.info( |
|
294 | log.info("Access for IP:%s allowed", ip_addr) | |
303 | else: |
|
295 | else: | |
304 | return False |
|
296 | return False | |
305 |
|
297 | |||
306 |
if action == |
|
298 | if action == "push": | |
307 |
perms = ( |
|
299 | perms = ("repository.write", "repository.admin") | |
308 | if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): |
|
300 | if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): | |
309 | return False |
|
301 | return False | |
310 |
|
302 | |||
311 | else: |
|
303 | else: | |
312 | # any other action need at least read permission |
|
304 | # any other action need at least read permission | |
313 | perms = ( |
|
305 | perms = ("repository.read", "repository.write", "repository.admin") | |
314 | 'repository.read', 'repository.write', 'repository.admin') |
|
|||
315 | if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): |
|
306 | if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name): | |
316 | return False |
|
307 | return False | |
317 |
|
308 | |||
318 | return True |
|
309 | return True | |
319 |
|
310 | |||
320 | start = time.time() |
|
311 | start = time.time() | |
321 |
log.debug( |
|
312 | log.debug("Running plugin `%s` permissions check", plugin_id) | |
322 |
|
313 | |||
323 | # for environ based auth, password can be empty, but then the validation is |
|
314 | # for environ based auth, password can be empty, but then the validation is | |
324 | # on the server that fills in the env data needed for authentication |
|
315 | # on the server that fills in the env data needed for authentication | |
325 | perm_result = compute_perm_vcs( |
|
316 | perm_result = compute_perm_vcs("vcs_permissions", plugin_id, action, user.user_id, repo_name, ip_addr) | |
326 | 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr) |
|
|||
327 |
|
317 | |||
328 | auth_time = time.time() - start |
|
318 | auth_time = time.time() - start | |
329 | log.debug('Permissions for plugin `%s` completed in %.4fs, ' |
|
319 | log.debug( | |
330 |
|
|
320 | "Permissions for plugin `%s` completed in %.4fs, " "expiration time of fetched cache %.1fs.", | |
331 |
|
|
321 | plugin_id, | |
|
322 | auth_time, | |||
|
323 | cache_ttl, | |||
|
324 | ) | |||
332 |
|
325 | |||
333 | return perm_result |
|
326 | return perm_result | |
334 |
|
327 | |||
335 | def _get_http_scheme(self, environ): |
|
328 | def _get_http_scheme(self, environ): | |
336 | try: |
|
329 | try: | |
337 |
return environ[ |
|
330 | return environ["wsgi.url_scheme"] | |
338 | except Exception: |
|
331 | except Exception: | |
339 |
log.exception( |
|
332 | log.exception("Failed to read http scheme") | |
340 |
return |
|
333 | return "http" | |
341 |
|
334 | |||
342 | def _get_default_cache_ttl(self): |
|
335 | def _get_default_cache_ttl(self): | |
343 | # take AUTH_CACHE_TTL from the `rhodecode` auth plugin |
|
336 | # take AUTH_CACHE_TTL from the `rhodecode` auth plugin | |
344 |
plugin = loadplugin( |
|
337 | plugin = loadplugin("egg:rhodecode-enterprise-ce#rhodecode") | |
345 | plugin_settings = plugin.get_settings() |
|
338 | plugin_settings = plugin.get_settings() | |
346 | plugin_cache_active, cache_ttl = plugin.get_ttl_cache( |
|
339 | plugin_cache_active, cache_ttl = plugin.get_ttl_cache(plugin_settings) or (False, 0) | |
347 | plugin_settings) or (False, 0) |
|
|||
348 | return plugin_cache_active, cache_ttl |
|
340 | return plugin_cache_active, cache_ttl | |
349 |
|
341 | |||
350 | def __call__(self, environ, start_response): |
|
342 | def __call__(self, environ, start_response): | |
@@ -359,17 +351,17 b' class SimpleVCS(object):' | |||||
359 |
|
351 | |||
360 | def _handle_request(self, environ, start_response): |
|
352 | def _handle_request(self, environ, start_response): | |
361 | if not self.url_repo_name: |
|
353 | if not self.url_repo_name: | |
362 |
log.warning( |
|
354 | log.warning("Repository name is empty: %s", self.url_repo_name) | |
363 | # failed to get repo name, we fail now |
|
355 | # failed to get repo name, we fail now | |
364 | return HTTPNotFound()(environ, start_response) |
|
356 | return HTTPNotFound()(environ, start_response) | |
365 |
log.debug( |
|
357 | log.debug("Extracted repo name is %s", self.url_repo_name) | |
366 |
|
358 | |||
367 | ip_addr = get_ip_addr(environ) |
|
359 | ip_addr = get_ip_addr(environ) | |
368 | user_agent = get_user_agent(environ) |
|
360 | user_agent = get_user_agent(environ) | |
369 | username = None |
|
361 | username = None | |
370 |
|
362 | |||
371 | # skip passing error to error controller |
|
363 | # skip passing error to error controller | |
372 |
environ[ |
|
364 | environ["pylons.status_code_redirect"] = True | |
373 |
|
365 | |||
374 | # ====================================================================== |
|
366 | # ====================================================================== | |
375 | # GET ACTION PULL or PUSH |
|
367 | # GET ACTION PULL or PUSH | |
@@ -380,17 +372,15 b' class SimpleVCS(object):' | |||||
380 | # Check if this is a request to a shadow repository of a pull request. |
|
372 | # Check if this is a request to a shadow repository of a pull request. | |
381 | # In this case only pull action is allowed. |
|
373 | # In this case only pull action is allowed. | |
382 | # ====================================================================== |
|
374 | # ====================================================================== | |
383 |
if self.is_shadow_repo and action != |
|
375 | if self.is_shadow_repo and action != "pull": | |
384 |
reason = |
|
376 | reason = "Only pull action is allowed for shadow repositories." | |
385 |
log.debug( |
|
377 | log.debug("User not allowed to proceed, %s", reason) | |
386 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
378 | return HTTPNotAcceptable(reason)(environ, start_response) | |
387 |
|
379 | |||
388 | # Check if the shadow repo actually exists, in case someone refers |
|
380 | # Check if the shadow repo actually exists, in case someone refers | |
389 | # to it, and it has been deleted because of successful merge. |
|
381 | # to it, and it has been deleted because of successful merge. | |
390 | if self.is_shadow_repo and not self.is_shadow_repo_dir: |
|
382 | if self.is_shadow_repo and not self.is_shadow_repo_dir: | |
391 | log.debug( |
|
383 | log.debug("Shadow repo detected, and shadow repo dir `%s` is missing", self.is_shadow_repo_dir) | |
392 | 'Shadow repo detected, and shadow repo dir `%s` is missing', |
|
|||
393 | self.is_shadow_repo_dir) |
|
|||
394 | return HTTPNotFound()(environ, start_response) |
|
384 | return HTTPNotFound()(environ, start_response) | |
395 |
|
385 | |||
396 | # ====================================================================== |
|
386 | # ====================================================================== | |
@@ -398,7 +388,7 b' class SimpleVCS(object):' | |||||
398 | # ====================================================================== |
|
388 | # ====================================================================== | |
399 | detect_force_push = False |
|
389 | detect_force_push = False | |
400 | check_branch_perms = False |
|
390 | check_branch_perms = False | |
401 |
if action in [ |
|
391 | if action in ["pull", "push"]: | |
402 | user_obj = anonymous_user = User.get_default_user() |
|
392 | user_obj = anonymous_user = User.get_default_user() | |
403 | auth_user = user_obj.AuthUser() |
|
393 | auth_user = user_obj.AuthUser() | |
404 | username = anonymous_user.username |
|
394 | username = anonymous_user.username | |
@@ -406,8 +396,12 b' class SimpleVCS(object):' | |||||
406 | plugin_cache_active, cache_ttl = self._get_default_cache_ttl() |
|
396 | plugin_cache_active, cache_ttl = self._get_default_cache_ttl() | |
407 | # ONLY check permissions if the user is activated |
|
397 | # ONLY check permissions if the user is activated | |
408 | anonymous_perm = self._check_permission( |
|
398 | anonymous_perm = self._check_permission( | |
409 | action, anonymous_user, auth_user, self.acl_repo_name, ip_addr, |
|
399 | action, | |
410 |
|
|
400 | anonymous_user, | |
|
401 | auth_user, | |||
|
402 | self.acl_repo_name, | |||
|
403 | ip_addr, | |||
|
404 | plugin_id="anonymous_access", | |||
411 | plugin_cache_active=plugin_cache_active, |
|
405 | plugin_cache_active=plugin_cache_active, | |
412 | cache_ttl=cache_ttl, |
|
406 | cache_ttl=cache_ttl, | |
413 | ) |
|
407 | ) | |
@@ -416,12 +410,13 b' class SimpleVCS(object):' | |||||
416 |
|
410 | |||
417 | if not anonymous_user.active or not anonymous_perm: |
|
411 | if not anonymous_user.active or not anonymous_perm: | |
418 | if not anonymous_user.active: |
|
412 | if not anonymous_user.active: | |
419 |
log.debug( |
|
413 | log.debug("Anonymous access is disabled, running " "authentication") | |
420 | 'authentication') |
|
|||
421 |
|
414 | |||
422 | if not anonymous_perm: |
|
415 | if not anonymous_perm: | |
423 | log.debug('Not enough credentials to access repo: `%s` ' |
|
416 | log.debug( | |
424 |
|
|
417 | "Not enough credentials to access repo: `%s` " "repository as anonymous user", | |
|
418 | self.acl_repo_name, | |||
|
419 | ) | |||
425 |
|
420 | |||
426 | username = None |
|
421 | username = None | |
427 | # ============================================================== |
|
422 | # ============================================================== | |
@@ -430,19 +425,18 b' class SimpleVCS(object):' | |||||
430 | # ============================================================== |
|
425 | # ============================================================== | |
431 |
|
426 | |||
432 | # try to auth based on environ, container auth methods |
|
427 | # try to auth based on environ, container auth methods | |
433 |
log.debug( |
|
428 | log.debug("Running PRE-AUTH for container|headers based authentication") | |
434 |
|
429 | |||
435 | # headers auth, by just reading special headers and bypass the auth with user/passwd |
|
430 | # headers auth, by just reading special headers and bypass the auth with user/passwd | |
436 | pre_auth = authenticate( |
|
431 | pre_auth = authenticate( | |
437 |
|
|
432 | "", "", environ, VCS_TYPE, registry=self.registry, acl_repo_name=self.acl_repo_name | |
438 | acl_repo_name=self.acl_repo_name) |
|
433 | ) | |
439 |
|
434 | |||
440 |
if pre_auth and pre_auth.get( |
|
435 | if pre_auth and pre_auth.get("username"): | |
441 |
username = pre_auth[ |
|
436 | username = pre_auth["username"] | |
442 |
log.debug( |
|
437 | log.debug("PRE-AUTH got `%s` as username", username) | |
443 | if pre_auth: |
|
438 | if pre_auth: | |
444 |
log.debug( |
|
439 | log.debug("PRE-AUTH successful from %s", pre_auth.get("auth_data", {}).get("_plugin")) | |
445 | pre_auth.get('auth_data', {}).get('_plugin')) |
|
|||
446 |
|
440 | |||
447 | # If not authenticated by the container, running basic auth |
|
441 | # If not authenticated by the container, running basic auth | |
448 | # before inject the calling repo_name for special scope checks |
|
442 | # before inject the calling repo_name for special scope checks | |
@@ -463,16 +457,16 b' class SimpleVCS(object):' | |||||
463 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
457 | return HTTPNotAcceptable(reason)(environ, start_response) | |
464 |
|
458 | |||
465 | if isinstance(auth_result, dict): |
|
459 | if isinstance(auth_result, dict): | |
466 |
AUTH_TYPE.update(environ, |
|
460 | AUTH_TYPE.update(environ, "basic") | |
467 |
REMOTE_USER.update(environ, auth_result[ |
|
461 | REMOTE_USER.update(environ, auth_result["username"]) | |
468 |
username = auth_result[ |
|
462 | username = auth_result["username"] | |
469 |
plugin = auth_result.get( |
|
463 | plugin = auth_result.get("auth_data", {}).get("_plugin") | |
470 | log.info( |
|
464 | log.info("MAIN-AUTH successful for user `%s` from %s plugin", username, plugin) | |
471 | 'MAIN-AUTH successful for user `%s` from %s plugin', |
|
|||
472 | username, plugin) |
|
|||
473 |
|
465 | |||
474 | plugin_cache_active, cache_ttl = auth_result.get( |
|
466 | plugin_cache_active, cache_ttl = auth_result.get("auth_data", {}).get("_ttl_cache") or ( | |
475 |
|
|
467 | False, | |
|
468 | 0, | |||
|
469 | ) | |||
476 | else: |
|
470 | else: | |
477 | return auth_result.wsgi_application(environ, start_response) |
|
471 | return auth_result.wsgi_application(environ, start_response) | |
478 |
|
472 | |||
@@ -488,21 +482,24 b' class SimpleVCS(object):' | |||||
488 | # check user attributes for password change flag |
|
482 | # check user attributes for password change flag | |
489 | user_obj = user |
|
483 | user_obj = user | |
490 | auth_user = user_obj.AuthUser() |
|
484 | auth_user = user_obj.AuthUser() | |
491 | if user_obj and user_obj.username != User.DEFAULT_USER and \ |
|
485 | if ( | |
492 | user_obj.user_data.get('force_password_change'): |
|
486 | user_obj | |
493 | reason = 'password change required' |
|
487 | and user_obj.username != User.DEFAULT_USER | |
494 | log.debug('User not allowed to authenticate, %s', reason) |
|
488 | and user_obj.user_data.get("force_password_change") | |
|
489 | ): | |||
|
490 | reason = "password change required" | |||
|
491 | log.debug("User not allowed to authenticate, %s", reason) | |||
495 | return HTTPNotAcceptable(reason)(environ, start_response) |
|
492 | return HTTPNotAcceptable(reason)(environ, start_response) | |
496 |
|
493 | |||
497 | # check permissions for this repository |
|
494 | # check permissions for this repository | |
498 | perm = self._check_permission( |
|
495 | perm = self._check_permission( | |
499 | action, user, auth_user, self.acl_repo_name, ip_addr, |
|
496 | action, user, auth_user, self.acl_repo_name, ip_addr, plugin, plugin_cache_active, cache_ttl | |
500 | plugin, plugin_cache_active, cache_ttl) |
|
497 | ) | |
501 | if not perm: |
|
498 | if not perm: | |
502 | return HTTPForbidden()(environ, start_response) |
|
499 | return HTTPForbidden()(environ, start_response) | |
503 |
environ[ |
|
500 | environ["rc_auth_user_id"] = str(user_id) | |
504 |
|
501 | |||
505 |
if action == |
|
502 | if action == "push": | |
506 | perms = auth_user.get_branch_permissions(self.acl_repo_name) |
|
503 | perms = auth_user.get_branch_permissions(self.acl_repo_name) | |
507 | if perms: |
|
504 | if perms: | |
508 | check_branch_perms = True |
|
505 | check_branch_perms = True | |
@@ -510,41 +507,48 b' class SimpleVCS(object):' | |||||
510 |
|
507 | |||
511 | # extras are injected into UI object and later available |
|
508 | # extras are injected into UI object and later available | |
512 | # in hooks executed by RhodeCode |
|
509 | # in hooks executed by RhodeCode | |
513 |
check_locking = _should_check_locking(environ.get( |
|
510 | check_locking = _should_check_locking(environ.get("QUERY_STRING")) | |
514 |
|
511 | |||
515 | extras = vcs_operation_context( |
|
512 | extras = vcs_operation_context( | |
516 | environ, repo_name=self.acl_repo_name, username=username, |
|
513 | environ, | |
517 | action=action, scm=self.SCM, check_locking=check_locking, |
|
514 | repo_name=self.acl_repo_name, | |
518 | is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms, |
|
515 | username=username, | |
519 | detect_force_push=detect_force_push |
|
516 | action=action, | |
|
517 | scm=self.SCM, | |||
|
518 | check_locking=check_locking, | |||
|
519 | is_shadow_repo=self.is_shadow_repo, | |||
|
520 | check_branch_perms=check_branch_perms, | |||
|
521 | detect_force_push=detect_force_push, | |||
520 | ) |
|
522 | ) | |
521 |
|
523 | |||
522 | # ====================================================================== |
|
524 | # ====================================================================== | |
523 | # REQUEST HANDLING |
|
525 | # REQUEST HANDLING | |
524 | # ====================================================================== |
|
526 | # ====================================================================== | |
525 | repo_path = os.path.join( |
|
527 | repo_path = os.path.join(safe_str(self.base_path), safe_str(self.vcs_repo_name)) | |
526 | safe_str(self.base_path), safe_str(self.vcs_repo_name)) |
|
528 | log.debug("Repository path is %s", repo_path) | |
527 | log.debug('Repository path is %s', repo_path) |
|
|||
528 |
|
529 | |||
529 | fix_PATH() |
|
530 | fix_PATH() | |
530 |
|
531 | |||
531 | log.info( |
|
532 | log.info( | |
532 | '%s action on %s repo "%s" by "%s" from %s %s', |
|
533 | '%s action on %s repo "%s" by "%s" from %s %s', | |
533 | action, self.SCM, safe_str(self.url_repo_name), |
|
534 | action, | |
534 | safe_str(username), ip_addr, user_agent) |
|
535 | self.SCM, | |
|
536 | safe_str(self.url_repo_name), | |||
|
537 | safe_str(username), | |||
|
538 | ip_addr, | |||
|
539 | user_agent, | |||
|
540 | ) | |||
535 |
|
541 | |||
536 | return self._generate_vcs_response( |
|
542 | return self._generate_vcs_response(environ, start_response, repo_path, extras, action) | |
537 | environ, start_response, repo_path, extras, action) |
|
|||
538 |
|
543 | |||
539 | def _get_txn_id(self, environ): |
|
544 | def _get_txn_id(self, environ): | |
540 |
|
545 | for k in ["RAW_URI", "HTTP_DESTINATION"]: | ||
541 | for k in ['RAW_URI', 'HTTP_DESTINATION']: |
|
|||
542 | url = environ.get(k) |
|
546 | url = environ.get(k) | |
543 | if not url: |
|
547 | if not url: | |
544 | continue |
|
548 | continue | |
545 |
|
549 | |||
546 | # regex to search for svn-txn-id |
|
550 | # regex to search for svn-txn-id | |
547 |
pattern = r |
|
551 | pattern = r"/!svn/txr/([^/]+)/" | |
548 |
|
552 | |||
549 | # Search for the pattern in the URL |
|
553 | # Search for the pattern in the URL | |
550 | match = re.search(pattern, url) |
|
554 | match = re.search(pattern, url) | |
@@ -555,8 +559,7 b' class SimpleVCS(object):' | |||||
555 | return txn_id |
|
559 | return txn_id | |
556 |
|
560 | |||
557 | @initialize_generator |
|
561 | @initialize_generator | |
558 | def _generate_vcs_response( |
|
562 | def _generate_vcs_response(self, environ, start_response, repo_path, extras, action): | |
559 | self, environ, start_response, repo_path, extras, action): |
|
|||
560 | """ |
|
563 | """ | |
561 | Returns a generator for the response content. |
|
564 | Returns a generator for the response content. | |
562 |
|
565 | |||
@@ -565,24 +568,20 b' class SimpleVCS(object):' | |||||
565 | also handles the locking exceptions which will be triggered when |
|
568 | also handles the locking exceptions which will be triggered when | |
566 | the first chunk is produced by the underlying WSGI application. |
|
569 | the first chunk is produced by the underlying WSGI application. | |
567 | """ |
|
570 | """ | |
568 |
svn_txn_id = |
|
571 | svn_txn_id = "" | |
569 |
if action == |
|
572 | if action == "push": | |
570 | svn_txn_id = self._get_txn_id(environ) |
|
573 | svn_txn_id = self._get_txn_id(environ) | |
571 |
|
574 | |||
572 | callback_daemon, extras = self._prepare_callback_daemon( |
|
575 | callback_daemon, extras = self._prepare_callback_daemon(extras, environ, action, txn_id=svn_txn_id) | |
573 | extras, environ, action, txn_id=svn_txn_id) |
|
|||
574 |
|
576 | |||
575 | if svn_txn_id: |
|
577 | if svn_txn_id: | |
576 |
|
||||
577 | port = safe_int(extras['hooks_uri'].split(':')[-1]) |
|
|||
578 | txn_id_data = extras.copy() |
|
578 | txn_id_data = extras.copy() | |
579 |
txn_id_data.update({ |
|
579 | txn_id_data.update({"req_method": environ["REQUEST_METHOD"]}) | |
580 | txn_id_data.update({'req_method': environ['REQUEST_METHOD']}) |
|
|||
581 |
|
580 | |||
582 | full_repo_path = repo_path |
|
581 | full_repo_path = repo_path | |
583 | store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data) |
|
582 | store_txn_id_data(full_repo_path, svn_txn_id, txn_id_data) | |
584 |
|
583 | |||
585 |
log.debug( |
|
584 | log.debug("HOOKS extras is %s", extras) | |
586 |
|
585 | |||
587 | http_scheme = self._get_http_scheme(environ) |
|
586 | http_scheme = self._get_http_scheme(environ) | |
588 |
|
587 | |||
@@ -609,7 +608,7 b' class SimpleVCS(object):' | |||||
609 |
|
608 | |||
610 | try: |
|
609 | try: | |
611 | # invalidate cache on push |
|
610 | # invalidate cache on push | |
612 |
if action == |
|
611 | if action == "push": | |
613 | self._invalidate_cache(self.url_repo_name) |
|
612 | self._invalidate_cache(self.url_repo_name) | |
614 | finally: |
|
613 | finally: | |
615 | meta.Session.remove() |
|
614 | meta.Session.remove() | |
@@ -632,12 +631,12 b' class SimpleVCS(object):' | |||||
632 | """Return the WSGI app that will finally handle the request.""" |
|
631 | """Return the WSGI app that will finally handle the request.""" | |
633 | raise NotImplementedError() |
|
632 | raise NotImplementedError() | |
634 |
|
633 | |||
635 |
def _create_config(self, extras, repo_name, scheme= |
|
634 | def _create_config(self, extras, repo_name, scheme="http"): | |
636 | """Create a safe config representation.""" |
|
635 | """Create a safe config representation.""" | |
637 | raise NotImplementedError() |
|
636 | raise NotImplementedError() | |
638 |
|
637 | |||
639 | def _should_use_callback_daemon(self, extras, environ, action): |
|
638 | def _should_use_callback_daemon(self, extras, environ, action): | |
640 |
if extras.get( |
|
639 | if extras.get("is_shadow_repo"): | |
641 | # we don't want to execute hooks, and callback daemon for shadow repos |
|
640 | # we don't want to execute hooks, and callback daemon for shadow repos | |
642 | return False |
|
641 | return False | |
643 | return True |
|
642 | return True | |
@@ -647,11 +646,9 b' class SimpleVCS(object):' | |||||
647 |
|
646 | |||
648 | if not self._should_use_callback_daemon(extras, environ, action): |
|
647 | if not self._should_use_callback_daemon(extras, environ, action): | |
649 | # disable callback daemon for actions that don't require it |
|
648 | # disable callback daemon for actions that don't require it | |
650 |
protocol = |
|
649 | protocol = "local" | |
651 |
|
650 | |||
652 | return prepare_callback_daemon( |
|
651 | return prepare_callback_daemon(extras, protocol=protocol, txn_id=txn_id) | |
653 | extras, protocol=protocol, |
|
|||
654 | host=vcs_settings.HOOKS_HOST, txn_id=txn_id) |
|
|||
655 |
|
652 | |||
656 |
|
653 | |||
657 | def _should_check_locking(query_string): |
|
654 | def _should_check_locking(query_string): | |
@@ -659,4 +656,4 b' def _should_check_locking(query_string):' | |||||
659 | # server see all operation on commit; bookmarks, phases and |
|
656 | # server see all operation on commit; bookmarks, phases and | |
660 | # obsolescence marker in different transaction, we don't want to check |
|
657 | # obsolescence marker in different transaction, we don't want to check | |
661 | # locking on those |
|
658 | # locking on those | |
662 |
return query_string not in [ |
|
659 | return query_string not in ["cmd=listkeys"] |
@@ -21,6 +21,7 b' Utilities library for RhodeCode' | |||||
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
23 | import datetime |
|
23 | import datetime | |
|
24 | import importlib | |||
24 |
|
25 | |||
25 | import decorator |
|
26 | import decorator | |
26 | import logging |
|
27 | import logging | |
@@ -42,8 +43,9 b' from webhelpers2.text import collapse, s' | |||||
42 |
|
43 | |||
43 | from mako import exceptions |
|
44 | from mako import exceptions | |
44 |
|
45 | |||
|
46 | import rhodecode | |||
45 | from rhodecode import ConfigGet |
|
47 | from rhodecode import ConfigGet | |
46 | from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRC |
|
48 | from rhodecode.lib.exceptions import HTTPBranchProtected, HTTPLockedRepo, ClientNotSupported | |
47 | from rhodecode.lib.hash_utils import sha256_safe, md5, sha1 |
|
49 | from rhodecode.lib.hash_utils import sha256_safe, md5, sha1 | |
48 | from rhodecode.lib.type_utils import AttributeDict |
|
50 | from rhodecode.lib.type_utils import AttributeDict | |
49 | from rhodecode.lib.str_utils import safe_bytes, safe_str |
|
51 | from rhodecode.lib.str_utils import safe_bytes, safe_str | |
@@ -86,6 +88,7 b' def adopt_for_celery(func):' | |||||
86 | @wraps(func) |
|
88 | @wraps(func) | |
87 | def wrapper(extras): |
|
89 | def wrapper(extras): | |
88 | extras = AttributeDict(extras) |
|
90 | extras = AttributeDict(extras) | |
|
91 | ||||
89 | try: |
|
92 | try: | |
90 | # HooksResponse implements to_json method which must be used there. |
|
93 | # HooksResponse implements to_json method which must be used there. | |
91 | return func(extras).to_json() |
|
94 | return func(extras).to_json() | |
@@ -100,7 +103,18 b' def adopt_for_celery(func):' | |||||
100 | 'exception_args': error_args, |
|
103 | 'exception_args': error_args, | |
101 | 'exception_traceback': '', |
|
104 | 'exception_traceback': '', | |
102 | } |
|
105 | } | |
103 |
except |
|
106 | except ClientNotSupported as error: | |
|
107 | # Those special cases don't need error reporting. It's a case of | |||
|
108 | # locked repo or protected branch | |||
|
109 | error_args = error.args | |||
|
110 | return { | |||
|
111 | 'status': error.code, | |||
|
112 | 'output': error.explanation, | |||
|
113 | 'exception': type(error).__name__, | |||
|
114 | 'exception_args': error_args, | |||
|
115 | 'exception_traceback': '', | |||
|
116 | } | |||
|
117 | except HTTPLockedRepo as error: | |||
104 | # Those special cases don't need error reporting. It's a case of |
|
118 | # Those special cases don't need error reporting. It's a case of | |
105 | # locked repo or protected branch |
|
119 | # locked repo or protected branch | |
106 | error_args = error.args |
|
120 | error_args = error.args | |
@@ -117,7 +131,7 b' def adopt_for_celery(func):' | |||||
117 | 'output': '', |
|
131 | 'output': '', | |
118 | 'exception': type(e).__name__, |
|
132 | 'exception': type(e).__name__, | |
119 | 'exception_args': e.args, |
|
133 | 'exception_args': e.args, | |
120 |
'exception_traceback': |
|
134 | 'exception_traceback': traceback.format_exc(), | |
121 | } |
|
135 | } | |
122 | return wrapper |
|
136 | return wrapper | |
123 |
|
137 | |||
@@ -411,6 +425,10 b' def prepare_config_data(clear_session=Tr' | |||||
411 | ('web', 'push_ssl', 'false'), |
|
425 | ('web', 'push_ssl', 'false'), | |
412 | ] |
|
426 | ] | |
413 | for setting in ui_settings: |
|
427 | for setting in ui_settings: | |
|
428 | # skip certain deprecated keys that might be still in DB | |||
|
429 | if f"{setting.section}_{setting.key}" in ['extensions_hgsubversion']: | |||
|
430 | continue | |||
|
431 | ||||
414 | # Todo: remove this section once transition to *.ini files will be completed |
|
432 | # Todo: remove this section once transition to *.ini files will be completed | |
415 | if setting.section in ('largefiles', 'vcs_git_lfs'): |
|
433 | if setting.section in ('largefiles', 'vcs_git_lfs'): | |
416 | if setting.key != 'enabled': |
|
434 | if setting.key != 'enabled': | |
@@ -686,22 +704,41 b' def repo2db_mapper(initial_repo_list, re' | |||||
686 |
|
704 | |||
687 | return added, removed |
|
705 | return added, removed | |
688 |
|
706 | |||
|
707 | def deep_reload_package(package_name): | |||
|
708 | """ | |||
|
709 | Deeply reload a package by removing it and its submodules from sys.modules, | |||
|
710 | then re-importing it. | |||
|
711 | """ | |||
|
712 | # Remove the package and its submodules from sys.modules | |||
|
713 | to_reload = [name for name in sys.modules if name == package_name or name.startswith(package_name + ".")] | |||
|
714 | for module_name in to_reload: | |||
|
715 | del sys.modules[module_name] | |||
|
716 | log.debug(f"Removed module from cache: {module_name}") | |||
|
717 | ||||
|
718 | # Re-import the package | |||
|
719 | package = importlib.import_module(package_name) | |||
|
720 | log.debug(f"Re-imported package: {package_name}") | |||
|
721 | ||||
|
722 | return package | |||
689 |
|
723 | |||
690 | def load_rcextensions(root_path): |
|
724 | def load_rcextensions(root_path): | |
691 | import rhodecode |
|
725 | import rhodecode | |
692 | from rhodecode.config import conf |
|
726 | from rhodecode.config import conf | |
693 |
|
727 | |||
694 | path = os.path.join(root_path) |
|
728 | path = os.path.join(root_path) | |
695 | sys.path.append(path) |
|
729 | deep_reload = path in sys.path | |
|
730 | sys.path.insert(0, path) | |||
696 |
|
731 | |||
697 | try: |
|
732 | try: | |
698 | rcextensions = __import__('rcextensions') |
|
733 | rcextensions = __import__('rcextensions', fromlist=['']) | |
699 | except ImportError: |
|
734 | except ImportError: | |
700 | if os.path.isdir(os.path.join(path, 'rcextensions')): |
|
735 | if os.path.isdir(os.path.join(path, 'rcextensions')): | |
701 | log.warning('Unable to load rcextensions from %s', path) |
|
736 | log.warning('Unable to load rcextensions from %s', path) | |
702 | rcextensions = None |
|
737 | rcextensions = None | |
703 |
|
738 | |||
704 | if rcextensions: |
|
739 | if rcextensions: | |
|
740 | if deep_reload: | |||
|
741 | rcextensions = deep_reload_package('rcextensions') | |||
705 | log.info('Loaded rcextensions from %s...', rcextensions) |
|
742 | log.info('Loaded rcextensions from %s...', rcextensions) | |
706 | rhodecode.EXTENSIONS = rcextensions |
|
743 | rhodecode.EXTENSIONS = rcextensions | |
707 |
|
744 | |||
@@ -741,6 +778,7 b' def create_test_index(repo_location, con' | |||||
741 | except ImportError: |
|
778 | except ImportError: | |
742 | raise ImportError('Failed to import rc_testdata, ' |
|
779 | raise ImportError('Failed to import rc_testdata, ' | |
743 | 'please make sure this package is installed from requirements_test.txt') |
|
780 | 'please make sure this package is installed from requirements_test.txt') | |
|
781 | ||||
744 | rc_testdata.extract_search_index( |
|
782 | rc_testdata.extract_search_index( | |
745 | 'vcs_search_index', os.path.dirname(config['search.location'])) |
|
783 | 'vcs_search_index', os.path.dirname(config['search.location'])) | |
746 |
|
784 | |||
@@ -785,22 +823,15 b' def create_test_repositories(test_path, ' | |||||
785 | Creates test repositories in the temporary directory. Repositories are |
|
823 | Creates test repositories in the temporary directory. Repositories are | |
786 | extracted from archives within the rc_testdata package. |
|
824 | extracted from archives within the rc_testdata package. | |
787 | """ |
|
825 | """ | |
788 | import rc_testdata |
|
826 | try: | |
|
827 | import rc_testdata | |||
|
828 | except ImportError: | |||
|
829 | raise ImportError('Failed to import rc_testdata, ' | |||
|
830 | 'please make sure this package is installed from requirements_test.txt') | |||
|
831 | ||||
789 | from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO |
|
832 | from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO | |
790 |
|
833 | |||
791 | log.debug('making test vcs repositories') |
|
834 | log.debug('making test vcs repositories at %s', test_path) | |
792 |
|
||||
793 | idx_path = config['search.location'] |
|
|||
794 | data_path = config['cache_dir'] |
|
|||
795 |
|
||||
796 | # clean index and data |
|
|||
797 | if idx_path and os.path.exists(idx_path): |
|
|||
798 | log.debug('remove %s', idx_path) |
|
|||
799 | shutil.rmtree(idx_path) |
|
|||
800 |
|
||||
801 | if data_path and os.path.exists(data_path): |
|
|||
802 | log.debug('remove %s', data_path) |
|
|||
803 | shutil.rmtree(data_path) |
|
|||
804 |
|
835 | |||
805 | rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) |
|
836 | rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO)) | |
806 | rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) |
|
837 | rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO)) |
@@ -140,7 +140,7 b' class CurlSession(object):' | |||||
140 | try: |
|
140 | try: | |
141 | curl.perform() |
|
141 | curl.perform() | |
142 | except pycurl.error as exc: |
|
142 | except pycurl.error as exc: | |
143 |
log.error('Failed to call endpoint url: |
|
143 | log.error('Failed to call endpoint url: %s using pycurl', url) | |
144 | raise |
|
144 | raise | |
145 |
|
145 | |||
146 | status_code = curl.getinfo(pycurl.HTTP_CODE) |
|
146 | status_code = curl.getinfo(pycurl.HTTP_CODE) |
@@ -45,10 +45,3 b' def discover_git_version(raise_on_exc=Fa' | |||||
45 | if raise_on_exc: |
|
45 | if raise_on_exc: | |
46 | raise |
|
46 | raise | |
47 | return '' |
|
47 | return '' | |
48 |
|
||||
49 |
|
||||
50 | def lfs_store(base_location): |
|
|||
51 | """ |
|
|||
52 | Return a lfs store relative to base_location |
|
|||
53 | """ |
|
|||
54 | return os.path.join(base_location, '.cache', 'lfs_store') |
|
@@ -45,10 +45,3 b' def discover_hg_version(raise_on_exc=Fal' | |||||
45 | if raise_on_exc: |
|
45 | if raise_on_exc: | |
46 | raise |
|
46 | raise | |
47 | return '' |
|
47 | return '' | |
48 |
|
||||
49 |
|
||||
50 | def largefiles_store(base_location): |
|
|||
51 | """ |
|
|||
52 | Return a largefile store relative to base_location |
|
|||
53 | """ |
|
|||
54 | return os.path.join(base_location, '.cache', 'largefiles') |
|
@@ -216,7 +216,7 b' class RemoteRepo(object):' | |||||
216 | self._cache_region, self._cache_namespace = \ |
|
216 | self._cache_region, self._cache_namespace = \ | |
217 | remote_maker.init_cache_region(cache_repo_id) |
|
217 | remote_maker.init_cache_region(cache_repo_id) | |
218 |
|
218 | |||
219 | with_wire = with_wire or {} |
|
219 | with_wire = with_wire or {"cache": False} | |
220 |
|
220 | |||
221 | repo_state_uid = with_wire.get('repo_state_uid') or 'state' |
|
221 | repo_state_uid = with_wire.get('repo_state_uid') or 'state' | |
222 |
|
222 |
@@ -373,6 +373,7 b' class CommentsModel(BaseModel):' | |||||
373 |
|
373 | |||
374 | Session().add(comment) |
|
374 | Session().add(comment) | |
375 | Session().flush() |
|
375 | Session().flush() | |
|
376 | ||||
376 | kwargs = { |
|
377 | kwargs = { | |
377 | 'user': user, |
|
378 | 'user': user, | |
378 | 'renderer_type': renderer, |
|
379 | 'renderer_type': renderer, | |
@@ -387,8 +388,7 b' class CommentsModel(BaseModel):' | |||||
387 | } |
|
388 | } | |
388 |
|
389 | |||
389 | if commit_obj: |
|
390 | if commit_obj: | |
390 | recipients = ChangesetComment.get_users( |
|
391 | recipients = ChangesetComment.get_users(revision=commit_obj.raw_id) | |
391 | revision=commit_obj.raw_id) |
|
|||
392 | # add commit author if it's in RhodeCode system |
|
392 | # add commit author if it's in RhodeCode system | |
393 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
393 | cs_author = User.get_from_cs_author(commit_obj.author) | |
394 | if not cs_author: |
|
394 | if not cs_author: | |
@@ -397,16 +397,13 b' class CommentsModel(BaseModel):' | |||||
397 | recipients += [cs_author] |
|
397 | recipients += [cs_author] | |
398 |
|
398 | |||
399 | commit_comment_url = self.get_url(comment, request=request) |
|
399 | commit_comment_url = self.get_url(comment, request=request) | |
400 | commit_comment_reply_url = self.get_url( |
|
400 | commit_comment_reply_url = self.get_url(comment, request=request, anchor=f'comment-{comment.comment_id}/?/ReplyToComment') | |
401 | comment, request=request, |
|
|||
402 | anchor=f'comment-{comment.comment_id}/?/ReplyToComment') |
|
|||
403 |
|
401 | |||
404 | target_repo_url = h.link_to( |
|
402 | target_repo_url = h.link_to( | |
405 | repo.repo_name, |
|
403 | repo.repo_name, | |
406 | h.route_url('repo_summary', repo_name=repo.repo_name)) |
|
404 | h.route_url('repo_summary', repo_name=repo.repo_name)) | |
407 |
|
405 | |||
408 | commit_url = h.route_url('repo_commit', repo_name=repo.repo_name, |
|
406 | commit_url = h.route_url('repo_commit', repo_name=repo.repo_name, commit_id=commit_id) | |
409 | commit_id=commit_id) |
|
|||
410 |
|
407 | |||
411 | # commit specifics |
|
408 | # commit specifics | |
412 | kwargs.update({ |
|
409 | kwargs.update({ | |
@@ -489,7 +486,6 b' class CommentsModel(BaseModel):' | |||||
489 |
|
486 | |||
490 | if not is_draft: |
|
487 | if not is_draft: | |
491 | comment_data = comment.get_api_data() |
|
488 | comment_data = comment.get_api_data() | |
492 |
|
||||
493 | self._log_audit_action( |
|
489 | self._log_audit_action( | |
494 | action, {'data': comment_data}, auth_user, comment) |
|
490 | action, {'data': comment_data}, auth_user, comment) | |
495 |
|
491 |
@@ -38,7 +38,7 b' from rhodecode.translation import lazy_u' | |||||
38 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
38 | from rhodecode.lib import helpers as h, hooks_utils, diffs | |
39 | from rhodecode.lib import audit_logger |
|
39 | from rhodecode.lib import audit_logger | |
40 | from collections import OrderedDict |
|
40 | from collections import OrderedDict | |
41 |
from rhodecode.lib.hook_daemon. |
|
41 | from rhodecode.lib.hook_daemon.utils import prepare_callback_daemon | |
42 | from rhodecode.lib.ext_json import sjson as json |
|
42 | from rhodecode.lib.ext_json import sjson as json | |
43 | from rhodecode.lib.markup_renderer import ( |
|
43 | from rhodecode.lib.markup_renderer import ( | |
44 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
44 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) | |
@@ -980,9 +980,7 b' class PullRequestModel(BaseModel):' | |||||
980 | target_ref = self._refresh_reference( |
|
980 | target_ref = self._refresh_reference( | |
981 | pull_request.target_ref_parts, target_vcs) |
|
981 | pull_request.target_ref_parts, target_vcs) | |
982 |
|
982 | |||
983 | callback_daemon, extras = prepare_callback_daemon( |
|
983 | callback_daemon, extras = prepare_callback_daemon(extras, protocol=vcs_settings.HOOKS_PROTOCOL) | |
984 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
|||
985 | host=vcs_settings.HOOKS_HOST) |
|
|||
986 |
|
984 | |||
987 | with callback_daemon: |
|
985 | with callback_daemon: | |
988 | # TODO: johbo: Implement a clean way to run a config_override |
|
986 | # TODO: johbo: Implement a clean way to run a config_override |
@@ -862,27 +862,3 b' class VcsSettingsModel(object):' | |||||
862 | raise ValueError( |
|
862 | raise ValueError( | |
863 | f'The given data does not contain {data_key} key') |
|
863 | f'The given data does not contain {data_key} key') | |
864 | return data_keys |
|
864 | return data_keys | |
865 |
|
||||
866 | def create_largeobjects_dirs_if_needed(self, repo_store_path): |
|
|||
867 | """ |
|
|||
868 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It |
|
|||
869 | does a repository scan if enabled in the settings. |
|
|||
870 | """ |
|
|||
871 |
|
||||
872 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
|||
873 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
|||
874 |
|
||||
875 | paths = [ |
|
|||
876 | largefiles_store(repo_store_path), |
|
|||
877 | lfs_store(repo_store_path)] |
|
|||
878 |
|
||||
879 | for path in paths: |
|
|||
880 | if os.path.isdir(path): |
|
|||
881 | continue |
|
|||
882 | if os.path.isfile(path): |
|
|||
883 | continue |
|
|||
884 | # not a file nor dir, we try to create it |
|
|||
885 | try: |
|
|||
886 | os.makedirs(path) |
|
|||
887 | except Exception: |
|
|||
888 | log.warning('Failed to create largefiles dir:%s', path) |
|
@@ -1,5 +1,4 b'' | |||||
1 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | ||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
|||
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
@@ -38,7 +37,7 b' from rhodecode.lib.hash_utils import sha' | |||||
38 | log = logging.getLogger(__name__) |
|
37 | log = logging.getLogger(__name__) | |
39 |
|
38 | |||
40 | __all__ = [ |
|
39 | __all__ = [ | |
41 | 'get_new_dir', 'TestController', |
|
40 | 'get_new_dir', 'TestController', 'console_printer', | |
42 | 'clear_cache_regions', |
|
41 | 'clear_cache_regions', | |
43 | 'assert_session_flash', 'login_user', 'no_newline_id_generator', |
|
42 | 'assert_session_flash', 'login_user', 'no_newline_id_generator', | |
44 | 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO', |
|
43 | 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO', | |
@@ -244,3 +243,11 b' def no_newline_id_generator(test_name):' | |||||
244 |
|
243 | |||
245 | return test_name or 'test-with-empty-name' |
|
244 | return test_name or 'test-with-empty-name' | |
246 |
|
245 | |||
|
246 | def console_printer(*msg): | |||
|
247 | print_func = print | |||
|
248 | try: | |||
|
249 | from rich import print as print_func | |||
|
250 | except ImportError: | |||
|
251 | pass | |||
|
252 | ||||
|
253 | print_func(*msg) |
@@ -1,5 +1,4 b'' | |||||
1 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | ||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
|||
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
@@ -90,7 +89,7 b' class RhodeCodeAuthPlugin(RhodeCodeExter' | |||||
90 | 'firstname': firstname, |
|
89 | 'firstname': firstname, | |
91 | 'lastname': lastname, |
|
90 | 'lastname': lastname, | |
92 | 'groups': [], |
|
91 | 'groups': [], | |
93 |
'email': ' |
|
92 | 'email': f'{username}@rhodecode.com', | |
94 | 'admin': admin, |
|
93 | 'admin': admin, | |
95 | 'active': active, |
|
94 | 'active': active, | |
96 | "active_from_extern": None, |
|
95 | "active_from_extern": None, |
@@ -20,14 +20,14 b'' | |||||
20 | import pytest |
|
20 | import pytest | |
21 | import requests |
|
21 | import requests | |
22 | from rhodecode.config import routing_links |
|
22 | from rhodecode.config import routing_links | |
23 |
|
23 | from rhodecode.tests import console_printer | ||
24 |
|
24 | |||
25 | def check_connection(): |
|
25 | def check_connection(): | |
26 | try: |
|
26 | try: | |
27 | response = requests.get('https://rhodecode.com') |
|
27 | response = requests.get('https://rhodecode.com') | |
28 | return response.status_code == 200 |
|
28 | return response.status_code == 200 | |
29 | except Exception as e: |
|
29 | except Exception as e: | |
30 | print(e) |
|
30 | console_printer(e) | |
31 |
|
31 | |||
32 | return False |
|
32 | return False | |
33 |
|
33 |
@@ -1,4 +1,4 b'' | |||||
1 |
# Copyright (C) 2010-202 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
@@ -16,23 +16,10 b'' | |||||
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | """ |
|
19 | import pytest # noqa | |
20 | py.test config for test suite for making push/pull operations. |
|
|||
21 |
|
||||
22 | .. important:: |
|
|||
23 |
|
||||
24 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started |
|
|||
25 | to redirect things to stderr instead of stdout. |
|
|||
26 | """ |
|
|||
27 |
|
||||
28 | import pytest |
|
|||
29 | import logging |
|
20 | import logging | |
30 |
|
21 | import collections | ||
31 | from rhodecode.authentication import AuthenticationPluginRegistry |
|
22 | import rhodecode | |
32 | from rhodecode.model.db import Permission, User |
|
|||
33 | from rhodecode.model.meta import Session |
|
|||
34 | from rhodecode.model.settings import SettingsModel |
|
|||
35 | from rhodecode.model.user import UserModel |
|
|||
36 |
|
23 | |||
37 |
|
24 | |||
38 | log = logging.getLogger(__name__) |
|
25 | log = logging.getLogger(__name__) | |
@@ -40,99 +27,3 b' log = logging.getLogger(__name__)' | |||||
40 | # Docker image running httpbin... |
|
27 | # Docker image running httpbin... | |
41 | HTTPBIN_DOMAIN = 'http://httpbin' |
|
28 | HTTPBIN_DOMAIN = 'http://httpbin' | |
42 | HTTPBIN_POST = HTTPBIN_DOMAIN + '/post' |
|
29 | HTTPBIN_POST = HTTPBIN_DOMAIN + '/post' | |
43 |
|
||||
44 |
|
||||
45 | @pytest.fixture() |
|
|||
46 | def enable_auth_plugins(request, baseapp, csrf_token): |
|
|||
47 | """ |
|
|||
48 | Return a factory object that when called, allows to control which |
|
|||
49 | authentication plugins are enabled. |
|
|||
50 | """ |
|
|||
51 |
|
||||
52 | class AuthPluginManager(object): |
|
|||
53 |
|
||||
54 | def cleanup(self): |
|
|||
55 | self._enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) |
|
|||
56 |
|
||||
57 | def enable(self, plugins_list, override=None): |
|
|||
58 | return self._enable_plugins(plugins_list, override) |
|
|||
59 |
|
||||
60 | def _enable_plugins(self, plugins_list, override=None): |
|
|||
61 | override = override or {} |
|
|||
62 | params = { |
|
|||
63 | 'auth_plugins': ','.join(plugins_list), |
|
|||
64 | } |
|
|||
65 |
|
||||
66 | # helper translate some names to others, to fix settings code |
|
|||
67 | name_map = { |
|
|||
68 | 'token': 'authtoken' |
|
|||
69 | } |
|
|||
70 | log.debug('enable_auth_plugins: enabling following auth-plugins: %s', plugins_list) |
|
|||
71 |
|
||||
72 | for module in plugins_list: |
|
|||
73 | plugin_name = module.partition('#')[-1] |
|
|||
74 | if plugin_name in name_map: |
|
|||
75 | plugin_name = name_map[plugin_name] |
|
|||
76 | enabled_plugin = f'auth_{plugin_name}_enabled' |
|
|||
77 | cache_ttl = f'auth_{plugin_name}_cache_ttl' |
|
|||
78 |
|
||||
79 | # default params that are needed for each plugin, |
|
|||
80 | # `enabled` and `cache_ttl` |
|
|||
81 | params.update({ |
|
|||
82 | enabled_plugin: True, |
|
|||
83 | cache_ttl: 0 |
|
|||
84 | }) |
|
|||
85 | if override.get: |
|
|||
86 | params.update(override.get(module, {})) |
|
|||
87 |
|
||||
88 | validated_params = params |
|
|||
89 |
|
||||
90 | for k, v in validated_params.items(): |
|
|||
91 | setting = SettingsModel().create_or_update_setting(k, v) |
|
|||
92 | Session().add(setting) |
|
|||
93 | Session().commit() |
|
|||
94 |
|
||||
95 | AuthenticationPluginRegistry.invalidate_auth_plugins_cache(hard=True) |
|
|||
96 |
|
||||
97 | enabled_plugins = SettingsModel().get_auth_plugins() |
|
|||
98 | assert plugins_list == enabled_plugins |
|
|||
99 |
|
||||
100 | enabler = AuthPluginManager() |
|
|||
101 | request.addfinalizer(enabler.cleanup) |
|
|||
102 |
|
||||
103 | return enabler |
|
|||
104 |
|
||||
105 |
|
||||
106 | @pytest.fixture() |
|
|||
107 | def test_user_factory(request, baseapp): |
|
|||
108 |
|
||||
109 | def user_factory(username='test_user', password='qweqwe', first_name='John', last_name='Testing', **kwargs): |
|
|||
110 | usr = UserModel().create_or_update( |
|
|||
111 | username=username, |
|
|||
112 | password=password, |
|
|||
113 | email=f'{username}@rhodecode.org', |
|
|||
114 | firstname=first_name, lastname=last_name) |
|
|||
115 | Session().commit() |
|
|||
116 |
|
||||
117 | for k, v in kwargs.items(): |
|
|||
118 | setattr(usr, k, v) |
|
|||
119 | Session().add(usr) |
|
|||
120 |
|
||||
121 | new_usr = User.get_by_username(username) |
|
|||
122 | new_usr_id = new_usr.user_id |
|
|||
123 | assert new_usr == usr |
|
|||
124 |
|
||||
125 | @request.addfinalizer |
|
|||
126 | def cleanup(): |
|
|||
127 | if User.get(new_usr_id) is None: |
|
|||
128 | return |
|
|||
129 |
|
||||
130 | perm = Permission.query().all() |
|
|||
131 | for p in perm: |
|
|||
132 | UserModel().revoke_perm(usr, p) |
|
|||
133 |
|
||||
134 | UserModel().delete(new_usr_id) |
|
|||
135 | Session().commit() |
|
|||
136 | return usr |
|
|||
137 |
|
||||
138 | return user_factory |
|
@@ -1,4 +1,4 b'' | |||||
1 |
# Copyright (C) 2010-202 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
@@ -98,16 +98,16 b' def pytest_addoption(parser):' | |||||
98 | 'pyramid_config', |
|
98 | 'pyramid_config', | |
99 | "Set up a Pyramid environment with the specified config file.") |
|
99 | "Set up a Pyramid environment with the specified config file.") | |
100 |
|
100 | |||
|
101 | parser.addini('rhodecode_config', 'rhodecode config ini for tests') | |||
|
102 | parser.addini('celery_config', 'celery config ini for tests') | |||
|
103 | parser.addini('vcsserver_config', 'vcsserver config ini for tests') | |||
|
104 | ||||
101 | vcsgroup = parser.getgroup('vcs') |
|
105 | vcsgroup = parser.getgroup('vcs') | |
|
106 | ||||
102 | vcsgroup.addoption( |
|
107 | vcsgroup.addoption( | |
103 | '--without-vcsserver', dest='with_vcsserver', action='store_false', |
|
108 | '--without-vcsserver', dest='with_vcsserver', action='store_false', | |
104 | help="Do not start the VCSServer in a background process.") |
|
109 | help="Do not start the VCSServer in a background process.") | |
105 | vcsgroup.addoption( |
|
110 | ||
106 | '--with-vcsserver-http', dest='vcsserver_config_http', |
|
|||
107 | help="Start the HTTP VCSServer with the specified config file.") |
|
|||
108 | vcsgroup.addoption( |
|
|||
109 | '--vcsserver-protocol', dest='vcsserver_protocol', |
|
|||
110 | help="Start the VCSServer with HTTP protocol support.") |
|
|||
111 | vcsgroup.addoption( |
|
111 | vcsgroup.addoption( | |
112 | '--vcsserver-config-override', action='store', type=_parse_json, |
|
112 | '--vcsserver-config-override', action='store', type=_parse_json, | |
113 | default=None, dest='vcsserver_config_override', help=( |
|
113 | default=None, dest='vcsserver_config_override', help=( | |
@@ -122,12 +122,6 b' def pytest_addoption(parser):' | |||||
122 | "Allows to set the port of the vcsserver. Useful when testing " |
|
122 | "Allows to set the port of the vcsserver. Useful when testing " | |
123 | "against an already running server and random ports cause " |
|
123 | "against an already running server and random ports cause " | |
124 | "trouble.")) |
|
124 | "trouble.")) | |
125 | parser.addini( |
|
|||
126 | 'vcsserver_config_http', |
|
|||
127 | "Start the HTTP VCSServer with the specified config file.") |
|
|||
128 | parser.addini( |
|
|||
129 | 'vcsserver_protocol', |
|
|||
130 | "Start the VCSServer with HTTP protocol support.") |
|
|||
131 |
|
125 | |||
132 |
|
126 | |||
133 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
|
127 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -17,7 +16,7 b'' | |||||
17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
19 |
|
18 | |||
20 | from subprocess import Popen, PIPE |
|
19 | import subprocess | |
21 | import os |
|
20 | import os | |
22 | import sys |
|
21 | import sys | |
23 | import tempfile |
|
22 | import tempfile | |
@@ -26,87 +25,71 b' import pytest' | |||||
26 | from sqlalchemy.engine import url |
|
25 | from sqlalchemy.engine import url | |
27 |
|
26 | |||
28 | from rhodecode.lib.str_utils import safe_str, safe_bytes |
|
27 | from rhodecode.lib.str_utils import safe_str, safe_bytes | |
29 | from rhodecode.tests.fixture import TestINI |
|
28 | from rhodecode.tests.fixtures.rc_fixture import TestINI | |
30 |
|
29 | |||
31 |
|
30 | |||
32 | def _get_dbs_from_metafunc(metafunc): |
|
31 | def _get_dbs_from_metafunc(metafunc): | |
33 |
dbs_mark = metafunc.definition.get_closest_marker( |
|
32 | dbs_mark = metafunc.definition.get_closest_marker("dbs") | |
34 |
|
33 | |||
35 | if dbs_mark: |
|
34 | if dbs_mark: | |
36 | # Supported backends by this test function, created from pytest.mark.dbs |
|
35 | # Supported backends by this test function, created from pytest.mark.dbs | |
37 | backends = dbs_mark.args |
|
36 | backends = dbs_mark.args | |
38 | else: |
|
37 | else: | |
39 |
backends = metafunc.config.getoption( |
|
38 | backends = metafunc.config.getoption("--dbs") | |
40 | return backends |
|
39 | return backends | |
41 |
|
40 | |||
42 |
|
41 | |||
43 | def pytest_generate_tests(metafunc): |
|
42 | def pytest_generate_tests(metafunc): | |
44 | # Support test generation based on --dbs parameter |
|
43 | # Support test generation based on --dbs parameter | |
45 |
if |
|
44 | if "db_backend" in metafunc.fixturenames: | |
46 |
requested_backends = set(metafunc.config.getoption( |
|
45 | requested_backends = set(metafunc.config.getoption("--dbs")) | |
47 | backends = _get_dbs_from_metafunc(metafunc) |
|
46 | backends = _get_dbs_from_metafunc(metafunc) | |
48 | backends = requested_backends.intersection(backends) |
|
47 | backends = requested_backends.intersection(backends) | |
49 | # TODO: johbo: Disabling a backend did not work out with |
|
48 | # TODO: johbo: Disabling a backend did not work out with | |
50 | # parametrization, find better way to achieve this. |
|
49 | # parametrization, find better way to achieve this. | |
51 | if not backends: |
|
50 | if not backends: | |
52 | metafunc.function._skip = True |
|
51 | metafunc.function._skip = True | |
53 |
metafunc.parametrize( |
|
52 | metafunc.parametrize("db_backend_name", backends) | |
54 |
|
53 | |||
55 |
|
54 | |||
56 | def pytest_collection_modifyitems(session, config, items): |
|
55 | def pytest_collection_modifyitems(session, config, items): | |
57 | remaining = [ |
|
56 | remaining = [i for i in items if not getattr(i.obj, "_skip", False)] | |
58 | i for i in items if not getattr(i.obj, '_skip', False)] |
|
|||
59 | items[:] = remaining |
|
57 | items[:] = remaining | |
60 |
|
58 | |||
61 |
|
59 | |||
62 | @pytest.fixture() |
|
60 | @pytest.fixture() | |
63 | def db_backend( |
|
61 | def db_backend(request, db_backend_name, ini_config, tmpdir_factory): | |
64 | request, db_backend_name, ini_config, tmpdir_factory): |
|
|||
65 | basetemp = tmpdir_factory.getbasetemp().strpath |
|
62 | basetemp = tmpdir_factory.getbasetemp().strpath | |
66 | klass = _get_backend(db_backend_name) |
|
63 | klass = _get_backend(db_backend_name) | |
67 |
|
64 | |||
68 |
option_name = |
|
65 | option_name = "--{}-connection-string".format(db_backend_name) | |
69 | connection_string = request.config.getoption(option_name) or None |
|
66 | connection_string = request.config.getoption(option_name) or None | |
70 |
|
67 | |||
71 | return klass( |
|
68 | return klass(config_file=ini_config, basetemp=basetemp, connection_string=connection_string) | |
72 | config_file=ini_config, basetemp=basetemp, |
|
|||
73 | connection_string=connection_string) |
|
|||
74 |
|
69 | |||
75 |
|
70 | |||
76 | def _get_backend(backend_type): |
|
71 | def _get_backend(backend_type): | |
77 | return { |
|
72 | return {"sqlite": SQLiteDBBackend, "postgres": PostgresDBBackend, "mysql": MySQLDBBackend, "": EmptyDBBackend}[ | |
78 | 'sqlite': SQLiteDBBackend, |
|
73 | backend_type | |
79 | 'postgres': PostgresDBBackend, |
|
74 | ] | |
80 | 'mysql': MySQLDBBackend, |
|
|||
81 | '': EmptyDBBackend |
|
|||
82 | }[backend_type] |
|
|||
83 |
|
75 | |||
84 |
|
76 | |||
85 | class DBBackend(object): |
|
77 | class DBBackend(object): | |
86 | _store = os.path.dirname(os.path.abspath(__file__)) |
|
78 | _store = os.path.dirname(os.path.abspath(__file__)) | |
87 | _type = None |
|
79 | _type = None | |
88 |
_base_ini_config = [{ |
|
80 | _base_ini_config = [{"app:main": {"vcs.start_server": "false", "startup.import_repos": "false"}}] | |
89 | 'startup.import_repos': 'false'}}] |
|
81 | _db_url = [{"app:main": {"sqlalchemy.db1.url": ""}}] | |
90 | _db_url = [{'app:main': {'sqlalchemy.db1.url': ''}}] |
|
82 | _base_db_name = "rhodecode_test_db_backend" | |
91 | _base_db_name = 'rhodecode_test_db_backend' |
|
83 | std_env = {"RC_TEST": "0"} | |
92 | std_env = {'RC_TEST': '0'} |
|
|||
93 |
|
84 | |||
94 | def __init__( |
|
85 | def __init__(self, config_file, db_name=None, basetemp=None, connection_string=None): | |
95 | self, config_file, db_name=None, basetemp=None, |
|
|||
96 | connection_string=None): |
|
|||
97 |
|
||||
98 | from rhodecode.lib.vcs.backends.hg import largefiles_store |
|
|||
99 | from rhodecode.lib.vcs.backends.git import lfs_store |
|
|||
100 |
|
||||
101 | self.fixture_store = os.path.join(self._store, self._type) |
|
86 | self.fixture_store = os.path.join(self._store, self._type) | |
102 | self.db_name = db_name or self._base_db_name |
|
87 | self.db_name = db_name or self._base_db_name | |
103 | self._base_ini_file = config_file |
|
88 | self._base_ini_file = config_file | |
104 |
self.stderr = |
|
89 | self.stderr = "" | |
105 |
self.stdout = |
|
90 | self.stdout = "" | |
106 | self._basetemp = basetemp or tempfile.gettempdir() |
|
91 | self._basetemp = basetemp or tempfile.gettempdir() | |
107 |
self._repos_location = os.path.join(self._basetemp, |
|
92 | self._repos_location = os.path.join(self._basetemp, "rc_test_repos") | |
108 | self._repos_hg_largefiles_store = largefiles_store(self._basetemp) |
|
|||
109 | self._repos_git_lfs_store = lfs_store(self._basetemp) |
|
|||
110 | self.connection_string = connection_string |
|
93 | self.connection_string = connection_string | |
111 |
|
94 | |||
112 | @property |
|
95 | @property | |
@@ -118,8 +101,7 b' class DBBackend(object):' | |||||
118 | if not new_connection_string: |
|
101 | if not new_connection_string: | |
119 | new_connection_string = self.get_default_connection_string() |
|
102 | new_connection_string = self.get_default_connection_string() | |
120 | else: |
|
103 | else: | |
121 | new_connection_string = new_connection_string.format( |
|
104 | new_connection_string = new_connection_string.format(db_name=self.db_name) | |
122 | db_name=self.db_name) |
|
|||
123 | url_parts = url.make_url(new_connection_string) |
|
105 | url_parts = url.make_url(new_connection_string) | |
124 | self._connection_string = new_connection_string |
|
106 | self._connection_string = new_connection_string | |
125 | self.user = url_parts.username |
|
107 | self.user = url_parts.username | |
@@ -127,73 +109,67 b' class DBBackend(object):' | |||||
127 | self.host = url_parts.host |
|
109 | self.host = url_parts.host | |
128 |
|
110 | |||
129 | def get_default_connection_string(self): |
|
111 | def get_default_connection_string(self): | |
130 |
raise NotImplementedError( |
|
112 | raise NotImplementedError("default connection_string is required.") | |
131 |
|
113 | |||
132 | def execute(self, cmd, env=None, *args): |
|
114 | def execute(self, cmd, env=None, *args): | |
133 | """ |
|
115 | """ | |
134 | Runs command on the system with given ``args``. |
|
116 | Runs command on the system with given ``args``. | |
135 | """ |
|
117 | """ | |
136 |
|
118 | |||
137 |
command = cmd + |
|
119 | command = cmd + " " + " ".join(args) | |
138 |
sys.stdout.write(f |
|
120 | sys.stdout.write(f"CMD: {command}") | |
139 |
|
121 | |||
140 | # Tell Python to use UTF-8 encoding out stdout |
|
122 | # Tell Python to use UTF-8 encoding out stdout | |
141 | _env = os.environ.copy() |
|
123 | _env = os.environ.copy() | |
142 |
_env[ |
|
124 | _env["PYTHONIOENCODING"] = "UTF-8" | |
143 | _env.update(self.std_env) |
|
125 | _env.update(self.std_env) | |
144 | if env: |
|
126 | if env: | |
145 | _env.update(env) |
|
127 | _env.update(env) | |
146 |
|
128 | |||
147 | self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env) |
|
129 | self.p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=_env) | |
148 | self.stdout, self.stderr = self.p.communicate() |
|
130 | self.stdout, self.stderr = self.p.communicate() | |
149 | stdout_str = safe_str(self.stdout) |
|
131 | stdout_str = safe_str(self.stdout) | |
150 |
sys.stdout.write(f |
|
132 | sys.stdout.write(f"COMMAND:{command}\n") | |
151 | sys.stdout.write(stdout_str) |
|
133 | sys.stdout.write(stdout_str) | |
152 | return self.stdout, self.stderr |
|
134 | return self.stdout, self.stderr | |
153 |
|
135 | |||
154 | def assert_returncode_success(self): |
|
136 | def assert_returncode_success(self): | |
155 | from rich import print as pprint |
|
137 | from rich import print as pprint | |
|
138 | ||||
156 | if not self.p.returncode == 0: |
|
139 | if not self.p.returncode == 0: | |
157 | pprint(safe_str(self.stderr)) |
|
140 | pprint(safe_str(self.stderr)) | |
158 |
raise AssertionError(f |
|
141 | raise AssertionError(f"non 0 retcode:{self.p.returncode}") | |
159 |
|
142 | |||
160 | def assert_correct_output(self, stdout, version): |
|
143 | def assert_correct_output(self, stdout, version): | |
161 |
assert b |
|
144 | assert b"UPGRADE FOR STEP %b COMPLETED" % safe_bytes(version) in stdout | |
162 |
|
145 | |||
163 | def setup_rhodecode_db(self, ini_params=None, env=None): |
|
146 | def setup_rhodecode_db(self, ini_params=None, env=None): | |
164 | if not ini_params: |
|
147 | if not ini_params: | |
165 | ini_params = self._base_ini_config |
|
148 | ini_params = self._base_ini_config | |
166 |
|
149 | |||
167 | ini_params.extend(self._db_url) |
|
150 | ini_params.extend(self._db_url) | |
168 | with TestINI(self._base_ini_file, ini_params, |
|
151 | with TestINI(self._base_ini_file, ini_params, self._type, destroy=True) as _ini_file: | |
169 | self._type, destroy=True) as _ini_file: |
|
|||
170 |
|
||||
171 | if not os.path.isdir(self._repos_location): |
|
152 | if not os.path.isdir(self._repos_location): | |
172 | os.makedirs(self._repos_location) |
|
153 | os.makedirs(self._repos_location) | |
173 | if not os.path.isdir(self._repos_hg_largefiles_store): |
|
|||
174 | os.makedirs(self._repos_hg_largefiles_store) |
|
|||
175 | if not os.path.isdir(self._repos_git_lfs_store): |
|
|||
176 | os.makedirs(self._repos_git_lfs_store) |
|
|||
177 |
|
154 | |||
178 | return self.execute( |
|
155 | return self.execute( | |
179 | "rc-setup-app {0} --user=marcink " |
|
156 | "rc-setup-app {0} --user=marcink " | |
180 | "--email=marcin@rhodeocode.com --password={1} " |
|
157 | "--email=marcin@rhodeocode.com --password={1} " | |
181 | "--repos={2} --force-yes".format( |
|
158 | "--repos={2} --force-yes".format(_ini_file, "qweqwe", self._repos_location), | |
182 | _ini_file, 'qweqwe', self._repos_location), env=env) |
|
159 | env=env, | |
|
160 | ) | |||
183 |
|
161 | |||
184 | def upgrade_database(self, ini_params=None): |
|
162 | def upgrade_database(self, ini_params=None): | |
185 | if not ini_params: |
|
163 | if not ini_params: | |
186 | ini_params = self._base_ini_config |
|
164 | ini_params = self._base_ini_config | |
187 | ini_params.extend(self._db_url) |
|
165 | ini_params.extend(self._db_url) | |
188 |
|
166 | |||
189 | test_ini = TestINI( |
|
167 | test_ini = TestINI(self._base_ini_file, ini_params, self._type, destroy=True) | |
190 | self._base_ini_file, ini_params, self._type, destroy=True) |
|
|||
191 | with test_ini as ini_file: |
|
168 | with test_ini as ini_file: | |
192 | if not os.path.isdir(self._repos_location): |
|
169 | if not os.path.isdir(self._repos_location): | |
193 | os.makedirs(self._repos_location) |
|
170 | os.makedirs(self._repos_location) | |
194 |
|
171 | |||
195 | return self.execute( |
|
172 | return self.execute("rc-upgrade-db {0} --force-yes".format(ini_file)) | |
196 | "rc-upgrade-db {0} --force-yes".format(ini_file)) |
|
|||
197 |
|
173 | |||
198 | def setup_db(self): |
|
174 | def setup_db(self): | |
199 | raise NotImplementedError |
|
175 | raise NotImplementedError | |
@@ -206,7 +182,7 b' class DBBackend(object):' | |||||
206 |
|
182 | |||
207 |
|
183 | |||
208 | class EmptyDBBackend(DBBackend): |
|
184 | class EmptyDBBackend(DBBackend): | |
209 |
_type = |
|
185 | _type = "" | |
210 |
|
186 | |||
211 | def setup_db(self): |
|
187 | def setup_db(self): | |
212 | pass |
|
188 | pass | |
@@ -222,21 +198,20 b' class EmptyDBBackend(DBBackend):' | |||||
222 |
|
198 | |||
223 |
|
199 | |||
224 | class SQLiteDBBackend(DBBackend): |
|
200 | class SQLiteDBBackend(DBBackend): | |
225 |
_type = |
|
201 | _type = "sqlite" | |
226 |
|
202 | |||
227 | def get_default_connection_string(self): |
|
203 | def get_default_connection_string(self): | |
228 |
return |
|
204 | return "sqlite:///{}/{}.sqlite".format(self._basetemp, self.db_name) | |
229 |
|
205 | |||
230 | def setup_db(self): |
|
206 | def setup_db(self): | |
231 | # dump schema for tests |
|
207 | # dump schema for tests | |
232 | # cp -v $TEST_DB_NAME |
|
208 | # cp -v $TEST_DB_NAME | |
233 | self._db_url = [{'app:main': { |
|
209 | self._db_url = [{"app:main": {"sqlalchemy.db1.url": self.connection_string}}] | |
234 | 'sqlalchemy.db1.url': self.connection_string}}] |
|
|||
235 |
|
210 | |||
236 | def import_dump(self, dumpname): |
|
211 | def import_dump(self, dumpname): | |
237 | dump = os.path.join(self.fixture_store, dumpname) |
|
212 | dump = os.path.join(self.fixture_store, dumpname) | |
238 |
target = os.path.join(self._basetemp, |
|
213 | target = os.path.join(self._basetemp, "{0.db_name}.sqlite".format(self)) | |
239 |
return self.execute(f |
|
214 | return self.execute(f"cp -v {dump} {target}") | |
240 |
|
215 | |||
241 | def teardown_db(self): |
|
216 | def teardown_db(self): | |
242 | target_db = os.path.join(self._basetemp, self.db_name) |
|
217 | target_db = os.path.join(self._basetemp, self.db_name) | |
@@ -244,39 +219,39 b' class SQLiteDBBackend(DBBackend):' | |||||
244 |
|
219 | |||
245 |
|
220 | |||
246 | class MySQLDBBackend(DBBackend): |
|
221 | class MySQLDBBackend(DBBackend): | |
247 |
_type = |
|
222 | _type = "mysql" | |
248 |
|
223 | |||
249 | def get_default_connection_string(self): |
|
224 | def get_default_connection_string(self): | |
250 |
return |
|
225 | return "mysql://root:qweqwe@127.0.0.1/{}".format(self.db_name) | |
251 |
|
226 | |||
252 | def setup_db(self): |
|
227 | def setup_db(self): | |
253 | # dump schema for tests |
|
228 | # dump schema for tests | |
254 | # mysqldump -uroot -pqweqwe $TEST_DB_NAME |
|
229 | # mysqldump -uroot -pqweqwe $TEST_DB_NAME | |
255 | self._db_url = [{'app:main': { |
|
230 | self._db_url = [{"app:main": {"sqlalchemy.db1.url": self.connection_string}}] | |
256 | 'sqlalchemy.db1.url': self.connection_string}}] |
|
231 | return self.execute( | |
257 |
|
|
232 | "mysql -v -u{} -p{} -e 'create database '{}';'".format(self.user, self.password, self.db_name) | |
258 | self.user, self.password, self.db_name)) |
|
233 | ) | |
259 |
|
234 | |||
260 | def import_dump(self, dumpname): |
|
235 | def import_dump(self, dumpname): | |
261 | dump = os.path.join(self.fixture_store, dumpname) |
|
236 | dump = os.path.join(self.fixture_store, dumpname) | |
262 | return self.execute("mysql -u{} -p{} {} < {}".format( |
|
237 | return self.execute("mysql -u{} -p{} {} < {}".format(self.user, self.password, self.db_name, dump)) | |
263 | self.user, self.password, self.db_name, dump)) |
|
|||
264 |
|
238 | |||
265 | def teardown_db(self): |
|
239 | def teardown_db(self): | |
266 | return self.execute("mysql -v -u{} -p{} -e 'drop database '{}';'".format( |
|
240 | return self.execute( | |
267 |
self.user, self.password, self.db_name) |
|
241 | "mysql -v -u{} -p{} -e 'drop database '{}';'".format(self.user, self.password, self.db_name) | |
|
242 | ) | |||
268 |
|
243 | |||
269 |
|
244 | |||
270 | class PostgresDBBackend(DBBackend): |
|
245 | class PostgresDBBackend(DBBackend): | |
271 |
_type = |
|
246 | _type = "postgres" | |
272 |
|
247 | |||
273 | def get_default_connection_string(self): |
|
248 | def get_default_connection_string(self): | |
274 |
return |
|
249 | return "postgresql://postgres:qweqwe@localhost/{}".format(self.db_name) | |
275 |
|
250 | |||
276 | def setup_db(self): |
|
251 | def setup_db(self): | |
277 | # dump schema for tests |
|
252 | # dump schema for tests | |
278 | # pg_dump -U postgres -h localhost $TEST_DB_NAME |
|
253 | # pg_dump -U postgres -h localhost $TEST_DB_NAME | |
279 |
self._db_url = [{ |
|
254 | self._db_url = [{"app:main": {"sqlalchemy.db1.url": self.connection_string}}] | |
280 | cmd = f"PGPASSWORD={self.password} psql -U {self.user} -h localhost -c 'create database '{self.db_name}';'" |
|
255 | cmd = f"PGPASSWORD={self.password} psql -U {self.user} -h localhost -c 'create database '{self.db_name}';'" | |
281 | return self.execute(cmd) |
|
256 | return self.execute(cmd) | |
282 |
|
257 |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -21,33 +20,42 b' import pytest' | |||||
21 |
|
20 | |||
22 |
|
21 | |||
23 | @pytest.mark.dbs("postgres") |
|
22 | @pytest.mark.dbs("postgres") | |
24 |
@pytest.mark.parametrize( |
|
23 | @pytest.mark.parametrize( | |
25 | '1.4.4.sql', |
|
24 | "dumpname", | |
26 | '1.5.0.sql', |
|
25 | [ | |
27 |
|
|
26 | "1.4.4.sql", | |
28 | '1.6.0_no_repo_name_index.sql', |
|
27 | "1.5.0.sql", | |
29 | ]) |
|
28 | "1.6.0.sql", | |
|
29 | "1.6.0_no_repo_name_index.sql", | |||
|
30 | ], | |||
|
31 | ) | |||
30 | def test_migrate_postgres_db(db_backend, dumpname): |
|
32 | def test_migrate_postgres_db(db_backend, dumpname): | |
31 | _run_migration_test(db_backend, dumpname) |
|
33 | _run_migration_test(db_backend, dumpname) | |
32 |
|
34 | |||
33 |
|
35 | |||
34 | @pytest.mark.dbs("sqlite") |
|
36 | @pytest.mark.dbs("sqlite") | |
35 |
@pytest.mark.parametrize( |
|
37 | @pytest.mark.parametrize( | |
36 | 'rhodecode.1.4.4.sqlite', |
|
38 | "dumpname", | |
37 | 'rhodecode.1.4.4_with_groups.sqlite', |
|
39 | [ | |
38 |
|
|
40 | "rhodecode.1.4.4.sqlite", | |
39 | ]) |
|
41 | "rhodecode.1.4.4_with_groups.sqlite", | |
|
42 | "rhodecode.1.4.4_with_ldap_active.sqlite", | |||
|
43 | ], | |||
|
44 | ) | |||
40 | def test_migrate_sqlite_db(db_backend, dumpname): |
|
45 | def test_migrate_sqlite_db(db_backend, dumpname): | |
41 | _run_migration_test(db_backend, dumpname) |
|
46 | _run_migration_test(db_backend, dumpname) | |
42 |
|
47 | |||
43 |
|
48 | |||
44 | @pytest.mark.dbs("mysql") |
|
49 | @pytest.mark.dbs("mysql") | |
45 |
@pytest.mark.parametrize( |
|
50 | @pytest.mark.parametrize( | |
46 | '1.4.4.sql', |
|
51 | "dumpname", | |
47 | '1.5.0.sql', |
|
52 | [ | |
48 |
|
|
53 | "1.4.4.sql", | |
49 | '1.6.0_no_repo_name_index.sql', |
|
54 | "1.5.0.sql", | |
50 | ]) |
|
55 | "1.6.0.sql", | |
|
56 | "1.6.0_no_repo_name_index.sql", | |||
|
57 | ], | |||
|
58 | ) | |||
51 | def test_migrate_mysql_db(db_backend, dumpname): |
|
59 | def test_migrate_mysql_db(db_backend, dumpname): | |
52 | _run_migration_test(db_backend, dumpname) |
|
60 | _run_migration_test(db_backend, dumpname) | |
53 |
|
61 | |||
@@ -60,5 +68,5 b' def _run_migration_test(db_backend, dump' | |||||
60 | db_backend.import_dump(dumpname) |
|
68 | db_backend.import_dump(dumpname) | |
61 | stdout, stderr = db_backend.upgrade_database() |
|
69 | stdout, stderr = db_backend.upgrade_database() | |
62 |
|
70 | |||
63 |
db_backend.assert_correct_output(stdout+stderr, version= |
|
71 | db_backend.assert_correct_output(stdout + stderr, version="16") | |
64 | db_backend.assert_returncode_success() |
|
72 | db_backend.assert_returncode_success() |
1 | NO CONTENT: file renamed from rhodecode/tests/fixture_mods/__init__.py to rhodecode/tests/fixtures/__init__.py |
|
NO CONTENT: file renamed from rhodecode/tests/fixture_mods/__init__.py to rhodecode/tests/fixtures/__init__.py |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/diff_with_diff_data.diff to rhodecode/tests/fixtures/diff_fixtures/diff_with_diff_data.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/diff_with_diff_data.diff to rhodecode/tests/fixtures/diff_fixtures/diff_with_diff_data.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_and_normal.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_and_normal.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_and_normal.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_and_normal.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_special_files.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_special_files.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_special_files_2.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files_2.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_binary_special_files_2.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_binary_special_files_2.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_chmod.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_chmod.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_js_chars.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_js_chars.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_js_chars.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_js_chars.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_mod_single_binary_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_mod_single_binary_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_rename_file.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_rename_file.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_rename_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file_with_spaces.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_diff_rename_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/git_diff_rename_file_with_spaces.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/git_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/git_node_history_response.json |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/git_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/git_node_history_response.json |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_add_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_add_single_binary_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_add_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_add_single_binary_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_binary_and_normal.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_binary_and_normal.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_binary_and_normal.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_binary_and_normal.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_chmod_and_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod_and_mod_single_binary_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_chmod_and_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_chmod_and_mod_single_binary_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_and_chmod_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_chmod_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_and_chmod_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_chmod_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_and_modify_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_modify_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_and_modify_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_and_modify_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_chmod_and_edit_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_chmod_and_edit_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_chmod_and_edit_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_chmod_and_edit_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file_with_spaces.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_copy_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_copy_file_with_spaces.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_del_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_del_single_binary_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_del_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_del_single_binary_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_double_file_change_double_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_double_newline.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_double_file_change_double_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_double_newline.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_double_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_newline.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_double_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_double_file_change_newline.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_four_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_four_file_change_newline.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_four_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_four_file_change_newline.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mixed_filename_encodings.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mixed_filename_encodings.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mixed_filename_encodings.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mixed_filename_encodings.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mod_file_and_rename.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_file_and_rename.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mod_file_and_rename.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_file_and_rename.diff |
1 | NO CONTENT: file copied from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_binary_file.diff |
|
NO CONTENT: file copied from rhodecode/tests/fixtures/git_diff_mod_single_binary_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_binary_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_mod_single_file_and_rename_and_chmod.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_no_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_no_newline.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_no_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_no_newline.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_and_chmod_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_and_chmod_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_and_chmod_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_and_chmod_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_file.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file_with_spaces.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_rename_file_with_spaces.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_rename_file_with_spaces.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_single_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_single_file_change_newline.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_diff_single_file_change_newline.diff to rhodecode/tests/fixtures/diff_fixtures/hg_diff_single_file_change_newline.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/hg_node_history_response.json |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/hg_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/hg_node_history_response.json |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/journal_dump.csv to rhodecode/tests/fixtures/diff_fixtures/journal_dump.csv |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/journal_dump.csv to rhodecode/tests/fixtures/diff_fixtures/journal_dump.csv |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/large_diff.diff to rhodecode/tests/fixtures/diff_fixtures/large_diff.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/large_diff.diff to rhodecode/tests/fixtures/diff_fixtures/large_diff.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_diff_binary_add_file.diff to rhodecode/tests/fixtures/diff_fixtures/svn_diff_binary_add_file.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_diff_binary_add_file.diff to rhodecode/tests/fixtures/diff_fixtures/svn_diff_binary_add_file.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_diff_multiple_changes.diff to rhodecode/tests/fixtures/diff_fixtures/svn_diff_multiple_changes.diff |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_diff_multiple_changes.diff to rhodecode/tests/fixtures/diff_fixtures/svn_diff_multiple_changes.diff |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_node_history_branches.json to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_branches.json |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_node_history_branches.json to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_branches.json |
1 | NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_response.json |
|
NO CONTENT: file renamed from rhodecode/tests/fixtures/svn_node_history_response.json to rhodecode/tests/fixtures/diff_fixtures/svn_node_history_response.json |
@@ -1,5 +1,4 b'' | |||||
1 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | ||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
|||
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
@@ -20,61 +19,128 b'' | |||||
20 | import pytest |
|
19 | import pytest | |
21 |
|
20 | |||
22 | from rhodecode.lib.config_utils import get_app_config |
|
21 | from rhodecode.lib.config_utils import get_app_config | |
23 | from rhodecode.tests.fixture import TestINI |
|
22 | from rhodecode.tests.fixtures.rc_fixture import TestINI | |
24 | from rhodecode.tests import TESTS_TMP_PATH |
|
23 | from rhodecode.tests import TESTS_TMP_PATH | |
25 | from rhodecode.tests.server_utils import RcVCSServer |
|
24 | from rhodecode.tests.server_utils import RcVCSServer | |
|
25 | from rhodecode.tests.server_utils import RcWebServer | |||
|
26 | from rhodecode.tests.server_utils import CeleryServer | |||
26 |
|
27 | |||
27 |
|
28 | |||
28 |
@pytest.fixture(scope= |
|
29 | @pytest.fixture(scope="session") | |
29 | def vcsserver(request, vcsserver_port, vcsserver_factory): |
|
30 | def vcsserver_factory(): | |
30 | """ |
|
|||
31 | Session scope VCSServer. |
|
|||
32 |
|
||||
33 | Tests which need the VCSServer have to rely on this fixture in order |
|
|||
34 | to ensure it will be running. |
|
|||
35 |
|
||||
36 | For specific needs, the fixture vcsserver_factory can be used. It allows to |
|
|||
37 | adjust the configuration file for the test run. |
|
|||
38 |
|
||||
39 | Command line args: |
|
|||
40 |
|
||||
41 | --without-vcsserver: Allows to switch this fixture off. You have to |
|
|||
42 | manually start the server. |
|
|||
43 |
|
||||
44 | --vcsserver-port: Will expect the VCSServer to listen on this port. |
|
|||
45 | """ |
|
|||
46 |
|
||||
47 | if not request.config.getoption('with_vcsserver'): |
|
|||
48 | return None |
|
|||
49 |
|
||||
50 | return vcsserver_factory( |
|
|||
51 | request, vcsserver_port=vcsserver_port) |
|
|||
52 |
|
||||
53 |
|
||||
54 | @pytest.fixture(scope='session') |
|
|||
55 | def vcsserver_factory(tmpdir_factory): |
|
|||
56 | """ |
|
31 | """ | |
57 | Use this if you need a running vcsserver with a special configuration. |
|
32 | Use this if you need a running vcsserver with a special configuration. | |
58 | """ |
|
33 | """ | |
59 |
|
34 | |||
60 | def factory(request, overrides=(), vcsserver_port=None, |
|
35 | def factory(request, store_dir, overrides=(), config_file=None, port=None, log_file=None, workers="3", env=None, info_prefix=""): | |
61 | log_file=None, workers='3'): |
|
36 | env = env or {"RC_NO_TEST_ENV": "1"} | |
62 |
|
37 | vcsserver_port = port | ||
63 |
if |
|
38 | if port is None: | |
64 | vcsserver_port = get_available_port() |
|
39 | vcsserver_port = get_available_port() | |
65 |
|
40 | |||
66 | overrides = list(overrides) |
|
41 | overrides = list(overrides) | |
67 |
overrides.append({ |
|
42 | overrides.append({"server:main": {"port": vcsserver_port}}) | |
|
43 | ||||
|
44 | if getattr(request, 'param', None): | |||
|
45 | config_overrides = [request.param] | |||
|
46 | overrides.extend(config_overrides) | |||
|
47 | ||||
|
48 | option_name = "vcsserver_config" | |||
|
49 | override_option_name = None | |||
|
50 | if not config_file: | |||
|
51 | config_file = get_config( | |||
|
52 | request.config, | |||
|
53 | option_name=option_name, | |||
|
54 | override_option_name=override_option_name, | |||
|
55 | overrides=overrides, | |||
|
56 | basetemp=store_dir, | |||
|
57 | prefix=f"{info_prefix}test_vcsserver_ini_", | |||
|
58 | ) | |||
|
59 | server = RcVCSServer(config_file, log_file, workers, env=env, info_prefix=info_prefix) | |||
|
60 | server.start() | |||
|
61 | ||||
|
62 | @request.addfinalizer | |||
|
63 | def cleanup(): | |||
|
64 | server.shutdown() | |||
|
65 | ||||
|
66 | server.wait_until_ready() | |||
|
67 | return server | |||
|
68 | ||||
|
69 | return factory | |||
|
70 | ||||
|
71 | ||||
|
72 | @pytest.fixture(scope="session") | |||
|
73 | def rhodecode_factory(): | |||
|
74 | def factory(request, store_dir, overrides=(), config_file=None, port=None, log_file=None, workers="3", env=None, info_prefix=""): | |||
|
75 | env = env or {"RC_NO_TEST_ENV": "1"} | |||
|
76 | rhodecode_port = port | |||
|
77 | if port is None: | |||
|
78 | rhodecode_port = get_available_port() | |||
|
79 | ||||
|
80 | overrides = list(overrides) | |||
|
81 | overrides.append({"server:main": {"port": rhodecode_port}}) | |||
|
82 | overrides.append({"app:main": {"use_celery": "true"}}) | |||
|
83 | overrides.append({"app:main": {"celery.task_always_eager": "false"}}) | |||
|
84 | ||||
|
85 | if getattr(request, 'param', None): | |||
|
86 | config_overrides = [request.param] | |||
|
87 | overrides.extend(config_overrides) | |||
|
88 | ||||
68 |
|
89 | |||
69 |
option_name = |
|
90 | option_name = "rhodecode_config" | |
70 |
override_option_name = |
|
91 | override_option_name = None | |
71 |
config_file |
|
92 | if not config_file: | |
72 | request.config, option_name=option_name, |
|
93 | config_file = get_config( | |
73 | override_option_name=override_option_name, overrides=overrides, |
|
94 | request.config, | |
74 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
95 | option_name=option_name, | |
75 | prefix='test_vcs_') |
|
96 | override_option_name=override_option_name, | |
|
97 | overrides=overrides, | |||
|
98 | basetemp=store_dir, | |||
|
99 | prefix=f"{info_prefix}test_rhodecode_ini", | |||
|
100 | ) | |||
|
101 | ||||
|
102 | server = RcWebServer(config_file, log_file, workers, env, info_prefix=info_prefix) | |||
|
103 | server.start() | |||
|
104 | ||||
|
105 | @request.addfinalizer | |||
|
106 | def cleanup(): | |||
|
107 | server.shutdown() | |||
|
108 | ||||
|
109 | server.wait_until_ready() | |||
|
110 | return server | |||
|
111 | ||||
|
112 | return factory | |||
|
113 | ||||
76 |
|
114 | |||
77 | server = RcVCSServer(config_file, log_file, workers) |
|
115 | @pytest.fixture(scope="session") | |
|
116 | def celery_factory(): | |||
|
117 | def factory(request, store_dir, overrides=(), config_file=None, port=None, log_file=None, workers="3", env=None, info_prefix=""): | |||
|
118 | env = env or {"RC_NO_TEST_ENV": "1"} | |||
|
119 | rhodecode_port = port | |||
|
120 | ||||
|
121 | overrides = list(overrides) | |||
|
122 | overrides.append({"app:main": {"use_celery": "true"}}) | |||
|
123 | overrides.append({"app:main": {"celery.task_always_eager": "false"}}) | |||
|
124 | config_overrides = None | |||
|
125 | ||||
|
126 | if getattr(request, 'param', None): | |||
|
127 | config_overrides = [request.param] | |||
|
128 | overrides.extend(config_overrides) | |||
|
129 | ||||
|
130 | option_name = "celery_config" | |||
|
131 | override_option_name = None | |||
|
132 | ||||
|
133 | if not config_file: | |||
|
134 | config_file = get_config( | |||
|
135 | request.config, | |||
|
136 | option_name=option_name, | |||
|
137 | override_option_name=override_option_name, | |||
|
138 | overrides=overrides, | |||
|
139 | basetemp=store_dir, | |||
|
140 | prefix=f"{info_prefix}test_celery_ini_", | |||
|
141 | ) | |||
|
142 | ||||
|
143 | server = CeleryServer(config_file, log_file, workers, env, info_prefix=info_prefix) | |||
78 | server.start() |
|
144 | server.start() | |
79 |
|
145 | |||
80 | @request.addfinalizer |
|
146 | @request.addfinalizer | |
@@ -88,52 +154,68 b' def vcsserver_factory(tmpdir_factory):' | |||||
88 |
|
154 | |||
89 |
|
155 | |||
90 | def _use_log_level(config): |
|
156 | def _use_log_level(config): | |
91 |
level = config.getoption( |
|
157 | level = config.getoption("test_loglevel") or "critical" | |
92 | return level.upper() |
|
158 | return level.upper() | |
93 |
|
159 | |||
94 |
|
160 | |||
95 | @pytest.fixture(scope='session') |
|
161 | def _ini_config_factory(request, base_dir, rcserver_port, vcsserver_port): | |
96 | def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): |
|
162 | option_name = "pyramid_config" | |
97 | option_name = 'pyramid_config' |
|
|||
98 | log_level = _use_log_level(request.config) |
|
163 | log_level = _use_log_level(request.config) | |
99 |
|
164 | |||
100 | overrides = [ |
|
165 | overrides = [ | |
101 |
{ |
|
166 | {"server:main": {"port": rcserver_port}}, | |
102 | {'app:main': { |
|
167 | { | |
103 | 'cache_dir': '%(here)s/rc-tests/rc_data', |
|
168 | "app:main": { | |
104 | 'vcs.server': f'localhost:{vcsserver_port}', |
|
169 | #'cache_dir': '%(here)s/rc-tests/rc_data', | |
105 | # johbo: We will always start the VCSServer on our own based on the |
|
170 | "vcs.server": f"localhost:{vcsserver_port}", | |
106 | # fixtures of the test cases. For the test run it must always be |
|
171 | # johbo: We will always start the VCSServer on our own based on the | |
107 | # off in the INI file. |
|
172 | # fixtures of the test cases. For the test run it must always be | |
108 | 'vcs.start_server': 'false', |
|
173 | # off in the INI file. | |
109 |
|
174 | "vcs.start_server": "false", | ||
110 |
|
|
175 | "vcs.server.protocol": "http", | |
111 |
|
|
176 | "vcs.scm_app_implementation": "http", | |
112 |
|
|
177 | "vcs.svn.proxy.enabled": "true", | |
113 |
|
|
178 | "vcs.hooks.protocol.v2": "celery", | |
114 |
|
|
179 | "vcs.hooks.host": "*", | |
115 |
|
|
180 | "repo_store.path": TESTS_TMP_PATH, | |
116 |
|
|
181 | "app.service_api.token": "service_secret_token", | |
117 |
} |
|
182 | } | |
118 |
|
183 | }, | ||
119 | {'handler_console': { |
|
184 | { | |
120 | 'class': 'StreamHandler', |
|
185 | "handler_console": { | |
121 | 'args': '(sys.stderr,)', |
|
186 | "class": "StreamHandler", | |
122 | 'level': log_level, |
|
187 | "args": "(sys.stderr,)", | |
123 | }}, |
|
188 | "level": log_level, | |
124 |
|
189 | } | ||
|
190 | }, | |||
125 | ] |
|
191 | ] | |
126 |
|
192 | |||
127 | filename = get_config( |
|
193 | filename = get_config( | |
128 |
request.config, |
|
194 | request.config, | |
129 |
|
|
195 | option_name=option_name, | |
|
196 | override_option_name=f"{option_name}_override", | |||
130 | overrides=overrides, |
|
197 | overrides=overrides, | |
131 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
198 | basetemp=base_dir, | |
132 |
prefix= |
|
199 | prefix="test_rce_", | |
|
200 | ) | |||
133 | return filename |
|
201 | return filename | |
134 |
|
202 | |||
135 |
|
203 | |||
136 |
@pytest.fixture(scope= |
|
204 | @pytest.fixture(scope="session") | |
|
205 | def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port): | |||
|
206 | base_dir = tmpdir_factory.getbasetemp().strpath | |||
|
207 | return _ini_config_factory(request, base_dir, rcserver_port, vcsserver_port) | |||
|
208 | ||||
|
209 | ||||
|
210 | @pytest.fixture(scope="session") | |||
|
211 | def ini_config_factory(request, tmpdir_factory, rcserver_port, vcsserver_port): | |||
|
212 | def _factory(ini_config_basedir, overrides=()): | |||
|
213 | return _ini_config_factory(request, ini_config_basedir, rcserver_port, vcsserver_port) | |||
|
214 | ||||
|
215 | return _factory | |||
|
216 | ||||
|
217 | ||||
|
218 | @pytest.fixture(scope="session") | |||
137 | def ini_settings(ini_config): |
|
219 | def ini_settings(ini_config): | |
138 | ini_path = ini_config |
|
220 | ini_path = ini_config | |
139 | return get_app_config(ini_path) |
|
221 | return get_app_config(ini_path) | |
@@ -141,26 +223,25 b' def ini_settings(ini_config):' | |||||
141 |
|
223 | |||
142 | def get_available_port(min_port=40000, max_port=55555): |
|
224 | def get_available_port(min_port=40000, max_port=55555): | |
143 | from rhodecode.lib.utils2 import get_available_port as _get_port |
|
225 | from rhodecode.lib.utils2 import get_available_port as _get_port | |
|
226 | ||||
144 | return _get_port(min_port, max_port) |
|
227 | return _get_port(min_port, max_port) | |
145 |
|
228 | |||
146 |
|
229 | |||
147 |
@pytest.fixture(scope= |
|
230 | @pytest.fixture(scope="session") | |
148 | def rcserver_port(request): |
|
231 | def rcserver_port(request): | |
149 | port = get_available_port() |
|
232 | port = get_available_port() | |
150 | print(f'Using rhodecode port {port}') |
|
|||
151 | return port |
|
233 | return port | |
152 |
|
234 | |||
153 |
|
235 | |||
154 |
@pytest.fixture(scope= |
|
236 | @pytest.fixture(scope="session") | |
155 | def vcsserver_port(request): |
|
237 | def vcsserver_port(request): | |
156 |
port = request.config.getoption( |
|
238 | port = request.config.getoption("--vcsserver-port") | |
157 | if port is None: |
|
239 | if port is None: | |
158 | port = get_available_port() |
|
240 | port = get_available_port() | |
159 | print(f'Using vcsserver port {port}') |
|
|||
160 | return port |
|
241 | return port | |
161 |
|
242 | |||
162 |
|
243 | |||
163 |
@pytest.fixture(scope= |
|
244 | @pytest.fixture(scope="session") | |
164 | def available_port_factory() -> get_available_port: |
|
245 | def available_port_factory() -> get_available_port: | |
165 | """ |
|
246 | """ | |
166 | Returns a callable which returns free port numbers. |
|
247 | Returns a callable which returns free port numbers. | |
@@ -178,7 +259,7 b' def available_port(available_port_factor' | |||||
178 | return available_port_factory() |
|
259 | return available_port_factory() | |
179 |
|
260 | |||
180 |
|
261 | |||
181 |
@pytest.fixture(scope= |
|
262 | @pytest.fixture(scope="session") | |
182 | def testini_factory(tmpdir_factory, ini_config): |
|
263 | def testini_factory(tmpdir_factory, ini_config): | |
183 | """ |
|
264 | """ | |
184 | Factory to create an INI file based on TestINI. |
|
265 | Factory to create an INI file based on TestINI. | |
@@ -190,37 +271,38 b' def testini_factory(tmpdir_factory, ini_' | |||||
190 |
|
271 | |||
191 |
|
272 | |||
192 | class TestIniFactory(object): |
|
273 | class TestIniFactory(object): | |
193 |
|
274 | def __init__(self, ini_store_dir, template_ini): | ||
194 | def __init__(self, basetemp, template_ini): |
|
275 | self._ini_store_dir = ini_store_dir | |
195 | self._basetemp = basetemp |
|
|||
196 | self._template_ini = template_ini |
|
276 | self._template_ini = template_ini | |
197 |
|
277 | |||
198 |
def __call__(self, ini_params, new_file_prefix= |
|
278 | def __call__(self, ini_params, new_file_prefix="test"): | |
199 | ini_file = TestINI( |
|
279 | ini_file = TestINI( | |
200 | self._template_ini, ini_params=ini_params, |
|
280 | self._template_ini, ini_params=ini_params, new_file_prefix=new_file_prefix, dir=self._ini_store_dir | |
201 | new_file_prefix=new_file_prefix, dir=self._basetemp) |
|
281 | ) | |
202 | result = ini_file.create() |
|
282 | result = ini_file.create() | |
203 | return result |
|
283 | return result | |
204 |
|
284 | |||
205 |
|
285 | |||
206 | def get_config( |
|
286 | def get_config(config, option_name, override_option_name, overrides=None, basetemp=None, prefix="test"): | |
207 | config, option_name, override_option_name, overrides=None, |
|
|||
208 | basetemp=None, prefix='test'): |
|
|||
209 | """ |
|
287 | """ | |
210 | Find a configuration file and apply overrides for the given `prefix`. |
|
288 | Find a configuration file and apply overrides for the given `prefix`. | |
211 | """ |
|
289 | """ | |
212 | config_file = ( |
|
290 | try: | |
213 |
config.getoption(option_name) |
|
291 | config_file = config.getoption(option_name) | |
|
292 | except ValueError: | |||
|
293 | config_file = None | |||
|
294 | ||||
214 | if not config_file: |
|
295 | if not config_file: | |
215 | pytest.exit( |
|
296 | config_file = config.getini(option_name) | |
216 | "Configuration error, could not extract {}.".format(option_name)) |
|
297 | ||
|
298 | if not config_file: | |||
|
299 | pytest.exit(f"Configuration error, could not extract {option_name}.") | |||
217 |
|
300 | |||
218 | overrides = overrides or [] |
|
301 | overrides = overrides or [] | |
219 | config_override = config.getoption(override_option_name) |
|
302 | if override_option_name: | |
220 | if config_override: |
|
303 | config_override = config.getoption(override_option_name) | |
221 |
|
|
304 | if config_override: | |
222 | temp_ini_file = TestINI( |
|
305 | overrides.append(config_override) | |
223 |
|
|
306 | temp_ini_file = TestINI(config_file, ini_params=overrides, new_file_prefix=prefix, dir=basetemp) | |
224 | dir=basetemp) |
|
|||
225 |
|
307 | |||
226 | return temp_ini_file.create() |
|
308 | return temp_ini_file.create() |
This diff has been collapsed as it changes many lines, (775 lines changed) Show them Hide them | |||||
@@ -1,5 +1,4 b'' | |||||
1 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | ||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
|||
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
@@ -30,6 +29,7 b' import uuid' | |||||
30 | import dateutil.tz |
|
29 | import dateutil.tz | |
31 | import logging |
|
30 | import logging | |
32 | import functools |
|
31 | import functools | |
|
32 | import textwrap | |||
33 |
|
33 | |||
34 | import mock |
|
34 | import mock | |
35 | import pyramid.testing |
|
35 | import pyramid.testing | |
@@ -43,8 +43,17 b' import rhodecode.lib' | |||||
43 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
43 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
44 | from rhodecode.model.comment import CommentsModel |
|
44 | from rhodecode.model.comment import CommentsModel | |
45 | from rhodecode.model.db import ( |
|
45 | from rhodecode.model.db import ( | |
46 | PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus, |
|
46 | PullRequest, | |
47 | RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) |
|
47 | PullRequestReviewers, | |
|
48 | Repository, | |||
|
49 | RhodeCodeSetting, | |||
|
50 | ChangesetStatus, | |||
|
51 | RepoGroup, | |||
|
52 | UserGroup, | |||
|
53 | RepoRhodeCodeUi, | |||
|
54 | RepoRhodeCodeSetting, | |||
|
55 | RhodeCodeUi, | |||
|
56 | ) | |||
48 | from rhodecode.model.meta import Session |
|
57 | from rhodecode.model.meta import Session | |
49 | from rhodecode.model.pull_request import PullRequestModel |
|
58 | from rhodecode.model.pull_request import PullRequestModel | |
50 | from rhodecode.model.repo import RepoModel |
|
59 | from rhodecode.model.repo import RepoModel | |
@@ -60,12 +69,20 b' from rhodecode.lib.str_utils import safe' | |||||
60 | from rhodecode.lib.hash_utils import sha1_safe |
|
69 | from rhodecode.lib.hash_utils import sha1_safe | |
61 | from rhodecode.lib.vcs.backends import get_backend |
|
70 | from rhodecode.lib.vcs.backends import get_backend | |
62 | from rhodecode.lib.vcs.nodes import FileNode |
|
71 | from rhodecode.lib.vcs.nodes import FileNode | |
|
72 | from rhodecode.lib.base import bootstrap_config | |||
63 | from rhodecode.tests import ( |
|
73 | from rhodecode.tests import ( | |
64 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, |
|
74 | login_user_session, | |
65 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, |
|
75 | get_new_dir, | |
66 | TEST_USER_REGULAR_PASS) |
|
76 | utils, | |
67 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access |
|
77 | TESTS_TMP_PATH, | |
68 | from rhodecode.tests.fixture import Fixture |
|
78 | TEST_USER_ADMIN_LOGIN, | |
|
79 | TEST_USER_REGULAR_LOGIN, | |||
|
80 | TEST_USER_REGULAR2_LOGIN, | |||
|
81 | TEST_USER_REGULAR_PASS, | |||
|
82 | console_printer, | |||
|
83 | ) | |||
|
84 | from rhodecode.tests.utils import set_anonymous_access | |||
|
85 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |||
69 | from rhodecode.config import utils as config_utils |
|
86 | from rhodecode.config import utils as config_utils | |
70 |
|
87 | |||
71 | log = logging.getLogger(__name__) |
|
88 | log = logging.getLogger(__name__) | |
@@ -76,36 +93,7 b' def cmp(a, b):' | |||||
76 | return (a > b) - (a < b) |
|
93 | return (a > b) - (a < b) | |
77 |
|
94 | |||
78 |
|
95 | |||
79 |
@pytest.fixture(scope= |
|
96 | @pytest.fixture(scope="session") | |
80 | def activate_example_rcextensions(request): |
|
|||
81 | """ |
|
|||
82 | Patch in an example rcextensions module which verifies passed in kwargs. |
|
|||
83 | """ |
|
|||
84 | from rhodecode.config import rcextensions |
|
|||
85 |
|
||||
86 | old_extensions = rhodecode.EXTENSIONS |
|
|||
87 | rhodecode.EXTENSIONS = rcextensions |
|
|||
88 | rhodecode.EXTENSIONS.calls = collections.defaultdict(list) |
|
|||
89 |
|
||||
90 | @request.addfinalizer |
|
|||
91 | def cleanup(): |
|
|||
92 | rhodecode.EXTENSIONS = old_extensions |
|
|||
93 |
|
||||
94 |
|
||||
95 | @pytest.fixture() |
|
|||
96 | def capture_rcextensions(): |
|
|||
97 | """ |
|
|||
98 | Returns the recorded calls to entry points in rcextensions. |
|
|||
99 | """ |
|
|||
100 | calls = rhodecode.EXTENSIONS.calls |
|
|||
101 | calls.clear() |
|
|||
102 | # Note: At this moment, it is still the empty dict, but that will |
|
|||
103 | # be filled during the test run and since it is a reference this |
|
|||
104 | # is enough to make it work. |
|
|||
105 | return calls |
|
|||
106 |
|
||||
107 |
|
||||
108 | @pytest.fixture(scope='session') |
|
|||
109 | def http_environ_session(): |
|
97 | def http_environ_session(): | |
110 | """ |
|
98 | """ | |
111 | Allow to use "http_environ" in session scope. |
|
99 | Allow to use "http_environ" in session scope. | |
@@ -117,7 +105,31 b' def plain_http_host_stub():' | |||||
117 | """ |
|
105 | """ | |
118 | Value of HTTP_HOST in the test run. |
|
106 | Value of HTTP_HOST in the test run. | |
119 | """ |
|
107 | """ | |
120 |
return |
|
108 | return "example.com:80" | |
|
109 | ||||
|
110 | ||||
|
111 | def plain_config_stub(request, request_stub): | |||
|
112 | """ | |||
|
113 | Set up pyramid.testing and return the Configurator. | |||
|
114 | """ | |||
|
115 | ||||
|
116 | config = bootstrap_config(request=request_stub) | |||
|
117 | ||||
|
118 | @request.addfinalizer | |||
|
119 | def cleanup(): | |||
|
120 | pyramid.testing.tearDown() | |||
|
121 | ||||
|
122 | return config | |||
|
123 | ||||
|
124 | ||||
|
125 | def plain_request_stub(): | |||
|
126 | """ | |||
|
127 | Stub request object. | |||
|
128 | """ | |||
|
129 | from rhodecode.lib.base import bootstrap_request | |||
|
130 | ||||
|
131 | _request = bootstrap_request(scheme="https") | |||
|
132 | return _request | |||
121 |
|
133 | |||
122 |
|
134 | |||
123 | @pytest.fixture() |
|
135 | @pytest.fixture() | |
@@ -132,7 +144,7 b' def plain_http_host_only_stub():' | |||||
132 | """ |
|
144 | """ | |
133 | Value of HTTP_HOST in the test run. |
|
145 | Value of HTTP_HOST in the test run. | |
134 | """ |
|
146 | """ | |
135 |
return plain_http_host_stub().split( |
|
147 | return plain_http_host_stub().split(":")[0] | |
136 |
|
148 | |||
137 |
|
149 | |||
138 | @pytest.fixture() |
|
150 | @pytest.fixture() | |
@@ -147,33 +159,21 b' def plain_http_environ():' | |||||
147 | """ |
|
159 | """ | |
148 | HTTP extra environ keys. |
|
160 | HTTP extra environ keys. | |
149 |
|
161 | |||
150 |
Use |
|
162 | Used by the test application and as well for setting up the pylons | |
151 | environment. In the case of the fixture "app" it should be possible |
|
163 | environment. In the case of the fixture "app" it should be possible | |
152 | to override this for a specific test case. |
|
164 | to override this for a specific test case. | |
153 | """ |
|
165 | """ | |
154 | return { |
|
166 | return { | |
155 |
|
|
167 | "SERVER_NAME": plain_http_host_only_stub(), | |
156 |
|
|
168 | "SERVER_PORT": plain_http_host_stub().split(":")[1], | |
157 |
|
|
169 | "HTTP_HOST": plain_http_host_stub(), | |
158 |
|
|
170 | "HTTP_USER_AGENT": "rc-test-agent", | |
159 |
|
|
171 | "REQUEST_METHOD": "GET", | |
160 | } |
|
172 | } | |
161 |
|
173 | |||
162 |
|
174 | |||
163 | @pytest.fixture() |
|
175 | @pytest.fixture(scope="session") | |
164 | def http_environ(): |
|
176 | def baseapp(request, ini_config, http_environ_session, available_port_factory, vcsserver_factory, celery_factory): | |
165 | """ |
|
|||
166 | HTTP extra environ keys. |
|
|||
167 |
|
||||
168 | User by the test application and as well for setting up the pylons |
|
|||
169 | environment. In the case of the fixture "app" it should be possible |
|
|||
170 | to override this for a specific test case. |
|
|||
171 | """ |
|
|||
172 | return plain_http_environ() |
|
|||
173 |
|
||||
174 |
|
||||
175 | @pytest.fixture(scope='session') |
|
|||
176 | def baseapp(ini_config, vcsserver, http_environ_session): |
|
|||
177 | from rhodecode.lib.config_utils import get_app_config |
|
177 | from rhodecode.lib.config_utils import get_app_config | |
178 | from rhodecode.config.middleware import make_pyramid_app |
|
178 | from rhodecode.config.middleware import make_pyramid_app | |
179 |
|
179 | |||
@@ -181,22 +181,41 b' def baseapp(ini_config, vcsserver, http_' | |||||
181 | pyramid.paster.setup_logging(ini_config) |
|
181 | pyramid.paster.setup_logging(ini_config) | |
182 |
|
182 | |||
183 | settings = get_app_config(ini_config) |
|
183 | settings = get_app_config(ini_config) | |
184 | app = make_pyramid_app({'__file__': ini_config}, **settings) |
|
184 | store_dir = os.path.dirname(ini_config) | |
|
185 | ||||
|
186 | # start vcsserver | |||
|
187 | _vcsserver_port = available_port_factory() | |||
|
188 | vcsserver_instance = vcsserver_factory( | |||
|
189 | request, | |||
|
190 | store_dir=store_dir, | |||
|
191 | port=_vcsserver_port, | |||
|
192 | info_prefix="base-app-" | |||
|
193 | ) | |||
|
194 | ||||
|
195 | settings["vcs.server"] = vcsserver_instance.bind_addr | |||
185 |
|
196 | |||
186 | return app |
|
197 | # we skip setting store_dir for baseapp, it's internally set via testing rhodecode.ini | |
|
198 | # settings['repo_store.path'] = str(store_dir) | |||
|
199 | console_printer(f' :warning: [green]pytest-setup[/green] Starting base pyramid-app: {ini_config}') | |||
|
200 | pyramid_baseapp = make_pyramid_app({"__file__": ini_config}, **settings) | |||
|
201 | ||||
|
202 | # start celery | |||
|
203 | celery_factory( | |||
|
204 | request, | |||
|
205 | store_dir=store_dir, | |||
|
206 | port=None, | |||
|
207 | info_prefix="base-app-", | |||
|
208 | overrides=( | |||
|
209 | {'handler_console': {'level': 'DEBUG'}}, | |||
|
210 | {'app:main': {'vcs.server': vcsserver_instance.bind_addr}}, | |||
|
211 | {'app:main': {'repo_store.path': store_dir}} | |||
|
212 | ) | |||
|
213 | ) | |||
|
214 | ||||
|
215 | return pyramid_baseapp | |||
187 |
|
216 | |||
188 |
|
217 | |||
189 |
@pytest.fixture(scope= |
|
218 | @pytest.fixture(scope="session") | |
190 | def app(request, config_stub, baseapp, http_environ): |
|
|||
191 | app = CustomTestApp( |
|
|||
192 | baseapp, |
|
|||
193 | extra_environ=http_environ) |
|
|||
194 | if request.cls: |
|
|||
195 | request.cls.app = app |
|
|||
196 | return app |
|
|||
197 |
|
||||
198 |
|
||||
199 | @pytest.fixture(scope='session') |
|
|||
200 | def app_settings(baseapp, ini_config): |
|
219 | def app_settings(baseapp, ini_config): | |
201 | """ |
|
220 | """ | |
202 | Settings dictionary used to create the app. |
|
221 | Settings dictionary used to create the app. | |
@@ -207,19 +226,19 b' def app_settings(baseapp, ini_config):' | |||||
207 | return baseapp.config.get_settings() |
|
226 | return baseapp.config.get_settings() | |
208 |
|
227 | |||
209 |
|
228 | |||
210 |
@pytest.fixture(scope= |
|
229 | @pytest.fixture(scope="session") | |
211 | def db_connection(ini_settings): |
|
230 | def db_connection(ini_settings): | |
212 | # Initialize the database connection. |
|
231 | # Initialize the database connection. | |
213 | config_utils.initialize_database(ini_settings) |
|
232 | config_utils.initialize_database(ini_settings) | |
214 |
|
233 | |||
215 |
|
234 | |||
216 |
LoginData = collections.namedtuple( |
|
235 | LoginData = collections.namedtuple("LoginData", ("csrf_token", "user")) | |
217 |
|
236 | |||
218 |
|
237 | |||
219 | def _autologin_user(app, *args): |
|
238 | def _autologin_user(app, *args): | |
220 | session = login_user_session(app, *args) |
|
239 | session = login_user_session(app, *args) | |
221 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) |
|
240 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) | |
222 |
return LoginData(csrf_token, session[ |
|
241 | return LoginData(csrf_token, session["rhodecode_user"]) | |
223 |
|
242 | |||
224 |
|
243 | |||
225 | @pytest.fixture() |
|
244 | @pytest.fixture() | |
@@ -235,18 +254,17 b' def autologin_regular_user(app):' | |||||
235 | """ |
|
254 | """ | |
236 | Utility fixture which makes sure that the regular user is logged in |
|
255 | Utility fixture which makes sure that the regular user is logged in | |
237 | """ |
|
256 | """ | |
238 | return _autologin_user( |
|
257 | return _autologin_user(app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) | |
239 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
|||
240 |
|
258 | |||
241 |
|
259 | |||
242 |
@pytest.fixture(scope= |
|
260 | @pytest.fixture(scope="function") | |
243 | def csrf_token(request, autologin_user): |
|
261 | def csrf_token(request, autologin_user): | |
244 | return autologin_user.csrf_token |
|
262 | return autologin_user.csrf_token | |
245 |
|
263 | |||
246 |
|
264 | |||
247 |
@pytest.fixture(scope= |
|
265 | @pytest.fixture(scope="function") | |
248 | def xhr_header(request): |
|
266 | def xhr_header(request): | |
249 |
return { |
|
267 | return {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"} | |
250 |
|
268 | |||
251 |
|
269 | |||
252 | @pytest.fixture() |
|
270 | @pytest.fixture() | |
@@ -257,18 +275,18 b' def real_crypto_backend(monkeypatch):' | |||||
257 | During the test run the crypto backend is replaced with a faster |
|
275 | During the test run the crypto backend is replaced with a faster | |
258 | implementation based on the MD5 algorithm. |
|
276 | implementation based on the MD5 algorithm. | |
259 | """ |
|
277 | """ | |
260 |
monkeypatch.setattr(rhodecode, |
|
278 | monkeypatch.setattr(rhodecode, "is_test", False) | |
261 |
|
279 | |||
262 |
|
280 | |||
263 |
@pytest.fixture(scope= |
|
281 | @pytest.fixture(scope="class") | |
264 | def index_location(request, baseapp): |
|
282 | def index_location(request, baseapp): | |
265 |
index_location = baseapp.config.get_settings()[ |
|
283 | index_location = baseapp.config.get_settings()["search.location"] | |
266 | if request.cls: |
|
284 | if request.cls: | |
267 | request.cls.index_location = index_location |
|
285 | request.cls.index_location = index_location | |
268 | return index_location |
|
286 | return index_location | |
269 |
|
287 | |||
270 |
|
288 | |||
271 |
@pytest.fixture(scope= |
|
289 | @pytest.fixture(scope="session", autouse=True) | |
272 | def tests_tmp_path(request): |
|
290 | def tests_tmp_path(request): | |
273 | """ |
|
291 | """ | |
274 | Create temporary directory to be used during the test session. |
|
292 | Create temporary directory to be used during the test session. | |
@@ -276,7 +294,8 b' def tests_tmp_path(request):' | |||||
276 | if not os.path.exists(TESTS_TMP_PATH): |
|
294 | if not os.path.exists(TESTS_TMP_PATH): | |
277 | os.makedirs(TESTS_TMP_PATH) |
|
295 | os.makedirs(TESTS_TMP_PATH) | |
278 |
|
296 | |||
279 |
if not request.config.getoption( |
|
297 | if not request.config.getoption("--keep-tmp-path"): | |
|
298 | ||||
280 | @request.addfinalizer |
|
299 | @request.addfinalizer | |
281 | def remove_tmp_path(): |
|
300 | def remove_tmp_path(): | |
282 | shutil.rmtree(TESTS_TMP_PATH) |
|
301 | shutil.rmtree(TESTS_TMP_PATH) | |
@@ -291,7 +310,7 b' def test_repo_group(request):' | |||||
291 | usage automatically |
|
310 | usage automatically | |
292 | """ |
|
311 | """ | |
293 | fixture = Fixture() |
|
312 | fixture = Fixture() | |
294 |
repogroupid = |
|
313 | repogroupid = "test_repo_group_%s" % str(time.time()).replace(".", "") | |
295 | repo_group = fixture.create_repo_group(repogroupid) |
|
314 | repo_group = fixture.create_repo_group(repogroupid) | |
296 |
|
315 | |||
297 | def _cleanup(): |
|
316 | def _cleanup(): | |
@@ -308,7 +327,7 b' def test_user_group(request):' | |||||
308 | usage automatically |
|
327 | usage automatically | |
309 | """ |
|
328 | """ | |
310 | fixture = Fixture() |
|
329 | fixture = Fixture() | |
311 |
usergroupid = |
|
330 | usergroupid = "test_user_group_%s" % str(time.time()).replace(".", "") | |
312 | user_group = fixture.create_user_group(usergroupid) |
|
331 | user_group = fixture.create_user_group(usergroupid) | |
313 |
|
332 | |||
314 | def _cleanup(): |
|
333 | def _cleanup(): | |
@@ -318,7 +337,7 b' def test_user_group(request):' | |||||
318 | return user_group |
|
337 | return user_group | |
319 |
|
338 | |||
320 |
|
339 | |||
321 |
@pytest.fixture(scope= |
|
340 | @pytest.fixture(scope="session") | |
322 | def test_repo(request): |
|
341 | def test_repo(request): | |
323 | container = TestRepoContainer() |
|
342 | container = TestRepoContainer() | |
324 | request.addfinalizer(container._cleanup) |
|
343 | request.addfinalizer(container._cleanup) | |
@@ -340,9 +359,9 b' class TestRepoContainer(object):' | |||||
340 | """ |
|
359 | """ | |
341 |
|
360 | |||
342 | dump_extractors = { |
|
361 | dump_extractors = { | |
343 |
|
|
362 | "git": utils.extract_git_repo_from_dump, | |
344 |
|
|
363 | "hg": utils.extract_hg_repo_from_dump, | |
345 |
|
|
364 | "svn": utils.extract_svn_repo_from_dump, | |
346 | } |
|
365 | } | |
347 |
|
366 | |||
348 | def __init__(self): |
|
367 | def __init__(self): | |
@@ -358,7 +377,7 b' class TestRepoContainer(object):' | |||||
358 | return Repository.get(self._repos[key]) |
|
377 | return Repository.get(self._repos[key]) | |
359 |
|
378 | |||
360 | def _create_repo(self, dump_name, backend_alias, config): |
|
379 | def _create_repo(self, dump_name, backend_alias, config): | |
361 |
repo_name = f |
|
380 | repo_name = f"{backend_alias}-{dump_name}" | |
362 | backend = get_backend(backend_alias) |
|
381 | backend = get_backend(backend_alias) | |
363 | dump_extractor = self.dump_extractors[backend_alias] |
|
382 | dump_extractor = self.dump_extractors[backend_alias] | |
364 | repo_path = dump_extractor(dump_name, repo_name) |
|
383 | repo_path = dump_extractor(dump_name, repo_name) | |
@@ -375,19 +394,17 b' class TestRepoContainer(object):' | |||||
375 | self._fixture.destroy_repo(repo_name) |
|
394 | self._fixture.destroy_repo(repo_name) | |
376 |
|
395 | |||
377 |
|
396 | |||
378 |
def backend_base(request, backend_alias, |
|
397 | def backend_base(request, backend_alias, test_repo): | |
379 |
if backend_alias not in request.config.getoption( |
|
398 | if backend_alias not in request.config.getoption("--backends"): | |
380 |
pytest.skip("Backend |
|
399 | pytest.skip(f"Backend {backend_alias} not selected.") | |
381 |
|
400 | |||
382 | utils.check_xfail_backends(request.node, backend_alias) |
|
401 | utils.check_xfail_backends(request.node, backend_alias) | |
383 | utils.check_skip_backends(request.node, backend_alias) |
|
402 | utils.check_skip_backends(request.node, backend_alias) | |
384 |
|
403 | |||
385 |
repo_name = |
|
404 | repo_name = "vcs_test_%s" % (backend_alias,) | |
386 | backend = Backend( |
|
405 | backend = Backend( | |
387 | alias=backend_alias, |
|
406 | alias=backend_alias, repo_name=repo_name, test_name=request.node.name, test_repo_container=test_repo | |
388 | repo_name=repo_name, |
|
407 | ) | |
389 | test_name=request.node.name, |
|
|||
390 | test_repo_container=test_repo) |
|
|||
391 | request.addfinalizer(backend.cleanup) |
|
408 | request.addfinalizer(backend.cleanup) | |
392 | return backend |
|
409 | return backend | |
393 |
|
410 | |||
@@ -404,22 +421,22 b' def backend(request, backend_alias, base' | |||||
404 | for specific backends. This is intended as a utility for incremental |
|
421 | for specific backends. This is intended as a utility for incremental | |
405 | development of a new backend implementation. |
|
422 | development of a new backend implementation. | |
406 | """ |
|
423 | """ | |
407 |
return backend_base(request, backend_alias, |
|
424 | return backend_base(request, backend_alias, test_repo) | |
408 |
|
425 | |||
409 |
|
426 | |||
410 | @pytest.fixture() |
|
427 | @pytest.fixture() | |
411 | def backend_git(request, baseapp, test_repo): |
|
428 | def backend_git(request, baseapp, test_repo): | |
412 |
return backend_base(request, |
|
429 | return backend_base(request, "git", test_repo) | |
413 |
|
430 | |||
414 |
|
431 | |||
415 | @pytest.fixture() |
|
432 | @pytest.fixture() | |
416 | def backend_hg(request, baseapp, test_repo): |
|
433 | def backend_hg(request, baseapp, test_repo): | |
417 |
return backend_base(request, |
|
434 | return backend_base(request, "hg", test_repo) | |
418 |
|
435 | |||
419 |
|
436 | |||
420 | @pytest.fixture() |
|
437 | @pytest.fixture() | |
421 | def backend_svn(request, baseapp, test_repo): |
|
438 | def backend_svn(request, baseapp, test_repo): | |
422 |
return backend_base(request, |
|
439 | return backend_base(request, "svn", test_repo) | |
423 |
|
440 | |||
424 |
|
441 | |||
425 | @pytest.fixture() |
|
442 | @pytest.fixture() | |
@@ -467,9 +484,9 b' class Backend(object):' | |||||
467 | session. |
|
484 | session. | |
468 | """ |
|
485 | """ | |
469 |
|
486 | |||
470 |
invalid_repo_name = re.compile(r |
|
487 | invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+") | |
471 | _master_repo = None |
|
488 | _master_repo = None | |
472 |
_master_repo_path = |
|
489 | _master_repo_path = "" | |
473 | _commit_ids = {} |
|
490 | _commit_ids = {} | |
474 |
|
491 | |||
475 | def __init__(self, alias, repo_name, test_name, test_repo_container): |
|
492 | def __init__(self, alias, repo_name, test_name, test_repo_container): | |
@@ -500,6 +517,7 b' class Backend(object):' | |||||
500 | last repo which has been created with `create_repo`. |
|
517 | last repo which has been created with `create_repo`. | |
501 | """ |
|
518 | """ | |
502 | from rhodecode.model.db import Repository |
|
519 | from rhodecode.model.db import Repository | |
|
520 | ||||
503 | return Repository.get_by_repo_name(self.repo_name) |
|
521 | return Repository.get_by_repo_name(self.repo_name) | |
504 |
|
522 | |||
505 | @property |
|
523 | @property | |
@@ -517,9 +535,7 b' class Backend(object):' | |||||
517 | which can serve as the base to create a new commit on top of it. |
|
535 | which can serve as the base to create a new commit on top of it. | |
518 | """ |
|
536 | """ | |
519 | vcsrepo = self.repo.scm_instance() |
|
537 | vcsrepo = self.repo.scm_instance() | |
520 | head_id = ( |
|
538 | head_id = vcsrepo.DEFAULT_BRANCH_NAME or vcsrepo.commit_ids[-1] | |
521 | vcsrepo.DEFAULT_BRANCH_NAME or |
|
|||
522 | vcsrepo.commit_ids[-1]) |
|
|||
523 | return head_id |
|
539 | return head_id | |
524 |
|
540 | |||
525 | @property |
|
541 | @property | |
@@ -543,9 +559,7 b' class Backend(object):' | |||||
543 |
|
559 | |||
544 | return self._commit_ids |
|
560 | return self._commit_ids | |
545 |
|
561 | |||
546 | def create_repo( |
|
562 | def create_repo(self, commits=None, number_of_commits=0, heads=None, name_suffix="", bare=False, **kwargs): | |
547 | self, commits=None, number_of_commits=0, heads=None, |
|
|||
548 | name_suffix='', bare=False, **kwargs): |
|
|||
549 | """ |
|
563 | """ | |
550 | Create a repository and record it for later cleanup. |
|
564 | Create a repository and record it for later cleanup. | |
551 |
|
565 | |||
@@ -559,13 +573,10 b' class Backend(object):' | |||||
559 | :param bare: set a repo as bare (no checkout) |
|
573 | :param bare: set a repo as bare (no checkout) | |
560 | """ |
|
574 | """ | |
561 | self.repo_name = self._next_repo_name() + name_suffix |
|
575 | self.repo_name = self._next_repo_name() + name_suffix | |
562 | repo = self._fixture.create_repo( |
|
576 | repo = self._fixture.create_repo(self.repo_name, repo_type=self.alias, bare=bare, **kwargs) | |
563 | self.repo_name, repo_type=self.alias, bare=bare, **kwargs) |
|
|||
564 | self._cleanup_repos.append(repo.repo_name) |
|
577 | self._cleanup_repos.append(repo.repo_name) | |
565 |
|
578 | |||
566 | commits = commits or [ |
|
579 | commits = commits or [{"message": f"Commit {x} of {self.repo_name}"} for x in range(number_of_commits)] | |
567 | {'message': f'Commit {x} of {self.repo_name}'} |
|
|||
568 | for x in range(number_of_commits)] |
|
|||
569 | vcs_repo = repo.scm_instance() |
|
580 | vcs_repo = repo.scm_instance() | |
570 | vcs_repo.count() |
|
581 | vcs_repo.count() | |
571 | self._add_commits_to_repo(vcs_repo, commits) |
|
582 | self._add_commits_to_repo(vcs_repo, commits) | |
@@ -579,7 +590,7 b' class Backend(object):' | |||||
579 | Make sure that repo contains all commits mentioned in `heads` |
|
590 | Make sure that repo contains all commits mentioned in `heads` | |
580 | """ |
|
591 | """ | |
581 | vcsrepo = repo.scm_instance() |
|
592 | vcsrepo = repo.scm_instance() | |
582 |
vcsrepo.config.clear_section( |
|
593 | vcsrepo.config.clear_section("hooks") | |
583 | commit_ids = [self._commit_ids[h] for h in heads] |
|
594 | commit_ids = [self._commit_ids[h] for h in heads] | |
584 | if do_fetch: |
|
595 | if do_fetch: | |
585 | vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids) |
|
596 | vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids) | |
@@ -592,21 +603,22 b' class Backend(object):' | |||||
592 | self._cleanup_repos.append(self.repo_name) |
|
603 | self._cleanup_repos.append(self.repo_name) | |
593 | return repo |
|
604 | return repo | |
594 |
|
605 | |||
595 |
def new_repo_name(self, suffix= |
|
606 | def new_repo_name(self, suffix=""): | |
596 | self.repo_name = self._next_repo_name() + suffix |
|
607 | self.repo_name = self._next_repo_name() + suffix | |
597 | self._cleanup_repos.append(self.repo_name) |
|
608 | self._cleanup_repos.append(self.repo_name) | |
598 | return self.repo_name |
|
609 | return self.repo_name | |
599 |
|
610 | |||
600 | def _next_repo_name(self): |
|
611 | def _next_repo_name(self): | |
601 | return "%s_%s" % ( |
|
612 | return "%s_%s" % (self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos)) | |
602 | self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos)) |
|
|||
603 |
|
613 | |||
604 |
def ensure_file(self, filename, content=b |
|
614 | def ensure_file(self, filename, content=b"Test content\n"): | |
605 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
615 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
606 | commits = [ |
|
616 | commits = [ | |
607 |
{ |
|
617 | { | |
608 | FileNode(filename, content=content), |
|
618 | "added": [ | |
609 | ]}, |
|
619 | FileNode(filename, content=content), | |
|
620 | ] | |||
|
621 | }, | |||
610 | ] |
|
622 | ] | |
611 | self._add_commits_to_repo(self.repo.scm_instance(), commits) |
|
623 | self._add_commits_to_repo(self.repo.scm_instance(), commits) | |
612 |
|
624 | |||
@@ -627,11 +639,11 b' class Backend(object):' | |||||
627 | self._commit_ids = commit_ids |
|
639 | self._commit_ids = commit_ids | |
628 |
|
640 | |||
629 | # Creating refs for Git to allow fetching them from remote repository |
|
641 | # Creating refs for Git to allow fetching them from remote repository | |
630 |
if self.alias == |
|
642 | if self.alias == "git": | |
631 | refs = {} |
|
643 | refs = {} | |
632 | for message in self._commit_ids: |
|
644 | for message in self._commit_ids: | |
633 |
cleanup_message = message.replace( |
|
645 | cleanup_message = message.replace(" ", "") | |
634 |
ref_name = f |
|
646 | ref_name = f"refs/test-refs/{cleanup_message}" | |
635 | refs[ref_name] = self._commit_ids[message] |
|
647 | refs[ref_name] = self._commit_ids[message] | |
636 | self._create_refs(repo, refs) |
|
648 | self._create_refs(repo, refs) | |
637 |
|
649 | |||
@@ -645,7 +657,7 b' class VcsBackend(object):' | |||||
645 | Represents the test configuration for one supported vcs backend. |
|
657 | Represents the test configuration for one supported vcs backend. | |
646 | """ |
|
658 | """ | |
647 |
|
659 | |||
648 |
invalid_repo_name = re.compile(r |
|
660 | invalid_repo_name = re.compile(r"[^0-9a-zA-Z]+") | |
649 |
|
661 | |||
650 | def __init__(self, alias, repo_path, test_name, test_repo_container): |
|
662 | def __init__(self, alias, repo_path, test_name, test_repo_container): | |
651 | self.alias = alias |
|
663 | self.alias = alias | |
@@ -658,7 +670,7 b' class VcsBackend(object):' | |||||
658 | return self._test_repo_container(key, self.alias).scm_instance() |
|
670 | return self._test_repo_container(key, self.alias).scm_instance() | |
659 |
|
671 | |||
660 | def __repr__(self): |
|
672 | def __repr__(self): | |
661 |
return f |
|
673 | return f"{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})" | |
662 |
|
674 | |||
663 | @property |
|
675 | @property | |
664 | def repo(self): |
|
676 | def repo(self): | |
@@ -676,8 +688,7 b' class VcsBackend(object):' | |||||
676 | """ |
|
688 | """ | |
677 | return get_backend(self.alias) |
|
689 | return get_backend(self.alias) | |
678 |
|
690 | |||
679 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, |
|
691 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, bare=False): | |
680 | bare=False): |
|
|||
681 | repo_name = self._next_repo_name() |
|
692 | repo_name = self._next_repo_name() | |
682 | self._repo_path = get_new_dir(repo_name) |
|
693 | self._repo_path = get_new_dir(repo_name) | |
683 | repo_class = get_backend(self.alias) |
|
694 | repo_class = get_backend(self.alias) | |
@@ -687,9 +698,7 b' class VcsBackend(object):' | |||||
687 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) |
|
698 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) | |
688 | self._cleanup_repos.append(repo) |
|
699 | self._cleanup_repos.append(repo) | |
689 |
|
700 | |||
690 | commits = commits or [ |
|
701 | commits = commits or [{"message": "Commit %s of %s" % (x, repo_name)} for x in range(number_of_commits)] | |
691 | {'message': 'Commit %s of %s' % (x, repo_name)} |
|
|||
692 | for x in range(number_of_commits)] |
|
|||
693 | _add_commits_to_repo(repo, commits) |
|
702 | _add_commits_to_repo(repo, commits) | |
694 | return repo |
|
703 | return repo | |
695 |
|
704 | |||
@@ -706,38 +715,30 b' class VcsBackend(object):' | |||||
706 | return self._repo_path |
|
715 | return self._repo_path | |
707 |
|
716 | |||
708 | def _next_repo_name(self): |
|
717 | def _next_repo_name(self): | |
|
718 | return "{}_{}".format(self.invalid_repo_name.sub("_", self._test_name), len(self._cleanup_repos)) | |||
709 |
|
719 | |||
710 | return "{}_{}".format( |
|
720 | def add_file(self, repo, filename, content="Test content\n"): | |
711 | self.invalid_repo_name.sub('_', self._test_name), |
|
|||
712 | len(self._cleanup_repos) |
|
|||
713 | ) |
|
|||
714 |
|
||||
715 | def add_file(self, repo, filename, content='Test content\n'): |
|
|||
716 | imc = repo.in_memory_commit |
|
721 | imc = repo.in_memory_commit | |
717 | imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content))) |
|
722 | imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content))) | |
718 | imc.commit( |
|
723 | imc.commit(message="Automatic commit from vcsbackend fixture", author="Automatic <automatic@rhodecode.com>") | |
719 | message='Automatic commit from vcsbackend fixture', |
|
|||
720 | author='Automatic <automatic@rhodecode.com>') |
|
|||
721 |
|
724 | |||
722 |
def ensure_file(self, filename, content= |
|
725 | def ensure_file(self, filename, content="Test content\n"): | |
723 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
726 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
724 | self.add_file(self.repo, filename, content) |
|
727 | self.add_file(self.repo, filename, content) | |
725 |
|
728 | |||
726 |
|
729 | |||
727 | def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend: |
|
730 | def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend: | |
728 |
if backend_alias not in request.config.getoption( |
|
731 | if backend_alias not in request.config.getoption("--backends"): | |
729 |
pytest.skip("Backend %s not selected." % (backend_alias, |
|
732 | pytest.skip("Backend %s not selected." % (backend_alias,)) | |
730 |
|
733 | |||
731 | utils.check_xfail_backends(request.node, backend_alias) |
|
734 | utils.check_xfail_backends(request.node, backend_alias) | |
732 | utils.check_skip_backends(request.node, backend_alias) |
|
735 | utils.check_skip_backends(request.node, backend_alias) | |
733 |
|
736 | |||
734 |
repo_name = f |
|
737 | repo_name = f"vcs_test_{backend_alias}" | |
735 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
738 | repo_path = os.path.join(tests_tmp_path, repo_name) | |
736 | backend = VcsBackend( |
|
739 | backend = VcsBackend( | |
737 | alias=backend_alias, |
|
740 | alias=backend_alias, repo_path=repo_path, test_name=request.node.name, test_repo_container=test_repo | |
738 | repo_path=repo_path, |
|
741 | ) | |
739 | test_name=request.node.name, |
|
|||
740 | test_repo_container=test_repo) |
|
|||
741 | request.addfinalizer(backend.cleanup) |
|
742 | request.addfinalizer(backend.cleanup) | |
742 | return backend |
|
743 | return backend | |
743 |
|
744 | |||
@@ -758,17 +759,17 b' def vcsbackend(request, backend_alias, t' | |||||
758 |
|
759 | |||
759 | @pytest.fixture() |
|
760 | @pytest.fixture() | |
760 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): |
|
761 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): | |
761 |
return vcsbackend_base(request, |
|
762 | return vcsbackend_base(request, "git", tests_tmp_path, baseapp, test_repo) | |
762 |
|
763 | |||
763 |
|
764 | |||
764 | @pytest.fixture() |
|
765 | @pytest.fixture() | |
765 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): |
|
766 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): | |
766 |
return vcsbackend_base(request, |
|
767 | return vcsbackend_base(request, "hg", tests_tmp_path, baseapp, test_repo) | |
767 |
|
768 | |||
768 |
|
769 | |||
769 | @pytest.fixture() |
|
770 | @pytest.fixture() | |
770 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): |
|
771 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): | |
771 |
return vcsbackend_base(request, |
|
772 | return vcsbackend_base(request, "svn", tests_tmp_path, baseapp, test_repo) | |
772 |
|
773 | |||
773 |
|
774 | |||
774 | @pytest.fixture() |
|
775 | @pytest.fixture() | |
@@ -789,29 +790,28 b' def _add_commits_to_repo(vcs_repo, commi' | |||||
789 | imc = vcs_repo.in_memory_commit |
|
790 | imc = vcs_repo.in_memory_commit | |
790 |
|
791 | |||
791 | for idx, commit in enumerate(commits): |
|
792 | for idx, commit in enumerate(commits): | |
792 |
message = str(commit.get( |
|
793 | message = str(commit.get("message", f"Commit {idx}")) | |
793 |
|
794 | |||
794 |
for node in commit.get( |
|
795 | for node in commit.get("added", []): | |
795 | imc.add(FileNode(safe_bytes(node.path), content=node.content)) |
|
796 | imc.add(FileNode(safe_bytes(node.path), content=node.content)) | |
796 |
for node in commit.get( |
|
797 | for node in commit.get("changed", []): | |
797 | imc.change(FileNode(safe_bytes(node.path), content=node.content)) |
|
798 | imc.change(FileNode(safe_bytes(node.path), content=node.content)) | |
798 |
for node in commit.get( |
|
799 | for node in commit.get("removed", []): | |
799 | imc.remove(FileNode(safe_bytes(node.path))) |
|
800 | imc.remove(FileNode(safe_bytes(node.path))) | |
800 |
|
801 | |||
801 | parents = [ |
|
802 | parents = [vcs_repo.get_commit(commit_id=commit_ids[p]) for p in commit.get("parents", [])] | |
802 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
|
|||
803 | for p in commit.get('parents', [])] |
|
|||
804 |
|
803 | |||
805 |
operations = ( |
|
804 | operations = ("added", "changed", "removed") | |
806 | if not any((commit.get(o) for o in operations)): |
|
805 | if not any((commit.get(o) for o in operations)): | |
807 |
imc.add(FileNode(b |
|
806 | imc.add(FileNode(b"file_%b" % safe_bytes(str(idx)), content=safe_bytes(message))) | |
808 |
|
807 | |||
809 | commit = imc.commit( |
|
808 | commit = imc.commit( | |
810 | message=message, |
|
809 | message=message, | |
811 |
author=str(commit.get( |
|
810 | author=str(commit.get("author", "Automatic <automatic@rhodecode.com>")), | |
812 |
date=commit.get( |
|
811 | date=commit.get("date"), | |
813 |
branch=commit.get( |
|
812 | branch=commit.get("branch"), | |
814 |
parents=parents |
|
813 | parents=parents, | |
|
814 | ) | |||
815 |
|
815 | |||
816 | commit_ids[commit.message] = commit.raw_id |
|
816 | commit_ids[commit.message] = commit.raw_id | |
817 |
|
817 | |||
@@ -842,14 +842,14 b' class RepoServer(object):' | |||||
842 | self._cleanup_servers = [] |
|
842 | self._cleanup_servers = [] | |
843 |
|
843 | |||
844 | def serve(self, vcsrepo): |
|
844 | def serve(self, vcsrepo): | |
845 |
if vcsrepo.alias != |
|
845 | if vcsrepo.alias != "svn": | |
846 | raise TypeError("Backend %s not supported" % vcsrepo.alias) |
|
846 | raise TypeError("Backend %s not supported" % vcsrepo.alias) | |
847 |
|
847 | |||
848 | proc = subprocess.Popen( |
|
848 | proc = subprocess.Popen( | |
849 |
[ |
|
849 | ["svnserve", "-d", "--foreground", "--listen-host", "localhost", "--root", vcsrepo.path] | |
850 | '--root', vcsrepo.path]) |
|
850 | ) | |
851 | self._cleanup_servers.append(proc) |
|
851 | self._cleanup_servers.append(proc) | |
852 |
self.url = |
|
852 | self.url = "svn://localhost" | |
853 |
|
853 | |||
854 | def cleanup(self): |
|
854 | def cleanup(self): | |
855 | for proc in self._cleanup_servers: |
|
855 | for proc in self._cleanup_servers: | |
@@ -874,7 +874,6 b' def pr_util(backend, request, config_stu' | |||||
874 |
|
874 | |||
875 |
|
875 | |||
876 | class PRTestUtility(object): |
|
876 | class PRTestUtility(object): | |
877 |
|
||||
878 | pull_request = None |
|
877 | pull_request = None | |
879 | pull_request_id = None |
|
878 | pull_request_id = None | |
880 | mergeable_patcher = None |
|
879 | mergeable_patcher = None | |
@@ -886,48 +885,55 b' class PRTestUtility(object):' | |||||
886 | self.backend = backend |
|
885 | self.backend = backend | |
887 |
|
886 | |||
888 | def create_pull_request( |
|
887 | def create_pull_request( | |
889 | self, commits=None, target_head=None, source_head=None, |
|
888 | self, | |
890 | revisions=None, approved=False, author=None, mergeable=False, |
|
889 | commits=None, | |
891 | enable_notifications=True, name_suffix='', reviewers=None, observers=None, |
|
890 | target_head=None, | |
892 | title="Test", description="Description"): |
|
891 | source_head=None, | |
|
892 | revisions=None, | |||
|
893 | approved=False, | |||
|
894 | author=None, | |||
|
895 | mergeable=False, | |||
|
896 | enable_notifications=True, | |||
|
897 | name_suffix="", | |||
|
898 | reviewers=None, | |||
|
899 | observers=None, | |||
|
900 | title="Test", | |||
|
901 | description="Description", | |||
|
902 | ): | |||
893 | self.set_mergeable(mergeable) |
|
903 | self.set_mergeable(mergeable) | |
894 | if not enable_notifications: |
|
904 | if not enable_notifications: | |
895 | # mock notification side effect |
|
905 | # mock notification side effect | |
896 | self.notification_patcher = mock.patch( |
|
906 | self.notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create") | |
897 | 'rhodecode.model.notification.NotificationModel.create') |
|
|||
898 | self.notification_patcher.start() |
|
907 | self.notification_patcher.start() | |
899 |
|
908 | |||
900 | if not self.pull_request: |
|
909 | if not self.pull_request: | |
901 | if not commits: |
|
910 | if not commits: | |
902 | commits = [ |
|
911 | commits = [ | |
903 |
{ |
|
912 | {"message": "c1"}, | |
904 |
{ |
|
913 | {"message": "c2"}, | |
905 |
{ |
|
914 | {"message": "c3"}, | |
906 | ] |
|
915 | ] | |
907 |
target_head = |
|
916 | target_head = "c1" | |
908 |
source_head = |
|
917 | source_head = "c2" | |
909 |
revisions = [ |
|
918 | revisions = ["c2"] | |
910 |
|
919 | |||
911 | self.commit_ids = self.backend.create_master_repo(commits) |
|
920 | self.commit_ids = self.backend.create_master_repo(commits) | |
912 | self.target_repository = self.backend.create_repo( |
|
921 | self.target_repository = self.backend.create_repo(heads=[target_head], name_suffix=name_suffix) | |
913 |
|
|
922 | self.source_repository = self.backend.create_repo(heads=[source_head], name_suffix=name_suffix) | |
914 | self.source_repository = self.backend.create_repo( |
|
923 | self.author = author or UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
915 | heads=[source_head], name_suffix=name_suffix) |
|
|||
916 | self.author = author or UserModel().get_by_username( |
|
|||
917 | TEST_USER_ADMIN_LOGIN) |
|
|||
918 |
|
924 | |||
919 | model = PullRequestModel() |
|
925 | model = PullRequestModel() | |
920 | self.create_parameters = { |
|
926 | self.create_parameters = { | |
921 |
|
|
927 | "created_by": self.author, | |
922 |
|
|
928 | "source_repo": self.source_repository.repo_name, | |
923 |
|
|
929 | "source_ref": self._default_branch_reference(source_head), | |
924 |
|
|
930 | "target_repo": self.target_repository.repo_name, | |
925 |
|
|
931 | "target_ref": self._default_branch_reference(target_head), | |
926 |
|
|
932 | "revisions": [self.commit_ids[r] for r in revisions], | |
927 |
|
|
933 | "reviewers": reviewers or self._get_reviewers(), | |
928 |
|
|
934 | "observers": observers or self._get_observers(), | |
929 |
|
|
935 | "title": title, | |
930 |
|
|
936 | "description": description, | |
931 | } |
|
937 | } | |
932 | self.pull_request = model.create(**self.create_parameters) |
|
938 | self.pull_request = model.create(**self.create_parameters) | |
933 | assert model.get_versions(self.pull_request) == [] |
|
939 | assert model.get_versions(self.pull_request) == [] | |
@@ -943,9 +949,7 b' class PRTestUtility(object):' | |||||
943 | return self.pull_request |
|
949 | return self.pull_request | |
944 |
|
950 | |||
945 | def approve(self): |
|
951 | def approve(self): | |
946 | self.create_status_votes( |
|
952 | self.create_status_votes(ChangesetStatus.STATUS_APPROVED, *self.pull_request.reviewers) | |
947 | ChangesetStatus.STATUS_APPROVED, |
|
|||
948 | *self.pull_request.reviewers) |
|
|||
949 |
|
953 | |||
950 | def close(self): |
|
954 | def close(self): | |
951 | PullRequestModel().close_pull_request(self.pull_request, self.author) |
|
955 | PullRequestModel().close_pull_request(self.pull_request, self.author) | |
@@ -953,28 +957,26 b' class PRTestUtility(object):' | |||||
953 | def _default_branch_reference(self, commit_message, branch: str = None) -> str: |
|
957 | def _default_branch_reference(self, commit_message, branch: str = None) -> str: | |
954 | default_branch = branch or self.backend.default_branch_name |
|
958 | default_branch = branch or self.backend.default_branch_name | |
955 | message = self.commit_ids[commit_message] |
|
959 | message = self.commit_ids[commit_message] | |
956 |
reference = f |
|
960 | reference = f"branch:{default_branch}:{message}" | |
957 |
|
961 | |||
958 | return reference |
|
962 | return reference | |
959 |
|
963 | |||
960 | def _get_reviewers(self): |
|
964 | def _get_reviewers(self): | |
961 | role = PullRequestReviewers.ROLE_REVIEWER |
|
965 | role = PullRequestReviewers.ROLE_REVIEWER | |
962 | return [ |
|
966 | return [ | |
963 |
(TEST_USER_REGULAR_LOGIN, [ |
|
967 | (TEST_USER_REGULAR_LOGIN, ["default1"], False, role, []), | |
964 |
(TEST_USER_REGULAR2_LOGIN, [ |
|
968 | (TEST_USER_REGULAR2_LOGIN, ["default2"], False, role, []), | |
965 | ] |
|
969 | ] | |
966 |
|
970 | |||
967 | def _get_observers(self): |
|
971 | def _get_observers(self): | |
968 | return [ |
|
972 | return [] | |
969 |
|
||||
970 | ] |
|
|||
971 |
|
973 | |||
972 | def update_source_repository(self, head=None, do_fetch=False): |
|
974 | def update_source_repository(self, head=None, do_fetch=False): | |
973 |
heads = [head or |
|
975 | heads = [head or "c3"] | |
974 | self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch) |
|
976 | self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch) | |
975 |
|
977 | |||
976 | def update_target_repository(self, head=None, do_fetch=False): |
|
978 | def update_target_repository(self, head=None, do_fetch=False): | |
977 |
heads = [head or |
|
979 | heads = [head or "c3"] | |
978 | self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch) |
|
980 | self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch) | |
979 |
|
981 | |||
980 | def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str: |
|
982 | def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str: | |
@@ -1004,7 +1006,7 b' class PRTestUtility(object):' | |||||
1004 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, |
|
1006 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, | |
1005 | # remove the if once that's sorted out. |
|
1007 | # remove the if once that's sorted out. | |
1006 | if self.backend.alias == "git": |
|
1008 | if self.backend.alias == "git": | |
1007 |
kwargs = { |
|
1009 | kwargs = {"branch_name": self.backend.default_branch_name} | |
1008 | else: |
|
1010 | else: | |
1009 | kwargs = {} |
|
1011 | kwargs = {} | |
1010 | source_vcs.strip(removed_commit_id, **kwargs) |
|
1012 | source_vcs.strip(removed_commit_id, **kwargs) | |
@@ -1015,10 +1017,8 b' class PRTestUtility(object):' | |||||
1015 |
|
1017 | |||
1016 | def create_comment(self, linked_to=None): |
|
1018 | def create_comment(self, linked_to=None): | |
1017 | comment = CommentsModel().create( |
|
1019 | comment = CommentsModel().create( | |
1018 | text="Test comment", |
|
1020 | text="Test comment", repo=self.target_repository.repo_name, user=self.author, pull_request=self.pull_request | |
1019 | repo=self.target_repository.repo_name, |
|
1021 | ) | |
1020 | user=self.author, |
|
|||
1021 | pull_request=self.pull_request) |
|
|||
1022 | assert comment.pull_request_version_id is None |
|
1022 | assert comment.pull_request_version_id is None | |
1023 |
|
1023 | |||
1024 | if linked_to: |
|
1024 | if linked_to: | |
@@ -1026,15 +1026,15 b' class PRTestUtility(object):' | |||||
1026 |
|
1026 | |||
1027 | return comment |
|
1027 | return comment | |
1028 |
|
1028 | |||
1029 | def create_inline_comment( |
|
1029 | def create_inline_comment(self, linked_to=None, line_no="n1", file_path="file_1"): | |
1030 | self, linked_to=None, line_no='n1', file_path='file_1'): |
|
|||
1031 | comment = CommentsModel().create( |
|
1030 | comment = CommentsModel().create( | |
1032 | text="Test comment", |
|
1031 | text="Test comment", | |
1033 | repo=self.target_repository.repo_name, |
|
1032 | repo=self.target_repository.repo_name, | |
1034 | user=self.author, |
|
1033 | user=self.author, | |
1035 | line_no=line_no, |
|
1034 | line_no=line_no, | |
1036 | f_path=file_path, |
|
1035 | f_path=file_path, | |
1037 |
pull_request=self.pull_request |
|
1036 | pull_request=self.pull_request, | |
|
1037 | ) | |||
1038 | assert comment.pull_request_version_id is None |
|
1038 | assert comment.pull_request_version_id is None | |
1039 |
|
1039 | |||
1040 | if linked_to: |
|
1040 | if linked_to: | |
@@ -1044,25 +1044,20 b' class PRTestUtility(object):' | |||||
1044 |
|
1044 | |||
1045 | def create_version_of_pull_request(self): |
|
1045 | def create_version_of_pull_request(self): | |
1046 | pull_request = self.create_pull_request() |
|
1046 | pull_request = self.create_pull_request() | |
1047 | version = PullRequestModel()._create_version_from_snapshot( |
|
1047 | version = PullRequestModel()._create_version_from_snapshot(pull_request) | |
1048 | pull_request) |
|
|||
1049 | return version |
|
1048 | return version | |
1050 |
|
1049 | |||
1051 | def create_status_votes(self, status, *reviewers): |
|
1050 | def create_status_votes(self, status, *reviewers): | |
1052 | for reviewer in reviewers: |
|
1051 | for reviewer in reviewers: | |
1053 | ChangesetStatusModel().set_status( |
|
1052 | ChangesetStatusModel().set_status( | |
1054 | repo=self.pull_request.target_repo, |
|
1053 | repo=self.pull_request.target_repo, status=status, user=reviewer.user_id, pull_request=self.pull_request | |
1055 | status=status, |
|
1054 | ) | |
1056 | user=reviewer.user_id, |
|
|||
1057 | pull_request=self.pull_request) |
|
|||
1058 |
|
1055 | |||
1059 | def set_mergeable(self, value): |
|
1056 | def set_mergeable(self, value): | |
1060 | if not self.mergeable_patcher: |
|
1057 | if not self.mergeable_patcher: | |
1061 | self.mergeable_patcher = mock.patch.object( |
|
1058 | self.mergeable_patcher = mock.patch.object(VcsSettingsModel, "get_general_settings") | |
1062 | VcsSettingsModel, 'get_general_settings') |
|
|||
1063 | self.mergeable_mock = self.mergeable_patcher.start() |
|
1059 | self.mergeable_mock = self.mergeable_patcher.start() | |
1064 | self.mergeable_mock.return_value = { |
|
1060 | self.mergeable_mock.return_value = {"rhodecode_pr_merge_enabled": value} | |
1065 | 'rhodecode_pr_merge_enabled': value} |
|
|||
1066 |
|
1061 | |||
1067 | def cleanup(self): |
|
1062 | def cleanup(self): | |
1068 | # In case the source repository is already cleaned up, the pull |
|
1063 | # In case the source repository is already cleaned up, the pull | |
@@ -1109,7 +1104,6 b' def user_util(request, db_connection):' | |||||
1109 |
|
1104 | |||
1110 | # TODO: johbo: Split this up into utilities per domain or something similar |
|
1105 | # TODO: johbo: Split this up into utilities per domain or something similar | |
1111 | class UserUtility(object): |
|
1106 | class UserUtility(object): | |
1112 |
|
||||
1113 | def __init__(self, test_name="test"): |
|
1107 | def __init__(self, test_name="test"): | |
1114 | self._test_name = self._sanitize_name(test_name) |
|
1108 | self._test_name = self._sanitize_name(test_name) | |
1115 | self.fixture = Fixture() |
|
1109 | self.fixture = Fixture() | |
@@ -1126,37 +1120,29 b' class UserUtility(object):' | |||||
1126 | self.user_permissions = [] |
|
1120 | self.user_permissions = [] | |
1127 |
|
1121 | |||
1128 | def _sanitize_name(self, name): |
|
1122 | def _sanitize_name(self, name): | |
1129 |
for char in [ |
|
1123 | for char in ["[", "]"]: | |
1130 |
name = name.replace(char, |
|
1124 | name = name.replace(char, "_") | |
1131 | return name |
|
1125 | return name | |
1132 |
|
1126 | |||
1133 | def create_repo_group( |
|
1127 | def create_repo_group(self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): | |
1134 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): |
|
1128 | group_name = "{prefix}_repogroup_{count}".format(prefix=self._test_name, count=len(self.repo_group_ids)) | |
1135 | group_name = "{prefix}_repogroup_{count}".format( |
|
1129 | repo_group = self.fixture.create_repo_group(group_name, cur_user=owner) | |
1136 | prefix=self._test_name, |
|
|||
1137 | count=len(self.repo_group_ids)) |
|
|||
1138 | repo_group = self.fixture.create_repo_group( |
|
|||
1139 | group_name, cur_user=owner) |
|
|||
1140 | if auto_cleanup: |
|
1130 | if auto_cleanup: | |
1141 | self.repo_group_ids.append(repo_group.group_id) |
|
1131 | self.repo_group_ids.append(repo_group.group_id) | |
1142 | return repo_group |
|
1132 | return repo_group | |
1143 |
|
1133 | |||
1144 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, |
|
1134 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True, repo_type="hg", bare=False): | |
1145 | auto_cleanup=True, repo_type='hg', bare=False): |
|
1135 | repo_name = "{prefix}_repository_{count}".format(prefix=self._test_name, count=len(self.repos_ids)) | |
1146 | repo_name = "{prefix}_repository_{count}".format( |
|
|||
1147 | prefix=self._test_name, |
|
|||
1148 | count=len(self.repos_ids)) |
|
|||
1149 |
|
1136 | |||
1150 | repository = self.fixture.create_repo( |
|
1137 | repository = self.fixture.create_repo( | |
1151 |
repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare |
|
1138 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare | |
|
1139 | ) | |||
1152 | if auto_cleanup: |
|
1140 | if auto_cleanup: | |
1153 | self.repos_ids.append(repository.repo_id) |
|
1141 | self.repos_ids.append(repository.repo_id) | |
1154 | return repository |
|
1142 | return repository | |
1155 |
|
1143 | |||
1156 | def create_user(self, auto_cleanup=True, **kwargs): |
|
1144 | def create_user(self, auto_cleanup=True, **kwargs): | |
1157 | user_name = "{prefix}_user_{count}".format( |
|
1145 | user_name = "{prefix}_user_{count}".format(prefix=self._test_name, count=len(self.user_ids)) | |
1158 | prefix=self._test_name, |
|
|||
1159 | count=len(self.user_ids)) |
|
|||
1160 | user = self.fixture.create_user(user_name, **kwargs) |
|
1146 | user = self.fixture.create_user(user_name, **kwargs) | |
1161 | if auto_cleanup: |
|
1147 | if auto_cleanup: | |
1162 | self.user_ids.append(user.user_id) |
|
1148 | self.user_ids.append(user.user_id) | |
@@ -1171,13 +1157,9 b' class UserUtility(object):' | |||||
1171 | user_group = self.create_user_group(members=[user]) |
|
1157 | user_group = self.create_user_group(members=[user]) | |
1172 | return user, user_group |
|
1158 | return user, user_group | |
1173 |
|
1159 | |||
1174 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, |
|
1160 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, auto_cleanup=True, **kwargs): | |
1175 | auto_cleanup=True, **kwargs): |
|
1161 | group_name = "{prefix}_usergroup_{count}".format(prefix=self._test_name, count=len(self.user_group_ids)) | |
1176 | group_name = "{prefix}_usergroup_{count}".format( |
|
1162 | user_group = self.fixture.create_user_group(group_name, cur_user=owner, **kwargs) | |
1177 | prefix=self._test_name, |
|
|||
1178 | count=len(self.user_group_ids)) |
|
|||
1179 | user_group = self.fixture.create_user_group( |
|
|||
1180 | group_name, cur_user=owner, **kwargs) |
|
|||
1181 |
|
1163 | |||
1182 | if auto_cleanup: |
|
1164 | if auto_cleanup: | |
1183 | self.user_group_ids.append(user_group.users_group_id) |
|
1165 | self.user_group_ids.append(user_group.users_group_id) | |
@@ -1190,52 +1172,34 b' class UserUtility(object):' | |||||
1190 | self.inherit_default_user_permissions(user_name, False) |
|
1172 | self.inherit_default_user_permissions(user_name, False) | |
1191 | self.user_permissions.append((user_name, permission_name)) |
|
1173 | self.user_permissions.append((user_name, permission_name)) | |
1192 |
|
1174 | |||
1193 | def grant_user_permission_to_repo_group( |
|
1175 | def grant_user_permission_to_repo_group(self, repo_group, user, permission_name): | |
1194 |
|
|
1176 | permission = RepoGroupModel().grant_user_permission(repo_group, user, permission_name) | |
1195 | permission = RepoGroupModel().grant_user_permission( |
|
1177 | self.user_repo_group_permission_ids.append((repo_group.group_id, user.user_id)) | |
1196 | repo_group, user, permission_name) |
|
|||
1197 | self.user_repo_group_permission_ids.append( |
|
|||
1198 | (repo_group.group_id, user.user_id)) |
|
|||
1199 | return permission |
|
1178 | return permission | |
1200 |
|
1179 | |||
1201 | def grant_user_group_permission_to_repo_group( |
|
1180 | def grant_user_group_permission_to_repo_group(self, repo_group, user_group, permission_name): | |
1202 |
|
|
1181 | permission = RepoGroupModel().grant_user_group_permission(repo_group, user_group, permission_name) | |
1203 | permission = RepoGroupModel().grant_user_group_permission( |
|
1182 | self.user_group_repo_group_permission_ids.append((repo_group.group_id, user_group.users_group_id)) | |
1204 | repo_group, user_group, permission_name) |
|
|||
1205 | self.user_group_repo_group_permission_ids.append( |
|
|||
1206 | (repo_group.group_id, user_group.users_group_id)) |
|
|||
1207 | return permission |
|
1183 | return permission | |
1208 |
|
1184 | |||
1209 | def grant_user_permission_to_repo( |
|
1185 | def grant_user_permission_to_repo(self, repo, user, permission_name): | |
1210 |
|
|
1186 | permission = RepoModel().grant_user_permission(repo, user, permission_name) | |
1211 | permission = RepoModel().grant_user_permission( |
|
1187 | self.user_repo_permission_ids.append((repo.repo_id, user.user_id)) | |
1212 | repo, user, permission_name) |
|
|||
1213 | self.user_repo_permission_ids.append( |
|
|||
1214 | (repo.repo_id, user.user_id)) |
|
|||
1215 | return permission |
|
1188 | return permission | |
1216 |
|
1189 | |||
1217 | def grant_user_group_permission_to_repo( |
|
1190 | def grant_user_group_permission_to_repo(self, repo, user_group, permission_name): | |
1218 |
|
|
1191 | permission = RepoModel().grant_user_group_permission(repo, user_group, permission_name) | |
1219 | permission = RepoModel().grant_user_group_permission( |
|
1192 | self.user_group_repo_permission_ids.append((repo.repo_id, user_group.users_group_id)) | |
1220 | repo, user_group, permission_name) |
|
|||
1221 | self.user_group_repo_permission_ids.append( |
|
|||
1222 | (repo.repo_id, user_group.users_group_id)) |
|
|||
1223 | return permission |
|
1193 | return permission | |
1224 |
|
1194 | |||
1225 | def grant_user_permission_to_user_group( |
|
1195 | def grant_user_permission_to_user_group(self, target_user_group, user, permission_name): | |
1226 |
|
|
1196 | permission = UserGroupModel().grant_user_permission(target_user_group, user, permission_name) | |
1227 | permission = UserGroupModel().grant_user_permission( |
|
1197 | self.user_user_group_permission_ids.append((target_user_group.users_group_id, user.user_id)) | |
1228 | target_user_group, user, permission_name) |
|
|||
1229 | self.user_user_group_permission_ids.append( |
|
|||
1230 | (target_user_group.users_group_id, user.user_id)) |
|
|||
1231 | return permission |
|
1198 | return permission | |
1232 |
|
1199 | |||
1233 | def grant_user_group_permission_to_user_group( |
|
1200 | def grant_user_group_permission_to_user_group(self, target_user_group, user_group, permission_name): | |
1234 |
|
|
1201 | permission = UserGroupModel().grant_user_group_permission(target_user_group, user_group, permission_name) | |
1235 | permission = UserGroupModel().grant_user_group_permission( |
|
1202 | self.user_group_user_group_permission_ids.append((target_user_group.users_group_id, user_group.users_group_id)) | |
1236 | target_user_group, user_group, permission_name) |
|
|||
1237 | self.user_group_user_group_permission_ids.append( |
|
|||
1238 | (target_user_group.users_group_id, user_group.users_group_id)) |
|
|||
1239 | return permission |
|
1203 | return permission | |
1240 |
|
1204 | |||
1241 | def revoke_user_permission(self, user_name, permission_name): |
|
1205 | def revoke_user_permission(self, user_name, permission_name): | |
@@ -1285,14 +1249,11 b' class UserUtility(object):' | |||||
1285 | """ |
|
1249 | """ | |
1286 | first_group = RepoGroup.get(first_group_id) |
|
1250 | first_group = RepoGroup.get(first_group_id) | |
1287 | second_group = RepoGroup.get(second_group_id) |
|
1251 | second_group = RepoGroup.get(second_group_id) | |
1288 | first_group_parts = ( |
|
1252 | first_group_parts = len(first_group.group_name.split("/")) if first_group else 0 | |
1289 |
|
|
1253 | second_group_parts = len(second_group.group_name.split("/")) if second_group else 0 | |
1290 | second_group_parts = ( |
|
|||
1291 | len(second_group.group_name.split('/')) if second_group else 0) |
|
|||
1292 | return cmp(second_group_parts, first_group_parts) |
|
1254 | return cmp(second_group_parts, first_group_parts) | |
1293 |
|
1255 | |||
1294 | sorted_repo_group_ids = sorted( |
|
1256 | sorted_repo_group_ids = sorted(self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare)) | |
1295 | self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare)) |
|
|||
1296 | for repo_group_id in sorted_repo_group_ids: |
|
1257 | for repo_group_id in sorted_repo_group_ids: | |
1297 | self.fixture.destroy_repo_group(repo_group_id) |
|
1258 | self.fixture.destroy_repo_group(repo_group_id) | |
1298 |
|
1259 | |||
@@ -1308,16 +1269,11 b' class UserUtility(object):' | |||||
1308 | """ |
|
1269 | """ | |
1309 | first_group = UserGroup.get(first_group_id) |
|
1270 | first_group = UserGroup.get(first_group_id) | |
1310 | second_group = UserGroup.get(second_group_id) |
|
1271 | second_group = UserGroup.get(second_group_id) | |
1311 | first_group_parts = ( |
|
1272 | first_group_parts = len(first_group.users_group_name.split("/")) if first_group else 0 | |
1312 |
|
|
1273 | second_group_parts = len(second_group.users_group_name.split("/")) if second_group else 0 | |
1313 | if first_group else 0) |
|
|||
1314 | second_group_parts = ( |
|
|||
1315 | len(second_group.users_group_name.split('/')) |
|
|||
1316 | if second_group else 0) |
|
|||
1317 | return cmp(second_group_parts, first_group_parts) |
|
1274 | return cmp(second_group_parts, first_group_parts) | |
1318 |
|
1275 | |||
1319 | sorted_user_group_ids = sorted( |
|
1276 | sorted_user_group_ids = sorted(self.user_group_ids, key=functools.cmp_to_key(_user_group_compare)) | |
1320 | self.user_group_ids, key=functools.cmp_to_key(_user_group_compare)) |
|
|||
1321 | for user_group_id in sorted_user_group_ids: |
|
1277 | for user_group_id in sorted_user_group_ids: | |
1322 | self.fixture.destroy_user_group(user_group_id) |
|
1278 | self.fixture.destroy_user_group(user_group_id) | |
1323 |
|
1279 | |||
@@ -1326,22 +1282,19 b' class UserUtility(object):' | |||||
1326 | self.fixture.destroy_user(user_id) |
|
1282 | self.fixture.destroy_user(user_id) | |
1327 |
|
1283 | |||
1328 |
|
1284 | |||
1329 |
@pytest.fixture(scope= |
|
1285 | @pytest.fixture(scope="session") | |
1330 | def testrun(): |
|
1286 | def testrun(): | |
1331 | return { |
|
1287 | return { | |
1332 |
|
|
1288 | "uuid": uuid.uuid4(), | |
1333 |
|
|
1289 | "start": datetime.datetime.utcnow().isoformat(), | |
1334 |
|
|
1290 | "timestamp": int(time.time()), | |
1335 | } |
|
1291 | } | |
1336 |
|
1292 | |||
1337 |
|
1293 | |||
1338 | class AppenlightClient(object): |
|
1294 | class AppenlightClient(object): | |
1339 |
|
1295 | url_template = "{url}?protocol_version=0.5" | ||
1340 | url_template = '{url}?protocol_version=0.5' |
|
|||
1341 |
|
1296 | |||
1342 | def __init__( |
|
1297 | def __init__(self, url, api_key, add_server=True, add_timestamp=True, namespace=None, request=None, testrun=None): | |
1343 | self, url, api_key, add_server=True, add_timestamp=True, |
|
|||
1344 | namespace=None, request=None, testrun=None): |
|
|||
1345 | self.url = self.url_template.format(url=url) |
|
1298 | self.url = self.url_template.format(url=url) | |
1346 | self.api_key = api_key |
|
1299 | self.api_key = api_key | |
1347 | self.add_server = add_server |
|
1300 | self.add_server = add_server | |
@@ -1362,40 +1315,41 b' class AppenlightClient(object):' | |||||
1362 |
|
1315 | |||
1363 | def collect(self, data): |
|
1316 | def collect(self, data): | |
1364 | if self.add_server: |
|
1317 | if self.add_server: | |
1365 |
data.setdefault( |
|
1318 | data.setdefault("server", self.server) | |
1366 | if self.add_timestamp: |
|
1319 | if self.add_timestamp: | |
1367 |
data.setdefault( |
|
1320 | data.setdefault("date", datetime.datetime.utcnow().isoformat()) | |
1368 | if self.namespace: |
|
1321 | if self.namespace: | |
1369 |
data.setdefault( |
|
1322 | data.setdefault("namespace", self.namespace) | |
1370 | if self.request: |
|
1323 | if self.request: | |
1371 |
data.setdefault( |
|
1324 | data.setdefault("request", self.request) | |
1372 | self.stats.append(data) |
|
1325 | self.stats.append(data) | |
1373 |
|
1326 | |||
1374 | def send_stats(self): |
|
1327 | def send_stats(self): | |
1375 | tags = [ |
|
1328 | tags = [ | |
1376 |
( |
|
1329 | ("testrun", self.request), | |
1377 |
( |
|
1330 | ("testrun.start", self.testrun["start"]), | |
1378 |
( |
|
1331 | ("testrun.timestamp", self.testrun["timestamp"]), | |
1379 |
( |
|
1332 | ("test", self.namespace), | |
1380 | ] |
|
1333 | ] | |
1381 | for key, value in self.tags_before.items(): |
|
1334 | for key, value in self.tags_before.items(): | |
1382 |
tags.append((key + |
|
1335 | tags.append((key + ".before", value)) | |
1383 | try: |
|
1336 | try: | |
1384 | delta = self.tags_after[key] - value |
|
1337 | delta = self.tags_after[key] - value | |
1385 |
tags.append((key + |
|
1338 | tags.append((key + ".delta", delta)) | |
1386 | except Exception: |
|
1339 | except Exception: | |
1387 | pass |
|
1340 | pass | |
1388 | for key, value in self.tags_after.items(): |
|
1341 | for key, value in self.tags_after.items(): | |
1389 |
tags.append((key + |
|
1342 | tags.append((key + ".after", value)) | |
1390 |
self.collect( |
|
1343 | self.collect( | |
1391 | 'message': "Collected tags", |
|
1344 | { | |
1392 |
|
|
1345 | "message": "Collected tags", | |
1393 | }) |
|
1346 | "tags": tags, | |
|
1347 | } | |||
|
1348 | ) | |||
1394 |
|
1349 | |||
1395 | response = requests.post( |
|
1350 | response = requests.post( | |
1396 | self.url, |
|
1351 | self.url, | |
1397 | headers={ |
|
1352 | headers={"X-appenlight-api-key": self.api_key}, | |
1398 | 'X-appenlight-api-key': self.api_key}, |
|
|||
1399 | json=self.stats, |
|
1353 | json=self.stats, | |
1400 | ) |
|
1354 | ) | |
1401 |
|
1355 | |||
@@ -1403,7 +1357,7 b' class AppenlightClient(object):' | |||||
1403 | pprint.pprint(self.stats) |
|
1357 | pprint.pprint(self.stats) | |
1404 | print(response.headers) |
|
1358 | print(response.headers) | |
1405 | print(response.text) |
|
1359 | print(response.text) | |
1406 |
raise Exception( |
|
1360 | raise Exception("Sending to appenlight failed") | |
1407 |
|
1361 | |||
1408 |
|
1362 | |||
1409 | @pytest.fixture() |
|
1363 | @pytest.fixture() | |
@@ -1454,9 +1408,8 b' class SettingsUtility(object):' | |||||
1454 | self.repo_rhodecode_ui_ids = [] |
|
1408 | self.repo_rhodecode_ui_ids = [] | |
1455 | self.repo_rhodecode_setting_ids = [] |
|
1409 | self.repo_rhodecode_setting_ids = [] | |
1456 |
|
1410 | |||
1457 | def create_repo_rhodecode_ui( |
|
1411 | def create_repo_rhodecode_ui(self, repo, section, value, key=None, active=True, cleanup=True): | |
1458 | self, repo, section, value, key=None, active=True, cleanup=True): |
|
1412 | key = key or sha1_safe(f"{section}{value}{repo.repo_id}") | |
1459 | key = key or sha1_safe(f'{section}{value}{repo.repo_id}') |
|
|||
1460 |
|
1413 | |||
1461 | setting = RepoRhodeCodeUi() |
|
1414 | setting = RepoRhodeCodeUi() | |
1462 | setting.repository_id = repo.repo_id |
|
1415 | setting.repository_id = repo.repo_id | |
@@ -1471,9 +1424,8 b' class SettingsUtility(object):' | |||||
1471 | self.repo_rhodecode_ui_ids.append(setting.ui_id) |
|
1424 | self.repo_rhodecode_ui_ids.append(setting.ui_id) | |
1472 | return setting |
|
1425 | return setting | |
1473 |
|
1426 | |||
1474 | def create_rhodecode_ui( |
|
1427 | def create_rhodecode_ui(self, section, value, key=None, active=True, cleanup=True): | |
1475 | self, section, value, key=None, active=True, cleanup=True): |
|
1428 | key = key or sha1_safe(f"{section}{value}") | |
1476 | key = key or sha1_safe(f'{section}{value}') |
|
|||
1477 |
|
1429 | |||
1478 | setting = RhodeCodeUi() |
|
1430 | setting = RhodeCodeUi() | |
1479 | setting.ui_section = section |
|
1431 | setting.ui_section = section | |
@@ -1487,10 +1439,8 b' class SettingsUtility(object):' | |||||
1487 | self.rhodecode_ui_ids.append(setting.ui_id) |
|
1439 | self.rhodecode_ui_ids.append(setting.ui_id) | |
1488 | return setting |
|
1440 | return setting | |
1489 |
|
1441 | |||
1490 | def create_repo_rhodecode_setting( |
|
1442 | def create_repo_rhodecode_setting(self, repo, name, value, type_, cleanup=True): | |
1491 | self, repo, name, value, type_, cleanup=True): |
|
1443 | setting = RepoRhodeCodeSetting(repo.repo_id, key=name, val=value, type=type_) | |
1492 | setting = RepoRhodeCodeSetting( |
|
|||
1493 | repo.repo_id, key=name, val=value, type=type_) |
|
|||
1494 | Session().add(setting) |
|
1444 | Session().add(setting) | |
1495 | Session().commit() |
|
1445 | Session().commit() | |
1496 |
|
1446 | |||
@@ -1530,13 +1480,12 b' class SettingsUtility(object):' | |||||
1530 |
|
1480 | |||
1531 | @pytest.fixture() |
|
1481 | @pytest.fixture() | |
1532 | def no_notifications(request): |
|
1482 | def no_notifications(request): | |
1533 | notification_patcher = mock.patch( |
|
1483 | notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create") | |
1534 | 'rhodecode.model.notification.NotificationModel.create') |
|
|||
1535 | notification_patcher.start() |
|
1484 | notification_patcher.start() | |
1536 | request.addfinalizer(notification_patcher.stop) |
|
1485 | request.addfinalizer(notification_patcher.stop) | |
1537 |
|
1486 | |||
1538 |
|
1487 | |||
1539 |
@pytest.fixture(scope= |
|
1488 | @pytest.fixture(scope="session") | |
1540 | def repeat(request): |
|
1489 | def repeat(request): | |
1541 | """ |
|
1490 | """ | |
1542 | The number of repetitions is based on this fixture. |
|
1491 | The number of repetitions is based on this fixture. | |
@@ -1544,7 +1493,7 b' def repeat(request):' | |||||
1544 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the |
|
1493 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the | |
1545 | tests are not too slow in our default test suite. |
|
1494 | tests are not too slow in our default test suite. | |
1546 | """ |
|
1495 | """ | |
1547 |
return request.config.getoption( |
|
1496 | return request.config.getoption("--repeat") | |
1548 |
|
1497 | |||
1549 |
|
1498 | |||
1550 | @pytest.fixture() |
|
1499 | @pytest.fixture() | |
@@ -1562,42 +1511,17 b' def context_stub():' | |||||
1562 |
|
1511 | |||
1563 |
|
1512 | |||
1564 | @pytest.fixture() |
|
1513 | @pytest.fixture() | |
1565 | def request_stub(): |
|
|||
1566 | """ |
|
|||
1567 | Stub request object. |
|
|||
1568 | """ |
|
|||
1569 | from rhodecode.lib.base import bootstrap_request |
|
|||
1570 | request = bootstrap_request(scheme='https') |
|
|||
1571 | return request |
|
|||
1572 |
|
||||
1573 |
|
||||
1574 | @pytest.fixture() |
|
|||
1575 | def config_stub(request, request_stub): |
|
|||
1576 | """ |
|
|||
1577 | Set up pyramid.testing and return the Configurator. |
|
|||
1578 | """ |
|
|||
1579 | from rhodecode.lib.base import bootstrap_config |
|
|||
1580 | config = bootstrap_config(request=request_stub) |
|
|||
1581 |
|
||||
1582 | @request.addfinalizer |
|
|||
1583 | def cleanup(): |
|
|||
1584 | pyramid.testing.tearDown() |
|
|||
1585 |
|
||||
1586 | return config |
|
|||
1587 |
|
||||
1588 |
|
||||
1589 | @pytest.fixture() |
|
|||
1590 | def StubIntegrationType(): |
|
1514 | def StubIntegrationType(): | |
1591 | class _StubIntegrationType(IntegrationTypeBase): |
|
1515 | class _StubIntegrationType(IntegrationTypeBase): | |
1592 |
""" |
|
1516 | """Test integration type class""" | |
1593 |
|
1517 | |||
1594 |
key = |
|
1518 | key = "test" | |
1595 |
display_name = |
|
1519 | display_name = "Test integration type" | |
1596 |
description = |
|
1520 | description = "A test integration type for testing" | |
1597 |
|
1521 | |||
1598 | @classmethod |
|
1522 | @classmethod | |
1599 | def icon(cls): |
|
1523 | def icon(cls): | |
1600 |
return |
|
1524 | return "test_icon_html_image" | |
1601 |
|
1525 | |||
1602 | def __init__(self, settings): |
|
1526 | def __init__(self, settings): | |
1603 | super(_StubIntegrationType, self).__init__(settings) |
|
1527 | super(_StubIntegrationType, self).__init__(settings) | |
@@ -1611,15 +1535,15 b' def StubIntegrationType():' | |||||
1611 | test_string_field = colander.SchemaNode( |
|
1535 | test_string_field = colander.SchemaNode( | |
1612 | colander.String(), |
|
1536 | colander.String(), | |
1613 | missing=colander.required, |
|
1537 | missing=colander.required, | |
1614 |
title= |
|
1538 | title="test string field", | |
1615 | ) |
|
1539 | ) | |
1616 | test_int_field = colander.SchemaNode( |
|
1540 | test_int_field = colander.SchemaNode( | |
1617 | colander.Int(), |
|
1541 | colander.Int(), | |
1618 |
title= |
|
1542 | title="some integer setting", | |
1619 | ) |
|
1543 | ) | |
|
1544 | ||||
1620 | return SettingsSchema() |
|
1545 | return SettingsSchema() | |
1621 |
|
1546 | |||
1622 |
|
||||
1623 | integration_type_registry.register_integration_type(_StubIntegrationType) |
|
1547 | integration_type_registry.register_integration_type(_StubIntegrationType) | |
1624 | return _StubIntegrationType |
|
1548 | return _StubIntegrationType | |
1625 |
|
1549 | |||
@@ -1627,18 +1551,22 b' def StubIntegrationType():' | |||||
1627 | @pytest.fixture() |
|
1551 | @pytest.fixture() | |
1628 | def stub_integration_settings(): |
|
1552 | def stub_integration_settings(): | |
1629 | return { |
|
1553 | return { | |
1630 |
|
|
1554 | "test_string_field": "some data", | |
1631 |
|
|
1555 | "test_int_field": 100, | |
1632 | } |
|
1556 | } | |
1633 |
|
1557 | |||
1634 |
|
1558 | |||
1635 | @pytest.fixture() |
|
1559 | @pytest.fixture() | |
1636 | def repo_integration_stub(request, repo_stub, StubIntegrationType, |
|
1560 | def repo_integration_stub(request, repo_stub, StubIntegrationType, stub_integration_settings): | |
1637 | stub_integration_settings): |
|
|||
1638 | integration = IntegrationModel().create( |
|
1561 | integration = IntegrationModel().create( | |
1639 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1562 | StubIntegrationType, | |
1640 |
|
|
1563 | settings=stub_integration_settings, | |
1641 | repo=repo_stub, repo_group=None, child_repos_only=None) |
|
1564 | enabled=True, | |
|
1565 | name="test repo integration", | |||
|
1566 | repo=repo_stub, | |||
|
1567 | repo_group=None, | |||
|
1568 | child_repos_only=None, | |||
|
1569 | ) | |||
1642 |
|
1570 | |||
1643 | @request.addfinalizer |
|
1571 | @request.addfinalizer | |
1644 | def cleanup(): |
|
1572 | def cleanup(): | |
@@ -1648,12 +1576,16 b' def repo_integration_stub(request, repo_' | |||||
1648 |
|
1576 | |||
1649 |
|
1577 | |||
1650 | @pytest.fixture() |
|
1578 | @pytest.fixture() | |
1651 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, |
|
1579 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings): | |
1652 | stub_integration_settings): |
|
|||
1653 | integration = IntegrationModel().create( |
|
1580 | integration = IntegrationModel().create( | |
1654 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1581 | StubIntegrationType, | |
1655 | name='test repogroup integration', |
|
1582 | settings=stub_integration_settings, | |
1656 | repo=None, repo_group=test_repo_group, child_repos_only=True) |
|
1583 | enabled=True, | |
|
1584 | name="test repogroup integration", | |||
|
1585 | repo=None, | |||
|
1586 | repo_group=test_repo_group, | |||
|
1587 | child_repos_only=True, | |||
|
1588 | ) | |||
1657 |
|
1589 | |||
1658 | @request.addfinalizer |
|
1590 | @request.addfinalizer | |
1659 | def cleanup(): |
|
1591 | def cleanup(): | |
@@ -1663,12 +1595,16 b' def repogroup_integration_stub(request, ' | |||||
1663 |
|
1595 | |||
1664 |
|
1596 | |||
1665 | @pytest.fixture() |
|
1597 | @pytest.fixture() | |
1666 | def repogroup_recursive_integration_stub(request, test_repo_group, |
|
1598 | def repogroup_recursive_integration_stub(request, test_repo_group, StubIntegrationType, stub_integration_settings): | |
1667 | StubIntegrationType, stub_integration_settings): |
|
|||
1668 | integration = IntegrationModel().create( |
|
1599 | integration = IntegrationModel().create( | |
1669 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1600 | StubIntegrationType, | |
1670 | name='test recursive repogroup integration', |
|
1601 | settings=stub_integration_settings, | |
1671 | repo=None, repo_group=test_repo_group, child_repos_only=False) |
|
1602 | enabled=True, | |
|
1603 | name="test recursive repogroup integration", | |||
|
1604 | repo=None, | |||
|
1605 | repo_group=test_repo_group, | |||
|
1606 | child_repos_only=False, | |||
|
1607 | ) | |||
1672 |
|
1608 | |||
1673 | @request.addfinalizer |
|
1609 | @request.addfinalizer | |
1674 | def cleanup(): |
|
1610 | def cleanup(): | |
@@ -1678,12 +1614,16 b' def repogroup_recursive_integration_stub' | |||||
1678 |
|
1614 | |||
1679 |
|
1615 | |||
1680 | @pytest.fixture() |
|
1616 | @pytest.fixture() | |
1681 | def global_integration_stub(request, StubIntegrationType, |
|
1617 | def global_integration_stub(request, StubIntegrationType, stub_integration_settings): | |
1682 | stub_integration_settings): |
|
|||
1683 | integration = IntegrationModel().create( |
|
1618 | integration = IntegrationModel().create( | |
1684 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1619 | StubIntegrationType, | |
1685 |
|
|
1620 | settings=stub_integration_settings, | |
1686 | repo=None, repo_group=None, child_repos_only=None) |
|
1621 | enabled=True, | |
|
1622 | name="test global integration", | |||
|
1623 | repo=None, | |||
|
1624 | repo_group=None, | |||
|
1625 | child_repos_only=None, | |||
|
1626 | ) | |||
1687 |
|
1627 | |||
1688 | @request.addfinalizer |
|
1628 | @request.addfinalizer | |
1689 | def cleanup(): |
|
1629 | def cleanup(): | |
@@ -1693,12 +1633,16 b' def global_integration_stub(request, Stu' | |||||
1693 |
|
1633 | |||
1694 |
|
1634 | |||
1695 | @pytest.fixture() |
|
1635 | @pytest.fixture() | |
1696 | def root_repos_integration_stub(request, StubIntegrationType, |
|
1636 | def root_repos_integration_stub(request, StubIntegrationType, stub_integration_settings): | |
1697 | stub_integration_settings): |
|
|||
1698 | integration = IntegrationModel().create( |
|
1637 | integration = IntegrationModel().create( | |
1699 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1638 | StubIntegrationType, | |
1700 |
|
|
1639 | settings=stub_integration_settings, | |
1701 | repo=None, repo_group=None, child_repos_only=True) |
|
1640 | enabled=True, | |
|
1641 | name="test global integration", | |||
|
1642 | repo=None, | |||
|
1643 | repo_group=None, | |||
|
1644 | child_repos_only=True, | |||
|
1645 | ) | |||
1702 |
|
1646 | |||
1703 | @request.addfinalizer |
|
1647 | @request.addfinalizer | |
1704 | def cleanup(): |
|
1648 | def cleanup(): | |
@@ -1710,8 +1654,8 b' def root_repos_integration_stub(request,' | |||||
1710 | @pytest.fixture() |
|
1654 | @pytest.fixture() | |
1711 | def local_dt_to_utc(): |
|
1655 | def local_dt_to_utc(): | |
1712 | def _factory(dt): |
|
1656 | def _factory(dt): | |
1713 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( |
|
1657 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(dateutil.tz.tzutc()).replace(tzinfo=None) | |
1714 | dateutil.tz.tzutc()).replace(tzinfo=None) |
|
1658 | ||
1715 | return _factory |
|
1659 | return _factory | |
1716 |
|
1660 | |||
1717 |
|
1661 | |||
@@ -1724,7 +1668,7 b' def disable_anonymous_user(request, base' | |||||
1724 | set_anonymous_access(True) |
|
1668 | set_anonymous_access(True) | |
1725 |
|
1669 | |||
1726 |
|
1670 | |||
1727 |
@pytest.fixture(scope= |
|
1671 | @pytest.fixture(scope="module") | |
1728 | def rc_fixture(request): |
|
1672 | def rc_fixture(request): | |
1729 | return Fixture() |
|
1673 | return Fixture() | |
1730 |
|
1674 | |||
@@ -1734,9 +1678,9 b' def repo_groups(request):' | |||||
1734 | fixture = Fixture() |
|
1678 | fixture = Fixture() | |
1735 |
|
1679 | |||
1736 | session = Session() |
|
1680 | session = Session() | |
1737 |
zombie_group = fixture.create_repo_group( |
|
1681 | zombie_group = fixture.create_repo_group("zombie") | |
1738 |
parent_group = fixture.create_repo_group( |
|
1682 | parent_group = fixture.create_repo_group("parent") | |
1739 |
child_group = fixture.create_repo_group( |
|
1683 | child_group = fixture.create_repo_group("parent/child") | |
1740 | groups_in_db = session.query(RepoGroup).all() |
|
1684 | groups_in_db = session.query(RepoGroup).all() | |
1741 | assert len(groups_in_db) == 3 |
|
1685 | assert len(groups_in_db) == 3 | |
1742 | assert child_group.group_parent_id == parent_group.group_id |
|
1686 | assert child_group.group_parent_id == parent_group.group_id | |
@@ -1748,3 +1692,4 b' def repo_groups(request):' | |||||
1748 | fixture.destroy_repo_group(parent_group) |
|
1692 | fixture.destroy_repo_group(parent_group) | |
1749 |
|
1693 | |||
1750 | return zombie_group, parent_group, child_group |
|
1694 | return zombie_group, parent_group, child_group | |
|
1695 |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -37,17 +36,16 b' from rhodecode.model.user_group import U' | |||||
37 | from rhodecode.model.gist import GistModel |
|
36 | from rhodecode.model.gist import GistModel | |
38 | from rhodecode.model.auth_token import AuthTokenModel |
|
37 | from rhodecode.model.auth_token import AuthTokenModel | |
39 | from rhodecode.model.scm import ScmModel |
|
38 | from rhodecode.model.scm import ScmModel | |
40 |
from rhodecode.authentication.plugins.auth_rhodecode import |
|
39 | from rhodecode.authentication.plugins.auth_rhodecode import RhodeCodeAuthPlugin | |
41 | RhodeCodeAuthPlugin |
|
|||
42 |
|
40 | |||
43 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
41 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
44 |
|
42 | |||
45 | dn = os.path.dirname |
|
43 | dn = os.path.dirname | |
46 |
FIXTURES = os.path.join(dn( |
|
44 | FIXTURES = os.path.join(dn(os.path.abspath(__file__)), "diff_fixtures") | |
47 |
|
45 | |||
48 |
|
46 | |||
49 | def error_function(*args, **kwargs): |
|
47 | def error_function(*args, **kwargs): | |
50 |
raise Exception( |
|
48 | raise Exception("Total Crash !") | |
51 |
|
49 | |||
52 |
|
50 | |||
53 | class TestINI(object): |
|
51 | class TestINI(object): | |
@@ -59,8 +57,7 b' class TestINI(object):' | |||||
59 | print('paster server %s' % new_test_ini) |
|
57 | print('paster server %s' % new_test_ini) | |
60 | """ |
|
58 | """ | |
61 |
|
59 | |||
62 |
def __init__(self, ini_file_path, ini_params, new_file_prefix= |
|
60 | def __init__(self, ini_file_path, ini_params, new_file_prefix="DEFAULT", destroy=True, dir=None): | |
63 | destroy=True, dir=None): |
|
|||
64 | self.ini_file_path = ini_file_path |
|
61 | self.ini_file_path = ini_file_path | |
65 | self.ini_params = ini_params |
|
62 | self.ini_params = ini_params | |
66 | self.new_path = None |
|
63 | self.new_path = None | |
@@ -85,9 +82,8 b' class TestINI(object):' | |||||
85 | parser[section][key] = str(val) |
|
82 | parser[section][key] = str(val) | |
86 |
|
83 | |||
87 | with tempfile.NamedTemporaryFile( |
|
84 | with tempfile.NamedTemporaryFile( | |
88 | mode='w', |
|
85 | mode="w", prefix=self.new_path_prefix, suffix=".ini", dir=self._dir, delete=False | |
89 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, |
|
86 | ) as new_ini_file: | |
90 | delete=False) as new_ini_file: |
|
|||
91 | parser.write(new_ini_file) |
|
87 | parser.write(new_ini_file) | |
92 | self.new_path = new_ini_file.name |
|
88 | self.new_path = new_ini_file.name | |
93 |
|
89 | |||
@@ -99,7 +95,6 b' class TestINI(object):' | |||||
99 |
|
95 | |||
100 |
|
96 | |||
101 | class Fixture(object): |
|
97 | class Fixture(object): | |
102 |
|
||||
103 | def anon_access(self, status): |
|
98 | def anon_access(self, status): | |
104 | """ |
|
99 | """ | |
105 | Context process for disabling anonymous access. use like: |
|
100 | Context process for disabling anonymous access. use like: | |
@@ -139,22 +134,19 b' class Fixture(object):' | |||||
139 |
|
134 | |||
140 | class context(object): |
|
135 | class context(object): | |
141 | def _get_plugin(self): |
|
136 | def _get_plugin(self): | |
142 |
plugin_id = |
|
137 | plugin_id = "egg:rhodecode-enterprise-ce#{}".format(RhodeCodeAuthPlugin.uid) | |
143 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
138 | plugin = RhodeCodeAuthPlugin(plugin_id) | |
144 | return plugin |
|
139 | return plugin | |
145 |
|
140 | |||
146 | def __enter__(self): |
|
141 | def __enter__(self): | |
147 |
|
||||
148 | plugin = self._get_plugin() |
|
142 | plugin = self._get_plugin() | |
149 |
plugin.create_or_update_setting( |
|
143 | plugin.create_or_update_setting("auth_restriction", auth_restriction) | |
150 | Session().commit() |
|
144 | Session().commit() | |
151 | SettingsModel().invalidate_settings_cache(hard=True) |
|
145 | SettingsModel().invalidate_settings_cache(hard=True) | |
152 |
|
146 | |||
153 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
147 | def __exit__(self, exc_type, exc_val, exc_tb): | |
154 |
|
||||
155 | plugin = self._get_plugin() |
|
148 | plugin = self._get_plugin() | |
156 | plugin.create_or_update_setting( |
|
149 | plugin.create_or_update_setting("auth_restriction", RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE) | |
157 | 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE) |
|
|||
158 | Session().commit() |
|
150 | Session().commit() | |
159 | SettingsModel().invalidate_settings_cache(hard=True) |
|
151 | SettingsModel().invalidate_settings_cache(hard=True) | |
160 |
|
152 | |||
@@ -173,62 +165,61 b' class Fixture(object):' | |||||
173 |
|
165 | |||
174 | class context(object): |
|
166 | class context(object): | |
175 | def _get_plugin(self): |
|
167 | def _get_plugin(self): | |
176 |
plugin_id = |
|
168 | plugin_id = "egg:rhodecode-enterprise-ce#{}".format(RhodeCodeAuthPlugin.uid) | |
177 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
169 | plugin = RhodeCodeAuthPlugin(plugin_id) | |
178 | return plugin |
|
170 | return plugin | |
179 |
|
171 | |||
180 | def __enter__(self): |
|
172 | def __enter__(self): | |
181 | plugin = self._get_plugin() |
|
173 | plugin = self._get_plugin() | |
182 |
plugin.create_or_update_setting( |
|
174 | plugin.create_or_update_setting("scope_restriction", scope_restriction) | |
183 | Session().commit() |
|
175 | Session().commit() | |
184 | SettingsModel().invalidate_settings_cache(hard=True) |
|
176 | SettingsModel().invalidate_settings_cache(hard=True) | |
185 |
|
177 | |||
186 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
178 | def __exit__(self, exc_type, exc_val, exc_tb): | |
187 | plugin = self._get_plugin() |
|
179 | plugin = self._get_plugin() | |
188 | plugin.create_or_update_setting( |
|
180 | plugin.create_or_update_setting("scope_restriction", RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL) | |
189 | 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL) |
|
|||
190 | Session().commit() |
|
181 | Session().commit() | |
191 | SettingsModel().invalidate_settings_cache(hard=True) |
|
182 | SettingsModel().invalidate_settings_cache(hard=True) | |
192 |
|
183 | |||
193 | return context() |
|
184 | return context() | |
194 |
|
185 | |||
195 | def _get_repo_create_params(self, **custom): |
|
186 | def _get_repo_create_params(self, **custom): | |
196 |
repo_type = custom.get( |
|
187 | repo_type = custom.get("repo_type") or "hg" | |
197 |
|
188 | |||
198 | default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type) |
|
189 | default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type) | |
199 |
|
190 | |||
200 | defs = { |
|
191 | defs = { | |
201 |
|
|
192 | "repo_name": None, | |
202 |
|
|
193 | "repo_type": repo_type, | |
203 |
|
|
194 | "clone_uri": "", | |
204 |
|
|
195 | "push_uri": "", | |
205 |
|
|
196 | "repo_group": "-1", | |
206 |
|
|
197 | "repo_description": "DESC", | |
207 |
|
|
198 | "repo_private": False, | |
208 |
|
|
199 | "repo_landing_commit_ref": default_landing_ref, | |
209 |
|
|
200 | "repo_copy_permissions": False, | |
210 |
|
|
201 | "repo_state": Repository.STATE_CREATED, | |
211 | } |
|
202 | } | |
212 | defs.update(custom) |
|
203 | defs.update(custom) | |
213 |
if |
|
204 | if "repo_name_full" not in custom: | |
214 |
defs.update({ |
|
205 | defs.update({"repo_name_full": defs["repo_name"]}) | |
215 |
|
206 | |||
216 | # fix the repo name if passed as repo_name_full |
|
207 | # fix the repo name if passed as repo_name_full | |
217 |
if defs[ |
|
208 | if defs["repo_name"]: | |
218 |
defs[ |
|
209 | defs["repo_name"] = defs["repo_name"].split("/")[-1] | |
219 |
|
210 | |||
220 | return defs |
|
211 | return defs | |
221 |
|
212 | |||
222 | def _get_group_create_params(self, **custom): |
|
213 | def _get_group_create_params(self, **custom): | |
223 | defs = { |
|
214 | defs = { | |
224 |
|
|
215 | "group_name": None, | |
225 |
|
|
216 | "group_description": "DESC", | |
226 |
|
|
217 | "perm_updates": [], | |
227 |
|
|
218 | "perm_additions": [], | |
228 |
|
|
219 | "perm_deletions": [], | |
229 |
|
|
220 | "group_parent_id": -1, | |
230 |
|
|
221 | "enable_locking": False, | |
231 |
|
|
222 | "recursive": False, | |
232 | } |
|
223 | } | |
233 | defs.update(custom) |
|
224 | defs.update(custom) | |
234 |
|
225 | |||
@@ -236,16 +227,16 b' class Fixture(object):' | |||||
236 |
|
227 | |||
237 | def _get_user_create_params(self, name, **custom): |
|
228 | def _get_user_create_params(self, name, **custom): | |
238 | defs = { |
|
229 | defs = { | |
239 |
|
|
230 | "username": name, | |
240 |
|
|
231 | "password": "qweqwe", | |
241 |
|
|
232 | "email": "%s+test@rhodecode.org" % name, | |
242 |
|
|
233 | "firstname": "TestUser", | |
243 |
|
|
234 | "lastname": "Test", | |
244 |
|
|
235 | "description": "test description", | |
245 |
|
|
236 | "active": True, | |
246 |
|
|
237 | "admin": False, | |
247 |
|
|
238 | "extern_type": "rhodecode", | |
248 |
|
|
239 | "extern_name": None, | |
249 | } |
|
240 | } | |
250 | defs.update(custom) |
|
241 | defs.update(custom) | |
251 |
|
242 | |||
@@ -253,30 +244,30 b' class Fixture(object):' | |||||
253 |
|
244 | |||
254 | def _get_user_group_create_params(self, name, **custom): |
|
245 | def _get_user_group_create_params(self, name, **custom): | |
255 | defs = { |
|
246 | defs = { | |
256 |
|
|
247 | "users_group_name": name, | |
257 |
|
|
248 | "user_group_description": "DESC", | |
258 |
|
|
249 | "users_group_active": True, | |
259 |
|
|
250 | "user_group_data": {}, | |
260 | } |
|
251 | } | |
261 | defs.update(custom) |
|
252 | defs.update(custom) | |
262 |
|
253 | |||
263 | return defs |
|
254 | return defs | |
264 |
|
255 | |||
265 | def create_repo(self, name, **kwargs): |
|
256 | def create_repo(self, name, **kwargs): | |
266 |
repo_group = kwargs.get( |
|
257 | repo_group = kwargs.get("repo_group") | |
267 | if isinstance(repo_group, RepoGroup): |
|
258 | if isinstance(repo_group, RepoGroup): | |
268 |
kwargs[ |
|
259 | kwargs["repo_group"] = repo_group.group_id | |
269 | name = name.split(Repository.NAME_SEP)[-1] |
|
260 | name = name.split(Repository.NAME_SEP)[-1] | |
270 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) |
|
261 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) | |
271 |
|
262 | |||
272 |
if |
|
263 | if "skip_if_exists" in kwargs: | |
273 |
del kwargs[ |
|
264 | del kwargs["skip_if_exists"] | |
274 | r = Repository.get_by_repo_name(name) |
|
265 | r = Repository.get_by_repo_name(name) | |
275 | if r: |
|
266 | if r: | |
276 | return r |
|
267 | return r | |
277 |
|
268 | |||
278 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) |
|
269 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) | |
279 |
cur_user = kwargs.get( |
|
270 | cur_user = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN) | |
280 | RepoModel().create(form_data, cur_user) |
|
271 | RepoModel().create(form_data, cur_user) | |
281 | Session().commit() |
|
272 | Session().commit() | |
282 | repo = Repository.get_by_repo_name(name) |
|
273 | repo = Repository.get_by_repo_name(name) | |
@@ -287,17 +278,15 b' class Fixture(object):' | |||||
287 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) |
|
278 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) | |
288 |
|
279 | |||
289 | form_data = self._get_repo_create_params( |
|
280 | form_data = self._get_repo_create_params( | |
290 | repo_name=fork_name, |
|
281 | repo_name=fork_name, fork_parent_id=repo_to_fork.repo_id, repo_type=repo_to_fork.repo_type, **kwargs | |
291 | fork_parent_id=repo_to_fork.repo_id, |
|
282 | ) | |
292 | repo_type=repo_to_fork.repo_type, |
|
|||
293 | **kwargs) |
|
|||
294 |
|
283 | |||
295 | # TODO: fix it !! |
|
284 | # TODO: fix it !! | |
296 |
form_data[ |
|
285 | form_data["description"] = form_data["repo_description"] | |
297 |
form_data[ |
|
286 | form_data["private"] = form_data["repo_private"] | |
298 |
form_data[ |
|
287 | form_data["landing_rev"] = form_data["repo_landing_commit_ref"] | |
299 |
|
288 | |||
300 |
owner = kwargs.get( |
|
289 | owner = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN) | |
301 | RepoModel().create_fork(form_data, cur_user=owner) |
|
290 | RepoModel().create_fork(form_data, cur_user=owner) | |
302 | Session().commit() |
|
291 | Session().commit() | |
303 | r = Repository.get_by_repo_name(fork_name) |
|
292 | r = Repository.get_by_repo_name(fork_name) | |
@@ -305,7 +294,7 b' class Fixture(object):' | |||||
305 | return r |
|
294 | return r | |
306 |
|
295 | |||
307 | def destroy_repo(self, repo_name, **kwargs): |
|
296 | def destroy_repo(self, repo_name, **kwargs): | |
308 |
RepoModel().delete(repo_name, pull_requests= |
|
297 | RepoModel().delete(repo_name, pull_requests="delete", artifacts="delete", **kwargs) | |
309 | Session().commit() |
|
298 | Session().commit() | |
310 |
|
299 | |||
311 | def destroy_repo_on_filesystem(self, repo_name): |
|
300 | def destroy_repo_on_filesystem(self, repo_name): | |
@@ -314,17 +303,16 b' class Fixture(object):' | |||||
314 | shutil.rmtree(rm_path) |
|
303 | shutil.rmtree(rm_path) | |
315 |
|
304 | |||
316 | def create_repo_group(self, name, **kwargs): |
|
305 | def create_repo_group(self, name, **kwargs): | |
317 |
if |
|
306 | if "skip_if_exists" in kwargs: | |
318 |
del kwargs[ |
|
307 | del kwargs["skip_if_exists"] | |
319 | gr = RepoGroup.get_by_group_name(group_name=name) |
|
308 | gr = RepoGroup.get_by_group_name(group_name=name) | |
320 | if gr: |
|
309 | if gr: | |
321 | return gr |
|
310 | return gr | |
322 | form_data = self._get_group_create_params(group_name=name, **kwargs) |
|
311 | form_data = self._get_group_create_params(group_name=name, **kwargs) | |
323 |
owner = kwargs.get( |
|
312 | owner = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN) | |
324 | gr = RepoGroupModel().create( |
|
313 | gr = RepoGroupModel().create( | |
325 |
group_name=form_data[ |
|
314 | group_name=form_data["group_name"], group_description=form_data["group_name"], owner=owner | |
326 | group_description=form_data['group_name'], |
|
315 | ) | |
327 | owner=owner) |
|
|||
328 | Session().commit() |
|
316 | Session().commit() | |
329 | gr = RepoGroup.get_by_group_name(gr.group_name) |
|
317 | gr = RepoGroup.get_by_group_name(gr.group_name) | |
330 | return gr |
|
318 | return gr | |
@@ -334,8 +322,8 b' class Fixture(object):' | |||||
334 | Session().commit() |
|
322 | Session().commit() | |
335 |
|
323 | |||
336 | def create_user(self, name, **kwargs): |
|
324 | def create_user(self, name, **kwargs): | |
337 |
if |
|
325 | if "skip_if_exists" in kwargs: | |
338 |
del kwargs[ |
|
326 | del kwargs["skip_if_exists"] | |
339 | user = User.get_by_username(name) |
|
327 | user = User.get_by_username(name) | |
340 | if user: |
|
328 | if user: | |
341 | return user |
|
329 | return user | |
@@ -343,8 +331,7 b' class Fixture(object):' | |||||
343 | user = UserModel().create(form_data) |
|
331 | user = UserModel().create(form_data) | |
344 |
|
332 | |||
345 | # create token for user |
|
333 | # create token for user | |
346 | AuthTokenModel().create( |
|
334 | AuthTokenModel().create(user=user, description="TEST_USER_TOKEN") | |
347 | user=user, description=u'TEST_USER_TOKEN') |
|
|||
348 |
|
335 | |||
349 | Session().commit() |
|
336 | Session().commit() | |
350 | user = User.get_by_username(user.username) |
|
337 | user = User.get_by_username(user.username) | |
@@ -368,22 +355,24 b' class Fixture(object):' | |||||
368 | Session().commit() |
|
355 | Session().commit() | |
369 |
|
356 | |||
370 | def create_user_group(self, name, **kwargs): |
|
357 | def create_user_group(self, name, **kwargs): | |
371 |
if |
|
358 | if "skip_if_exists" in kwargs: | |
372 |
del kwargs[ |
|
359 | del kwargs["skip_if_exists"] | |
373 | gr = UserGroup.get_by_group_name(group_name=name) |
|
360 | gr = UserGroup.get_by_group_name(group_name=name) | |
374 | if gr: |
|
361 | if gr: | |
375 | return gr |
|
362 | return gr | |
376 | # map active flag to the real attribute. For API consistency of fixtures |
|
363 | # map active flag to the real attribute. For API consistency of fixtures | |
377 |
if |
|
364 | if "active" in kwargs: | |
378 |
kwargs[ |
|
365 | kwargs["users_group_active"] = kwargs["active"] | |
379 |
del kwargs[ |
|
366 | del kwargs["active"] | |
380 | form_data = self._get_user_group_create_params(name, **kwargs) |
|
367 | form_data = self._get_user_group_create_params(name, **kwargs) | |
381 |
owner = kwargs.get( |
|
368 | owner = kwargs.get("cur_user", TEST_USER_ADMIN_LOGIN) | |
382 | user_group = UserGroupModel().create( |
|
369 | user_group = UserGroupModel().create( | |
383 |
name=form_data[ |
|
370 | name=form_data["users_group_name"], | |
384 |
description=form_data[ |
|
371 | description=form_data["user_group_description"], | |
385 | owner=owner, active=form_data['users_group_active'], |
|
372 | owner=owner, | |
386 |
|
|
373 | active=form_data["users_group_active"], | |
|
374 | group_data=form_data["user_group_data"], | |||
|
375 | ) | |||
387 | Session().commit() |
|
376 | Session().commit() | |
388 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) |
|
377 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) | |
389 | return user_group |
|
378 | return user_group | |
@@ -394,18 +383,23 b' class Fixture(object):' | |||||
394 |
|
383 | |||
395 | def create_gist(self, **kwargs): |
|
384 | def create_gist(self, **kwargs): | |
396 | form_data = { |
|
385 | form_data = { | |
397 |
|
|
386 | "description": "new-gist", | |
398 |
|
|
387 | "owner": TEST_USER_ADMIN_LOGIN, | |
399 |
|
|
388 | "gist_type": GistModel.cls.GIST_PUBLIC, | |
400 |
|
|
389 | "lifetime": -1, | |
401 |
|
|
390 | "acl_level": Gist.ACL_LEVEL_PUBLIC, | |
402 | 'gist_mapping': {b'filename1.txt': {'content': b'hello world'},} |
|
391 | "gist_mapping": { | |
|
392 | b"filename1.txt": {"content": b"hello world"}, | |||
|
393 | }, | |||
403 | } |
|
394 | } | |
404 | form_data.update(kwargs) |
|
395 | form_data.update(kwargs) | |
405 | gist = GistModel().create( |
|
396 | gist = GistModel().create( | |
406 |
description=form_data[ |
|
397 | description=form_data["description"], | |
407 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], |
|
398 | owner=form_data["owner"], | |
408 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] |
|
399 | gist_mapping=form_data["gist_mapping"], | |
|
400 | gist_type=form_data["gist_type"], | |||
|
401 | lifetime=form_data["lifetime"], | |||
|
402 | gist_acl_level=form_data["acl_level"], | |||
409 | ) |
|
403 | ) | |
410 | Session().commit() |
|
404 | Session().commit() | |
411 | return gist |
|
405 | return gist | |
@@ -420,7 +414,7 b' class Fixture(object):' | |||||
420 | Session().commit() |
|
414 | Session().commit() | |
421 |
|
415 | |||
422 | def load_resource(self, resource_name, strip=False): |
|
416 | def load_resource(self, resource_name, strip=False): | |
423 |
with open(os.path.join(FIXTURES, resource_name), |
|
417 | with open(os.path.join(FIXTURES, resource_name), "rb") as f: | |
424 | source = f.read() |
|
418 | source = f.read() | |
425 | if strip: |
|
419 | if strip: | |
426 | source = source.strip() |
|
420 | source = source.strip() |
@@ -21,7 +21,7 b'' | |||||
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | from rhodecode.tests import TestController |
|
23 | from rhodecode.tests import TestController | |
24 | from rhodecode.tests.fixture import Fixture |
|
24 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
25 | from rhodecode.tests.routes import route_path |
|
25 | from rhodecode.tests.routes import route_path | |
26 |
|
26 | |||
27 |
|
27 |
@@ -20,7 +20,7 b' import time' | |||||
20 | import pytest |
|
20 | import pytest | |
21 |
|
21 | |||
22 | from rhodecode import events |
|
22 | from rhodecode import events | |
23 | from rhodecode.tests.fixture import Fixture |
|
23 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
24 | from rhodecode.model.db import Session, Integration |
|
24 | from rhodecode.model.db import Session, Integration | |
25 | from rhodecode.model.integration import IntegrationModel |
|
25 | from rhodecode.model.integration import IntegrationModel | |
26 |
|
26 |
@@ -123,10 +123,14 b' def test_get_config(user_util, baseapp, ' | |||||
123 | ('web', 'allow_push', '*'), |
|
123 | ('web', 'allow_push', '*'), | |
124 | ('web', 'allow_archive', 'gz zip bz2'), |
|
124 | ('web', 'allow_archive', 'gz zip bz2'), | |
125 | ('web', 'baseurl', '/'), |
|
125 | ('web', 'baseurl', '/'), | |
|
126 | ||||
|
127 | # largefiles data... | |||
126 | ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')), |
|
128 | ('vcs_git_lfs', 'store_location', hg_config_org.get('vcs_git_lfs', 'store_location')), | |
|
129 | ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')), | |||
|
130 | ||||
127 | ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'), |
|
131 | ('vcs_svn_branch', '9aac1a38c3b8a0cdc4ae0f960a5f83332bc4fa5e', '/branches/*'), | |
128 | ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'), |
|
132 | ('vcs_svn_branch', 'c7e6a611c87da06529fd0dd733308481d67c71a8', '/trunk'), | |
129 | ('largefiles', 'usercache', hg_config_org.get('largefiles', 'usercache')), |
|
133 | ||
130 | ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'), |
|
134 | ('hooks', 'preoutgoing.pre_pull', 'python:vcsserver.hooks.pre_pull'), | |
131 | ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'), |
|
135 | ('hooks', 'prechangegroup.pre_push', 'python:vcsserver.hooks.pre_push'), | |
132 | ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'), |
|
136 | ('hooks', 'outgoing.pull_logger', 'python:vcsserver.hooks.log_pull_action'), |
@@ -22,7 +22,8 b' import pytest' | |||||
22 |
|
22 | |||
23 | from rhodecode.lib.str_utils import base64_to_str |
|
23 | from rhodecode.lib.str_utils import base64_to_str | |
24 | from rhodecode.lib.utils2 import AttributeDict |
|
24 | from rhodecode.lib.utils2 import AttributeDict | |
25 |
from rhodecode.tests. |
|
25 | from rhodecode.tests.fixtures.fixture_pyramid import ini_config | |
|
26 | from rhodecode.tests.utils import CustomTestApp, AuthPluginManager | |||
26 |
|
27 | |||
27 | from rhodecode.lib.caching_query import FromCache |
|
28 | from rhodecode.lib.caching_query import FromCache | |
28 | from rhodecode.lib.middleware import simplevcs |
|
29 | from rhodecode.lib.middleware import simplevcs | |
@@ -34,6 +35,57 b' from rhodecode.tests import (' | |||||
34 | HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
35 | HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) | |
35 | from rhodecode.tests.lib.middleware import mock_scm_app |
|
36 | from rhodecode.tests.lib.middleware import mock_scm_app | |
36 |
|
37 | |||
|
38 | from rhodecode.model.db import Permission, User | |||
|
39 | from rhodecode.model.meta import Session | |||
|
40 | from rhodecode.model.user import UserModel | |||
|
41 | ||||
|
42 | ||||
|
43 | @pytest.fixture() | |||
|
44 | def enable_auth_plugins(request, app): | |||
|
45 | """ | |||
|
46 | Return a factory object that when called, allows to control which | |||
|
47 | authentication plugins are enabled. | |||
|
48 | """ | |||
|
49 | ||||
|
50 | enabler = AuthPluginManager() | |||
|
51 | request.addfinalizer(enabler.cleanup) | |||
|
52 | ||||
|
53 | return enabler | |||
|
54 | ||||
|
55 | ||||
|
56 | @pytest.fixture() | |||
|
57 | def test_user_factory(request, baseapp): | |||
|
58 | ||||
|
59 | def user_factory(username='test_user', password='qweqwe', first_name='John', last_name='Testing', **kwargs): | |||
|
60 | usr = UserModel().create_or_update( | |||
|
61 | username=username, | |||
|
62 | password=password, | |||
|
63 | email=f'{username}@rhodecode.org', | |||
|
64 | firstname=first_name, lastname=last_name) | |||
|
65 | Session().commit() | |||
|
66 | ||||
|
67 | for k, v in kwargs.items(): | |||
|
68 | setattr(usr, k, v) | |||
|
69 | Session().add(usr) | |||
|
70 | ||||
|
71 | new_usr = User.get_by_username(username) | |||
|
72 | new_usr_id = new_usr.user_id | |||
|
73 | assert new_usr == usr | |||
|
74 | ||||
|
75 | @request.addfinalizer | |||
|
76 | def cleanup(): | |||
|
77 | if User.get(new_usr_id) is None: | |||
|
78 | return | |||
|
79 | ||||
|
80 | perm = Permission.query().all() | |||
|
81 | for p in perm: | |||
|
82 | UserModel().revoke_perm(usr, p) | |||
|
83 | ||||
|
84 | UserModel().delete(new_usr_id) | |||
|
85 | Session().commit() | |||
|
86 | return usr | |||
|
87 | ||||
|
88 | return user_factory | |||
37 |
|
89 | |||
38 | class StubVCSController(simplevcs.SimpleVCS): |
|
90 | class StubVCSController(simplevcs.SimpleVCS): | |
39 |
|
91 | |||
@@ -107,8 +159,7 b' def _remove_default_user_from_query_cach' | |||||
107 | Session().expire(user) |
|
159 | Session().expire(user) | |
108 |
|
160 | |||
109 |
|
161 | |||
110 | def test_handles_exceptions_during_permissions_checks( |
|
162 | def test_handles_exceptions_during_permissions_checks(vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory): | |
111 | vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory): |
|
|||
112 |
|
163 | |||
113 | test_password = 'qweqwe' |
|
164 | test_password = 'qweqwe' | |
114 | test_user = test_user_factory(password=test_password, extern_type='headers', extern_name='headers') |
|
165 | test_user = test_user_factory(password=test_password, extern_type='headers', extern_name='headers') | |
@@ -373,29 +424,30 b' class TestShadowRepoExposure(object):' | |||||
373 | controller.vcs_repo_name) |
|
424 | controller.vcs_repo_name) | |
374 |
|
425 | |||
375 |
|
426 | |||
376 | @pytest.mark.usefixtures('baseapp') |
|
|||
377 | class TestGenerateVcsResponse(object): |
|
427 | class TestGenerateVcsResponse(object): | |
378 |
|
428 | |||
379 | def test_ensures_that_start_response_is_called_early_enough(self): |
|
429 | def test_ensures_that_start_response_is_called_early_enough(self, baseapp): | |
380 | self.call_controller_with_response_body(iter(['a', 'b'])) |
|
430 | app_ini_config = baseapp.config.registry.settings['__file__'] | |
|
431 | self.call_controller_with_response_body(app_ini_config, iter(['a', 'b'])) | |||
381 | assert self.start_response.called |
|
432 | assert self.start_response.called | |
382 |
|
433 | |||
383 | def test_invalidates_cache_after_body_is_consumed(self): |
|
434 | def test_invalidates_cache_after_body_is_consumed(self, baseapp): | |
384 | result = self.call_controller_with_response_body(iter(['a', 'b'])) |
|
435 | app_ini_config = baseapp.config.registry.settings['__file__'] | |
|
436 | result = self.call_controller_with_response_body(app_ini_config, iter(['a', 'b'])) | |||
385 | assert not self.was_cache_invalidated() |
|
437 | assert not self.was_cache_invalidated() | |
386 | # Consume the result |
|
438 | # Consume the result | |
387 | list(result) |
|
439 | list(result) | |
388 | assert self.was_cache_invalidated() |
|
440 | assert self.was_cache_invalidated() | |
389 |
|
441 | |||
390 | def test_raises_unknown_exceptions(self): |
|
442 | def test_raises_unknown_exceptions(self, baseapp): | |
391 | result = self.call_controller_with_response_body( |
|
443 | app_ini_config = baseapp.config.registry.settings['__file__'] | |
392 |
|
|
444 | result = self.call_controller_with_response_body(app_ini_config, self.raise_result_iter(vcs_kind='unknown')) | |
393 | with pytest.raises(Exception): |
|
445 | with pytest.raises(Exception): | |
394 | list(result) |
|
446 | list(result) | |
395 |
|
447 | |||
396 | def call_controller_with_response_body(self, response_body): |
|
448 | def call_controller_with_response_body(self, ini_config, response_body): | |
|
449 | ||||
397 | settings = { |
|
450 | settings = { | |
398 | 'base_path': 'fake_base_path', |
|
|||
399 | 'vcs.hooks.protocol.v2': 'celery', |
|
451 | 'vcs.hooks.protocol.v2': 'celery', | |
400 | 'vcs.hooks.direct_calls': False, |
|
452 | 'vcs.hooks.direct_calls': False, | |
401 | } |
|
453 | } | |
@@ -407,7 +459,7 b' class TestGenerateVcsResponse(object):' | |||||
407 | result = controller._generate_vcs_response( |
|
459 | result = controller._generate_vcs_response( | |
408 | environ={}, start_response=self.start_response, |
|
460 | environ={}, start_response=self.start_response, | |
409 | repo_path='fake_repo_path', |
|
461 | repo_path='fake_repo_path', | |
410 | extras={}, action='push') |
|
462 | extras={'config': ini_config}, action='push') | |
411 | self.controller = controller |
|
463 | self.controller = controller | |
412 | return result |
|
464 | return result | |
413 |
|
465 |
@@ -19,6 +19,7 b'' | |||||
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
|
22 | import tempfile | |||
22 |
|
23 | |||
23 | from rhodecode.tests.utils import CustomTestApp |
|
24 | from rhodecode.tests.utils import CustomTestApp | |
24 | from rhodecode.lib.middleware.utils import scm_app_http, scm_app |
|
25 | from rhodecode.lib.middleware.utils import scm_app_http, scm_app | |
@@ -41,10 +42,13 b' def vcsserver_http_echo_app(request, vcs' | |||||
41 | """ |
|
42 | """ | |
42 | A running VCSServer with the EchoApp activated via HTTP. |
|
43 | A running VCSServer with the EchoApp activated via HTTP. | |
43 | """ |
|
44 | """ | |
44 | vcsserver = vcsserver_factory( |
|
45 | store_dir = tempfile.gettempdir() | |
|
46 | ||||
|
47 | vcsserver_instance = vcsserver_factory( | |||
45 | request=request, |
|
48 | request=request, | |
|
49 | store_dir=store_dir, | |||
46 | overrides=[{'app:main': {'dev.use_echo_app': 'true'}}]) |
|
50 | overrides=[{'app:main': {'dev.use_echo_app': 'true'}}]) | |
47 | return vcsserver |
|
51 | return vcsserver_instance | |
48 |
|
52 | |||
49 |
|
53 | |||
50 | @pytest.fixture(scope='session') |
|
54 | @pytest.fixture(scope='session') |
@@ -1,5 +1,4 b'' | |||||
1 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | ||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
|||
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 |
@@ -30,7 +30,7 b' from rhodecode.lib.diffs import (' | |||||
30 |
|
30 | |||
31 | from rhodecode.lib.utils2 import AttributeDict |
|
31 | from rhodecode.lib.utils2 import AttributeDict | |
32 | from rhodecode.lib.vcs.backends.git import GitCommit |
|
32 | from rhodecode.lib.vcs.backends.git import GitCommit | |
33 | from rhodecode.tests.fixture import Fixture |
|
33 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
34 | from rhodecode.tests import no_newline_id_generator |
|
34 | from rhodecode.tests import no_newline_id_generator | |
35 | from rhodecode.lib.vcs.backends.git.repository import GitDiff |
|
35 | from rhodecode.lib.vcs.backends.git.repository import GitDiff | |
36 | from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff |
|
36 | from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff |
@@ -1,5 +1,4 b'' | |||||
1 |
|
1 | # Copyright (C) 2010-2024 RhodeCode GmbH | ||
2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
|||
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
@@ -18,305 +17,71 b'' | |||||
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
19 |
|
18 | |||
20 | import logging |
|
19 | import logging | |
21 | import io |
|
|||
22 |
|
20 | |||
23 | import mock |
|
21 | import mock | |
24 | import msgpack |
|
|||
25 | import pytest |
|
22 | import pytest | |
26 | import tempfile |
|
23 | import tempfile | |
27 |
|
24 | |||
28 | from rhodecode.lib.hook_daemon import http_hooks_deamon |
|
|||
29 | from rhodecode.lib.hook_daemon import celery_hooks_deamon |
|
25 | from rhodecode.lib.hook_daemon import celery_hooks_deamon | |
30 |
from rhodecode.lib.hook_daemon import |
|
26 | from rhodecode.lib.hook_daemon import utils as hooks_utils | |
31 | from rhodecode.lib.hook_daemon import base as hook_base |
|
27 | from rhodecode.lib.hook_daemon import base as hook_base | |
32 | from rhodecode.lib.str_utils import safe_bytes |
|
28 | ||
33 | from rhodecode.tests.utils import assert_message_in_log |
|
29 | from rhodecode.tests.utils import assert_message_in_log | |
34 | from rhodecode.lib.ext_json import json |
|
|||
35 |
|
||||
36 | test_proto = http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO |
|
|||
37 |
|
30 | |||
38 |
|
31 | |||
39 | class TestHooks(object): |
|
32 | class TestHooks(object): | |
40 | def test_hooks_can_be_used_as_a_context_processor(self): |
|
33 | def test_hooks_can_be_used_as_a_context_processor(self): | |
41 |
hooks = hook_ |
|
34 | hooks = hook_base.Hooks() | |
42 | with hooks as return_value: |
|
35 | with hooks as return_value: | |
43 | pass |
|
36 | pass | |
44 | assert hooks == return_value |
|
37 | assert hooks == return_value | |
45 |
|
38 | |||
46 |
|
||||
47 | class TestHooksHttpHandler(object): |
|
|||
48 | def test_read_request_parses_method_name_and_arguments(self): |
|
|||
49 | data = { |
|
|||
50 | 'method': 'test', |
|
|||
51 | 'extras': { |
|
|||
52 | 'param1': 1, |
|
|||
53 | 'param2': 'a' |
|
|||
54 | } |
|
|||
55 | } |
|
|||
56 | request = self._generate_post_request(data) |
|
|||
57 | hooks_patcher = mock.patch.object( |
|
|||
58 | hook_module.Hooks, data['method'], create=True, return_value=1) |
|
|||
59 |
|
||||
60 | with hooks_patcher as hooks_mock: |
|
|||
61 | handler = http_hooks_deamon.HooksHttpHandler |
|
|||
62 | handler.DEFAULT_HOOKS_PROTO = test_proto |
|
|||
63 | handler.wbufsize = 10240 |
|
|||
64 | MockServer(handler, request) |
|
|||
65 |
|
||||
66 | hooks_mock.assert_called_once_with(data['extras']) |
|
|||
67 |
|
||||
68 | def test_hooks_serialized_result_is_returned(self): |
|
|||
69 | request = self._generate_post_request({}) |
|
|||
70 | rpc_method = 'test' |
|
|||
71 | hook_result = { |
|
|||
72 | 'first': 'one', |
|
|||
73 | 'second': 2 |
|
|||
74 | } |
|
|||
75 | extras = {} |
|
|||
76 |
|
||||
77 | # patching our _read to return test method and proto used |
|
|||
78 | read_patcher = mock.patch.object( |
|
|||
79 | http_hooks_deamon.HooksHttpHandler, '_read_request', |
|
|||
80 | return_value=(test_proto, rpc_method, extras)) |
|
|||
81 |
|
||||
82 | # patch Hooks instance to return hook_result data on 'test' call |
|
|||
83 | hooks_patcher = mock.patch.object( |
|
|||
84 | hook_module.Hooks, rpc_method, create=True, |
|
|||
85 | return_value=hook_result) |
|
|||
86 |
|
||||
87 | with read_patcher, hooks_patcher: |
|
|||
88 | handler = http_hooks_deamon.HooksHttpHandler |
|
|||
89 | handler.DEFAULT_HOOKS_PROTO = test_proto |
|
|||
90 | handler.wbufsize = 10240 |
|
|||
91 | server = MockServer(handler, request) |
|
|||
92 |
|
||||
93 | expected_result = http_hooks_deamon.HooksHttpHandler.serialize_data(hook_result) |
|
|||
94 |
|
||||
95 | server.request.output_stream.seek(0) |
|
|||
96 | assert server.request.output_stream.readlines()[-1] == expected_result |
|
|||
97 |
|
||||
98 | def test_exception_is_returned_in_response(self): |
|
|||
99 | request = self._generate_post_request({}) |
|
|||
100 | rpc_method = 'test' |
|
|||
101 |
|
||||
102 | read_patcher = mock.patch.object( |
|
|||
103 | http_hooks_deamon.HooksHttpHandler, '_read_request', |
|
|||
104 | return_value=(test_proto, rpc_method, {})) |
|
|||
105 |
|
||||
106 | hooks_patcher = mock.patch.object( |
|
|||
107 | hook_module.Hooks, rpc_method, create=True, |
|
|||
108 | side_effect=Exception('Test exception')) |
|
|||
109 |
|
||||
110 | with read_patcher, hooks_patcher: |
|
|||
111 | handler = http_hooks_deamon.HooksHttpHandler |
|
|||
112 | handler.DEFAULT_HOOKS_PROTO = test_proto |
|
|||
113 | handler.wbufsize = 10240 |
|
|||
114 | server = MockServer(handler, request) |
|
|||
115 |
|
||||
116 | server.request.output_stream.seek(0) |
|
|||
117 | data = server.request.output_stream.readlines() |
|
|||
118 | msgpack_data = b''.join(data[5:]) |
|
|||
119 | org_exc = http_hooks_deamon.HooksHttpHandler.deserialize_data(msgpack_data) |
|
|||
120 | expected_result = { |
|
|||
121 | 'exception': 'Exception', |
|
|||
122 | 'exception_traceback': org_exc['exception_traceback'], |
|
|||
123 | 'exception_args': ['Test exception'] |
|
|||
124 | } |
|
|||
125 | assert org_exc == expected_result |
|
|||
126 |
|
||||
127 | def test_log_message_writes_to_debug_log(self, caplog): |
|
|||
128 | ip_port = ('0.0.0.0', 8888) |
|
|||
129 | handler = http_hooks_deamon.HooksHttpHandler(MockRequest('POST /'), ip_port, mock.Mock()) |
|
|||
130 | fake_date = '1/Nov/2015 00:00:00' |
|
|||
131 | date_patcher = mock.patch.object( |
|
|||
132 | handler, 'log_date_time_string', return_value=fake_date) |
|
|||
133 |
|
||||
134 | with date_patcher, caplog.at_level(logging.DEBUG): |
|
|||
135 | handler.log_message('Some message %d, %s', 123, 'string') |
|
|||
136 |
|
||||
137 | expected_message = f"HOOKS: client={ip_port} - - [{fake_date}] Some message 123, string" |
|
|||
138 |
|
||||
139 | assert_message_in_log( |
|
|||
140 | caplog.records, expected_message, |
|
|||
141 | levelno=logging.DEBUG, module='http_hooks_deamon') |
|
|||
142 |
|
||||
143 | def _generate_post_request(self, data, proto=test_proto): |
|
|||
144 | if proto == http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO: |
|
|||
145 | payload = msgpack.packb(data) |
|
|||
146 | else: |
|
|||
147 | payload = json.dumps(data) |
|
|||
148 |
|
||||
149 | return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % ( |
|
|||
150 | len(payload), payload) |
|
|||
151 |
|
||||
152 |
|
||||
153 | class ThreadedHookCallbackDaemon(object): |
|
|||
154 | def test_constructor_calls_prepare(self): |
|
|||
155 | prepare_daemon_patcher = mock.patch.object( |
|
|||
156 | http_hooks_deamon.ThreadedHookCallbackDaemon, '_prepare') |
|
|||
157 | with prepare_daemon_patcher as prepare_daemon_mock: |
|
|||
158 | http_hooks_deamon.ThreadedHookCallbackDaemon() |
|
|||
159 | prepare_daemon_mock.assert_called_once_with() |
|
|||
160 |
|
||||
161 | def test_run_is_called_on_context_start(self): |
|
|||
162 | patchers = mock.patch.multiple( |
|
|||
163 | http_hooks_deamon.ThreadedHookCallbackDaemon, |
|
|||
164 | _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT) |
|
|||
165 |
|
||||
166 | with patchers as mocks: |
|
|||
167 | daemon = http_hooks_deamon.ThreadedHookCallbackDaemon() |
|
|||
168 | with daemon as daemon_context: |
|
|||
169 | pass |
|
|||
170 | mocks['_run'].assert_called_once_with() |
|
|||
171 | assert daemon_context == daemon |
|
|||
172 |
|
||||
173 | def test_stop_is_called_on_context_exit(self): |
|
|||
174 | patchers = mock.patch.multiple( |
|
|||
175 | http_hooks_deamon.ThreadedHookCallbackDaemon, |
|
|||
176 | _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT) |
|
|||
177 |
|
||||
178 | with patchers as mocks: |
|
|||
179 | daemon = http_hooks_deamon.ThreadedHookCallbackDaemon() |
|
|||
180 | with daemon as daemon_context: |
|
|||
181 | assert mocks['_stop'].call_count == 0 |
|
|||
182 |
|
||||
183 | mocks['_stop'].assert_called_once_with() |
|
|||
184 | assert daemon_context == daemon |
|
|||
185 |
|
||||
186 |
|
||||
187 | class TestHttpHooksCallbackDaemon(object): |
|
|||
188 | def test_hooks_callback_generates_new_port(self, caplog): |
|
|||
189 | with caplog.at_level(logging.DEBUG): |
|
|||
190 | daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881) |
|
|||
191 | assert daemon._daemon.server_address == ('127.0.0.1', 8881) |
|
|||
192 |
|
||||
193 | with caplog.at_level(logging.DEBUG): |
|
|||
194 | daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host=None, port=None) |
|
|||
195 | assert daemon._daemon.server_address[1] in range(0, 66000) |
|
|||
196 | assert daemon._daemon.server_address[0] != '127.0.0.1' |
|
|||
197 |
|
||||
198 | def test_prepare_inits_daemon_variable(self, tcp_server, caplog): |
|
|||
199 | with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG): |
|
|||
200 | daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881) |
|
|||
201 | assert daemon._daemon == tcp_server |
|
|||
202 |
|
||||
203 | _, port = tcp_server.server_address |
|
|||
204 |
|
||||
205 | msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \ |
|
|||
206 | f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>" |
|
|||
207 | assert_message_in_log( |
|
|||
208 | caplog.records, msg, levelno=logging.DEBUG, module='http_hooks_deamon') |
|
|||
209 |
|
||||
210 | def test_prepare_inits_hooks_uri_and_logs_it( |
|
|||
211 | self, tcp_server, caplog): |
|
|||
212 | with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG): |
|
|||
213 | daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881) |
|
|||
214 |
|
||||
215 | _, port = tcp_server.server_address |
|
|||
216 | expected_uri = '{}:{}'.format('127.0.0.1', port) |
|
|||
217 | assert daemon.hooks_uri == expected_uri |
|
|||
218 |
|
||||
219 | msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \ |
|
|||
220 | f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>" |
|
|||
221 |
|
||||
222 | assert_message_in_log( |
|
|||
223 | caplog.records, msg, |
|
|||
224 | levelno=logging.DEBUG, module='http_hooks_deamon') |
|
|||
225 |
|
||||
226 | def test_run_creates_a_thread(self, tcp_server): |
|
|||
227 | thread = mock.Mock() |
|
|||
228 |
|
||||
229 | with self._tcp_patcher(tcp_server): |
|
|||
230 | daemon = http_hooks_deamon.HttpHooksCallbackDaemon() |
|
|||
231 |
|
||||
232 | with self._thread_patcher(thread) as thread_mock: |
|
|||
233 | daemon._run() |
|
|||
234 |
|
||||
235 | thread_mock.assert_called_once_with( |
|
|||
236 | target=tcp_server.serve_forever, |
|
|||
237 | kwargs={'poll_interval': daemon.POLL_INTERVAL}) |
|
|||
238 | assert thread.daemon is True |
|
|||
239 | thread.start.assert_called_once_with() |
|
|||
240 |
|
||||
241 | def test_run_logs(self, tcp_server, caplog): |
|
|||
242 |
|
||||
243 | with self._tcp_patcher(tcp_server): |
|
|||
244 | daemon = http_hooks_deamon.HttpHooksCallbackDaemon() |
|
|||
245 |
|
||||
246 | with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG): |
|
|||
247 | daemon._run() |
|
|||
248 |
|
||||
249 | assert_message_in_log( |
|
|||
250 | caplog.records, |
|
|||
251 | 'Running thread-based loop of callback daemon in background', |
|
|||
252 | levelno=logging.DEBUG, module='http_hooks_deamon') |
|
|||
253 |
|
||||
254 | def test_stop_cleans_up_the_connection(self, tcp_server, caplog): |
|
|||
255 | thread = mock.Mock() |
|
|||
256 |
|
||||
257 | with self._tcp_patcher(tcp_server): |
|
|||
258 | daemon = http_hooks_deamon.HttpHooksCallbackDaemon() |
|
|||
259 |
|
||||
260 | with self._thread_patcher(thread), caplog.at_level(logging.DEBUG): |
|
|||
261 | with daemon: |
|
|||
262 | assert daemon._daemon == tcp_server |
|
|||
263 | assert daemon._callback_thread == thread |
|
|||
264 |
|
||||
265 | assert daemon._daemon is None |
|
|||
266 | assert daemon._callback_thread is None |
|
|||
267 | tcp_server.shutdown.assert_called_with() |
|
|||
268 | thread.join.assert_called_once_with() |
|
|||
269 |
|
||||
270 | assert_message_in_log( |
|
|||
271 | caplog.records, 'Waiting for background thread to finish.', |
|
|||
272 | levelno=logging.DEBUG, module='http_hooks_deamon') |
|
|||
273 |
|
||||
274 | def _tcp_patcher(self, tcp_server): |
|
|||
275 | return mock.patch.object( |
|
|||
276 | http_hooks_deamon, 'TCPServer', return_value=tcp_server) |
|
|||
277 |
|
||||
278 | def _thread_patcher(self, thread): |
|
|||
279 | return mock.patch.object( |
|
|||
280 | http_hooks_deamon.threading, 'Thread', return_value=thread) |
|
|||
281 |
|
||||
282 |
|
||||
283 | class TestPrepareHooksDaemon(object): |
|
39 | class TestPrepareHooksDaemon(object): | |
284 |
|
40 | |||
285 | @pytest.mark.parametrize('protocol', ('celery',)) |
|
41 | @pytest.mark.parametrize('protocol', ('celery',)) | |
286 | def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified( |
|
42 | def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(self, protocol): | |
287 | self, protocol): |
|
|||
288 | with tempfile.NamedTemporaryFile(mode='w') as temp_file: |
|
43 | with tempfile.NamedTemporaryFile(mode='w') as temp_file: | |
289 | temp_file.write("[app:main]\ncelery.broker_url = redis://redis/0\n" |
|
44 | temp_file.write( | |
290 | "celery.result_backend = redis://redis/0") |
|
45 | "[app:main]\n" | |
|
46 | "celery.broker_url = redis://redis/0\n" | |||
|
47 | "celery.result_backend = redis://redis/0\n" | |||
|
48 | ) | |||
291 | temp_file.flush() |
|
49 | temp_file.flush() | |
292 | expected_extras = {'config': temp_file.name} |
|
50 | expected_extras = {'config': temp_file.name} | |
293 |
callback, extras = hook |
|
51 | callback, extras = hooks_utils.prepare_callback_daemon(expected_extras, protocol=protocol) | |
294 | expected_extras, protocol=protocol, host='') |
|
|||
295 | assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon) |
|
52 | assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon) | |
296 |
|
53 | |||
297 | @pytest.mark.parametrize('protocol, expected_class', ( |
|
54 | @pytest.mark.parametrize('protocol, expected_class', ( | |
298 |
(' |
|
55 | ('celery', celery_hooks_deamon.CeleryHooksCallbackDaemon), | |
299 | )) |
|
56 | )) | |
300 | def test_returns_real_hooks_callback_daemon_when_protocol_is_specified( |
|
57 | def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(self, protocol, expected_class): | |
301 | self, protocol, expected_class): |
|
58 | ||
302 | expected_extras = { |
|
59 | with tempfile.NamedTemporaryFile(mode='w') as temp_file: | |
303 | 'extra1': 'value1', |
|
60 | temp_file.write( | |
304 | 'txn_id': 'txnid2', |
|
61 | "[app:main]\n" | |
305 | 'hooks_protocol': protocol.lower(), |
|
62 | "celery.broker_url = redis://redis:6379/0\n" | |
306 | 'task_backend': '', |
|
63 | "celery.result_backend = redis://redis:6379/0\n" | |
307 | 'task_queue': '', |
|
64 | ) | |
308 | 'repo_store': '/var/opt/rhodecode_repo_store', |
|
65 | temp_file.flush() | |
309 | 'repository': 'rhodecode', |
|
66 | ||
310 | } |
|
67 | expected_extras = { | |
311 | from rhodecode import CONFIG |
|
68 | 'extra1': 'value1', | |
312 | CONFIG['vcs.svn.redis_conn'] = 'redis://redis:6379/0' |
|
69 | 'txn_id': 'txnid2', | |
313 | callback, extras = hook_base.prepare_callback_daemon( |
|
70 | 'hooks_protocol': protocol.lower(), | |
314 | expected_extras.copy(), protocol=protocol, host='127.0.0.1', |
|
71 | 'hooks_config': { | |
315 | txn_id='txnid2') |
|
72 | 'broker_url': 'redis://redis:6379/0', | |
316 | assert isinstance(callback, expected_class) |
|
73 | 'result_backend': 'redis://redis:6379/0', | |
317 | extras.pop('hooks_uri') |
|
74 | }, | |
318 | expected_extras['time'] = extras['time'] |
|
75 | 'repo_store': '/var/opt/rhodecode_repo_store', | |
319 | assert extras == expected_extras |
|
76 | 'repository': 'rhodecode', | |
|
77 | 'config': temp_file.name | |||
|
78 | } | |||
|
79 | from rhodecode import CONFIG | |||
|
80 | CONFIG['vcs.svn.redis_conn'] = 'redis://redis:6379/0' | |||
|
81 | callback, extras = hooks_utils.prepare_callback_daemon(expected_extras.copy(), protocol=protocol,txn_id='txnid2') | |||
|
82 | assert isinstance(callback, expected_class) | |||
|
83 | expected_extras['time'] = extras['time'] | |||
|
84 | assert extras == expected_extras | |||
320 |
|
85 | |||
321 | @pytest.mark.parametrize('protocol', ( |
|
86 | @pytest.mark.parametrize('protocol', ( | |
322 | 'invalid', |
|
87 | 'invalid', | |
@@ -330,35 +95,4 b' class TestPrepareHooksDaemon(object):' | |||||
330 | 'hooks_protocol': protocol.lower() |
|
95 | 'hooks_protocol': protocol.lower() | |
331 | } |
|
96 | } | |
332 | with pytest.raises(Exception): |
|
97 | with pytest.raises(Exception): | |
333 |
callback, extras = hook |
|
98 | callback, extras = hooks_utils.prepare_callback_daemon(expected_extras.copy(), protocol=protocol) | |
334 | expected_extras.copy(), |
|
|||
335 | protocol=protocol, host='127.0.0.1') |
|
|||
336 |
|
||||
337 |
|
||||
338 | class MockRequest(object): |
|
|||
339 |
|
||||
340 | def __init__(self, request): |
|
|||
341 | self.request = request |
|
|||
342 | self.input_stream = io.BytesIO(safe_bytes(self.request)) |
|
|||
343 | self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion |
|
|||
344 | self.output_stream.close = lambda: None |
|
|||
345 |
|
||||
346 | def makefile(self, mode, *args, **kwargs): |
|
|||
347 | return self.output_stream if mode == 'wb' else self.input_stream |
|
|||
348 |
|
||||
349 |
|
||||
350 | class MockServer(object): |
|
|||
351 |
|
||||
352 | def __init__(self, handler_cls, request): |
|
|||
353 | ip_port = ('0.0.0.0', 8888) |
|
|||
354 | self.request = MockRequest(request) |
|
|||
355 | self.server_address = ip_port |
|
|||
356 | self.handler = handler_cls(self.request, ip_port, self) |
|
|||
357 |
|
||||
358 |
|
||||
359 | @pytest.fixture() |
|
|||
360 | def tcp_server(): |
|
|||
361 | server = mock.Mock() |
|
|||
362 | server.server_address = ('127.0.0.1', 8881) |
|
|||
363 | server.wbufsize = 1024 |
|
|||
364 | return server |
|
@@ -33,7 +33,7 b' from rhodecode.model import meta' | |||||
33 | from rhodecode.model.repo import RepoModel |
|
33 | from rhodecode.model.repo import RepoModel | |
34 | from rhodecode.model.repo_group import RepoGroupModel |
|
34 | from rhodecode.model.repo_group import RepoGroupModel | |
35 | from rhodecode.model.settings import UiSetting, SettingsModel |
|
35 | from rhodecode.model.settings import UiSetting, SettingsModel | |
36 | from rhodecode.tests.fixture import Fixture |
|
36 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
37 | from rhodecode_tools.lib.hash_utils import md5_safe |
|
37 | from rhodecode_tools.lib.hash_utils import md5_safe | |
38 | from rhodecode.lib.ext_json import json |
|
38 | from rhodecode.lib.ext_json import json | |
39 |
|
39 | |||
@@ -403,12 +403,9 b' class TestPrepareConfigData(object):' | |||||
403 |
|
403 | |||
404 | self._assert_repo_name_passed(model_mock, repo_name) |
|
404 | self._assert_repo_name_passed(model_mock, repo_name) | |
405 |
|
405 | |||
406 | expected_result = [ |
|
406 | assert ('section1', 'option1', 'value1') in result | |
407 |
|
|
407 | assert ('section2', 'option2', 'value2') in result | |
408 |
|
|
408 | assert ('section3', 'option3', 'value3') not in result | |
409 | ] |
|
|||
410 | # We have extra config items returned, so we're ignoring two last items |
|
|||
411 | assert result[:2] == expected_result |
|
|||
412 |
|
409 | |||
413 | def _assert_repo_name_passed(self, model_mock, repo_name): |
|
410 | def _assert_repo_name_passed(self, model_mock, repo_name): | |
414 | assert model_mock.call_count == 1 |
|
411 | assert model_mock.call_count == 1 |
@@ -25,7 +25,7 b' It works by replaying a group of commits' | |||||
25 |
|
25 | |||
26 | import argparse |
|
26 | import argparse | |
27 | import collections |
|
27 | import collections | |
28 |
import |
|
28 | import configparser | |
29 | import functools |
|
29 | import functools | |
30 | import itertools |
|
30 | import itertools | |
31 | import os |
|
31 | import os | |
@@ -294,7 +294,7 b' class HgMixin(object):' | |||||
294 | def add_remote(self, repo, remote_url, remote_name='upstream'): |
|
294 | def add_remote(self, repo, remote_url, remote_name='upstream'): | |
295 | self.remove_remote(repo, remote_name) |
|
295 | self.remove_remote(repo, remote_name) | |
296 | os.chdir(repo) |
|
296 | os.chdir(repo) | |
297 |
hgrc = |
|
297 | hgrc = configparser.RawConfigParser() | |
298 | hgrc.read('.hg/hgrc') |
|
298 | hgrc.read('.hg/hgrc') | |
299 | hgrc.set('paths', remote_name, remote_url) |
|
299 | hgrc.set('paths', remote_name, remote_url) | |
300 | with open('.hg/hgrc', 'w') as f: |
|
300 | with open('.hg/hgrc', 'w') as f: | |
@@ -303,7 +303,7 b' class HgMixin(object):' | |||||
303 | @keep_cwd |
|
303 | @keep_cwd | |
304 | def remove_remote(self, repo, remote_name='upstream'): |
|
304 | def remove_remote(self, repo, remote_name='upstream'): | |
305 | os.chdir(repo) |
|
305 | os.chdir(repo) | |
306 |
hgrc = |
|
306 | hgrc = configparser.RawConfigParser() | |
307 | hgrc.read('.hg/hgrc') |
|
307 | hgrc.read('.hg/hgrc') | |
308 | hgrc.remove_option('paths', remote_name) |
|
308 | hgrc.remove_option('paths', remote_name) | |
309 | with open('.hg/hgrc', 'w') as f: |
|
309 | with open('.hg/hgrc', 'w') as f: |
@@ -59,16 +59,6 b' def parse_options():' | |||||
59 | parser.add_argument( |
|
59 | parser.add_argument( | |
60 | '--interval', '-i', type=float, default=5, |
|
60 | '--interval', '-i', type=float, default=5, | |
61 | help="Interval in secods.") |
|
61 | help="Interval in secods.") | |
62 | parser.add_argument( |
|
|||
63 | '--appenlight', '--ae', action='store_true') |
|
|||
64 | parser.add_argument( |
|
|||
65 | '--appenlight-url', '--ae-url', |
|
|||
66 | default='https://ae.rhodecode.com/api/logs', |
|
|||
67 | help='URL of the Appenlight API endpoint, defaults to "%(default)s".') |
|
|||
68 | parser.add_argument( |
|
|||
69 | '--appenlight-api-key', '--ae-key', |
|
|||
70 | help='API key to use when sending data to appenlight. This has to be ' |
|
|||
71 | 'set if Appenlight is enabled.') |
|
|||
72 | return parser.parse_args() |
|
62 | return parser.parse_args() | |
73 |
|
63 | |||
74 |
|
64 |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify |
@@ -1,5 +1,3 b'' | |||||
1 |
|
||||
2 |
|
||||
3 |
|
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
4 | # |
|
2 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify |
@@ -22,7 +22,7 b' from rhodecode.model.meta import Session' | |||||
22 | from rhodecode.model.repo_group import RepoGroupModel |
|
22 | from rhodecode.model.repo_group import RepoGroupModel | |
23 | from rhodecode.model.repo import RepoModel |
|
23 | from rhodecode.model.repo import RepoModel | |
24 | from rhodecode.model.user import UserModel |
|
24 | from rhodecode.model.user import UserModel | |
25 | from rhodecode.tests.fixture import Fixture |
|
25 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | fixture = Fixture() |
|
28 | fixture = Fixture() |
@@ -19,7 +19,7 b'' | |||||
19 |
|
19 | |||
20 | import pytest |
|
20 | import pytest | |
21 |
|
21 | |||
22 | from rhodecode.tests.fixture import Fixture |
|
22 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
23 |
|
23 | |||
24 | from rhodecode.model.db import User, Notification, UserNotification |
|
24 | from rhodecode.model.db import User, Notification, UserNotification | |
25 | from rhodecode.model.meta import Session |
|
25 | from rhodecode.model.meta import Session |
@@ -29,7 +29,7 b' from rhodecode.model.repo import RepoMod' | |||||
29 | from rhodecode.model.repo_group import RepoGroupModel |
|
29 | from rhodecode.model.repo_group import RepoGroupModel | |
30 | from rhodecode.model.user import UserModel |
|
30 | from rhodecode.model.user import UserModel | |
31 | from rhodecode.model.user_group import UserGroupModel |
|
31 | from rhodecode.model.user_group import UserGroupModel | |
32 | from rhodecode.tests.fixture import Fixture |
|
32 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | fixture = Fixture() |
|
35 | fixture = Fixture() |
This diff has been collapsed as it changes many lines, (785 lines changed) Show them Hide them | |||||
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -16,6 +15,7 b'' | |||||
16 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
18 | import os | |||
19 |
|
19 | |||
20 | import mock |
|
20 | import mock | |
21 | import pytest |
|
21 | import pytest | |
@@ -23,8 +23,7 b' import textwrap' | |||||
23 |
|
23 | |||
24 | import rhodecode |
|
24 | import rhodecode | |
25 | from rhodecode.lib.vcs.backends import get_backend |
|
25 | from rhodecode.lib.vcs.backends import get_backend | |
26 |
from rhodecode.lib.vcs.backends.base import |
|
26 | from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason, Reference | |
27 | MergeResponse, MergeFailureReason, Reference) |
|
|||
28 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
27 | from rhodecode.lib.vcs.exceptions import RepositoryError | |
29 | from rhodecode.lib.vcs.nodes import FileNode |
|
28 | from rhodecode.lib.vcs.nodes import FileNode | |
30 | from rhodecode.model.comment import CommentsModel |
|
29 | from rhodecode.model.comment import CommentsModel | |
@@ -39,54 +38,42 b' pytestmark = [' | |||||
39 | ] |
|
38 | ] | |
40 |
|
39 | |||
41 |
|
40 | |||
42 |
@pytest.mark.usefixtures( |
|
41 | @pytest.mark.usefixtures("config_stub") | |
43 | class TestPullRequestModel(object): |
|
42 | class TestPullRequestModel(object): | |
44 |
|
||||
45 | @pytest.fixture() |
|
43 | @pytest.fixture() | |
46 | def pull_request(self, request, backend, pr_util): |
|
44 | def pull_request(self, request, backend, pr_util): | |
47 | """ |
|
45 | """ | |
48 | A pull request combined with multiples patches. |
|
46 | A pull request combined with multiples patches. | |
49 | """ |
|
47 | """ | |
50 | BackendClass = get_backend(backend.alias) |
|
48 | BackendClass = get_backend(backend.alias) | |
51 | merge_resp = MergeResponse( |
|
49 | merge_resp = MergeResponse(False, False, None, MergeFailureReason.UNKNOWN, metadata={"exception": "MockError"}) | |
52 | False, False, None, MergeFailureReason.UNKNOWN, |
|
50 | self.merge_patcher = mock.patch.object(BackendClass, "merge", return_value=merge_resp) | |
53 | metadata={'exception': 'MockError'}) |
|
51 | self.workspace_remove_patcher = mock.patch.object(BackendClass, "cleanup_merge_workspace") | |
54 | self.merge_patcher = mock.patch.object( |
|
|||
55 | BackendClass, 'merge', return_value=merge_resp) |
|
|||
56 | self.workspace_remove_patcher = mock.patch.object( |
|
|||
57 | BackendClass, 'cleanup_merge_workspace') |
|
|||
58 |
|
52 | |||
59 | self.workspace_remove_mock = self.workspace_remove_patcher.start() |
|
53 | self.workspace_remove_mock = self.workspace_remove_patcher.start() | |
60 | self.merge_mock = self.merge_patcher.start() |
|
54 | self.merge_mock = self.merge_patcher.start() | |
61 | self.comment_patcher = mock.patch( |
|
55 | self.comment_patcher = mock.patch("rhodecode.model.changeset_status.ChangesetStatusModel.set_status") | |
62 | 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status') |
|
|||
63 | self.comment_patcher.start() |
|
56 | self.comment_patcher.start() | |
64 | self.notification_patcher = mock.patch( |
|
57 | self.notification_patcher = mock.patch("rhodecode.model.notification.NotificationModel.create") | |
65 | 'rhodecode.model.notification.NotificationModel.create') |
|
|||
66 | self.notification_patcher.start() |
|
58 | self.notification_patcher.start() | |
67 | self.helper_patcher = mock.patch( |
|
59 | self.helper_patcher = mock.patch("rhodecode.lib.helpers.route_path") | |
68 | 'rhodecode.lib.helpers.route_path') |
|
|||
69 | self.helper_patcher.start() |
|
60 | self.helper_patcher.start() | |
70 |
|
61 | |||
71 | self.hook_patcher = mock.patch.object(PullRequestModel, |
|
62 | self.hook_patcher = mock.patch.object(PullRequestModel, "trigger_pull_request_hook") | |
72 | 'trigger_pull_request_hook') |
|
|||
73 | self.hook_mock = self.hook_patcher.start() |
|
63 | self.hook_mock = self.hook_patcher.start() | |
74 |
|
64 | |||
75 | self.invalidation_patcher = mock.patch( |
|
65 | self.invalidation_patcher = mock.patch("rhodecode.model.pull_request.ScmModel.mark_for_invalidation") | |
76 | 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation') |
|
|||
77 | self.invalidation_mock = self.invalidation_patcher.start() |
|
66 | self.invalidation_mock = self.invalidation_patcher.start() | |
78 |
|
67 | |||
79 | self.pull_request = pr_util.create_pull_request( |
|
68 | self.pull_request = pr_util.create_pull_request(mergeable=True, name_suffix="ąć") | |
80 | mergeable=True, name_suffix=u'ąć') |
|
|||
81 | self.source_commit = self.pull_request.source_ref_parts.commit_id |
|
69 | self.source_commit = self.pull_request.source_ref_parts.commit_id | |
82 | self.target_commit = self.pull_request.target_ref_parts.commit_id |
|
70 | self.target_commit = self.pull_request.target_ref_parts.commit_id | |
83 |
self.workspace_id = |
|
71 | self.workspace_id = f"pr-{self.pull_request.pull_request_id}" | |
84 | self.repo_id = self.pull_request.target_repo.repo_id |
|
72 | self.repo_id = self.pull_request.target_repo.repo_id | |
85 |
|
73 | |||
86 | @request.addfinalizer |
|
74 | @request.addfinalizer | |
87 | def cleanup_pull_request(): |
|
75 | def cleanup_pull_request(): | |
88 | calls = [mock.call( |
|
76 | calls = [mock.call(self.pull_request, self.pull_request.author, "create")] | |
89 | self.pull_request, self.pull_request.author, 'create')] |
|
|||
90 | self.hook_mock.assert_has_calls(calls) |
|
77 | self.hook_mock.assert_has_calls(calls) | |
91 |
|
78 | |||
92 | self.workspace_remove_patcher.stop() |
|
79 | self.workspace_remove_patcher.stop() | |
@@ -114,29 +101,30 b' class TestPullRequestModel(object):' | |||||
114 | assert len(prs) == 1 |
|
101 | assert len(prs) == 1 | |
115 |
|
102 | |||
116 | def test_count_awaiting_review(self, pull_request): |
|
103 | def test_count_awaiting_review(self, pull_request): | |
117 | pr_count = PullRequestModel().count_awaiting_review( |
|
104 | pr_count = PullRequestModel().count_awaiting_review(pull_request.target_repo) | |
118 | pull_request.target_repo) |
|
|||
119 | assert pr_count == 1 |
|
105 | assert pr_count == 1 | |
120 |
|
106 | |||
121 | def test_get_awaiting_my_review(self, pull_request): |
|
107 | def test_get_awaiting_my_review(self, pull_request): | |
122 | PullRequestModel().update_reviewers( |
|
108 | PullRequestModel().update_reviewers( | |
123 |
pull_request, [(pull_request.author, [ |
|
109 | pull_request, [(pull_request.author, ["author"], False, "reviewer", [])], pull_request.author | |
124 | pull_request.author) |
|
110 | ) | |
125 | Session().commit() |
|
111 | Session().commit() | |
126 |
|
112 | |||
127 | prs = PullRequestModel().get_awaiting_my_review( |
|
113 | prs = PullRequestModel().get_awaiting_my_review( | |
128 |
pull_request.target_repo.repo_name, user_id=pull_request.author.user_id |
|
114 | pull_request.target_repo.repo_name, user_id=pull_request.author.user_id | |
|
115 | ) | |||
129 | assert isinstance(prs, list) |
|
116 | assert isinstance(prs, list) | |
130 | assert len(prs) == 1 |
|
117 | assert len(prs) == 1 | |
131 |
|
118 | |||
132 | def test_count_awaiting_my_review(self, pull_request): |
|
119 | def test_count_awaiting_my_review(self, pull_request): | |
133 | PullRequestModel().update_reviewers( |
|
120 | PullRequestModel().update_reviewers( | |
134 |
pull_request, [(pull_request.author, [ |
|
121 | pull_request, [(pull_request.author, ["author"], False, "reviewer", [])], pull_request.author | |
135 | pull_request.author) |
|
122 | ) | |
136 | Session().commit() |
|
123 | Session().commit() | |
137 |
|
124 | |||
138 | pr_count = PullRequestModel().count_awaiting_my_review( |
|
125 | pr_count = PullRequestModel().count_awaiting_my_review( | |
139 |
pull_request.target_repo.repo_name, user_id=pull_request.author.user_id |
|
126 | pull_request.target_repo.repo_name, user_id=pull_request.author.user_id | |
|
127 | ) | |||
140 | assert pr_count == 1 |
|
128 | assert pr_count == 1 | |
141 |
|
129 | |||
142 | def test_delete_calls_cleanup_merge(self, pull_request): |
|
130 | def test_delete_calls_cleanup_merge(self, pull_request): | |
@@ -144,24 +132,19 b' class TestPullRequestModel(object):' | |||||
144 | PullRequestModel().delete(pull_request, pull_request.author) |
|
132 | PullRequestModel().delete(pull_request, pull_request.author) | |
145 | Session().commit() |
|
133 | Session().commit() | |
146 |
|
134 | |||
147 | self.workspace_remove_mock.assert_called_once_with( |
|
135 | self.workspace_remove_mock.assert_called_once_with(repo_id, self.workspace_id) | |
148 | repo_id, self.workspace_id) |
|
|||
149 |
|
136 | |||
150 | def test_close_calls_cleanup_and_hook(self, pull_request): |
|
137 | def test_close_calls_cleanup_and_hook(self, pull_request): | |
151 | PullRequestModel().close_pull_request( |
|
138 | PullRequestModel().close_pull_request(pull_request, pull_request.author) | |
152 | pull_request, pull_request.author) |
|
|||
153 | Session().commit() |
|
139 | Session().commit() | |
154 |
|
140 | |||
155 | repo_id = pull_request.target_repo.repo_id |
|
141 | repo_id = pull_request.target_repo.repo_id | |
156 |
|
142 | |||
157 | self.workspace_remove_mock.assert_called_once_with( |
|
143 | self.workspace_remove_mock.assert_called_once_with(repo_id, self.workspace_id) | |
158 | repo_id, self.workspace_id) |
|
144 | self.hook_mock.assert_called_with(self.pull_request, self.pull_request.author, "close") | |
159 | self.hook_mock.assert_called_with( |
|
|||
160 | self.pull_request, self.pull_request.author, 'close') |
|
|||
161 |
|
145 | |||
162 | def test_merge_status(self, pull_request): |
|
146 | def test_merge_status(self, pull_request): | |
163 | self.merge_mock.return_value = MergeResponse( |
|
147 | self.merge_mock.return_value = MergeResponse(True, False, None, MergeFailureReason.NONE) | |
164 | True, False, None, MergeFailureReason.NONE) |
|
|||
165 |
|
148 | |||
166 | assert pull_request._last_merge_source_rev is None |
|
149 | assert pull_request._last_merge_source_rev is None | |
167 | assert pull_request._last_merge_target_rev is None |
|
150 | assert pull_request._last_merge_target_rev is None | |
@@ -169,13 +152,17 b' class TestPullRequestModel(object):' | |||||
169 |
|
152 | |||
170 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
153 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
171 | assert status is True |
|
154 | assert status is True | |
172 |
assert msg == |
|
155 | assert msg == "This pull request can be automatically merged." | |
173 | self.merge_mock.assert_called_with( |
|
156 | self.merge_mock.assert_called_with( | |
174 |
self.repo_id, |
|
157 | self.repo_id, | |
|
158 | self.workspace_id, | |||
175 | pull_request.target_ref_parts, |
|
159 | pull_request.target_ref_parts, | |
176 | pull_request.source_repo.scm_instance(), |
|
160 | pull_request.source_repo.scm_instance(), | |
177 |
pull_request.source_ref_parts, |
|
161 | pull_request.source_ref_parts, | |
178 | use_rebase=False, close_branch=False) |
|
162 | dry_run=True, | |
|
163 | use_rebase=False, | |||
|
164 | close_branch=False, | |||
|
165 | ) | |||
179 |
|
166 | |||
180 | assert pull_request._last_merge_source_rev == self.source_commit |
|
167 | assert pull_request._last_merge_source_rev == self.source_commit | |
181 | assert pull_request._last_merge_target_rev == self.target_commit |
|
168 | assert pull_request._last_merge_target_rev == self.target_commit | |
@@ -184,13 +171,13 b' class TestPullRequestModel(object):' | |||||
184 | self.merge_mock.reset_mock() |
|
171 | self.merge_mock.reset_mock() | |
185 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
172 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
186 | assert status is True |
|
173 | assert status is True | |
187 |
assert msg == |
|
174 | assert msg == "This pull request can be automatically merged." | |
188 | assert self.merge_mock.called is False |
|
175 | assert self.merge_mock.called is False | |
189 |
|
176 | |||
190 | def test_merge_status_known_failure(self, pull_request): |
|
177 | def test_merge_status_known_failure(self, pull_request): | |
191 | self.merge_mock.return_value = MergeResponse( |
|
178 | self.merge_mock.return_value = MergeResponse( | |
192 | False, False, None, MergeFailureReason.MERGE_FAILED, |
|
179 | False, False, None, MergeFailureReason.MERGE_FAILED, metadata={"unresolved_files": "file1"} | |
193 | metadata={'unresolved_files': 'file1'}) |
|
180 | ) | |
194 |
|
181 | |||
195 | assert pull_request._last_merge_source_rev is None |
|
182 | assert pull_request._last_merge_source_rev is None | |
196 | assert pull_request._last_merge_target_rev is None |
|
183 | assert pull_request._last_merge_target_rev is None | |
@@ -198,13 +185,17 b' class TestPullRequestModel(object):' | |||||
198 |
|
185 | |||
199 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
186 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
200 | assert status is False |
|
187 | assert status is False | |
201 |
assert msg == |
|
188 | assert msg == "This pull request cannot be merged because of merge conflicts. file1" | |
202 | self.merge_mock.assert_called_with( |
|
189 | self.merge_mock.assert_called_with( | |
203 |
self.repo_id, |
|
190 | self.repo_id, | |
|
191 | self.workspace_id, | |||
204 | pull_request.target_ref_parts, |
|
192 | pull_request.target_ref_parts, | |
205 | pull_request.source_repo.scm_instance(), |
|
193 | pull_request.source_repo.scm_instance(), | |
206 |
pull_request.source_ref_parts, |
|
194 | pull_request.source_ref_parts, | |
207 | use_rebase=False, close_branch=False) |
|
195 | dry_run=True, | |
|
196 | use_rebase=False, | |||
|
197 | close_branch=False, | |||
|
198 | ) | |||
208 |
|
199 | |||
209 | assert pull_request._last_merge_source_rev == self.source_commit |
|
200 | assert pull_request._last_merge_source_rev == self.source_commit | |
210 | assert pull_request._last_merge_target_rev == self.target_commit |
|
201 | assert pull_request._last_merge_target_rev == self.target_commit | |
@@ -213,13 +204,13 b' class TestPullRequestModel(object):' | |||||
213 | self.merge_mock.reset_mock() |
|
204 | self.merge_mock.reset_mock() | |
214 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
205 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
215 | assert status is False |
|
206 | assert status is False | |
216 |
assert msg == |
|
207 | assert msg == "This pull request cannot be merged because of merge conflicts. file1" | |
217 | assert self.merge_mock.called is False |
|
208 | assert self.merge_mock.called is False | |
218 |
|
209 | |||
219 | def test_merge_status_unknown_failure(self, pull_request): |
|
210 | def test_merge_status_unknown_failure(self, pull_request): | |
220 | self.merge_mock.return_value = MergeResponse( |
|
211 | self.merge_mock.return_value = MergeResponse( | |
221 | False, False, None, MergeFailureReason.UNKNOWN, |
|
212 | False, False, None, MergeFailureReason.UNKNOWN, metadata={"exception": "MockError"} | |
222 | metadata={'exception': 'MockError'}) |
|
213 | ) | |
223 |
|
214 | |||
224 | assert pull_request._last_merge_source_rev is None |
|
215 | assert pull_request._last_merge_source_rev is None | |
225 | assert pull_request._last_merge_target_rev is None |
|
216 | assert pull_request._last_merge_target_rev is None | |
@@ -227,15 +218,17 b' class TestPullRequestModel(object):' | |||||
227 |
|
218 | |||
228 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
219 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
229 | assert status is False |
|
220 | assert status is False | |
230 | assert msg == ( |
|
221 | assert msg == "This pull request cannot be merged because of an unhandled exception. MockError" | |
231 | 'This pull request cannot be merged because of an unhandled exception. ' |
|
|||
232 | 'MockError') |
|
|||
233 | self.merge_mock.assert_called_with( |
|
222 | self.merge_mock.assert_called_with( | |
234 |
self.repo_id, |
|
223 | self.repo_id, | |
|
224 | self.workspace_id, | |||
235 | pull_request.target_ref_parts, |
|
225 | pull_request.target_ref_parts, | |
236 | pull_request.source_repo.scm_instance(), |
|
226 | pull_request.source_repo.scm_instance(), | |
237 |
pull_request.source_ref_parts, |
|
227 | pull_request.source_ref_parts, | |
238 | use_rebase=False, close_branch=False) |
|
228 | dry_run=True, | |
|
229 | use_rebase=False, | |||
|
230 | close_branch=False, | |||
|
231 | ) | |||
239 |
|
232 | |||
240 | assert pull_request._last_merge_source_rev is None |
|
233 | assert pull_request._last_merge_source_rev is None | |
241 | assert pull_request._last_merge_target_rev is None |
|
234 | assert pull_request._last_merge_target_rev is None | |
@@ -244,155 +237,136 b' class TestPullRequestModel(object):' | |||||
244 | self.merge_mock.reset_mock() |
|
237 | self.merge_mock.reset_mock() | |
245 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
238 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
246 | assert status is False |
|
239 | assert status is False | |
247 | assert msg == ( |
|
240 | assert msg == "This pull request cannot be merged because of an unhandled exception. MockError" | |
248 | 'This pull request cannot be merged because of an unhandled exception. ' |
|
|||
249 | 'MockError') |
|
|||
250 | assert self.merge_mock.called is True |
|
241 | assert self.merge_mock.called is True | |
251 |
|
242 | |||
252 | def test_merge_status_when_target_is_locked(self, pull_request): |
|
243 | def test_merge_status_when_target_is_locked(self, pull_request): | |
253 |
pull_request.target_repo.locked = [1, |
|
244 | pull_request.target_repo.locked = [1, "12345.50", "lock_web"] | |
254 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
245 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
255 | assert status is False |
|
246 | assert status is False | |
256 | assert msg == ( |
|
247 | assert msg == "This pull request cannot be merged because the target repository is locked by user:1." | |
257 | 'This pull request cannot be merged because the target repository ' |
|
|||
258 | 'is locked by user:1.') |
|
|||
259 |
|
248 | |||
260 | def test_merge_status_requirements_check_target(self, pull_request): |
|
249 | def test_merge_status_requirements_check_target(self, pull_request): | |
261 |
|
||||
262 | def has_largefiles(self, repo): |
|
250 | def has_largefiles(self, repo): | |
263 | return repo == pull_request.source_repo |
|
251 | return repo == pull_request.source_repo | |
264 |
|
252 | |||
265 |
patcher = mock.patch.object(PullRequestModel, |
|
253 | patcher = mock.patch.object(PullRequestModel, "_has_largefiles", has_largefiles) | |
266 | with patcher: |
|
254 | with patcher: | |
267 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
255 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
268 |
|
256 | |||
269 | assert status is False |
|
257 | assert status is False | |
270 |
assert msg == |
|
258 | assert msg == "Target repository large files support is disabled." | |
271 |
|
259 | |||
272 | def test_merge_status_requirements_check_source(self, pull_request): |
|
260 | def test_merge_status_requirements_check_source(self, pull_request): | |
273 |
|
||||
274 | def has_largefiles(self, repo): |
|
261 | def has_largefiles(self, repo): | |
275 | return repo == pull_request.target_repo |
|
262 | return repo == pull_request.target_repo | |
276 |
|
263 | |||
277 |
patcher = mock.patch.object(PullRequestModel, |
|
264 | patcher = mock.patch.object(PullRequestModel, "_has_largefiles", has_largefiles) | |
278 | with patcher: |
|
265 | with patcher: | |
279 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) |
|
266 | merge_response, status, msg = PullRequestModel().merge_status(pull_request) | |
280 |
|
267 | |||
281 | assert status is False |
|
268 | assert status is False | |
282 |
assert msg == |
|
269 | assert msg == "Source repository large files support is disabled." | |
283 |
|
270 | |||
284 | def test_merge(self, pull_request, merge_extras): |
|
271 | def test_merge(self, pull_request, merge_extras): | |
285 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
272 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
286 | merge_ref = Reference( |
|
273 | merge_ref = Reference("type", "name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6") | |
287 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
274 | self.merge_mock.return_value = MergeResponse(True, True, merge_ref, MergeFailureReason.NONE) | |
288 | self.merge_mock.return_value = MergeResponse( |
|
|||
289 | True, True, merge_ref, MergeFailureReason.NONE) |
|
|||
290 |
|
275 | |||
291 |
merge_extras[ |
|
276 | merge_extras["repository"] = pull_request.target_repo.repo_name | |
292 | PullRequestModel().merge_repo( |
|
277 | PullRequestModel().merge_repo(pull_request, pull_request.author, extras=merge_extras) | |
293 | pull_request, pull_request.author, extras=merge_extras) |
|
|||
294 | Session().commit() |
|
278 | Session().commit() | |
295 |
|
279 | |||
296 | message = ( |
|
280 | message = "Merge pull request !{pr_id} from {source_repo} {source_ref_name}" "\n\n {pr_title}".format( | |
297 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' |
|
281 | pr_id=pull_request.pull_request_id, | |
298 | u'\n\n {pr_title}'.format( |
|
282 | source_repo=safe_str(pull_request.source_repo.scm_instance().name), | |
299 | pr_id=pull_request.pull_request_id, |
|
283 | source_ref_name=pull_request.source_ref_parts.name, | |
300 | source_repo=safe_str( |
|
284 | pr_title=safe_str(pull_request.title), | |
301 | pull_request.source_repo.scm_instance().name), |
|
|||
302 | source_ref_name=pull_request.source_ref_parts.name, |
|
|||
303 | pr_title=safe_str(pull_request.title) |
|
|||
304 | ) |
|
|||
305 | ) |
|
285 | ) | |
306 | self.merge_mock.assert_called_with( |
|
286 | self.merge_mock.assert_called_with( | |
307 |
self.repo_id, |
|
287 | self.repo_id, | |
|
288 | self.workspace_id, | |||
308 | pull_request.target_ref_parts, |
|
289 | pull_request.target_ref_parts, | |
309 | pull_request.source_repo.scm_instance(), |
|
290 | pull_request.source_repo.scm_instance(), | |
310 | pull_request.source_ref_parts, |
|
291 | pull_request.source_ref_parts, | |
311 |
user_name=user.short_contact, |
|
292 | user_name=user.short_contact, | |
312 | use_rebase=False, close_branch=False |
|
293 | user_email=user.email, | |
|
294 | message=message, | |||
|
295 | use_rebase=False, | |||
|
296 | close_branch=False, | |||
313 | ) |
|
297 | ) | |
314 | self.invalidation_mock.assert_called_once_with( |
|
298 | self.invalidation_mock.assert_called_once_with(pull_request.target_repo.repo_name) | |
315 | pull_request.target_repo.repo_name) |
|
|||
316 |
|
299 | |||
317 | self.hook_mock.assert_called_with( |
|
300 | self.hook_mock.assert_called_with(self.pull_request, self.pull_request.author, "merge") | |
318 | self.pull_request, self.pull_request.author, 'merge') |
|
|||
319 |
|
301 | |||
320 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
302 | pull_request = PullRequest.get(pull_request.pull_request_id) | |
321 |
assert pull_request.merge_rev == |
|
303 | assert pull_request.merge_rev == "6126b7bfcc82ad2d3deaee22af926b082ce54cc6" | |
322 |
|
304 | |||
323 | def test_merge_with_status_lock(self, pull_request, merge_extras): |
|
305 | def test_merge_with_status_lock(self, pull_request, merge_extras): | |
324 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
306 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
325 | merge_ref = Reference( |
|
307 | merge_ref = Reference("type", "name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6") | |
326 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
308 | self.merge_mock.return_value = MergeResponse(True, True, merge_ref, MergeFailureReason.NONE) | |
327 | self.merge_mock.return_value = MergeResponse( |
|
|||
328 | True, True, merge_ref, MergeFailureReason.NONE) |
|
|||
329 |
|
309 | |||
330 |
merge_extras[ |
|
310 | merge_extras["repository"] = pull_request.target_repo.repo_name | |
331 |
|
311 | |||
332 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
312 | with pull_request.set_state(PullRequest.STATE_UPDATING): | |
333 | assert pull_request.pull_request_state == PullRequest.STATE_UPDATING |
|
313 | assert pull_request.pull_request_state == PullRequest.STATE_UPDATING | |
334 | PullRequestModel().merge_repo( |
|
314 | PullRequestModel().merge_repo(pull_request, pull_request.author, extras=merge_extras) | |
335 | pull_request, pull_request.author, extras=merge_extras) |
|
|||
336 | Session().commit() |
|
315 | Session().commit() | |
337 |
|
316 | |||
338 | assert pull_request.pull_request_state == PullRequest.STATE_CREATED |
|
317 | assert pull_request.pull_request_state == PullRequest.STATE_CREATED | |
339 |
|
318 | |||
340 | message = ( |
|
319 | message = "Merge pull request !{pr_id} from {source_repo} {source_ref_name}" "\n\n {pr_title}".format( | |
341 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' |
|
320 | pr_id=pull_request.pull_request_id, | |
342 | u'\n\n {pr_title}'.format( |
|
321 | source_repo=safe_str(pull_request.source_repo.scm_instance().name), | |
343 | pr_id=pull_request.pull_request_id, |
|
322 | source_ref_name=pull_request.source_ref_parts.name, | |
344 | source_repo=safe_str( |
|
323 | pr_title=safe_str(pull_request.title), | |
345 | pull_request.source_repo.scm_instance().name), |
|
|||
346 | source_ref_name=pull_request.source_ref_parts.name, |
|
|||
347 | pr_title=safe_str(pull_request.title) |
|
|||
348 | ) |
|
|||
349 | ) |
|
324 | ) | |
350 | self.merge_mock.assert_called_with( |
|
325 | self.merge_mock.assert_called_with( | |
351 |
self.repo_id, |
|
326 | self.repo_id, | |
|
327 | self.workspace_id, | |||
352 | pull_request.target_ref_parts, |
|
328 | pull_request.target_ref_parts, | |
353 | pull_request.source_repo.scm_instance(), |
|
329 | pull_request.source_repo.scm_instance(), | |
354 | pull_request.source_ref_parts, |
|
330 | pull_request.source_ref_parts, | |
355 |
user_name=user.short_contact, |
|
331 | user_name=user.short_contact, | |
356 | use_rebase=False, close_branch=False |
|
332 | user_email=user.email, | |
|
333 | message=message, | |||
|
334 | use_rebase=False, | |||
|
335 | close_branch=False, | |||
357 | ) |
|
336 | ) | |
358 | self.invalidation_mock.assert_called_once_with( |
|
337 | self.invalidation_mock.assert_called_once_with(pull_request.target_repo.repo_name) | |
359 | pull_request.target_repo.repo_name) |
|
|||
360 |
|
338 | |||
361 | self.hook_mock.assert_called_with( |
|
339 | self.hook_mock.assert_called_with(self.pull_request, self.pull_request.author, "merge") | |
362 | self.pull_request, self.pull_request.author, 'merge') |
|
|||
363 |
|
340 | |||
364 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
341 | pull_request = PullRequest.get(pull_request.pull_request_id) | |
365 |
assert pull_request.merge_rev == |
|
342 | assert pull_request.merge_rev == "6126b7bfcc82ad2d3deaee22af926b082ce54cc6" | |
366 |
|
343 | |||
367 | def test_merge_failed(self, pull_request, merge_extras): |
|
344 | def test_merge_failed(self, pull_request, merge_extras): | |
368 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
345 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
369 | merge_ref = Reference( |
|
346 | merge_ref = Reference("type", "name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6") | |
370 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
347 | self.merge_mock.return_value = MergeResponse(False, False, merge_ref, MergeFailureReason.MERGE_FAILED) | |
371 | self.merge_mock.return_value = MergeResponse( |
|
|||
372 | False, False, merge_ref, MergeFailureReason.MERGE_FAILED) |
|
|||
373 |
|
348 | |||
374 |
merge_extras[ |
|
349 | merge_extras["repository"] = pull_request.target_repo.repo_name | |
375 | PullRequestModel().merge_repo( |
|
350 | PullRequestModel().merge_repo(pull_request, pull_request.author, extras=merge_extras) | |
376 | pull_request, pull_request.author, extras=merge_extras) |
|
|||
377 | Session().commit() |
|
351 | Session().commit() | |
378 |
|
352 | |||
379 | message = ( |
|
353 | message = "Merge pull request !{pr_id} from {source_repo} {source_ref_name}" "\n\n {pr_title}".format( | |
380 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' |
|
354 | pr_id=pull_request.pull_request_id, | |
381 | u'\n\n {pr_title}'.format( |
|
355 | source_repo=safe_str(pull_request.source_repo.scm_instance().name), | |
382 | pr_id=pull_request.pull_request_id, |
|
356 | source_ref_name=pull_request.source_ref_parts.name, | |
383 | source_repo=safe_str( |
|
357 | pr_title=safe_str(pull_request.title), | |
384 | pull_request.source_repo.scm_instance().name), |
|
|||
385 | source_ref_name=pull_request.source_ref_parts.name, |
|
|||
386 | pr_title=safe_str(pull_request.title) |
|
|||
387 | ) |
|
|||
388 | ) |
|
358 | ) | |
389 | self.merge_mock.assert_called_with( |
|
359 | self.merge_mock.assert_called_with( | |
390 |
self.repo_id, |
|
360 | self.repo_id, | |
|
361 | self.workspace_id, | |||
391 | pull_request.target_ref_parts, |
|
362 | pull_request.target_ref_parts, | |
392 | pull_request.source_repo.scm_instance(), |
|
363 | pull_request.source_repo.scm_instance(), | |
393 | pull_request.source_ref_parts, |
|
364 | pull_request.source_ref_parts, | |
394 |
user_name=user.short_contact, |
|
365 | user_name=user.short_contact, | |
395 | use_rebase=False, close_branch=False |
|
366 | user_email=user.email, | |
|
367 | message=message, | |||
|
368 | use_rebase=False, | |||
|
369 | close_branch=False, | |||
396 | ) |
|
370 | ) | |
397 |
|
371 | |||
398 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
372 | pull_request = PullRequest.get(pull_request.pull_request_id) | |
@@ -410,7 +384,7 b' class TestPullRequestModel(object):' | |||||
410 | assert commit_ids == pull_request.revisions |
|
384 | assert commit_ids == pull_request.revisions | |
411 |
|
385 | |||
412 | # Merge revision is not in the revisions list |
|
386 | # Merge revision is not in the revisions list | |
413 |
pull_request.merge_rev = |
|
387 | pull_request.merge_rev = "f000" * 10 | |
414 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
388 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) | |
415 | assert commit_ids == pull_request.revisions + [pull_request.merge_rev] |
|
389 | assert commit_ids == pull_request.revisions + [pull_request.merge_rev] | |
416 |
|
390 | |||
@@ -419,147 +393,126 b' class TestPullRequestModel(object):' | |||||
419 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
393 | source_ref_id = pull_request.source_ref_parts.commit_id | |
420 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
394 | target_ref_id = pull_request.target_ref_parts.commit_id | |
421 | diff = PullRequestModel()._get_diff_from_pr_or_version( |
|
395 | diff = PullRequestModel()._get_diff_from_pr_or_version( | |
422 | source_repo, source_ref_id, target_ref_id, |
|
396 | source_repo, source_ref_id, target_ref_id, hide_whitespace_changes=False, diff_context=6 | |
423 | hide_whitespace_changes=False, diff_context=6) |
|
397 | ) | |
424 |
assert b |
|
398 | assert b"file_1" in diff.raw.tobytes() | |
425 |
|
399 | |||
426 | def test_generate_title_returns_unicode(self): |
|
400 | def test_generate_title_returns_unicode(self): | |
427 | title = PullRequestModel().generate_pullrequest_title( |
|
401 | title = PullRequestModel().generate_pullrequest_title( | |
428 |
source= |
|
402 | source="source-dummy", | |
429 |
source_ref= |
|
403 | source_ref="source-ref-dummy", | |
430 |
target= |
|
404 | target="target-dummy", | |
431 | ) |
|
405 | ) | |
432 | assert type(title) == str |
|
406 | assert type(title) == str | |
433 |
|
407 | |||
434 |
@pytest.mark.parametrize( |
|
408 | @pytest.mark.parametrize( | |
435 | ('hello', False), |
|
409 | "title, has_wip", | |
436 | ('hello wip', False), |
|
410 | [ | |
437 |
( |
|
411 | ("hello", False), | |
438 |
( |
|
412 | ("hello wip", False), | |
439 | ('[wip] hello', True), |
|
413 | ("hello wip: xxx", False), | |
440 |
( |
|
414 | ("[wip] hello", True), | |
441 |
( |
|
415 | ("[wip] hello", True), | |
442 |
|
416 | ("wip: hello", True), | ||
443 | ]) |
|
417 | ("wip hello", True), | |
|
418 | ], | |||
|
419 | ) | |||
444 | def test_wip_title_marker(self, pull_request, title, has_wip): |
|
420 | def test_wip_title_marker(self, pull_request, title, has_wip): | |
445 | pull_request.title = title |
|
421 | pull_request.title = title | |
446 | assert pull_request.work_in_progress == has_wip |
|
422 | assert pull_request.work_in_progress == has_wip | |
447 |
|
423 | |||
448 |
|
424 | |||
449 |
@pytest.mark.usefixtures( |
|
425 | @pytest.mark.usefixtures("config_stub") | |
450 | class TestIntegrationMerge(object): |
|
426 | class TestIntegrationMerge(object): | |
451 | @pytest.mark.parametrize('extra_config', ( |
|
|||
452 | {'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False}, |
|
|||
453 | )) |
|
|||
454 | def test_merge_triggers_push_hooks( |
|
|||
455 | self, pr_util, user_admin, capture_rcextensions, merge_extras, |
|
|||
456 | extra_config): |
|
|||
457 |
|
||||
458 | pull_request = pr_util.create_pull_request( |
|
|||
459 | approved=True, mergeable=True) |
|
|||
460 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
|||
461 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
|||
462 | Session().commit() |
|
|||
463 |
|
||||
464 | with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False): |
|
|||
465 | merge_state = PullRequestModel().merge_repo( |
|
|||
466 | pull_request, user_admin, extras=merge_extras) |
|
|||
467 | Session().commit() |
|
|||
468 |
|
||||
469 | assert merge_state.executed |
|
|||
470 | assert '_pre_push_hook' in capture_rcextensions |
|
|||
471 | assert '_push_hook' in capture_rcextensions |
|
|||
472 |
|
427 | |||
473 | def test_merge_can_be_rejected_by_pre_push_hook( |
|
428 | def test_merge_fails_if_target_is_locked(self, pr_util, user_regular, merge_extras): | |
474 | self, pr_util, user_admin, capture_rcextensions, merge_extras): |
|
429 | pull_request = pr_util.create_pull_request(approved=True, mergeable=True) | |
475 | pull_request = pr_util.create_pull_request( |
|
430 | locked_by = [user_regular.user_id + 1, 12345.50, "lock_web"] | |
476 | approved=True, mergeable=True) |
|
|||
477 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
|||
478 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
|||
479 | Session().commit() |
|
|||
480 |
|
||||
481 | with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull: |
|
|||
482 | pre_pull.side_effect = RepositoryError("Disallow push!") |
|
|||
483 | merge_status = PullRequestModel().merge_repo( |
|
|||
484 | pull_request, user_admin, extras=merge_extras) |
|
|||
485 | Session().commit() |
|
|||
486 |
|
||||
487 | assert not merge_status.executed |
|
|||
488 | assert 'pre_push' not in capture_rcextensions |
|
|||
489 | assert 'post_push' not in capture_rcextensions |
|
|||
490 |
|
||||
491 | def test_merge_fails_if_target_is_locked( |
|
|||
492 | self, pr_util, user_regular, merge_extras): |
|
|||
493 | pull_request = pr_util.create_pull_request( |
|
|||
494 | approved=True, mergeable=True) |
|
|||
495 | locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web'] |
|
|||
496 | pull_request.target_repo.locked = locked_by |
|
431 | pull_request.target_repo.locked = locked_by | |
497 | # TODO: johbo: Check if this can work based on the database, currently |
|
432 | # TODO: johbo: Check if this can work based on the database, currently | |
498 | # all data is pre-computed, that's why just updating the DB is not |
|
433 | # all data is pre-computed, that's why just updating the DB is not | |
499 | # enough. |
|
434 | # enough. | |
500 |
merge_extras[ |
|
435 | merge_extras["locked_by"] = locked_by | |
501 |
merge_extras[ |
|
436 | merge_extras["repository"] = pull_request.target_repo.repo_name | |
502 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
437 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it | |
503 | Session().commit() |
|
438 | Session().commit() | |
504 | merge_status = PullRequestModel().merge_repo( |
|
439 | merge_status = PullRequestModel().merge_repo(pull_request, user_regular, extras=merge_extras) | |
505 | pull_request, user_regular, extras=merge_extras) |
|
|||
506 | Session().commit() |
|
440 | Session().commit() | |
507 |
|
441 | |||
508 | assert not merge_status.executed |
|
442 | assert not merge_status.executed | |
509 |
|
443 | |||
510 |
|
444 | |||
511 | @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [ |
|
445 | @pytest.mark.parametrize( | |
512 | (False, 1, 0), |
|
446 | "use_outdated, inlines_count, outdated_count", | |
513 | (True, 0, 1), |
|
447 | [ | |
514 | ]) |
|
448 | (False, 1, 0), | |
515 | def test_outdated_comments( |
|
449 | (True, 0, 1), | |
516 | pr_util, use_outdated, inlines_count, outdated_count, config_stub): |
|
450 | ], | |
|
451 | ) | |||
|
452 | def test_outdated_comments(pr_util, use_outdated, inlines_count, outdated_count, config_stub): | |||
517 | pull_request = pr_util.create_pull_request() |
|
453 | pull_request = pr_util.create_pull_request() | |
518 |
pr_util.create_inline_comment(file_path= |
|
454 | pr_util.create_inline_comment(file_path="not_in_updated_diff") | |
519 |
|
455 | |||
520 | with outdated_comments_patcher(use_outdated) as outdated_comment_mock: |
|
456 | with outdated_comments_patcher(use_outdated) as outdated_comment_mock: | |
521 | pr_util.add_one_commit() |
|
457 | pr_util.add_one_commit() | |
522 | assert_inline_comments( |
|
458 | assert_inline_comments(pull_request, visible=inlines_count, outdated=outdated_count) | |
523 | pull_request, visible=inlines_count, outdated=outdated_count) |
|
|||
524 | outdated_comment_mock.assert_called_with(pull_request) |
|
459 | outdated_comment_mock.assert_called_with(pull_request) | |
525 |
|
460 | |||
526 |
|
461 | |||
527 |
@pytest.mark.parametrize( |
|
462 | @pytest.mark.parametrize( | |
528 | (MergeFailureReason.NONE, |
|
463 | "mr_type, expected_msg", | |
529 | 'This pull request can be automatically merged.'), |
|
464 | [ | |
530 | (MergeFailureReason.UNKNOWN, |
|
465 | (MergeFailureReason.NONE, "This pull request can be automatically merged."), | |
531 |
|
|
466 | (MergeFailureReason.UNKNOWN, "This pull request cannot be merged because of an unhandled exception. CRASH"), | |
532 | (MergeFailureReason.MERGE_FAILED, |
|
467 | ( | |
533 | 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'), |
|
468 | MergeFailureReason.MERGE_FAILED, | |
534 | (MergeFailureReason.PUSH_FAILED, |
|
469 | "This pull request cannot be merged because of merge conflicts. CONFLICT_FILE", | |
535 | 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'), |
|
470 | ), | |
536 | (MergeFailureReason.TARGET_IS_NOT_HEAD, |
|
471 | ( | |
537 | 'This pull request cannot be merged because the target `ref_name` is not a head.'), |
|
472 | MergeFailureReason.PUSH_FAILED, | |
538 | (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES, |
|
473 | "This pull request could not be merged because push to target:`some-repo@merge_commit` failed.", | |
539 | 'This pull request cannot be merged because the source contains more branches than the target.'), |
|
474 | ), | |
540 | (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, |
|
475 | ( | |
541 | 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'), |
|
476 | MergeFailureReason.TARGET_IS_NOT_HEAD, | |
542 | (MergeFailureReason.TARGET_IS_LOCKED, |
|
477 | "This pull request cannot be merged because the target `ref_name` is not a head.", | |
543 | 'This pull request cannot be merged because the target repository is locked by user:123.'), |
|
478 | ), | |
544 | (MergeFailureReason.MISSING_TARGET_REF, |
|
479 | ( | |
545 | 'This pull request cannot be merged because the target reference `ref_name` is missing.'), |
|
480 | MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES, | |
546 | (MergeFailureReason.MISSING_SOURCE_REF, |
|
481 | "This pull request cannot be merged because the source contains more branches than the target.", | |
547 | 'This pull request cannot be merged because the source reference `ref_name` is missing.'), |
|
482 | ), | |
548 | (MergeFailureReason.SUBREPO_MERGE_FAILED, |
|
483 | ( | |
549 | 'This pull request cannot be merged because of conflicts related to sub repositories.'), |
|
484 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, | |
550 |
|
485 | "This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.", | ||
551 | ]) |
|
486 | ), | |
|
487 | ( | |||
|
488 | MergeFailureReason.TARGET_IS_LOCKED, | |||
|
489 | "This pull request cannot be merged because the target repository is locked by user:123.", | |||
|
490 | ), | |||
|
491 | ( | |||
|
492 | MergeFailureReason.MISSING_TARGET_REF, | |||
|
493 | "This pull request cannot be merged because the target reference `ref_name` is missing.", | |||
|
494 | ), | |||
|
495 | ( | |||
|
496 | MergeFailureReason.MISSING_SOURCE_REF, | |||
|
497 | "This pull request cannot be merged because the source reference `ref_name` is missing.", | |||
|
498 | ), | |||
|
499 | ( | |||
|
500 | MergeFailureReason.SUBREPO_MERGE_FAILED, | |||
|
501 | "This pull request cannot be merged because of conflicts related to sub repositories.", | |||
|
502 | ), | |||
|
503 | ], | |||
|
504 | ) | |||
552 | def test_merge_response_message(mr_type, expected_msg): |
|
505 | def test_merge_response_message(mr_type, expected_msg): | |
553 |
merge_ref = Reference( |
|
506 | merge_ref = Reference("type", "ref_name", "6126b7bfcc82ad2d3deaee22af926b082ce54cc6") | |
554 | metadata = { |
|
507 | metadata = { | |
555 |
|
|
508 | "unresolved_files": "CONFLICT_FILE", | |
556 |
|
|
509 | "exception": "CRASH", | |
557 |
|
|
510 | "target": "some-repo", | |
558 |
|
|
511 | "merge_commit": "merge_commit", | |
559 |
|
|
512 | "target_ref": merge_ref, | |
560 |
|
|
513 | "source_ref": merge_ref, | |
561 |
|
|
514 | "heads": ",".join(["a", "b", "c"]), | |
562 |
|
|
515 | "locked_by": "user:123", | |
563 | } |
|
516 | } | |
564 |
|
517 | |||
565 | merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata) |
|
518 | merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata) | |
@@ -573,30 +526,28 b' def merge_extras(request, user_regular):' | |||||
573 | """ |
|
526 | """ | |
574 |
|
527 | |||
575 | extras = { |
|
528 | extras = { | |
576 |
|
|
529 | "ip": "127.0.0.1", | |
577 |
|
|
530 | "username": user_regular.username, | |
578 |
|
|
531 | "user_id": user_regular.user_id, | |
579 |
|
|
532 | "action": "push", | |
580 |
|
|
533 | "repository": "fake_target_repo_name", | |
581 |
|
|
534 | "scm": "git", | |
582 |
|
|
535 | "config": request.config.getini("pyramid_config"), | |
583 |
|
|
536 | "repo_store": "", | |
584 |
|
|
537 | "make_lock": None, | |
585 |
|
|
538 | "locked_by": [None, None, None], | |
586 |
|
|
539 | "server_url": "http://test.example.com:5000", | |
587 |
|
|
540 | "hooks": ["push", "pull"], | |
588 |
|
|
541 | "is_shadow_repo": False, | |
589 | } |
|
542 | } | |
590 | return extras |
|
543 | return extras | |
591 |
|
544 | |||
592 |
|
545 | |||
593 |
@pytest.mark.usefixtures( |
|
546 | @pytest.mark.usefixtures("config_stub") | |
594 | class TestUpdateCommentHandling(object): |
|
547 | class TestUpdateCommentHandling(object): | |
595 |
|
548 | @pytest.fixture(autouse=True, scope="class") | ||
596 | @pytest.fixture(autouse=True, scope='class') |
|
|||
597 | def enable_outdated_comments(self, request, baseapp): |
|
549 | def enable_outdated_comments(self, request, baseapp): | |
598 | config_patch = mock.patch.dict( |
|
550 | config_patch = mock.patch.dict("rhodecode.CONFIG", {"rhodecode_use_outdated_comments": True}) | |
599 | 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True}) |
|
|||
600 | config_patch.start() |
|
551 | config_patch.start() | |
601 |
|
552 | |||
602 | @request.addfinalizer |
|
553 | @request.addfinalizer | |
@@ -605,206 +556,194 b' class TestUpdateCommentHandling(object):' | |||||
605 |
|
556 | |||
606 | def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util): |
|
557 | def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util): | |
607 | commits = [ |
|
558 | commits = [ | |
608 |
{ |
|
559 | {"message": "a"}, | |
609 |
{ |
|
560 | {"message": "b", "added": [FileNode(b"file_b", b"test_content\n")]}, | |
610 |
{ |
|
561 | {"message": "c", "added": [FileNode(b"file_c", b"test_content\n")]}, | |
611 | ] |
|
562 | ] | |
612 | pull_request = pr_util.create_pull_request( |
|
563 | pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) | |
613 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
564 | pr_util.create_inline_comment(file_path="file_b") | |
614 |
pr_util. |
|
565 | pr_util.add_one_commit(head="c") | |
615 | pr_util.add_one_commit(head='c') |
|
|||
616 |
|
566 | |||
617 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
567 | assert_inline_comments(pull_request, visible=1, outdated=0) | |
618 |
|
568 | |||
619 | def test_comment_stays_unflagged_on_change_above(self, pr_util): |
|
569 | def test_comment_stays_unflagged_on_change_above(self, pr_util): | |
620 |
original_content = b |
|
570 | original_content = b"".join((b"line %d\n" % x for x in range(1, 11))) | |
621 |
updated_content = b |
|
571 | updated_content = b"new_line_at_top\n" + original_content | |
622 | commits = [ |
|
572 | commits = [ | |
623 |
{ |
|
573 | {"message": "a"}, | |
624 |
{ |
|
574 | {"message": "b", "added": [FileNode(b"file_b", original_content)]}, | |
625 |
{ |
|
575 | {"message": "c", "changed": [FileNode(b"file_b", updated_content)]}, | |
626 | ] |
|
576 | ] | |
627 | pull_request = pr_util.create_pull_request( |
|
577 | pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) | |
628 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
|||
629 |
|
578 | |||
630 | with outdated_comments_patcher(): |
|
579 | with outdated_comments_patcher(): | |
631 | comment = pr_util.create_inline_comment( |
|
580 | comment = pr_util.create_inline_comment(line_no="n8", file_path="file_b") | |
632 | line_no=u'n8', file_path='file_b') |
|
581 | pr_util.add_one_commit(head="c") | |
633 | pr_util.add_one_commit(head='c') |
|
|||
634 |
|
582 | |||
635 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
583 | assert_inline_comments(pull_request, visible=1, outdated=0) | |
636 |
assert comment.line_no == |
|
584 | assert comment.line_no == "n9" | |
637 |
|
585 | |||
638 | def test_comment_stays_unflagged_on_change_below(self, pr_util): |
|
586 | def test_comment_stays_unflagged_on_change_below(self, pr_util): | |
639 |
original_content = b |
|
587 | original_content = b"".join([b"line %d\n" % x for x in range(10)]) | |
640 |
updated_content = original_content + b |
|
588 | updated_content = original_content + b"new_line_at_end\n" | |
641 | commits = [ |
|
589 | commits = [ | |
642 |
{ |
|
590 | {"message": "a"}, | |
643 |
{ |
|
591 | {"message": "b", "added": [FileNode(b"file_b", original_content)]}, | |
644 |
{ |
|
592 | {"message": "c", "changed": [FileNode(b"file_b", updated_content)]}, | |
645 | ] |
|
593 | ] | |
646 | pull_request = pr_util.create_pull_request( |
|
594 | pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) | |
647 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
595 | pr_util.create_inline_comment(file_path="file_b") | |
648 |
pr_util. |
|
596 | pr_util.add_one_commit(head="c") | |
649 | pr_util.add_one_commit(head='c') |
|
|||
650 |
|
597 | |||
651 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
598 | assert_inline_comments(pull_request, visible=1, outdated=0) | |
652 |
|
599 | |||
653 |
@pytest.mark.parametrize( |
|
600 | @pytest.mark.parametrize("line_no", ["n4", "o4", "n10", "o9"]) | |
654 | def test_comment_flagged_on_change_around_context(self, pr_util, line_no): |
|
601 | def test_comment_flagged_on_change_around_context(self, pr_util, line_no): | |
655 |
base_lines = [b |
|
602 | base_lines = [b"line %d\n" % x for x in range(1, 13)] | |
656 | change_lines = list(base_lines) |
|
603 | change_lines = list(base_lines) | |
657 |
change_lines.insert(6, b |
|
604 | change_lines.insert(6, b"line 6a added\n") | |
658 |
|
605 | |||
659 | # Changes on the last line of sight |
|
606 | # Changes on the last line of sight | |
660 | update_lines = list(change_lines) |
|
607 | update_lines = list(change_lines) | |
661 |
update_lines[0] = b |
|
608 | update_lines[0] = b"line 1 changed\n" | |
662 |
update_lines[-1] = b |
|
609 | update_lines[-1] = b"line 12 changed\n" | |
663 |
|
610 | |||
664 | def file_b(lines): |
|
611 | def file_b(lines): | |
665 |
return FileNode(b |
|
612 | return FileNode(b"file_b", b"".join(lines)) | |
666 |
|
613 | |||
667 | commits = [ |
|
614 | commits = [ | |
668 |
{ |
|
615 | {"message": "a", "added": [file_b(base_lines)]}, | |
669 |
{ |
|
616 | {"message": "b", "changed": [file_b(change_lines)]}, | |
670 |
{ |
|
617 | {"message": "c", "changed": [file_b(update_lines)]}, | |
671 | ] |
|
618 | ] | |
672 |
|
619 | |||
673 | pull_request = pr_util.create_pull_request( |
|
620 | pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) | |
674 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
621 | pr_util.create_inline_comment(line_no=line_no, file_path="file_b") | |
675 | pr_util.create_inline_comment(line_no=line_no, file_path='file_b') |
|
|||
676 |
|
622 | |||
677 | with outdated_comments_patcher(): |
|
623 | with outdated_comments_patcher(): | |
678 |
pr_util.add_one_commit(head= |
|
624 | pr_util.add_one_commit(head="c") | |
679 | assert_inline_comments(pull_request, visible=0, outdated=1) |
|
625 | assert_inline_comments(pull_request, visible=0, outdated=1) | |
680 |
|
626 | |||
681 |
@pytest.mark.parametrize( |
|
627 | @pytest.mark.parametrize( | |
682 |
|
|
628 | "change, content", | |
683 | ('removed', b''), |
|
629 | [ | |
684 | ], ids=['changed', b'removed']) |
|
630 | ("changed", b"changed\n"), | |
|
631 | ("removed", b""), | |||
|
632 | ], | |||
|
633 | ids=["changed", b"removed"], | |||
|
634 | ) | |||
685 | def test_comment_flagged_on_change(self, pr_util, change, content): |
|
635 | def test_comment_flagged_on_change(self, pr_util, change, content): | |
686 | commits = [ |
|
636 | commits = [ | |
687 |
{ |
|
637 | {"message": "a"}, | |
688 |
{ |
|
638 | {"message": "b", "added": [FileNode(b"file_b", b"test_content\n")]}, | |
689 |
{ |
|
639 | {"message": "c", change: [FileNode(b"file_b", content)]}, | |
690 | ] |
|
640 | ] | |
691 | pull_request = pr_util.create_pull_request( |
|
641 | pull_request = pr_util.create_pull_request(commits=commits, target_head="a", source_head="b", revisions=["b"]) | |
692 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
642 | pr_util.create_inline_comment(file_path="file_b") | |
693 | pr_util.create_inline_comment(file_path='file_b') |
|
|||
694 |
|
643 | |||
695 | with outdated_comments_patcher(): |
|
644 | with outdated_comments_patcher(): | |
696 |
pr_util.add_one_commit(head= |
|
645 | pr_util.add_one_commit(head="c") | |
697 | assert_inline_comments(pull_request, visible=0, outdated=1) |
|
646 | assert_inline_comments(pull_request, visible=0, outdated=1) | |
698 |
|
647 | |||
699 |
|
648 | |||
700 |
@pytest.mark.usefixtures( |
|
649 | @pytest.mark.usefixtures("config_stub") | |
701 | class TestUpdateChangedFiles(object): |
|
650 | class TestUpdateChangedFiles(object): | |
702 |
|
||||
703 | def test_no_changes_on_unchanged_diff(self, pr_util): |
|
651 | def test_no_changes_on_unchanged_diff(self, pr_util): | |
704 | commits = [ |
|
652 | commits = [ | |
705 |
{ |
|
653 | {"message": "a"}, | |
706 | {'message': 'b', |
|
654 | {"message": "b", "added": [FileNode(b"file_b", b"test_content b\n")]}, | |
707 |
|
|
655 | {"message": "c", "added": [FileNode(b"file_c", b"test_content c\n")]}, | |
708 | {'message': 'c', |
|
|||
709 | 'added': [FileNode(b'file_c', b'test_content c\n')]}, |
|
|||
710 | ] |
|
656 | ] | |
711 | # open a PR from a to b, adding file_b |
|
657 | # open a PR from a to b, adding file_b | |
712 | pull_request = pr_util.create_pull_request( |
|
658 | pull_request = pr_util.create_pull_request( | |
713 |
commits=commits, target_head= |
|
659 | commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review" | |
714 | name_suffix='per-file-review') |
|
660 | ) | |
715 |
|
661 | |||
716 | # modify PR adding new file file_c |
|
662 | # modify PR adding new file file_c | |
717 |
pr_util.add_one_commit(head= |
|
663 | pr_util.add_one_commit(head="c") | |
718 |
|
664 | |||
719 | assert_pr_file_changes( |
|
665 | assert_pr_file_changes(pull_request, added=["file_c"], modified=[], removed=[]) | |
720 | pull_request, |
|
|||
721 | added=['file_c'], |
|
|||
722 | modified=[], |
|
|||
723 | removed=[]) |
|
|||
724 |
|
666 | |||
725 | def test_modify_and_undo_modification_diff(self, pr_util): |
|
667 | def test_modify_and_undo_modification_diff(self, pr_util): | |
726 | commits = [ |
|
668 | commits = [ | |
727 |
{ |
|
669 | {"message": "a"}, | |
728 | {'message': 'b', |
|
670 | {"message": "b", "added": [FileNode(b"file_b", b"test_content b\n")]}, | |
729 |
|
|
671 | {"message": "c", "changed": [FileNode(b"file_b", b"test_content b modified\n")]}, | |
730 | {'message': 'c', |
|
672 | {"message": "d", "changed": [FileNode(b"file_b", b"test_content b\n")]}, | |
731 | 'changed': [FileNode(b'file_b', b'test_content b modified\n')]}, |
|
|||
732 | {'message': 'd', |
|
|||
733 | 'changed': [FileNode(b'file_b', b'test_content b\n')]}, |
|
|||
734 | ] |
|
673 | ] | |
735 | # open a PR from a to b, adding file_b |
|
674 | # open a PR from a to b, adding file_b | |
736 | pull_request = pr_util.create_pull_request( |
|
675 | pull_request = pr_util.create_pull_request( | |
737 |
commits=commits, target_head= |
|
676 | commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review" | |
738 | name_suffix='per-file-review') |
|
677 | ) | |
739 |
|
678 | |||
740 | # modify PR modifying file file_b |
|
679 | # modify PR modifying file file_b | |
741 |
pr_util.add_one_commit(head= |
|
680 | pr_util.add_one_commit(head="c") | |
742 |
|
681 | |||
743 | assert_pr_file_changes( |
|
682 | assert_pr_file_changes(pull_request, added=[], modified=["file_b"], removed=[]) | |
744 | pull_request, |
|
|||
745 | added=[], |
|
|||
746 | modified=['file_b'], |
|
|||
747 | removed=[]) |
|
|||
748 |
|
683 | |||
749 | # move the head again to d, which rollbacks change, |
|
684 | # move the head again to d, which rollbacks change, | |
750 | # meaning we should indicate no changes |
|
685 | # meaning we should indicate no changes | |
751 |
pr_util.add_one_commit(head= |
|
686 | pr_util.add_one_commit(head="d") | |
752 |
|
687 | |||
753 | assert_pr_file_changes( |
|
688 | assert_pr_file_changes(pull_request, added=[], modified=[], removed=[]) | |
754 | pull_request, |
|
|||
755 | added=[], |
|
|||
756 | modified=[], |
|
|||
757 | removed=[]) |
|
|||
758 |
|
689 | |||
759 | def test_updated_all_files_in_pr(self, pr_util): |
|
690 | def test_updated_all_files_in_pr(self, pr_util): | |
760 | commits = [ |
|
691 | commits = [ | |
761 |
{ |
|
692 | {"message": "a"}, | |
762 | {'message': 'b', 'added': [ |
|
693 | { | |
763 | FileNode(b'file_a', b'test_content a\n'), |
|
694 | "message": "b", | |
764 | FileNode(b'file_b', b'test_content b\n'), |
|
695 | "added": [ | |
765 |
FileNode(b |
|
696 | FileNode(b"file_a", b"test_content a\n"), | |
766 | {'message': 'c', 'changed': [ |
|
697 | FileNode(b"file_b", b"test_content b\n"), | |
767 |
FileNode(b |
|
698 | FileNode(b"file_c", b"test_content c\n"), | |
768 | FileNode(b'file_b', b'test_content b changed\n'), |
|
699 | ], | |
769 | FileNode(b'file_c', b'test_content c changed\n')]}, |
|
700 | }, | |
|
701 | { | |||
|
702 | "message": "c", | |||
|
703 | "changed": [ | |||
|
704 | FileNode(b"file_a", b"test_content a changed\n"), | |||
|
705 | FileNode(b"file_b", b"test_content b changed\n"), | |||
|
706 | FileNode(b"file_c", b"test_content c changed\n"), | |||
|
707 | ], | |||
|
708 | }, | |||
770 | ] |
|
709 | ] | |
771 | # open a PR from a to b, changing 3 files |
|
710 | # open a PR from a to b, changing 3 files | |
772 | pull_request = pr_util.create_pull_request( |
|
711 | pull_request = pr_util.create_pull_request( | |
773 |
commits=commits, target_head= |
|
712 | commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review" | |
774 | name_suffix='per-file-review') |
|
713 | ) | |
775 |
|
||||
776 | pr_util.add_one_commit(head='c') |
|
|||
777 |
|
714 | |||
778 | assert_pr_file_changes( |
|
715 | pr_util.add_one_commit(head="c") | |
779 | pull_request, |
|
716 | ||
780 | added=[], |
|
717 | assert_pr_file_changes(pull_request, added=[], modified=["file_a", "file_b", "file_c"], removed=[]) | |
781 | modified=['file_a', 'file_b', 'file_c'], |
|
|||
782 | removed=[]) |
|
|||
783 |
|
718 | |||
784 | def test_updated_and_removed_all_files_in_pr(self, pr_util): |
|
719 | def test_updated_and_removed_all_files_in_pr(self, pr_util): | |
785 | commits = [ |
|
720 | commits = [ | |
786 |
{ |
|
721 | {"message": "a"}, | |
787 | {'message': 'b', 'added': [ |
|
722 | { | |
788 | FileNode(b'file_a', b'test_content a\n'), |
|
723 | "message": "b", | |
789 | FileNode(b'file_b', b'test_content b\n'), |
|
724 | "added": [ | |
790 |
FileNode(b |
|
725 | FileNode(b"file_a", b"test_content a\n"), | |
791 | {'message': 'c', 'removed': [ |
|
726 | FileNode(b"file_b", b"test_content b\n"), | |
792 |
FileNode(b |
|
727 | FileNode(b"file_c", b"test_content c\n"), | |
793 | FileNode(b'file_b', b'test_content b changed\n'), |
|
728 | ], | |
794 | FileNode(b'file_c', b'test_content c changed\n')]}, |
|
729 | }, | |
|
730 | { | |||
|
731 | "message": "c", | |||
|
732 | "removed": [ | |||
|
733 | FileNode(b"file_a", b"test_content a changed\n"), | |||
|
734 | FileNode(b"file_b", b"test_content b changed\n"), | |||
|
735 | FileNode(b"file_c", b"test_content c changed\n"), | |||
|
736 | ], | |||
|
737 | }, | |||
795 | ] |
|
738 | ] | |
796 | # open a PR from a to b, removing 3 files |
|
739 | # open a PR from a to b, removing 3 files | |
797 | pull_request = pr_util.create_pull_request( |
|
740 | pull_request = pr_util.create_pull_request( | |
798 |
commits=commits, target_head= |
|
741 | commits=commits, target_head="a", source_head="b", revisions=["b"], name_suffix="per-file-review" | |
799 | name_suffix='per-file-review') |
|
742 | ) | |
800 |
|
||||
801 | pr_util.add_one_commit(head='c') |
|
|||
802 |
|
743 | |||
803 | assert_pr_file_changes( |
|
744 | pr_util.add_one_commit(head="c") | |
804 | pull_request, |
|
745 | ||
805 | added=[], |
|
746 | assert_pr_file_changes(pull_request, added=[], modified=[], removed=["file_a", "file_b", "file_c"]) | |
806 | modified=[], |
|
|||
807 | removed=['file_a', 'file_b', 'file_c']) |
|
|||
808 |
|
747 | |||
809 |
|
748 | |||
810 | def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub): |
|
749 | def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub): | |
@@ -866,8 +805,7 b' def test_update_adds_a_comment_to_the_pu' | |||||
866 |
|
805 | |||
867 | .. |under_review| replace:: *"Under Review"*""" |
|
806 | .. |under_review| replace:: *"Under Review"*""" | |
868 | ).format(commit_id[:12]) |
|
807 | ).format(commit_id[:12]) | |
869 | pull_request_comments = sorted( |
|
808 | pull_request_comments = sorted(pull_request.comments, key=lambda c: c.modified_at) | |
870 | pull_request.comments, key=lambda c: c.modified_at) |
|
|||
871 | update_comment = pull_request_comments[-1] |
|
809 | update_comment = pull_request_comments[-1] | |
872 | assert update_comment.text == expected_message |
|
810 | assert update_comment.text == expected_message | |
873 |
|
811 | |||
@@ -890,8 +828,8 b' def test_create_version_from_snapshot_up' | |||||
890 | version = PullRequestModel()._create_version_from_snapshot(pull_request) |
|
828 | version = PullRequestModel()._create_version_from_snapshot(pull_request) | |
891 |
|
829 | |||
892 | # Check attributes |
|
830 | # Check attributes | |
893 |
assert version.title == pr_util.create_parameters[ |
|
831 | assert version.title == pr_util.create_parameters["title"] | |
894 |
assert version.description == pr_util.create_parameters[ |
|
832 | assert version.description == pr_util.create_parameters["description"] | |
895 | assert version.status == PullRequest.STATUS_CLOSED |
|
833 | assert version.status == PullRequest.STATUS_CLOSED | |
896 |
|
834 | |||
897 | # versions get updated created_on |
|
835 | # versions get updated created_on | |
@@ -899,11 +837,11 b' def test_create_version_from_snapshot_up' | |||||
899 |
|
837 | |||
900 | assert version.updated_on == updated_on |
|
838 | assert version.updated_on == updated_on | |
901 | assert version.user_id == pull_request.user_id |
|
839 | assert version.user_id == pull_request.user_id | |
902 |
assert version.revisions == pr_util.create_parameters[ |
|
840 | assert version.revisions == pr_util.create_parameters["revisions"] | |
903 | assert version.source_repo == pr_util.source_repository |
|
841 | assert version.source_repo == pr_util.source_repository | |
904 |
assert version.source_ref == pr_util.create_parameters[ |
|
842 | assert version.source_ref == pr_util.create_parameters["source_ref"] | |
905 | assert version.target_repo == pr_util.target_repository |
|
843 | assert version.target_repo == pr_util.target_repository | |
906 |
assert version.target_ref == pr_util.create_parameters[ |
|
844 | assert version.target_ref == pr_util.create_parameters["target_ref"] | |
907 | assert version._last_merge_source_rev == pull_request._last_merge_source_rev |
|
845 | assert version._last_merge_source_rev == pull_request._last_merge_source_rev | |
908 | assert version._last_merge_target_rev == pull_request._last_merge_target_rev |
|
846 | assert version._last_merge_target_rev == pull_request._last_merge_target_rev | |
909 | assert version.last_merge_status == pull_request.last_merge_status |
|
847 | assert version.last_merge_status == pull_request.last_merge_status | |
@@ -921,15 +859,9 b' def test_link_comments_to_version_only_u' | |||||
921 | Session().commit() |
|
859 | Session().commit() | |
922 |
|
860 | |||
923 | # Expect that only the new comment is linked to version2 |
|
861 | # Expect that only the new comment is linked to version2 | |
924 | assert ( |
|
862 | assert comment_unlinked.pull_request_version_id == version2.pull_request_version_id | |
925 |
|
|
863 | assert comment_linked.pull_request_version_id == version1.pull_request_version_id | |
926 | version2.pull_request_version_id) |
|
864 | assert comment_unlinked.pull_request_version_id != comment_linked.pull_request_version_id | |
927 | assert ( |
|
|||
928 | comment_linked.pull_request_version_id == |
|
|||
929 | version1.pull_request_version_id) |
|
|||
930 | assert ( |
|
|||
931 | comment_unlinked.pull_request_version_id != |
|
|||
932 | comment_linked.pull_request_version_id) |
|
|||
933 |
|
865 | |||
934 |
|
866 | |||
935 | def test_calculate_commits(): |
|
867 | def test_calculate_commits(): | |
@@ -945,35 +877,26 b' def test_calculate_commits():' | |||||
945 | def assert_inline_comments(pull_request, visible=None, outdated=None): |
|
877 | def assert_inline_comments(pull_request, visible=None, outdated=None): | |
946 | if visible is not None: |
|
878 | if visible is not None: | |
947 | inline_comments = CommentsModel().get_inline_comments( |
|
879 | inline_comments = CommentsModel().get_inline_comments( | |
948 |
pull_request.target_repo.repo_id, pull_request=pull_request |
|
880 | pull_request.target_repo.repo_id, pull_request=pull_request | |
949 | inline_cnt = len(CommentsModel().get_inline_comments_as_list( |
|
881 | ) | |
950 | inline_comments)) |
|
882 | inline_cnt = len(CommentsModel().get_inline_comments_as_list(inline_comments)) | |
951 | assert inline_cnt == visible |
|
883 | assert inline_cnt == visible | |
952 | if outdated is not None: |
|
884 | if outdated is not None: | |
953 | outdated_comments = CommentsModel().get_outdated_comments( |
|
885 | outdated_comments = CommentsModel().get_outdated_comments(pull_request.target_repo.repo_id, pull_request) | |
954 | pull_request.target_repo.repo_id, pull_request) |
|
|||
955 | assert len(outdated_comments) == outdated |
|
886 | assert len(outdated_comments) == outdated | |
956 |
|
887 | |||
957 |
|
888 | |||
958 | def assert_pr_file_changes( |
|
889 | def assert_pr_file_changes(pull_request, added=None, modified=None, removed=None): | |
959 | pull_request, added=None, modified=None, removed=None): |
|
|||
960 | pr_versions = PullRequestModel().get_versions(pull_request) |
|
890 | pr_versions = PullRequestModel().get_versions(pull_request) | |
961 | # always use first version, ie original PR to calculate changes |
|
891 | # always use first version, ie original PR to calculate changes | |
962 | pull_request_version = pr_versions[0] |
|
892 | pull_request_version = pr_versions[0] | |
963 | old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs( |
|
893 | old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(pull_request, pull_request_version) | |
964 | pull_request, pull_request_version) |
|
894 | file_changes = PullRequestModel()._calculate_file_changes(old_diff_data, new_diff_data) | |
965 | file_changes = PullRequestModel()._calculate_file_changes( |
|
|||
966 | old_diff_data, new_diff_data) |
|
|||
967 |
|
895 | |||
968 | assert added == file_changes.added, \ |
|
896 | assert added == file_changes.added, "expected added:%s vs value:%s" % (added, file_changes.added) | |
969 |
|
|
897 | assert modified == file_changes.modified, "expected modified:%s vs value:%s" % (modified, file_changes.modified) | |
970 | assert modified == file_changes.modified, \ |
|
898 | assert removed == file_changes.removed, "expected removed:%s vs value:%s" % (removed, file_changes.removed) | |
971 | 'expected modified:%s vs value:%s' % (modified, file_changes.modified) |
|
|||
972 | assert removed == file_changes.removed, \ |
|
|||
973 | 'expected removed:%s vs value:%s' % (removed, file_changes.removed) |
|
|||
974 |
|
899 | |||
975 |
|
900 | |||
976 | def outdated_comments_patcher(use_outdated=True): |
|
901 | def outdated_comments_patcher(use_outdated=True): | |
977 | return mock.patch.object( |
|
902 | return mock.patch.object(CommentsModel, "use_outdated_comments", return_value=use_outdated) | |
978 | CommentsModel, 'use_outdated_comments', |
|
|||
979 | return_value=use_outdated) |
|
@@ -23,7 +23,7 b' from sqlalchemy.exc import IntegrityErro' | |||||
23 | import pytest |
|
23 | import pytest | |
24 |
|
24 | |||
25 | from rhodecode.tests import TESTS_TMP_PATH |
|
25 | from rhodecode.tests import TESTS_TMP_PATH | |
26 | from rhodecode.tests.fixture import Fixture |
|
26 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
27 |
|
27 | |||
28 | from rhodecode.model.repo_group import RepoGroupModel |
|
28 | from rhodecode.model.repo_group import RepoGroupModel | |
29 | from rhodecode.model.repo import RepoModel |
|
29 | from rhodecode.model.repo import RepoModel |
@@ -28,7 +28,7 b' from rhodecode.model.user_group import U' | |||||
28 | from rhodecode.tests.models.common import ( |
|
28 | from rhodecode.tests.models.common import ( | |
29 | _create_project_tree, check_tree_perms, _get_perms, _check_expected_count, |
|
29 | _create_project_tree, check_tree_perms, _get_perms, _check_expected_count, | |
30 | expected_count, _destroy_project_tree) |
|
30 | expected_count, _destroy_project_tree) | |
31 | from rhodecode.tests.fixture import Fixture |
|
31 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | fixture = Fixture() |
|
34 | fixture = Fixture() |
@@ -22,7 +22,7 b' import pytest' | |||||
22 |
|
22 | |||
23 | from rhodecode.model.db import User |
|
23 | from rhodecode.model.db import User | |
24 | from rhodecode.tests import TEST_USER_REGULAR_LOGIN |
|
24 | from rhodecode.tests import TEST_USER_REGULAR_LOGIN | |
25 | from rhodecode.tests.fixture import Fixture |
|
25 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
26 | from rhodecode.model.user_group import UserGroupModel |
|
26 | from rhodecode.model.user_group import UserGroupModel | |
27 | from rhodecode.model.meta import Session |
|
27 | from rhodecode.model.meta import Session | |
28 |
|
28 |
@@ -27,7 +27,7 b' from rhodecode.model.user import UserMod' | |||||
27 | from rhodecode.model.user_group import UserGroupModel |
|
27 | from rhodecode.model.user_group import UserGroupModel | |
28 | from rhodecode.model.repo import RepoModel |
|
28 | from rhodecode.model.repo import RepoModel | |
29 | from rhodecode.model.repo_group import RepoGroupModel |
|
29 | from rhodecode.model.repo_group import RepoGroupModel | |
30 | from rhodecode.tests.fixture import Fixture |
|
30 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
31 | from rhodecode.lib.str_utils import safe_str |
|
31 | from rhodecode.lib.str_utils import safe_str | |
32 |
|
32 | |||
33 |
|
33 |
@@ -32,11 +32,11 b' from rhodecode.model.meta import Session' | |||||
32 | from rhodecode.model.repo_group import RepoGroupModel |
|
32 | from rhodecode.model.repo_group import RepoGroupModel | |
33 | from rhodecode.model.db import ChangesetStatus, Repository |
|
33 | from rhodecode.model.db import ChangesetStatus, Repository | |
34 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
34 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
35 | from rhodecode.tests.fixture import Fixture |
|
35 | from rhodecode.tests.fixtures.rc_fixture import Fixture | |
36 |
|
36 | |||
37 | fixture = Fixture() |
|
37 | fixture = Fixture() | |
38 |
|
38 | |||
39 |
pytestmark = pytest.mark.usefixtures( |
|
39 | pytestmark = pytest.mark.usefixtures("baseapp") | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | @pytest.fixture() |
|
42 | @pytest.fixture() |
@@ -111,7 +111,7 b' app.base_url = http://rhodecode.local' | |||||
111 | app.service_api.host = http://rhodecode.local:10020 |
|
111 | app.service_api.host = http://rhodecode.local:10020 | |
112 |
|
112 | |||
113 | ; Secret for Service API authentication. |
|
113 | ; Secret for Service API authentication. | |
114 | app.service_api.token = |
|
114 | app.service_api.token = secret4 | |
115 |
|
115 | |||
116 | ; Unique application ID. Should be a random unique string for security. |
|
116 | ; Unique application ID. Should be a random unique string for security. | |
117 | app_instance_uuid = rc-production |
|
117 | app_instance_uuid = rc-production | |
@@ -351,7 +351,7 b' archive_cache.objectstore.retry_attempts' | |||||
351 | ; and served from the cache during subsequent requests for the same archive of |
|
351 | ; and served from the cache during subsequent requests for the same archive of | |
352 | ; the repository. This path is important to be shared across filesystems and with |
|
352 | ; the repository. This path is important to be shared across filesystems and with | |
353 | ; RhodeCode and vcsserver |
|
353 | ; RhodeCode and vcsserver | |
354 |
archive_cache.filesystem.store_dir = %(here)s/rc-test |
|
354 | archive_cache.filesystem.store_dir = %(here)s/.rc-test-data/archive_cache | |
355 |
|
355 | |||
356 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb |
|
356 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb | |
357 | archive_cache.filesystem.cache_size_gb = 2 |
|
357 | archive_cache.filesystem.cache_size_gb = 2 | |
@@ -406,7 +406,7 b' celery.task_store_eager_result = true' | |||||
406 |
|
406 | |||
407 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
407 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. | |
408 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
408 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space | |
409 | cache_dir = %(here)s/rc-test-data |
|
409 | cache_dir = %(here)s/.rc-test-data | |
410 |
|
410 | |||
411 | ; ********************************************* |
|
411 | ; ********************************************* | |
412 | ; `sql_cache_short` cache for heavy SQL queries |
|
412 | ; `sql_cache_short` cache for heavy SQL queries | |
@@ -435,7 +435,7 b' rc_cache.cache_repo_longterm.max_size = ' | |||||
435 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace |
|
435 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace | |
436 | rc_cache.cache_general.expiration_time = 43200 |
|
436 | rc_cache.cache_general.expiration_time = 43200 | |
437 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
437 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
438 |
rc_cache.cache_general.arguments.filename = %(here)s/rc-test |
|
438 | rc_cache.cache_general.arguments.filename = %(here)s/.rc-test-data/cache-backend/cache_general_db | |
439 |
|
439 | |||
440 | ; alternative `cache_general` redis backend with distributed lock |
|
440 | ; alternative `cache_general` redis backend with distributed lock | |
441 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis |
|
441 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis | |
@@ -454,6 +454,10 b' rc_cache.cache_general.arguments.filenam' | |||||
454 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
454 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
455 | #rc_cache.cache_general.arguments.lock_auto_renewal = true |
|
455 | #rc_cache.cache_general.arguments.lock_auto_renewal = true | |
456 |
|
456 | |||
|
457 | ; prefix for redis keys used for this cache backend, the final key is constructed using {custom-prefix}{key} | |||
|
458 | #rc_cache.cache_general.arguments.key_prefix = custom-prefix- | |||
|
459 | ||||
|
460 | ||||
457 | ; ************************************************* |
|
461 | ; ************************************************* | |
458 | ; `cache_perms` cache for permission tree, auth TTL |
|
462 | ; `cache_perms` cache for permission tree, auth TTL | |
459 | ; for simplicity use rc.file_namespace backend, |
|
463 | ; for simplicity use rc.file_namespace backend, | |
@@ -462,7 +466,7 b' rc_cache.cache_general.arguments.filenam' | |||||
462 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace |
|
466 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace | |
463 | rc_cache.cache_perms.expiration_time = 0 |
|
467 | rc_cache.cache_perms.expiration_time = 0 | |
464 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
468 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
465 |
rc_cache.cache_perms.arguments.filename = %(here)s/rc-test |
|
469 | rc_cache.cache_perms.arguments.filename = %(here)s/.rc-test-data/cache-backend/cache_perms_db | |
466 |
|
470 | |||
467 | ; alternative `cache_perms` redis backend with distributed lock |
|
471 | ; alternative `cache_perms` redis backend with distributed lock | |
468 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis |
|
472 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis | |
@@ -481,6 +485,10 b' rc_cache.cache_perms.arguments.filename ' | |||||
481 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
485 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
482 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true |
|
486 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true | |
483 |
|
487 | |||
|
488 | ; prefix for redis keys used for this cache backend, the final key is constructed using {custom-prefix}{key} | |||
|
489 | #rc_cache.cache_perms.arguments.key_prefix = custom-prefix- | |||
|
490 | ||||
|
491 | ||||
484 | ; *************************************************** |
|
492 | ; *************************************************** | |
485 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS |
|
493 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS | |
486 | ; for simplicity use rc.file_namespace backend, |
|
494 | ; for simplicity use rc.file_namespace backend, | |
@@ -489,7 +497,7 b' rc_cache.cache_perms.arguments.filename ' | |||||
489 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace |
|
497 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace | |
490 | rc_cache.cache_repo.expiration_time = 2592000 |
|
498 | rc_cache.cache_repo.expiration_time = 2592000 | |
491 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set |
|
499 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
492 |
rc_cache.cache_repo.arguments.filename = %(here)s/rc-test |
|
500 | rc_cache.cache_repo.arguments.filename = %(here)s/.rc-test-data/cache-backend/cache_repo_db | |
493 |
|
501 | |||
494 | ; alternative `cache_repo` redis backend with distributed lock |
|
502 | ; alternative `cache_repo` redis backend with distributed lock | |
495 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis |
|
503 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis | |
@@ -508,6 +516,10 b' rc_cache.cache_repo.arguments.filename =' | |||||
508 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen |
|
516 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
509 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true |
|
517 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true | |
510 |
|
518 | |||
|
519 | ; prefix for redis keys used for this cache backend, the final key is constructed using {custom-prefix}{key} | |||
|
520 | #rc_cache.cache_repo.arguments.key_prefix = custom-prefix- | |||
|
521 | ||||
|
522 | ||||
511 | ; ############## |
|
523 | ; ############## | |
512 | ; BEAKER SESSION |
|
524 | ; BEAKER SESSION | |
513 | ; ############## |
|
525 | ; ############## | |
@@ -516,7 +528,7 b' rc_cache.cache_repo.arguments.filename =' | |||||
516 | ; types are file, ext:redis, ext:database, ext:memcached |
|
528 | ; types are file, ext:redis, ext:database, ext:memcached | |
517 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session |
|
529 | ; Fastest ones are ext:redis and ext:database, DO NOT use memory type for session | |
518 | beaker.session.type = file |
|
530 | beaker.session.type = file | |
519 |
beaker.session.data_dir = %(here)s/rc-test |
|
531 | beaker.session.data_dir = %(here)s/.rc-test-data/data/sessions | |
520 |
|
532 | |||
521 | ; Redis based sessions |
|
533 | ; Redis based sessions | |
522 | #beaker.session.type = ext:redis |
|
534 | #beaker.session.type = ext:redis | |
@@ -532,7 +544,7 b' beaker.session.data_dir = %(here)s/rc-te' | |||||
532 |
|
544 | |||
533 | beaker.session.key = rhodecode |
|
545 | beaker.session.key = rhodecode | |
534 | beaker.session.secret = test-rc-uytcxaz |
|
546 | beaker.session.secret = test-rc-uytcxaz | |
535 |
beaker.session.lock_dir = %(here)s/rc-test |
|
547 | beaker.session.lock_dir = %(here)s/.rc-test-data/data/sessions/lock | |
536 |
|
548 | |||
537 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
549 | ; Secure encrypted cookie. Requires AES and AES python libraries | |
538 | ; you must disable beaker.session.secret to use this |
|
550 | ; you must disable beaker.session.secret to use this | |
@@ -564,7 +576,7 b' beaker.session.secure = false' | |||||
564 | ; WHOOSH Backend, doesn't require additional services to run |
|
576 | ; WHOOSH Backend, doesn't require additional services to run | |
565 | ; it works good with few dozen repos |
|
577 | ; it works good with few dozen repos | |
566 | search.module = rhodecode.lib.index.whoosh |
|
578 | search.module = rhodecode.lib.index.whoosh | |
567 |
search.location = %(here)s/rc-test |
|
579 | search.location = %(here)s/.rc-test-data/data/index | |
568 |
|
580 | |||
569 | ; #################### |
|
581 | ; #################### | |
570 | ; CHANNELSTREAM CONFIG |
|
582 | ; CHANNELSTREAM CONFIG | |
@@ -584,7 +596,7 b' channelstream.server = channelstream:980' | |||||
584 | ; see Nginx/Apache configuration examples in our docs |
|
596 | ; see Nginx/Apache configuration examples in our docs | |
585 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream |
|
597 | channelstream.ws_url = ws://rhodecode.yourserver.com/_channelstream | |
586 | channelstream.secret = ENV_GENERATED |
|
598 | channelstream.secret = ENV_GENERATED | |
587 |
channelstream.history.location = %(here)s/rc-test |
|
599 | channelstream.history.location = %(here)s/.rc-test-data/channelstream_history | |
588 |
|
600 | |||
589 | ; Internal application path that Javascript uses to connect into. |
|
601 | ; Internal application path that Javascript uses to connect into. | |
590 | ; If you use proxy-prefix the prefix should be added before /_channelstream |
|
602 | ; If you use proxy-prefix the prefix should be added before /_channelstream | |
@@ -601,7 +613,7 b' channelstream.proxy_path = /_channelstre' | |||||
601 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one |
|
613 | ; pymysql is an alternative driver for MySQL, use in case of problems with default one | |
602 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode |
|
614 | #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode | |
603 |
|
615 | |||
604 |
sqlalchemy.db1.url = sqlite:///%(here)s/rc-test |
|
616 | sqlalchemy.db1.url = sqlite:///%(here)s/.rc-test-data/rhodecode_test.db?timeout=30 | |
605 |
|
617 | |||
606 | ; see sqlalchemy docs for other advanced settings |
|
618 | ; see sqlalchemy docs for other advanced settings | |
607 | ; print the sql statements to output |
|
619 | ; print the sql statements to output | |
@@ -737,7 +749,7 b' ssh.generate_authorized_keyfile = true' | |||||
737 | ; Path to the authorized_keys file where the generate entries are placed. |
|
749 | ; Path to the authorized_keys file where the generate entries are placed. | |
738 | ; It is possible to have multiple key files specified in `sshd_config` e.g. |
|
750 | ; It is possible to have multiple key files specified in `sshd_config` e.g. | |
739 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode |
|
751 | ; AuthorizedKeysFile %h/.ssh/authorized_keys %h/.ssh/authorized_keys_rhodecode | |
740 |
ssh.authorized_keys_file_path = %(here)s/rc-test |
|
752 | ssh.authorized_keys_file_path = %(here)s/.rc-test-data/authorized_keys_rhodecode | |
741 |
|
753 | |||
742 | ; Command to execute the SSH wrapper. The binary is available in the |
|
754 | ; Command to execute the SSH wrapper. The binary is available in the | |
743 | ; RhodeCode installation directory. |
|
755 | ; RhodeCode installation directory. |
@@ -24,13 +24,13 b' import tempfile' | |||||
24 | import pytest |
|
24 | import pytest | |
25 | import subprocess |
|
25 | import subprocess | |
26 | import logging |
|
26 | import logging | |
27 | from urllib.request import urlopen |
|
27 | import requests | |
28 | from urllib.error import URLError |
|
|||
29 | import configparser |
|
28 | import configparser | |
30 |
|
29 | |||
31 |
|
30 | |||
32 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS |
|
31 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS | |
33 | from rhodecode.tests.utils import is_url_reachable |
|
32 | from rhodecode.tests.utils import is_url_reachable | |
|
33 | from rhodecode.tests import console_printer | |||
34 |
|
34 | |||
35 | log = logging.getLogger(__name__) |
|
35 | log = logging.getLogger(__name__) | |
36 |
|
36 | |||
@@ -49,7 +49,7 b' def get_host_url(pyramid_config):' | |||||
49 |
|
49 | |||
50 | def assert_no_running_instance(url): |
|
50 | def assert_no_running_instance(url): | |
51 | if is_url_reachable(url): |
|
51 | if is_url_reachable(url): | |
52 | print(f"Hint: Usually this means another instance of server " |
|
52 | console_printer(f"Hint: Usually this means another instance of server " | |
53 | f"is running in the background at {url}.") |
|
53 | f"is running in the background at {url}.") | |
54 | pytest.fail(f"Port is not free at {url}, cannot start server at") |
|
54 | pytest.fail(f"Port is not free at {url}, cannot start server at") | |
55 |
|
55 | |||
@@ -58,8 +58,9 b' class ServerBase(object):' | |||||
58 | _args = [] |
|
58 | _args = [] | |
59 | log_file_name = 'NOT_DEFINED.log' |
|
59 | log_file_name = 'NOT_DEFINED.log' | |
60 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' |
|
60 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' | |
|
61 | console_marker = " :warning: [green]pytest-setup[/green] " | |||
61 |
|
62 | |||
62 | def __init__(self, config_file, log_file): |
|
63 | def __init__(self, config_file, log_file, env): | |
63 | self.config_file = config_file |
|
64 | self.config_file = config_file | |
64 | config = configparser.ConfigParser() |
|
65 | config = configparser.ConfigParser() | |
65 | config.read(config_file) |
|
66 | config.read(config_file) | |
@@ -69,10 +70,10 b' class ServerBase(object):' | |||||
69 | self._args = [] |
|
70 | self._args = [] | |
70 | self.log_file = log_file or os.path.join( |
|
71 | self.log_file = log_file or os.path.join( | |
71 | tempfile.gettempdir(), self.log_file_name) |
|
72 | tempfile.gettempdir(), self.log_file_name) | |
|
73 | self.env = env | |||
72 | self.process = None |
|
74 | self.process = None | |
73 | self.server_out = None |
|
75 | self.server_out = None | |
74 |
log.info("Using the {} configuration:{}" |
|
76 | log.info(f"Using the {self.__class__.__name__} configuration:{config_file}") | |
75 | self.__class__.__name__, config_file)) |
|
|||
76 |
|
77 | |||
77 | if not os.path.isfile(config_file): |
|
78 | if not os.path.isfile(config_file): | |
78 | raise RuntimeError(f'Failed to get config at {config_file}') |
|
79 | raise RuntimeError(f'Failed to get config at {config_file}') | |
@@ -110,18 +111,17 b' class ServerBase(object):' | |||||
110 |
|
111 | |||
111 | while time.time() - start < timeout: |
|
112 | while time.time() - start < timeout: | |
112 | try: |
|
113 | try: | |
113 |
|
|
114 | requests.get(status_url) | |
114 | break |
|
115 | break | |
115 |
except |
|
116 | except requests.exceptions.ConnectionError: | |
116 | time.sleep(0.2) |
|
117 | time.sleep(0.2) | |
117 | else: |
|
118 | else: | |
118 | pytest.fail( |
|
119 | pytest.fail( | |
119 | "Starting the {} failed or took more than {} " |
|
120 | f"Starting the {self.__class__.__name__} failed or took more than {timeout} seconds." | |
120 |
" |
|
121 | f"cmd: `{self.command}`" | |
121 | self.__class__.__name__, timeout, self.command)) |
|
122 | ) | |
122 |
|
123 | |||
123 |
log.info('Server of {} ready at url {}' |
|
124 | log.info(f'Server of {self.__class__.__name__} ready at url {status_url}') | |
124 | self.__class__.__name__, status_url)) |
|
|||
125 |
|
125 | |||
126 | def shutdown(self): |
|
126 | def shutdown(self): | |
127 | self.process.kill() |
|
127 | self.process.kill() | |
@@ -130,7 +130,7 b' class ServerBase(object):' | |||||
130 |
|
130 | |||
131 | def get_log_file_with_port(self): |
|
131 | def get_log_file_with_port(self): | |
132 | log_file = list(self.log_file.partition('.log')) |
|
132 | log_file = list(self.log_file.partition('.log')) | |
133 | log_file.insert(1, get_port(self.config_file)) |
|
133 | log_file.insert(1, f'-{get_port(self.config_file)}') | |
134 | log_file = ''.join(log_file) |
|
134 | log_file = ''.join(log_file) | |
135 | return log_file |
|
135 | return log_file | |
136 |
|
136 | |||
@@ -140,11 +140,12 b' class RcVCSServer(ServerBase):' | |||||
140 | Represents a running VCSServer instance. |
|
140 | Represents a running VCSServer instance. | |
141 | """ |
|
141 | """ | |
142 |
|
142 | |||
143 | log_file_name = 'rc-vcsserver.log' |
|
143 | log_file_name = 'rhodecode-vcsserver.log' | |
144 | status_url_tmpl = 'http://{host}:{port}/status' |
|
144 | status_url_tmpl = 'http://{host}:{port}/status' | |
145 |
|
145 | |||
146 | def __init__(self, config_file, log_file=None, workers='3'): |
|
146 | def __init__(self, config_file, log_file=None, workers='3', env=None, info_prefix=''): | |
147 | super(RcVCSServer, self).__init__(config_file, log_file) |
|
147 | super(RcVCSServer, self).__init__(config_file, log_file, env) | |
|
148 | self.info_prefix = info_prefix | |||
148 | self._args = [ |
|
149 | self._args = [ | |
149 | 'gunicorn', |
|
150 | 'gunicorn', | |
150 | '--bind', self.bind_addr, |
|
151 | '--bind', self.bind_addr, | |
@@ -164,9 +165,10 b' class RcVCSServer(ServerBase):' | |||||
164 | host_url = self.host_url() |
|
165 | host_url = self.host_url() | |
165 | assert_no_running_instance(host_url) |
|
166 | assert_no_running_instance(host_url) | |
166 |
|
167 | |||
167 | print(f'rhodecode-vcsserver starting at: {host_url}') |
|
168 | console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-vcsserver starting at: {host_url}') | |
168 | print(f'rhodecode-vcsserver command: {self.command}') |
|
169 | console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-vcsserver command: {self.command}') | |
169 | print(f'rhodecode-vcsserver logfile: {self.log_file}') |
|
170 | console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-vcsserver logfile: {self.log_file}') | |
|
171 | console_printer() | |||
170 |
|
172 | |||
171 | self.process = subprocess.Popen( |
|
173 | self.process = subprocess.Popen( | |
172 | self._args, bufsize=0, env=env, |
|
174 | self._args, bufsize=0, env=env, | |
@@ -178,11 +180,12 b' class RcWebServer(ServerBase):' | |||||
178 | Represents a running RCE web server used as a test fixture. |
|
180 | Represents a running RCE web server used as a test fixture. | |
179 | """ |
|
181 | """ | |
180 |
|
182 | |||
181 |
log_file_name = 'r |
|
183 | log_file_name = 'rhodecode-ce.log' | |
182 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' |
|
184 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' | |
183 |
|
185 | |||
184 | def __init__(self, config_file, log_file=None, workers='2'): |
|
186 | def __init__(self, config_file, log_file=None, workers='2', env=None, info_prefix=''): | |
185 | super(RcWebServer, self).__init__(config_file, log_file) |
|
187 | super(RcWebServer, self).__init__(config_file, log_file, env) | |
|
188 | self.info_prefix = info_prefix | |||
186 | self._args = [ |
|
189 | self._args = [ | |
187 | 'gunicorn', |
|
190 | 'gunicorn', | |
188 | '--bind', self.bind_addr, |
|
191 | '--bind', self.bind_addr, | |
@@ -195,7 +198,8 b' class RcWebServer(ServerBase):' | |||||
195 |
|
198 | |||
196 | def start(self): |
|
199 | def start(self): | |
197 | env = os.environ.copy() |
|
200 | env = os.environ.copy() | |
198 | env['RC_NO_TMP_PATH'] = '1' |
|
201 | if self.env: | |
|
202 | env.update(self.env) | |||
199 |
|
203 | |||
200 | self.log_file = self.get_log_file_with_port() |
|
204 | self.log_file = self.get_log_file_with_port() | |
201 | self.server_out = open(self.log_file, 'w') |
|
205 | self.server_out = open(self.log_file, 'w') | |
@@ -203,9 +207,10 b' class RcWebServer(ServerBase):' | |||||
203 | host_url = self.host_url() |
|
207 | host_url = self.host_url() | |
204 | assert_no_running_instance(host_url) |
|
208 | assert_no_running_instance(host_url) | |
205 |
|
209 | |||
206 |
print(f'rhodecode- |
|
210 | console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-ce starting at: {host_url}') | |
207 |
print(f'rhodecode- |
|
211 | console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-ce command: {self.command}') | |
208 |
print(f'rhodecode- |
|
212 | console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-ce logfile: {self.log_file}') | |
|
213 | console_printer() | |||
209 |
|
214 | |||
210 | self.process = subprocess.Popen( |
|
215 | self.process = subprocess.Popen( | |
211 | self._args, bufsize=0, env=env, |
|
216 | self._args, bufsize=0, env=env, | |
@@ -229,3 +234,44 b' class RcWebServer(ServerBase):' | |||||
229 | } |
|
234 | } | |
230 | params.update(**kwargs) |
|
235 | params.update(**kwargs) | |
231 | return params['user'], params['passwd'] |
|
236 | return params['user'], params['passwd'] | |
|
237 | ||||
|
238 | class CeleryServer(ServerBase): | |||
|
239 | log_file_name = 'rhodecode-celery.log' | |||
|
240 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' | |||
|
241 | ||||
|
242 | def __init__(self, config_file, log_file=None, workers='2', env=None, info_prefix=''): | |||
|
243 | super(CeleryServer, self).__init__(config_file, log_file, env) | |||
|
244 | self.info_prefix = info_prefix | |||
|
245 | self._args = \ | |||
|
246 | ['celery', | |||
|
247 | '--no-color', | |||
|
248 | '--app=rhodecode.lib.celerylib.loader', | |||
|
249 | 'worker', | |||
|
250 | '--autoscale=4,2', | |||
|
251 | '--max-tasks-per-child=30', | |||
|
252 | '--task-events', | |||
|
253 | '--loglevel=DEBUG', | |||
|
254 | '--ini=' + self.config_file] | |||
|
255 | ||||
|
256 | def start(self): | |||
|
257 | env = os.environ.copy() | |||
|
258 | env['RC_NO_TEST_ENV'] = '1' | |||
|
259 | ||||
|
260 | self.log_file = self.get_log_file_with_port() | |||
|
261 | self.server_out = open(self.log_file, 'w') | |||
|
262 | ||||
|
263 | host_url = "Celery" #self.host_url() | |||
|
264 | #assert_no_running_instance(host_url) | |||
|
265 | ||||
|
266 | console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-celery starting at: {host_url}') | |||
|
267 | console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-celery command: {self.command}') | |||
|
268 | console_printer(f'{self.console_marker}{self.info_prefix}rhodecode-celery logfile: {self.log_file}') | |||
|
269 | console_printer() | |||
|
270 | ||||
|
271 | self.process = subprocess.Popen( | |||
|
272 | self._args, bufsize=0, env=env, | |||
|
273 | stdout=self.server_out, stderr=self.server_out) | |||
|
274 | ||||
|
275 | ||||
|
276 | def wait_until_ready(self, timeout=30): | |||
|
277 | time.sleep(2) |
@@ -36,24 +36,29 b' from webtest.app import TestResponse, Te' | |||||
36 |
|
36 | |||
37 | import pytest |
|
37 | import pytest | |
38 |
|
38 | |||
39 | try: |
|
|||
40 | import rc_testdata |
|
|||
41 | except ImportError: |
|
|||
42 | raise ImportError('Failed to import rc_testdata, ' |
|
|||
43 | 'please make sure this package is installed from requirements_test.txt') |
|
|||
44 |
|
||||
45 | from rhodecode.model.db import User, Repository |
|
39 | from rhodecode.model.db import User, Repository | |
46 | from rhodecode.model.meta import Session |
|
40 | from rhodecode.model.meta import Session | |
47 | from rhodecode.model.scm import ScmModel |
|
41 | from rhodecode.model.scm import ScmModel | |
48 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository |
|
42 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository | |
49 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
43 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
50 | from rhodecode.tests import login_user_session |
|
44 | from rhodecode.tests import login_user_session, console_printer | |
|
45 | from rhodecode.authentication import AuthenticationPluginRegistry | |||
|
46 | from rhodecode.model.settings import SettingsModel | |||
51 |
|
47 | |||
52 | log = logging.getLogger(__name__) |
|
48 | log = logging.getLogger(__name__) | |
53 |
|
49 | |||
54 |
|
50 | |||
55 | def print_to_func(value, print_to=sys.stderr): |
|
51 | def console_printer_utils(msg): | |
56 | print(value, file=print_to) |
|
52 | console_printer(f" :white_check_mark: [green]test-utils[/green] {msg}") | |
|
53 | ||||
|
54 | ||||
|
55 | def get_rc_testdata(): | |||
|
56 | try: | |||
|
57 | import rc_testdata | |||
|
58 | except ImportError: | |||
|
59 | raise ImportError('Failed to import rc_testdata, ' | |||
|
60 | 'please make sure this package is installed from requirements_test.txt') | |||
|
61 | return rc_testdata | |||
57 |
|
62 | |||
58 |
|
63 | |||
59 | class CustomTestResponse(TestResponse): |
|
64 | class CustomTestResponse(TestResponse): | |
@@ -73,7 +78,6 b' class CustomTestResponse(TestResponse):' | |||||
73 | assert string in res |
|
78 | assert string in res | |
74 | """ |
|
79 | """ | |
75 | print_body = kw.pop('print_body', False) |
|
80 | print_body = kw.pop('print_body', False) | |
76 | print_to = kw.pop('print_to', sys.stderr) |
|
|||
77 |
|
81 | |||
78 | if 'no' in kw: |
|
82 | if 'no' in kw: | |
79 | no = kw['no'] |
|
83 | no = kw['no'] | |
@@ -89,18 +93,18 b' class CustomTestResponse(TestResponse):' | |||||
89 |
|
93 | |||
90 | for s in strings: |
|
94 | for s in strings: | |
91 | if s not in self: |
|
95 | if s not in self: | |
92 |
|
|
96 | console_printer_utils(f"Actual response (no {s!r}):") | |
93 |
|
|
97 | console_printer_utils(f"body output saved as `{f}`") | |
94 | if print_body: |
|
98 | if print_body: | |
95 |
|
|
99 | console_printer_utils(str(self)) | |
96 | raise IndexError(f"Body does not contain string {s!r}, body output saved as {f}") |
|
100 | raise IndexError(f"Body does not contain string {s!r}, body output saved as {f}") | |
97 |
|
101 | |||
98 | for no_s in no: |
|
102 | for no_s in no: | |
99 | if no_s in self: |
|
103 | if no_s in self: | |
100 |
|
|
104 | console_printer_utils(f"Actual response (has {no_s!r})") | |
101 |
|
|
105 | console_printer_utils(f"body output saved as `{f}`") | |
102 | if print_body: |
|
106 | if print_body: | |
103 |
|
|
107 | console_printer_utils(str(self)) | |
104 | raise IndexError(f"Body contains bad string {no_s!r}, body output saved as {f}") |
|
108 | raise IndexError(f"Body contains bad string {no_s!r}, body output saved as {f}") | |
105 |
|
109 | |||
106 | def assert_response(self): |
|
110 | def assert_response(self): | |
@@ -209,6 +213,7 b' def extract_git_repo_from_dump(dump_name' | |||||
209 | """Create git repo `repo_name` from dump `dump_name`.""" |
|
213 | """Create git repo `repo_name` from dump `dump_name`.""" | |
210 | repos_path = ScmModel().repos_path |
|
214 | repos_path = ScmModel().repos_path | |
211 | target_path = os.path.join(repos_path, repo_name) |
|
215 | target_path = os.path.join(repos_path, repo_name) | |
|
216 | rc_testdata = get_rc_testdata() | |||
212 | rc_testdata.extract_git_dump(dump_name, target_path) |
|
217 | rc_testdata.extract_git_dump(dump_name, target_path) | |
213 | return target_path |
|
218 | return target_path | |
214 |
|
219 | |||
@@ -217,6 +222,7 b' def extract_hg_repo_from_dump(dump_name,' | |||||
217 | """Create hg repo `repo_name` from dump `dump_name`.""" |
|
222 | """Create hg repo `repo_name` from dump `dump_name`.""" | |
218 | repos_path = ScmModel().repos_path |
|
223 | repos_path = ScmModel().repos_path | |
219 | target_path = os.path.join(repos_path, repo_name) |
|
224 | target_path = os.path.join(repos_path, repo_name) | |
|
225 | rc_testdata = get_rc_testdata() | |||
220 | rc_testdata.extract_hg_dump(dump_name, target_path) |
|
226 | rc_testdata.extract_hg_dump(dump_name, target_path) | |
221 | return target_path |
|
227 | return target_path | |
222 |
|
228 | |||
@@ -245,6 +251,7 b' def _load_svn_dump_into_repo(dump_name, ' | |||||
245 | Currently the dumps are in rc_testdata. They might later on be |
|
251 | Currently the dumps are in rc_testdata. They might later on be | |
246 | integrated with the main repository once they stabilize more. |
|
252 | integrated with the main repository once they stabilize more. | |
247 | """ |
|
253 | """ | |
|
254 | rc_testdata = get_rc_testdata() | |||
248 | dump = rc_testdata.load_svn_dump(dump_name) |
|
255 | dump = rc_testdata.load_svn_dump(dump_name) | |
249 | load_dump = subprocess.Popen( |
|
256 | load_dump = subprocess.Popen( | |
250 | ['svnadmin', 'load', repo_path], |
|
257 | ['svnadmin', 'load', repo_path], | |
@@ -254,9 +261,7 b' def _load_svn_dump_into_repo(dump_name, ' | |||||
254 | if load_dump.returncode != 0: |
|
261 | if load_dump.returncode != 0: | |
255 | log.error("Output of load_dump command: %s", out) |
|
262 | log.error("Output of load_dump command: %s", out) | |
256 | log.error("Error output of load_dump command: %s", err) |
|
263 | log.error("Error output of load_dump command: %s", err) | |
257 | raise Exception( |
|
264 | raise Exception(f'Failed to load dump "{dump_name}" into repository at path "{repo_path}".') | |
258 | 'Failed to load dump "%s" into repository at path "%s".' |
|
|||
259 | % (dump_name, repo_path)) |
|
|||
260 |
|
265 | |||
261 |
|
266 | |||
262 | class AssertResponse(object): |
|
267 | class AssertResponse(object): | |
@@ -492,3 +497,54 b' def permission_update_data_generator(csr' | |||||
492 | ('perm_del_member_type_{}'.format(obj_id), obj_type), |
|
497 | ('perm_del_member_type_{}'.format(obj_id), obj_type), | |
493 | ]) |
|
498 | ]) | |
494 | return form_data |
|
499 | return form_data | |
|
500 | ||||
|
501 | ||||
|
502 | ||||
|
503 | class AuthPluginManager: | |||
|
504 | ||||
|
505 | def cleanup(self): | |||
|
506 | self._enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) | |||
|
507 | ||||
|
508 | def enable(self, plugins_list, override=None): | |||
|
509 | return self._enable_plugins(plugins_list, override) | |||
|
510 | ||||
|
511 | @classmethod | |||
|
512 | def _enable_plugins(cls, plugins_list, override: object = None): | |||
|
513 | override = override or {} | |||
|
514 | params = { | |||
|
515 | 'auth_plugins': ','.join(plugins_list), | |||
|
516 | } | |||
|
517 | ||||
|
518 | # helper translate some names to others, to fix settings code | |||
|
519 | name_map = { | |||
|
520 | 'token': 'authtoken' | |||
|
521 | } | |||
|
522 | log.debug('enable_auth_plugins: enabling following auth-plugins: %s', plugins_list) | |||
|
523 | ||||
|
524 | for module in plugins_list: | |||
|
525 | plugin_name = module.partition('#')[-1] | |||
|
526 | if plugin_name in name_map: | |||
|
527 | plugin_name = name_map[plugin_name] | |||
|
528 | enabled_plugin = f'auth_{plugin_name}_enabled' | |||
|
529 | cache_ttl = f'auth_{plugin_name}_cache_ttl' | |||
|
530 | ||||
|
531 | # default params that are needed for each plugin, | |||
|
532 | # `enabled` and `cache_ttl` | |||
|
533 | params.update({ | |||
|
534 | enabled_plugin: True, | |||
|
535 | cache_ttl: 0 | |||
|
536 | }) | |||
|
537 | if override.get: | |||
|
538 | params.update(override.get(module, {})) | |||
|
539 | ||||
|
540 | validated_params = params | |||
|
541 | ||||
|
542 | for k, v in validated_params.items(): | |||
|
543 | setting = SettingsModel().create_or_update_setting(k, v) | |||
|
544 | Session().add(setting) | |||
|
545 | Session().commit() | |||
|
546 | ||||
|
547 | AuthenticationPluginRegistry.invalidate_auth_plugins_cache(hard=True) | |||
|
548 | ||||
|
549 | enabled_plugins = SettingsModel().get_auth_plugins() | |||
|
550 | assert plugins_list == enabled_plugins |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -32,8 +31,7 b' from rhodecode.tests.utils import check_' | |||||
32 |
|
31 | |||
33 |
|
32 | |||
34 | @pytest.fixture() |
|
33 | @pytest.fixture() | |
35 | def vcs_repository_support( |
|
34 | def vcs_repository_support(request, backend_alias, baseapp, _vcs_repo_container): | |
36 | request, backend_alias, baseapp, _vcs_repo_container): |
|
|||
37 | """ |
|
35 | """ | |
38 | Provide a test repository for the test run. |
|
36 | Provide a test repository for the test run. | |
39 |
|
37 | |||
@@ -63,7 +61,7 b' def vcs_repository_support(' | |||||
63 | return backend_alias, repo |
|
61 | return backend_alias, repo | |
64 |
|
62 | |||
65 |
|
63 | |||
66 |
@pytest.fixture(scope= |
|
64 | @pytest.fixture(scope="class") | |
67 | def _vcs_repo_container(request): |
|
65 | def _vcs_repo_container(request): | |
68 | """ |
|
66 | """ | |
69 | Internal fixture intended to help support class based scoping on demand. |
|
67 | Internal fixture intended to help support class based scoping on demand. | |
@@ -73,13 +71,12 b' def _vcs_repo_container(request):' | |||||
73 |
|
71 | |||
74 | def _create_vcs_repo_container(request): |
|
72 | def _create_vcs_repo_container(request): | |
75 | repo_container = VcsRepoContainer() |
|
73 | repo_container = VcsRepoContainer() | |
76 |
if not request.config.getoption( |
|
74 | if not request.config.getoption("--keep-tmp-path"): | |
77 | request.addfinalizer(repo_container.cleanup) |
|
75 | request.addfinalizer(repo_container.cleanup) | |
78 | return repo_container |
|
76 | return repo_container | |
79 |
|
77 | |||
80 |
|
78 | |||
81 | class VcsRepoContainer(object): |
|
79 | class VcsRepoContainer(object): | |
82 |
|
||||
83 | def __init__(self): |
|
80 | def __init__(self): | |
84 | self._cleanup_paths = [] |
|
81 | self._cleanup_paths = [] | |
85 | self._repos = {} |
|
82 | self._repos = {} | |
@@ -98,14 +95,14 b' class VcsRepoContainer(object):' | |||||
98 |
|
95 | |||
99 |
|
96 | |||
100 | def _should_create_repo_per_test(cls): |
|
97 | def _should_create_repo_per_test(cls): | |
101 |
return getattr(cls, |
|
98 | return getattr(cls, "recreate_repo_per_test", False) | |
102 |
|
99 | |||
103 |
|
100 | |||
104 | def _create_empty_repository(cls, backend_alias=None): |
|
101 | def _create_empty_repository(cls, backend_alias=None): | |
105 | Backend = get_backend(backend_alias or cls.backend_alias) |
|
102 | Backend = get_backend(backend_alias or cls.backend_alias) | |
106 | repo_path = get_new_dir(str(time.time())) |
|
103 | repo_path = get_new_dir(str(time.time())) | |
107 | repo = Backend(repo_path, create=True) |
|
104 | repo = Backend(repo_path, create=True) | |
108 |
if hasattr(cls, |
|
105 | if hasattr(cls, "_get_commits"): | |
109 | commits = cls._get_commits() |
|
106 | commits = cls._get_commits() | |
110 | cls.tip = _add_commits_to_repo(repo, commits) |
|
107 | cls.tip = _add_commits_to_repo(repo, commits) | |
111 |
|
108 | |||
@@ -127,7 +124,7 b' def config():' | |||||
127 | specific content is required. |
|
124 | specific content is required. | |
128 | """ |
|
125 | """ | |
129 | config = Config() |
|
126 | config = Config() | |
130 |
config.set( |
|
127 | config.set("section-a", "a-1", "value-a-1") | |
131 | return config |
|
128 | return config | |
132 |
|
129 | |||
133 |
|
130 | |||
@@ -136,24 +133,24 b' def _add_commits_to_repo(repo, commits):' | |||||
136 | tip = None |
|
133 | tip = None | |
137 |
|
134 | |||
138 | for commit in commits: |
|
135 | for commit in commits: | |
139 |
for node in commit.get( |
|
136 | for node in commit.get("added", []): | |
140 | if not isinstance(node, FileNode): |
|
137 | if not isinstance(node, FileNode): | |
141 | node = FileNode(safe_bytes(node.path), content=node.content) |
|
138 | node = FileNode(safe_bytes(node.path), content=node.content) | |
142 | imc.add(node) |
|
139 | imc.add(node) | |
143 |
|
140 | |||
144 |
for node in commit.get( |
|
141 | for node in commit.get("changed", []): | |
145 | if not isinstance(node, FileNode): |
|
142 | if not isinstance(node, FileNode): | |
146 | node = FileNode(safe_bytes(node.path), content=node.content) |
|
143 | node = FileNode(safe_bytes(node.path), content=node.content) | |
147 | imc.change(node) |
|
144 | imc.change(node) | |
148 |
|
145 | |||
149 |
for node in commit.get( |
|
146 | for node in commit.get("removed", []): | |
150 | imc.remove(FileNode(safe_bytes(node.path))) |
|
147 | imc.remove(FileNode(safe_bytes(node.path))) | |
151 |
|
148 | |||
152 | tip = imc.commit( |
|
149 | tip = imc.commit( | |
153 |
message=str(commit[ |
|
150 | message=str(commit["message"]), | |
154 |
author=str(commit[ |
|
151 | author=str(commit["author"]), | |
155 |
date=commit[ |
|
152 | date=commit["date"], | |
156 |
branch=commit.get( |
|
153 | branch=commit.get("branch"), | |
157 | ) |
|
154 | ) | |
158 |
|
155 | |||
159 | return tip |
|
156 | return tip | |
@@ -183,16 +180,15 b' def generate_repo_with_commits(vcs_repo)' | |||||
183 | start_date = datetime.datetime(2010, 1, 1, 20) |
|
180 | start_date = datetime.datetime(2010, 1, 1, 20) | |
184 | for x in range(num): |
|
181 | for x in range(num): | |
185 | yield { |
|
182 | yield { | |
186 |
|
|
183 | "message": "Commit %d" % x, | |
187 |
|
|
184 | "author": "Joe Doe <joe.doe@example.com>", | |
188 |
|
|
185 | "date": start_date + datetime.timedelta(hours=12 * x), | |
189 |
|
|
186 | "added": [ | |
190 |
FileNode(b |
|
187 | FileNode(b"file_%d.txt" % x, content=b"Foobar %d" % x), | |
191 | ], |
|
188 | ], | |
192 |
|
|
189 | "modified": [ | |
193 |
FileNode(b |
|
190 | FileNode(b"file_%d.txt" % x, content=b"Foobar %d modified" % (x - 1)), | |
194 | content=b'Foobar %d modified' % (x-1)), |
|
191 | ], | |
195 | ] |
|
|||
196 | } |
|
192 | } | |
197 |
|
193 | |||
198 | def commit_maker(num=5): |
|
194 | def commit_maker(num=5): | |
@@ -231,34 +227,33 b' class BackendTestMixin(object):' | |||||
231 | created |
|
227 | created | |
232 | before every single test. Defaults to ``True``. |
|
228 | before every single test. Defaults to ``True``. | |
233 | """ |
|
229 | """ | |
|
230 | ||||
234 | recreate_repo_per_test = True |
|
231 | recreate_repo_per_test = True | |
235 |
|
232 | |||
236 | @classmethod |
|
233 | @classmethod | |
237 | def _get_commits(cls): |
|
234 | def _get_commits(cls): | |
238 | commits = [ |
|
235 | commits = [ | |
239 | { |
|
236 | { | |
240 |
|
|
237 | "message": "Initial commit", | |
241 |
|
|
238 | "author": "Joe Doe <joe.doe@example.com>", | |
242 |
|
|
239 | "date": datetime.datetime(2010, 1, 1, 20), | |
243 |
|
|
240 | "added": [ | |
244 |
FileNode(b |
|
241 | FileNode(b"foobar", content=b"Foobar"), | |
245 |
FileNode(b |
|
242 | FileNode(b"foobar2", content=b"Foobar II"), | |
246 |
FileNode(b |
|
243 | FileNode(b"foo/bar/baz", content=b"baz here!"), | |
247 | ], |
|
244 | ], | |
248 | }, |
|
245 | }, | |
249 | { |
|
246 | { | |
250 |
|
|
247 | "message": "Changes...", | |
251 |
|
|
248 | "author": "Jane Doe <jane.doe@example.com>", | |
252 |
|
|
249 | "date": datetime.datetime(2010, 1, 1, 21), | |
253 |
|
|
250 | "added": [ | |
254 |
FileNode(b |
|
251 | FileNode(b"some/new.txt", content=b"news..."), | |
255 | ], |
|
252 | ], | |
256 |
|
|
253 | "changed": [ | |
257 |
FileNode(b |
|
254 | FileNode(b"foobar", b"Foobar I"), | |
258 | ], |
|
255 | ], | |
259 |
|
|
256 | "removed": [], | |
260 | }, |
|
257 | }, | |
261 | ] |
|
258 | ] | |
262 | return commits |
|
259 | return commits | |
263 |
|
||||
264 |
|
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -43,121 +42,120 b' def d_cache_config():' | |||||
43 |
|
42 | |||
44 | @pytest.mark.usefixtures("vcs_repository_support") |
|
43 | @pytest.mark.usefixtures("vcs_repository_support") | |
45 | class TestArchives(BackendTestMixin): |
|
44 | class TestArchives(BackendTestMixin): | |
46 |
|
||||
47 | @classmethod |
|
45 | @classmethod | |
48 | def _get_commits(cls): |
|
46 | def _get_commits(cls): | |
49 | start_date = datetime.datetime(2010, 1, 1, 20) |
|
47 | start_date = datetime.datetime(2010, 1, 1, 20) | |
50 | yield { |
|
48 | yield { | |
51 |
|
|
49 | "message": "Initial Commit", | |
52 |
|
|
50 | "author": "Joe Doe <joe.doe@example.com>", | |
53 |
|
|
51 | "date": start_date + datetime.timedelta(hours=12), | |
54 |
|
|
52 | "added": [ | |
55 |
FileNode(b |
|
53 | FileNode(b"executable_0o100755", b"mode_755", mode=0o100755), | |
56 |
FileNode(b |
|
54 | FileNode(b"executable_0o100500", b"mode_500", mode=0o100500), | |
57 |
FileNode(b |
|
55 | FileNode(b"not_executable", b"mode_644", mode=0o100644), | |
58 | ], |
|
56 | ], | |
59 | } |
|
57 | } | |
60 | for x in range(5): |
|
58 | for x in range(5): | |
61 | yield { |
|
59 | yield { | |
62 |
|
|
60 | "message": "Commit %d" % x, | |
63 |
|
|
61 | "author": "Joe Doe <joe.doe@example.com>", | |
64 |
|
|
62 | "date": start_date + datetime.timedelta(hours=12 * x), | |
65 |
|
|
63 | "added": [ | |
66 |
FileNode(b |
|
64 | FileNode(b"%d/file_%d.txt" % (x, x), content=b"Foobar %d" % x), | |
67 | ], |
|
65 | ], | |
68 | } |
|
66 | } | |
69 |
|
67 | |||
70 |
@pytest.mark.parametrize( |
|
68 | @pytest.mark.parametrize("compressor", ["gz", "bz2"]) | |
71 | def test_archive_tar(self, compressor, tmpdir, tmp_path, d_cache_config): |
|
69 | def test_archive_tar(self, compressor, tmpdir, tmp_path, d_cache_config): | |
72 |
|
70 | archive_node = tmp_path / "archive-node" | ||
73 | archive_node = tmp_path / 'archive-node' |
|
|||
74 | archive_node.touch() |
|
71 | archive_node.touch() | |
75 |
|
72 | |||
76 | archive_lnk = self.tip.archive_repo( |
|
73 | archive_lnk = self.tip.archive_repo( | |
77 |
str(archive_node), kind=f |
|
74 | str(archive_node), kind=f"t{compressor}", archive_dir_name="repo", cache_config=d_cache_config | |
|
75 | ) | |||
78 |
|
76 | |||
79 | out_dir = tmpdir |
|
77 | out_dir = tmpdir | |
80 |
out_file = tarfile.open(str(archive_lnk), f |
|
78 | out_file = tarfile.open(str(archive_lnk), f"r|{compressor}") | |
81 | out_file.extractall(out_dir) |
|
79 | out_file.extractall(out_dir) | |
82 | out_file.close() |
|
80 | out_file.close() | |
83 |
|
81 | |||
84 | for x in range(5): |
|
82 | for x in range(5): | |
85 |
node_path = |
|
83 | node_path = "%d/file_%d.txt" % (x, x) | |
86 |
with open(os.path.join(out_dir, |
|
84 | with open(os.path.join(out_dir, "repo/" + node_path), "rb") as f: | |
87 | file_content = f.read() |
|
85 | file_content = f.read() | |
88 | assert file_content == self.tip.get_node(node_path).content |
|
86 | assert file_content == self.tip.get_node(node_path).content | |
89 |
|
87 | |||
90 | shutil.rmtree(out_dir) |
|
88 | shutil.rmtree(out_dir) | |
91 |
|
89 | |||
92 |
@pytest.mark.parametrize( |
|
90 | @pytest.mark.parametrize("compressor", ["gz", "bz2"]) | |
93 | def test_archive_tar_symlink(self, compressor): |
|
91 | def test_archive_tar_symlink(self, compressor): | |
94 |
pytest.skip( |
|
92 | pytest.skip("Not supported") | |
95 |
|
93 | |||
96 |
@pytest.mark.parametrize( |
|
94 | @pytest.mark.parametrize("compressor", ["gz", "bz2"]) | |
97 | def test_archive_tar_file_modes(self, compressor, tmpdir, tmp_path, d_cache_config): |
|
95 | def test_archive_tar_file_modes(self, compressor, tmpdir, tmp_path, d_cache_config): | |
98 |
archive_node = tmp_path / |
|
96 | archive_node = tmp_path / "archive-node" | |
99 | archive_node.touch() |
|
97 | archive_node.touch() | |
100 |
|
98 | |||
101 | archive_lnk = self.tip.archive_repo( |
|
99 | archive_lnk = self.tip.archive_repo( | |
102 |
str(archive_node), kind= |
|
100 | str(archive_node), kind="t{}".format(compressor), archive_dir_name="repo", cache_config=d_cache_config | |
|
101 | ) | |||
103 |
|
102 | |||
104 | out_dir = tmpdir |
|
103 | out_dir = tmpdir | |
105 |
out_file = tarfile.open(str(archive_lnk), |
|
104 | out_file = tarfile.open(str(archive_lnk), "r|{}".format(compressor)) | |
106 | out_file.extractall(out_dir) |
|
105 | out_file.extractall(out_dir) | |
107 | out_file.close() |
|
106 | out_file.close() | |
108 |
|
107 | |||
109 | def dest(inp): |
|
108 | def dest(inp): | |
110 | return os.path.join(out_dir, "repo/" + inp) |
|
109 | return os.path.join(out_dir, "repo/" + inp) | |
111 |
|
110 | |||
112 |
assert oct(os.stat(dest( |
|
111 | assert oct(os.stat(dest("not_executable")).st_mode) == "0o100644" | |
113 |
|
112 | |||
114 | def test_archive_zip(self, tmp_path, d_cache_config): |
|
113 | def test_archive_zip(self, tmp_path, d_cache_config): | |
115 |
archive_node = tmp_path / |
|
114 | archive_node = tmp_path / "archive-node" | |
116 | archive_node.touch() |
|
|||
117 |
|
||||
118 | archive_lnk = self.tip.archive_repo(str(archive_node), kind='zip', |
|
|||
119 | archive_dir_name='repo', cache_config=d_cache_config) |
|
|||
120 | zip_file = zipfile.ZipFile(str(archive_lnk)) |
|
|||
121 |
|
||||
122 | for x in range(5): |
|
|||
123 | node_path = '%d/file_%d.txt' % (x, x) |
|
|||
124 | data = zip_file.read(f'repo/{node_path}') |
|
|||
125 |
|
||||
126 | decompressed = io.BytesIO() |
|
|||
127 | decompressed.write(data) |
|
|||
128 | assert decompressed.getvalue() == \ |
|
|||
129 | self.tip.get_node(node_path).content |
|
|||
130 | decompressed.close() |
|
|||
131 |
|
||||
132 | def test_archive_zip_with_metadata(self, tmp_path, d_cache_config): |
|
|||
133 | archive_node = tmp_path / 'archive-node' |
|
|||
134 | archive_node.touch() |
|
115 | archive_node.touch() | |
135 |
|
116 | |||
136 | archive_lnk = self.tip.archive_repo( |
|
117 | archive_lnk = self.tip.archive_repo( | |
137 | str(archive_node), kind='zip', |
|
118 | str(archive_node), kind="zip", archive_dir_name="repo", cache_config=d_cache_config | |
138 | archive_dir_name='repo', write_metadata=True, cache_config=d_cache_config) |
|
119 | ) | |
|
120 | zip_file = zipfile.ZipFile(str(archive_lnk)) | |||
|
121 | ||||
|
122 | for x in range(5): | |||
|
123 | node_path = "%d/file_%d.txt" % (x, x) | |||
|
124 | data = zip_file.read(f"repo/{node_path}") | |||
|
125 | ||||
|
126 | decompressed = io.BytesIO() | |||
|
127 | decompressed.write(data) | |||
|
128 | assert decompressed.getvalue() == self.tip.get_node(node_path).content | |||
|
129 | decompressed.close() | |||
|
130 | ||||
|
131 | def test_archive_zip_with_metadata(self, tmp_path, d_cache_config): | |||
|
132 | archive_node = tmp_path / "archive-node" | |||
|
133 | archive_node.touch() | |||
|
134 | ||||
|
135 | archive_lnk = self.tip.archive_repo( | |||
|
136 | str(archive_node), kind="zip", archive_dir_name="repo", write_metadata=True, cache_config=d_cache_config | |||
|
137 | ) | |||
139 |
|
138 | |||
140 | zip_file = zipfile.ZipFile(str(archive_lnk)) |
|
139 | zip_file = zipfile.ZipFile(str(archive_lnk)) | |
141 |
metafile = zip_file.read( |
|
140 | metafile = zip_file.read("repo/.archival.txt") | |
142 |
|
141 | |||
143 | raw_id = ascii_bytes(self.tip.raw_id) |
|
142 | raw_id = ascii_bytes(self.tip.raw_id) | |
144 |
assert b |
|
143 | assert b"commit_id:%b" % raw_id in metafile | |
145 |
|
144 | |||
146 | for x in range(5): |
|
145 | for x in range(5): | |
147 |
node_path = |
|
146 | node_path = "%d/file_%d.txt" % (x, x) | |
148 |
data = zip_file.read(f |
|
147 | data = zip_file.read(f"repo/{node_path}") | |
149 | decompressed = io.BytesIO() |
|
148 | decompressed = io.BytesIO() | |
150 | decompressed.write(data) |
|
149 | decompressed.write(data) | |
151 |
assert decompressed.getvalue() == |
|
150 | assert decompressed.getvalue() == self.tip.get_node(node_path).content | |
152 | self.tip.get_node(node_path).content |
|
|||
153 | decompressed.close() |
|
151 | decompressed.close() | |
154 |
|
152 | |||
155 | def test_archive_wrong_kind(self, tmp_path, d_cache_config): |
|
153 | def test_archive_wrong_kind(self, tmp_path, d_cache_config): | |
156 |
archive_node = tmp_path / |
|
154 | archive_node = tmp_path / "archive-node" | |
157 | archive_node.touch() |
|
155 | archive_node.touch() | |
158 |
|
156 | |||
159 | with pytest.raises(ImproperArchiveTypeError): |
|
157 | with pytest.raises(ImproperArchiveTypeError): | |
160 |
self.tip.archive_repo(str(archive_node), kind= |
|
158 | self.tip.archive_repo(str(archive_node), kind="wrong kind", cache_config=d_cache_config) | |
161 |
|
159 | |||
162 |
|
160 | |||
163 | @pytest.fixture() |
|
161 | @pytest.fixture() | |
@@ -167,8 +165,8 b' def base_commit():' | |||||
167 | """ |
|
165 | """ | |
168 | commit = base.BaseCommit() |
|
166 | commit = base.BaseCommit() | |
169 | commit.repository = mock.Mock() |
|
167 | commit.repository = mock.Mock() | |
170 |
commit.repository.name = |
|
168 | commit.repository.name = "fake_repo" | |
171 |
commit.short_id = |
|
169 | commit.short_id = "fake_id" | |
172 | return commit |
|
170 | return commit | |
173 |
|
171 | |||
174 |
|
172 | |||
@@ -180,19 +178,17 b' def test_validate_archive_prefix_enforce' | |||||
180 | def test_validate_archive_prefix_empty_prefix(base_commit): |
|
178 | def test_validate_archive_prefix_empty_prefix(base_commit): | |
181 | # TODO: johbo: Should raise a ValueError here. |
|
179 | # TODO: johbo: Should raise a ValueError here. | |
182 | with pytest.raises(VCSError): |
|
180 | with pytest.raises(VCSError): | |
183 |
base_commit._validate_archive_prefix( |
|
181 | base_commit._validate_archive_prefix("") | |
184 |
|
182 | |||
185 |
|
183 | |||
186 | def test_validate_archive_prefix_with_leading_slash(base_commit): |
|
184 | def test_validate_archive_prefix_with_leading_slash(base_commit): | |
187 | # TODO: johbo: Should raise a ValueError here. |
|
185 | # TODO: johbo: Should raise a ValueError here. | |
188 | with pytest.raises(VCSError): |
|
186 | with pytest.raises(VCSError): | |
189 |
base_commit._validate_archive_prefix( |
|
187 | base_commit._validate_archive_prefix("/any") | |
190 |
|
188 | |||
191 |
|
189 | |||
192 | def test_validate_archive_prefix_falls_back_to_repository_name(base_commit): |
|
190 | def test_validate_archive_prefix_falls_back_to_repository_name(base_commit): | |
193 | prefix = base_commit._validate_archive_prefix(None) |
|
191 | prefix = base_commit._validate_archive_prefix(None) | |
194 | expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format( |
|
192 | expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format(repo_name="fake_repo", short_id="fake_id") | |
195 | repo_name='fake_repo', |
|
|||
196 | short_id='fake_id') |
|
|||
197 | assert isinstance(prefix, str) |
|
193 | assert isinstance(prefix, str) | |
198 | assert prefix == expected_prefix |
|
194 | assert prefix == expected_prefix |
@@ -64,18 +64,14 b' class TestBranches(BackendTestMixin):' | |||||
64 | def test_new_head(self): |
|
64 | def test_new_head(self): | |
65 | tip = self.repo.get_commit() |
|
65 | tip = self.repo.get_commit() | |
66 |
|
66 | |||
67 | self.imc.add( |
|
67 | self.imc.add(FileNode(b"docs/index.txt", content=b"Documentation\n")) | |
68 | FileNode(b"docs/index.txt", content=b"Documentation\n") |
|
|||
69 | ) |
|
|||
70 | foobar_tip = self.imc.commit( |
|
68 | foobar_tip = self.imc.commit( | |
71 | message="New branch: foobar", |
|
69 | message="New branch: foobar", | |
72 | author="joe <joe@rhodecode.com>", |
|
70 | author="joe <joe@rhodecode.com>", | |
73 | branch="foobar", |
|
71 | branch="foobar", | |
74 | parents=[tip], |
|
72 | parents=[tip], | |
75 | ) |
|
73 | ) | |
76 | self.imc.change( |
|
74 | self.imc.change(FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n")) | |
77 | FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n") |
|
|||
78 | ) |
|
|||
79 | assert foobar_tip.branch == "foobar" |
|
75 | assert foobar_tip.branch == "foobar" | |
80 | newtip = self.imc.commit( |
|
76 | newtip = self.imc.commit( | |
81 | message="At foobar_tip branch", |
|
77 | message="At foobar_tip branch", | |
@@ -96,21 +92,15 b' class TestBranches(BackendTestMixin):' | |||||
96 | @pytest.mark.backends("git", "hg") |
|
92 | @pytest.mark.backends("git", "hg") | |
97 | def test_branch_with_slash_in_name(self): |
|
93 | def test_branch_with_slash_in_name(self): | |
98 | self.imc.add(FileNode(b"extrafile", content=b"Some data\n")) |
|
94 | self.imc.add(FileNode(b"extrafile", content=b"Some data\n")) | |
99 | self.imc.commit( |
|
95 | self.imc.commit("Branch with a slash!", author="joe <joe@rhodecode.com>", branch="issue/123") | |
100 | "Branch with a slash!", author="joe <joe@rhodecode.com>", branch="issue/123" |
|
|||
101 | ) |
|
|||
102 | assert "issue/123" in self.repo.branches |
|
96 | assert "issue/123" in self.repo.branches | |
103 |
|
97 | |||
104 | @pytest.mark.backends("git", "hg") |
|
98 | @pytest.mark.backends("git", "hg") | |
105 | def test_branch_with_slash_in_name_and_similar_without(self): |
|
99 | def test_branch_with_slash_in_name_and_similar_without(self): | |
106 | self.imc.add(FileNode(b"extrafile", content=b"Some data\n")) |
|
100 | self.imc.add(FileNode(b"extrafile", content=b"Some data\n")) | |
107 | self.imc.commit( |
|
101 | self.imc.commit("Branch with a slash!", author="joe <joe@rhodecode.com>", branch="issue/123") | |
108 | "Branch with a slash!", author="joe <joe@rhodecode.com>", branch="issue/123" |
|
|||
109 | ) |
|
|||
110 | self.imc.add(FileNode(b"extrafile II", content=b"Some data\n")) |
|
102 | self.imc.add(FileNode(b"extrafile II", content=b"Some data\n")) | |
111 | self.imc.commit( |
|
103 | self.imc.commit("Branch without a slash...", author="joe <joe@rhodecode.com>", branch="123") | |
112 | "Branch without a slash...", author="joe <joe@rhodecode.com>", branch="123" |
|
|||
113 | ) |
|
|||
114 | assert "issue/123" in self.repo.branches |
|
104 | assert "issue/123" in self.repo.branches | |
115 | assert "123" in self.repo.branches |
|
105 | assert "123" in self.repo.branches | |
116 |
|
106 |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -28,9 +27,7 b' from rhodecode.lib.vcs import client_htt' | |||||
28 |
|
27 | |||
29 |
|
28 | |||
30 | def is_new_connection(logger, level, message): |
|
29 | def is_new_connection(logger, level, message): | |
31 | return ( |
|
30 | return logger == "requests.packages.urllib3.connectionpool" and message.startswith("Starting new HTTP") | |
32 | logger == 'requests.packages.urllib3.connectionpool' and |
|
|||
33 | message.startswith('Starting new HTTP')) |
|
|||
34 |
|
31 | |||
35 |
|
32 | |||
36 | @pytest.fixture() |
|
33 | @pytest.fixture() | |
@@ -54,7 +51,7 b' def stub_fail_session():' | |||||
54 | """ |
|
51 | """ | |
55 | session = mock.Mock() |
|
52 | session = mock.Mock() | |
56 | post = session.post() |
|
53 | post = session.post() | |
57 |
post.content = msgpack.packb({ |
|
54 | post.content = msgpack.packb({"error": "500"}) | |
58 | post.status_code = 500 |
|
55 | post.status_code = 500 | |
59 |
|
56 | |||
60 | session.reset_mock() |
|
57 | session.reset_mock() | |
@@ -89,44 +86,37 b' def test_uses_persistent_http_connection' | |||||
89 | for x in range(5): |
|
86 | for x in range(5): | |
90 | remote_call(normal=True, closed=False) |
|
87 | remote_call(normal=True, closed=False) | |
91 |
|
88 | |||
92 | new_connections = [ |
|
89 | new_connections = [r for r in caplog.record_tuples if is_new_connection(*r)] | |
93 | r for r in caplog.record_tuples if is_new_connection(*r)] |
|
|||
94 | assert len(new_connections) <= 1 |
|
90 | assert len(new_connections) <= 1 | |
95 |
|
91 | |||
96 |
|
92 | |||
97 | def test_repo_maker_uses_session_for_classmethods(stub_session_factory): |
|
93 | def test_repo_maker_uses_session_for_classmethods(stub_session_factory): | |
98 | repo_maker = client_http.RemoteVCSMaker( |
|
94 | repo_maker = client_http.RemoteVCSMaker("server_and_port", "endpoint", "test_dummy_scm", stub_session_factory) | |
99 | 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory) |
|
|||
100 | repo_maker.example_call() |
|
95 | repo_maker.example_call() | |
101 | stub_session_factory().post.assert_called_with( |
|
96 | stub_session_factory().post.assert_called_with("http://server_and_port/endpoint", data=mock.ANY) | |
102 | 'http://server_and_port/endpoint', data=mock.ANY) |
|
|||
103 |
|
97 | |||
104 |
|
98 | |||
105 | def test_repo_maker_uses_session_for_instance_methods( |
|
99 | def test_repo_maker_uses_session_for_instance_methods(stub_session_factory, config): | |
106 | stub_session_factory, config): |
|
100 | repo_maker = client_http.RemoteVCSMaker("server_and_port", "endpoint", "test_dummy_scm", stub_session_factory) | |
107 | repo_maker = client_http.RemoteVCSMaker( |
|
101 | repo = repo_maker("stub_path", "stub_repo_id", config) | |
108 | 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory) |
|
|||
109 | repo = repo_maker('stub_path', 'stub_repo_id', config) |
|
|||
110 | repo.example_call() |
|
102 | repo.example_call() | |
111 | stub_session_factory().post.assert_called_with( |
|
103 | stub_session_factory().post.assert_called_with("http://server_and_port/endpoint", data=mock.ANY) | |
112 | 'http://server_and_port/endpoint', data=mock.ANY) |
|
|||
113 |
|
104 | |||
114 |
|
105 | |||
115 |
@mock.patch( |
|
106 | @mock.patch("rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory") | |
116 |
@mock.patch( |
|
107 | @mock.patch("rhodecode.lib.vcs.connection") | |
117 | def test_connect_passes_in_the_same_session( |
|
108 | def test_connect_passes_in_the_same_session(connection, session_factory_class, stub_session): | |
118 | connection, session_factory_class, stub_session): |
|
|||
119 | session_factory = session_factory_class.return_value |
|
109 | session_factory = session_factory_class.return_value | |
120 | session_factory.return_value = stub_session |
|
110 | session_factory.return_value = stub_session | |
121 |
|
111 | |||
122 |
vcs.connect_http( |
|
112 | vcs.connect_http("server_and_port") | |
123 |
|
113 | |||
124 |
|
114 | |||
125 | def test_repo_maker_uses_session_that_throws_error( |
|
115 | def test_repo_maker_uses_session_that_throws_error(stub_session_failing_factory, config): | |
126 | stub_session_failing_factory, config): |
|
|||
127 | repo_maker = client_http.RemoteVCSMaker( |
|
116 | repo_maker = client_http.RemoteVCSMaker( | |
128 |
|
|
117 | "server_and_port", "endpoint", "test_dummy_scm", stub_session_failing_factory | |
129 | repo = repo_maker('stub_path', 'stub_repo_id', config) |
|
118 | ) | |
|
119 | repo = repo_maker("stub_path", "stub_repo_id", config) | |||
130 |
|
120 | |||
131 | with pytest.raises(exceptions.HttpVCSCommunicationError): |
|
121 | with pytest.raises(exceptions.HttpVCSCommunicationError): | |
132 | repo.example_call() |
|
122 | repo.example_call() |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -23,27 +22,31 b' import time' | |||||
23 | import pytest |
|
22 | import pytest | |
24 |
|
23 | |||
25 | from rhodecode.lib.str_utils import safe_bytes |
|
24 | from rhodecode.lib.str_utils import safe_bytes | |
26 |
from rhodecode.lib.vcs.backends.base import |
|
25 | from rhodecode.lib.vcs.backends.base import CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit | |
27 | CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit) |
|
|||
28 | from rhodecode.lib.vcs.exceptions import ( |
|
26 | from rhodecode.lib.vcs.exceptions import ( | |
29 |
BranchDoesNotExistError, |
|
27 | BranchDoesNotExistError, | |
30 | RepositoryError, EmptyRepositoryError) |
|
28 | CommitDoesNotExistError, | |
|
29 | RepositoryError, | |||
|
30 | EmptyRepositoryError, | |||
|
31 | ) | |||
31 | from rhodecode.lib.vcs.nodes import ( |
|
32 | from rhodecode.lib.vcs.nodes import ( | |
32 | FileNode, AddedFileNodesGenerator, |
|
33 | FileNode, | |
33 | ChangedFileNodesGenerator, RemovedFileNodesGenerator) |
|
34 | AddedFileNodesGenerator, | |
|
35 | ChangedFileNodesGenerator, | |||
|
36 | RemovedFileNodesGenerator, | |||
|
37 | ) | |||
34 | from rhodecode.tests import get_new_dir |
|
38 | from rhodecode.tests import get_new_dir | |
35 | from rhodecode.tests.vcs.conftest import BackendTestMixin |
|
39 | from rhodecode.tests.vcs.conftest import BackendTestMixin | |
36 |
|
40 | |||
37 |
|
41 | |||
38 | class TestBaseChangeset(object): |
|
42 | class TestBaseChangeset(object): | |
39 |
|
||||
40 | def test_is_deprecated(self): |
|
43 | def test_is_deprecated(self): | |
41 | from rhodecode.lib.vcs.backends.base import BaseChangeset |
|
44 | from rhodecode.lib.vcs.backends.base import BaseChangeset | |
|
45 | ||||
42 | pytest.deprecated_call(BaseChangeset) |
|
46 | pytest.deprecated_call(BaseChangeset) | |
43 |
|
47 | |||
44 |
|
48 | |||
45 | class TestEmptyCommit(object): |
|
49 | class TestEmptyCommit(object): | |
46 |
|
||||
47 | def test_branch_without_alias_returns_none(self): |
|
50 | def test_branch_without_alias_returns_none(self): | |
48 | commit = EmptyCommit() |
|
51 | commit = EmptyCommit() | |
49 | assert commit.branch is None |
|
52 | assert commit.branch is None | |
@@ -58,29 +61,28 b' class TestCommitsInNonEmptyRepo(BackendT' | |||||
58 | start_date = datetime.datetime(2010, 1, 1, 20) |
|
61 | start_date = datetime.datetime(2010, 1, 1, 20) | |
59 | for x in range(5): |
|
62 | for x in range(5): | |
60 | yield { |
|
63 | yield { | |
61 |
|
|
64 | "message": "Commit %d" % x, | |
62 |
|
|
65 | "author": "Joe Doe <joe.doe@example.com>", | |
63 |
|
|
66 | "date": start_date + datetime.timedelta(hours=12 * x), | |
64 |
|
|
67 | "added": [ | |
65 |
FileNode(b |
|
68 | FileNode(b"file_%d.txt" % x, content=b"Foobar %d" % x), | |
66 | content=b'Foobar %d' % x), |
|
|||
67 | ], |
|
69 | ], | |
68 | } |
|
70 | } | |
69 |
|
71 | |||
70 | def test_walk_returns_empty_list_in_case_of_file(self): |
|
72 | def test_walk_returns_empty_list_in_case_of_file(self): | |
71 |
result = list(self.tip.walk( |
|
73 | result = list(self.tip.walk("file_0.txt")) | |
72 | assert result == [] |
|
74 | assert result == [] | |
73 |
|
75 | |||
74 | @pytest.mark.backends("git", "hg") |
|
76 | @pytest.mark.backends("git", "hg") | |
75 | def test_new_branch(self): |
|
77 | def test_new_branch(self): | |
76 |
self.imc.add(FileNode(b |
|
78 | self.imc.add(FileNode(b"docs/index.txt", content=b"Documentation\n")) | |
77 | foobar_tip = self.imc.commit( |
|
79 | foobar_tip = self.imc.commit( | |
78 |
message= |
|
80 | message="New branch: foobar", | |
79 |
author= |
|
81 | author="joe <joe@rhodecode.com>", | |
80 |
branch= |
|
82 | branch="foobar", | |
81 | ) |
|
83 | ) | |
82 |
assert |
|
84 | assert "foobar" in self.repo.branches | |
83 |
assert foobar_tip.branch == |
|
85 | assert foobar_tip.branch == "foobar" | |
84 | # 'foobar' should be the only branch that contains the new commit |
|
86 | # 'foobar' should be the only branch that contains the new commit | |
85 | branch = list(self.repo.branches.values()) |
|
87 | branch = list(self.repo.branches.values()) | |
86 | assert branch[0] != branch[1] |
|
88 | assert branch[0] != branch[1] | |
@@ -89,18 +91,14 b' class TestCommitsInNonEmptyRepo(BackendT' | |||||
89 | def test_new_head_in_default_branch(self): |
|
91 | def test_new_head_in_default_branch(self): | |
90 | tip = self.repo.get_commit() |
|
92 | tip = self.repo.get_commit() | |
91 |
|
93 | |||
92 | self.imc.add( |
|
94 | self.imc.add(FileNode(b"docs/index.txt", content=b"Documentation\n")) | |
93 | FileNode(b"docs/index.txt", content=b"Documentation\n") |
|
|||
94 | ) |
|
|||
95 | foobar_tip = self.imc.commit( |
|
95 | foobar_tip = self.imc.commit( | |
96 | message="New branch: foobar", |
|
96 | message="New branch: foobar", | |
97 | author="joe <joe@rhodecode.com>", |
|
97 | author="joe <joe@rhodecode.com>", | |
98 | branch="foobar", |
|
98 | branch="foobar", | |
99 | parents=[tip], |
|
99 | parents=[tip], | |
100 | ) |
|
100 | ) | |
101 | self.imc.change( |
|
101 | self.imc.change(FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n")) | |
102 | FileNode(b"docs/index.txt", content=b"Documentation\nand more...\n") |
|
|||
103 | ) |
|
|||
104 | assert foobar_tip.branch == "foobar" |
|
102 | assert foobar_tip.branch == "foobar" | |
105 | newtip = self.imc.commit( |
|
103 | newtip = self.imc.commit( | |
106 | message="At foobar_tip branch", |
|
104 | message="At foobar_tip branch", | |
@@ -132,51 +130,55 b' class TestCommitsInNonEmptyRepo(BackendT' | |||||
132 | :return: |
|
130 | :return: | |
133 | """ |
|
131 | """ | |
134 | DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME |
|
132 | DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME | |
135 |
TEST_BRANCH = |
|
133 | TEST_BRANCH = "docs" | |
136 | org_tip = self.repo.get_commit() |
|
134 | org_tip = self.repo.get_commit() | |
137 |
|
135 | |||
138 |
self.imc.add(FileNode(b |
|
136 | self.imc.add(FileNode(b"readme.txt", content=b"Document\n")) | |
139 | initial = self.imc.commit( |
|
137 | initial = self.imc.commit( | |
140 |
message= |
|
138 | message="Initial commit", | |
141 |
author= |
|
139 | author="joe <joe@rhodecode.com>", | |
142 | parents=[org_tip], |
|
140 | parents=[org_tip], | |
143 |
branch=DEFAULT_BRANCH, |
|
141 | branch=DEFAULT_BRANCH, | |
|
142 | ) | |||
144 |
|
143 | |||
145 |
self.imc.add(FileNode(b |
|
144 | self.imc.add(FileNode(b"newdoc.txt", content=b"foobar\n")) | |
146 | docs_branch_commit1 = self.imc.commit( |
|
145 | docs_branch_commit1 = self.imc.commit( | |
147 |
message= |
|
146 | message="New branch: docs", | |
148 |
author= |
|
147 | author="joe <joe@rhodecode.com>", | |
149 | parents=[initial], |
|
148 | parents=[initial], | |
150 |
branch=TEST_BRANCH, |
|
149 | branch=TEST_BRANCH, | |
|
150 | ) | |||
151 |
|
151 | |||
152 |
self.imc.add(FileNode(b |
|
152 | self.imc.add(FileNode(b"newdoc2.txt", content=b"foobar2\n")) | |
153 | docs_branch_commit2 = self.imc.commit( |
|
153 | docs_branch_commit2 = self.imc.commit( | |
154 |
message= |
|
154 | message="New branch: docs2", | |
155 |
author= |
|
155 | author="joe <joe@rhodecode.com>", | |
156 | parents=[docs_branch_commit1], |
|
156 | parents=[docs_branch_commit1], | |
157 |
branch=TEST_BRANCH, |
|
157 | branch=TEST_BRANCH, | |
|
158 | ) | |||
158 |
|
159 | |||
159 |
self.imc.add(FileNode(b |
|
160 | self.imc.add(FileNode(b"newfile", content=b"hello world\n")) | |
160 | self.imc.commit( |
|
161 | self.imc.commit( | |
161 |
message= |
|
162 | message="Back in default branch", | |
162 |
author= |
|
163 | author="joe <joe@rhodecode.com>", | |
163 | parents=[initial], |
|
164 | parents=[initial], | |
164 |
branch=DEFAULT_BRANCH, |
|
165 | branch=DEFAULT_BRANCH, | |
|
166 | ) | |||
165 |
|
167 | |||
166 | default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH) |
|
168 | default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH) | |
167 | assert docs_branch_commit1 not in list(default_branch_commits) |
|
169 | assert docs_branch_commit1 not in list(default_branch_commits) | |
168 | assert docs_branch_commit2 not in list(default_branch_commits) |
|
170 | assert docs_branch_commit2 not in list(default_branch_commits) | |
169 |
|
171 | |||
170 | docs_branch_commits = self.repo.get_commits( |
|
172 | docs_branch_commits = self.repo.get_commits( | |
171 | start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1], |
|
173 | start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1], branch_name=TEST_BRANCH | |
172 | branch_name=TEST_BRANCH) |
|
174 | ) | |
173 | assert docs_branch_commit1 in list(docs_branch_commits) |
|
175 | assert docs_branch_commit1 in list(docs_branch_commits) | |
174 | assert docs_branch_commit2 in list(docs_branch_commits) |
|
176 | assert docs_branch_commit2 in list(docs_branch_commits) | |
175 |
|
177 | |||
176 | @pytest.mark.backends("svn") |
|
178 | @pytest.mark.backends("svn") | |
177 | def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn): |
|
179 | def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn): | |
178 |
repo = vcsbackend_svn[ |
|
180 | repo = vcsbackend_svn["svn-simple-layout"] | |
179 |
commits = repo.get_commits(branch_name= |
|
181 | commits = repo.get_commits(branch_name="trunk") | |
180 | commit_indexes = [c.idx for c in commits] |
|
182 | commit_indexes = [c.idx for c in commits] | |
181 | assert commit_indexes == [1, 2, 3, 7, 12, 15] |
|
183 | assert commit_indexes == [1, 2, 3, 7, 12, 15] | |
182 |
|
184 | |||
@@ -214,13 +216,10 b' class TestCommits(BackendTestMixin):' | |||||
214 | start_date = datetime.datetime(2010, 1, 1, 20) |
|
216 | start_date = datetime.datetime(2010, 1, 1, 20) | |
215 | for x in range(5): |
|
217 | for x in range(5): | |
216 | yield { |
|
218 | yield { | |
217 |
|
|
219 | "message": "Commit %d" % x, | |
218 |
|
|
220 | "author": "Joe Doe <joe.doe@example.com>", | |
219 |
|
|
221 | "date": start_date + datetime.timedelta(hours=12 * x), | |
220 | 'added': [ |
|
222 | "added": [FileNode(b"file_%d.txt" % x, content=b"Foobar %d" % x)], | |
221 | FileNode(b'file_%d.txt' % x, |
|
|||
222 | content=b'Foobar %d' % x) |
|
|||
223 | ], |
|
|||
224 | } |
|
223 | } | |
225 |
|
224 | |||
226 | def test_simple(self): |
|
225 | def test_simple(self): | |
@@ -231,11 +230,11 b' class TestCommits(BackendTestMixin):' | |||||
231 | tip = self.repo.get_commit() |
|
230 | tip = self.repo.get_commit() | |
232 | # json.dumps(tip) uses .__json__() method |
|
231 | # json.dumps(tip) uses .__json__() method | |
233 | data = tip.__json__() |
|
232 | data = tip.__json__() | |
234 |
assert |
|
233 | assert "branch" in data | |
235 |
assert data[ |
|
234 | assert data["revision"] | |
236 |
|
235 | |||
237 | def test_retrieve_tip(self): |
|
236 | def test_retrieve_tip(self): | |
238 |
tip = self.repo.get_commit( |
|
237 | tip = self.repo.get_commit("tip") | |
239 | assert tip == self.repo.get_commit() |
|
238 | assert tip == self.repo.get_commit() | |
240 |
|
239 | |||
241 | def test_invalid(self): |
|
240 | def test_invalid(self): | |
@@ -259,34 +258,34 b' class TestCommits(BackendTestMixin):' | |||||
259 |
|
258 | |||
260 | def test_size(self): |
|
259 | def test_size(self): | |
261 | tip = self.repo.get_commit() |
|
260 | tip = self.repo.get_commit() | |
262 |
size = 5 * len( |
|
261 | size = 5 * len("Foobar N") # Size of 5 files | |
263 | assert tip.size == size |
|
262 | assert tip.size == size | |
264 |
|
263 | |||
265 | def test_size_at_commit(self): |
|
264 | def test_size_at_commit(self): | |
266 | tip = self.repo.get_commit() |
|
265 | tip = self.repo.get_commit() | |
267 |
size = 5 * len( |
|
266 | size = 5 * len("Foobar N") # Size of 5 files | |
268 | assert self.repo.size_at_commit(tip.raw_id) == size |
|
267 | assert self.repo.size_at_commit(tip.raw_id) == size | |
269 |
|
268 | |||
270 | def test_size_at_first_commit(self): |
|
269 | def test_size_at_first_commit(self): | |
271 | commit = self.repo[0] |
|
270 | commit = self.repo[0] | |
272 |
size = len( |
|
271 | size = len("Foobar N") # Size of 1 file | |
273 | assert self.repo.size_at_commit(commit.raw_id) == size |
|
272 | assert self.repo.size_at_commit(commit.raw_id) == size | |
274 |
|
273 | |||
275 | def test_author(self): |
|
274 | def test_author(self): | |
276 | tip = self.repo.get_commit() |
|
275 | tip = self.repo.get_commit() | |
277 |
assert_text_equal(tip.author, |
|
276 | assert_text_equal(tip.author, "Joe Doe <joe.doe@example.com>") | |
278 |
|
277 | |||
279 | def test_author_name(self): |
|
278 | def test_author_name(self): | |
280 | tip = self.repo.get_commit() |
|
279 | tip = self.repo.get_commit() | |
281 |
assert_text_equal(tip.author_name, |
|
280 | assert_text_equal(tip.author_name, "Joe Doe") | |
282 |
|
281 | |||
283 | def test_author_email(self): |
|
282 | def test_author_email(self): | |
284 | tip = self.repo.get_commit() |
|
283 | tip = self.repo.get_commit() | |
285 |
assert_text_equal(tip.author_email, |
|
284 | assert_text_equal(tip.author_email, "joe.doe@example.com") | |
286 |
|
285 | |||
287 | def test_message(self): |
|
286 | def test_message(self): | |
288 | tip = self.repo.get_commit() |
|
287 | tip = self.repo.get_commit() | |
289 |
assert_text_equal(tip.message, |
|
288 | assert_text_equal(tip.message, "Commit 4") | |
290 |
|
289 | |||
291 | def test_diff(self): |
|
290 | def test_diff(self): | |
292 | tip = self.repo.get_commit() |
|
291 | tip = self.repo.get_commit() | |
@@ -296,7 +295,7 b' class TestCommits(BackendTestMixin):' | |||||
296 | def test_prev(self): |
|
295 | def test_prev(self): | |
297 | tip = self.repo.get_commit() |
|
296 | tip = self.repo.get_commit() | |
298 | prev_commit = tip.prev() |
|
297 | prev_commit = tip.prev() | |
299 |
assert prev_commit.message == |
|
298 | assert prev_commit.message == "Commit 3" | |
300 |
|
299 | |||
301 | def test_prev_raises_on_first_commit(self): |
|
300 | def test_prev_raises_on_first_commit(self): | |
302 | commit = self.repo.get_commit(commit_idx=0) |
|
301 | commit = self.repo.get_commit(commit_idx=0) | |
@@ -311,7 +310,7 b' class TestCommits(BackendTestMixin):' | |||||
311 | def test_next(self): |
|
310 | def test_next(self): | |
312 | commit = self.repo.get_commit(commit_idx=2) |
|
311 | commit = self.repo.get_commit(commit_idx=2) | |
313 | next_commit = commit.next() |
|
312 | next_commit = commit.next() | |
314 |
assert next_commit.message == |
|
313 | assert next_commit.message == "Commit 3" | |
315 |
|
314 | |||
316 | def test_next_raises_on_tip(self): |
|
315 | def test_next_raises_on_tip(self): | |
317 | commit = self.repo.get_commit() |
|
316 | commit = self.repo.get_commit() | |
@@ -320,36 +319,36 b' class TestCommits(BackendTestMixin):' | |||||
320 |
|
319 | |||
321 | def test_get_path_commit(self): |
|
320 | def test_get_path_commit(self): | |
322 | commit = self.repo.get_commit() |
|
321 | commit = self.repo.get_commit() | |
323 |
commit.get_path_commit( |
|
322 | commit.get_path_commit("file_4.txt") | |
324 |
assert commit.message == |
|
323 | assert commit.message == "Commit 4" | |
325 |
|
324 | |||
326 | def test_get_filenodes_generator(self): |
|
325 | def test_get_filenodes_generator(self): | |
327 | tip = self.repo.get_commit() |
|
326 | tip = self.repo.get_commit() | |
328 | filepaths = [node.path for node in tip.get_filenodes_generator()] |
|
327 | filepaths = [node.path for node in tip.get_filenodes_generator()] | |
329 |
assert filepaths == [ |
|
328 | assert filepaths == ["file_%d.txt" % x for x in range(5)] | |
330 |
|
329 | |||
331 | def test_get_file_annotate(self): |
|
330 | def test_get_file_annotate(self): | |
332 | file_added_commit = self.repo.get_commit(commit_idx=3) |
|
331 | file_added_commit = self.repo.get_commit(commit_idx=3) | |
333 |
annotations = list(file_added_commit.get_file_annotate( |
|
332 | annotations = list(file_added_commit.get_file_annotate("file_3.txt")) | |
334 |
|
333 | |||
335 | line_no, commit_id, commit_loader, line = annotations[0] |
|
334 | line_no, commit_id, commit_loader, line = annotations[0] | |
336 |
|
335 | |||
337 | assert line_no == 1 |
|
336 | assert line_no == 1 | |
338 | assert commit_id == file_added_commit.raw_id |
|
337 | assert commit_id == file_added_commit.raw_id | |
339 | assert commit_loader() == file_added_commit |
|
338 | assert commit_loader() == file_added_commit | |
340 |
assert b |
|
339 | assert b"Foobar 3" in line | |
341 |
|
340 | |||
342 | def test_get_file_annotate_does_not_exist(self): |
|
341 | def test_get_file_annotate_does_not_exist(self): | |
343 | file_added_commit = self.repo.get_commit(commit_idx=2) |
|
342 | file_added_commit = self.repo.get_commit(commit_idx=2) | |
344 | # TODO: Should use a specific exception class here? |
|
343 | # TODO: Should use a specific exception class here? | |
345 | with pytest.raises(Exception): |
|
344 | with pytest.raises(Exception): | |
346 |
list(file_added_commit.get_file_annotate( |
|
345 | list(file_added_commit.get_file_annotate("file_3.txt")) | |
347 |
|
346 | |||
348 | def test_get_file_annotate_tip(self): |
|
347 | def test_get_file_annotate_tip(self): | |
349 | tip = self.repo.get_commit() |
|
348 | tip = self.repo.get_commit() | |
350 | commit = self.repo.get_commit(commit_idx=3) |
|
349 | commit = self.repo.get_commit(commit_idx=3) | |
351 |
expected_values = list(commit.get_file_annotate( |
|
350 | expected_values = list(commit.get_file_annotate("file_3.txt")) | |
352 |
annotations = list(tip.get_file_annotate( |
|
351 | annotations = list(tip.get_file_annotate("file_3.txt")) | |
353 |
|
352 | |||
354 | # Note: Skip index 2 because the loader function is not the same |
|
353 | # Note: Skip index 2 because the loader function is not the same | |
355 | for idx in (0, 1, 3): |
|
354 | for idx in (0, 1, 3): | |
@@ -398,7 +397,7 b' class TestCommits(BackendTestMixin):' | |||||
398 | repo = self.Backend(repo_path, create=True) |
|
397 | repo = self.Backend(repo_path, create=True) | |
399 |
|
398 | |||
400 | with pytest.raises(EmptyRepositoryError): |
|
399 | with pytest.raises(EmptyRepositoryError): | |
401 |
list(repo.get_commits(start_id= |
|
400 | list(repo.get_commits(start_id="foobar")) | |
402 |
|
401 | |||
403 | def test_get_commits_respects_hidden(self): |
|
402 | def test_get_commits_respects_hidden(self): | |
404 | commits = self.repo.get_commits(show_hidden=True) |
|
403 | commits = self.repo.get_commits(show_hidden=True) | |
@@ -424,8 +423,7 b' class TestCommits(BackendTestMixin):' | |||||
424 |
|
423 | |||
425 | def test_get_commits_respects_start_date_with_branch(self): |
|
424 | def test_get_commits_respects_start_date_with_branch(self): | |
426 | start_date = datetime.datetime(2010, 1, 2) |
|
425 | start_date = datetime.datetime(2010, 1, 2) | |
427 | commits = self.repo.get_commits( |
|
426 | commits = self.repo.get_commits(start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME) | |
428 | start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME) |
|
|||
429 | assert isinstance(commits, CollectionGenerator) |
|
427 | assert isinstance(commits, CollectionGenerator) | |
430 | # Should be 4 commits after 2010-01-02 00:00:00 |
|
428 | # Should be 4 commits after 2010-01-02 00:00:00 | |
431 | assert len(commits) == 4 |
|
429 | assert len(commits) == 4 | |
@@ -435,8 +433,7 b' class TestCommits(BackendTestMixin):' | |||||
435 | def test_get_commits_respects_start_date_and_end_date(self): |
|
433 | def test_get_commits_respects_start_date_and_end_date(self): | |
436 | start_date = datetime.datetime(2010, 1, 2) |
|
434 | start_date = datetime.datetime(2010, 1, 2) | |
437 | end_date = datetime.datetime(2010, 1, 3) |
|
435 | end_date = datetime.datetime(2010, 1, 3) | |
438 | commits = self.repo.get_commits(start_date=start_date, |
|
436 | commits = self.repo.get_commits(start_date=start_date, end_date=end_date) | |
439 | end_date=end_date) |
|
|||
440 | assert isinstance(commits, CollectionGenerator) |
|
437 | assert isinstance(commits, CollectionGenerator) | |
441 | assert len(commits) == 2 |
|
438 | assert len(commits) == 2 | |
442 | for c in commits: |
|
439 | for c in commits: | |
@@ -459,23 +456,22 b' class TestCommits(BackendTestMixin):' | |||||
459 | assert list(commit_ids) == list(reversed(self.repo.commit_ids)) |
|
456 | assert list(commit_ids) == list(reversed(self.repo.commit_ids)) | |
460 |
|
457 | |||
461 | def test_get_commits_slice_generator(self): |
|
458 | def test_get_commits_slice_generator(self): | |
462 | commits = self.repo.get_commits( |
|
459 | commits = self.repo.get_commits(branch_name=self.repo.DEFAULT_BRANCH_NAME) | |
463 | branch_name=self.repo.DEFAULT_BRANCH_NAME) |
|
|||
464 | assert isinstance(commits, CollectionGenerator) |
|
460 | assert isinstance(commits, CollectionGenerator) | |
465 | commit_slice = list(commits[1:3]) |
|
461 | commit_slice = list(commits[1:3]) | |
466 | assert len(commit_slice) == 2 |
|
462 | assert len(commit_slice) == 2 | |
467 |
|
463 | |||
468 | def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self): |
|
464 | def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self): | |
469 | with pytest.raises(CommitDoesNotExistError): |
|
465 | with pytest.raises(CommitDoesNotExistError): | |
470 |
list(self.repo.get_commits(start_id= |
|
466 | list(self.repo.get_commits(start_id="foobar")) | |
471 |
|
467 | |||
472 | def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self): |
|
468 | def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self): | |
473 | with pytest.raises(CommitDoesNotExistError): |
|
469 | with pytest.raises(CommitDoesNotExistError): | |
474 |
list(self.repo.get_commits(end_id= |
|
470 | list(self.repo.get_commits(end_id="foobar")) | |
475 |
|
471 | |||
476 | def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self): |
|
472 | def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self): | |
477 | with pytest.raises(BranchDoesNotExistError): |
|
473 | with pytest.raises(BranchDoesNotExistError): | |
478 |
list(self.repo.get_commits(branch_name= |
|
474 | list(self.repo.get_commits(branch_name="foobar")) | |
479 |
|
475 | |||
480 | def test_get_commits_raise_repositoryerror_for_wrong_start_end(self): |
|
476 | def test_get_commits_raise_repositoryerror_for_wrong_start_end(self): | |
481 | start_id = self.repo.commit_ids[-1] |
|
477 | start_id = self.repo.commit_ids[-1] | |
@@ -498,13 +494,16 b' class TestCommits(BackendTestMixin):' | |||||
498 | assert commit1 is not None |
|
494 | assert commit1 is not None | |
499 | assert commit2 is not None |
|
495 | assert commit2 is not None | |
500 | assert 1 != commit1 |
|
496 | assert 1 != commit1 | |
501 |
assert |
|
497 | assert "string" != commit1 | |
502 |
|
498 | |||
503 |
|
499 | |||
504 |
@pytest.mark.parametrize( |
|
500 | @pytest.mark.parametrize( | |
505 | ("README.rst", False), |
|
501 | "filename, expected", | |
506 | ("README", True), |
|
502 | [ | |
507 | ]) |
|
503 | ("README.rst", False), | |
|
504 | ("README", True), | |||
|
505 | ], | |||
|
506 | ) | |||
508 | def test_commit_is_link(vcsbackend, filename, expected): |
|
507 | def test_commit_is_link(vcsbackend, filename, expected): | |
509 | commit = vcsbackend.repo.get_commit() |
|
508 | commit = vcsbackend.repo.get_commit() | |
510 | link_status = commit.is_link(filename) |
|
509 | link_status = commit.is_link(filename) | |
@@ -519,75 +518,74 b' class TestCommitsChanges(BackendTestMixi' | |||||
519 | def _get_commits(cls): |
|
518 | def _get_commits(cls): | |
520 | return [ |
|
519 | return [ | |
521 | { |
|
520 | { | |
522 |
|
|
521 | "message": "Initial", | |
523 |
|
|
522 | "author": "Joe Doe <joe.doe@example.com>", | |
524 |
|
|
523 | "date": datetime.datetime(2010, 1, 1, 20), | |
525 |
|
|
524 | "added": [ | |
526 |
FileNode(b |
|
525 | FileNode(b"foo/bar", content=b"foo"), | |
527 |
FileNode(safe_bytes( |
|
526 | FileNode(safe_bytes("foo/bał"), content=b"foo"), | |
528 |
FileNode(b |
|
527 | FileNode(b"foobar", content=b"foo"), | |
529 |
FileNode(b |
|
528 | FileNode(b"qwe", content=b"foo"), | |
530 | ], |
|
529 | ], | |
531 | }, |
|
530 | }, | |
532 | { |
|
531 | { | |
533 |
|
|
532 | "message": "Massive changes", | |
534 |
|
|
533 | "author": "Joe Doe <joe.doe@example.com>", | |
535 |
|
|
534 | "date": datetime.datetime(2010, 1, 1, 22), | |
536 |
|
|
535 | "added": [FileNode(b"fallout", content=b"War never changes")], | |
537 |
|
|
536 | "changed": [ | |
538 |
FileNode(b |
|
537 | FileNode(b"foo/bar", content=b"baz"), | |
539 |
FileNode(b |
|
538 | FileNode(b"foobar", content=b"baz"), | |
540 | ], |
|
539 | ], | |
541 |
|
|
540 | "removed": [FileNode(b"qwe")], | |
542 | }, |
|
541 | }, | |
543 | ] |
|
542 | ] | |
544 |
|
543 | |||
545 | def test_initial_commit(self, local_dt_to_utc): |
|
544 | def test_initial_commit(self, local_dt_to_utc): | |
546 | commit = self.repo.get_commit(commit_idx=0) |
|
545 | commit = self.repo.get_commit(commit_idx=0) | |
547 | assert set(commit.added) == { |
|
546 | assert set(commit.added) == { | |
548 |
commit.get_node( |
|
547 | commit.get_node("foo/bar"), | |
549 |
commit.get_node( |
|
548 | commit.get_node("foo/bał"), | |
550 |
commit.get_node( |
|
549 | commit.get_node("foobar"), | |
551 |
commit.get_node( |
|
550 | commit.get_node("qwe"), | |
552 | } |
|
551 | } | |
553 | assert set(commit.changed) == set() |
|
552 | assert set(commit.changed) == set() | |
554 | assert set(commit.removed) == set() |
|
553 | assert set(commit.removed) == set() | |
555 |
assert set(commit.affected_files) == { |
|
554 | assert set(commit.affected_files) == {"foo/bar", "foo/bał", "foobar", "qwe"} | |
556 | assert commit.date == local_dt_to_utc( |
|
555 | assert commit.date == local_dt_to_utc(datetime.datetime(2010, 1, 1, 20, 0)) | |
557 | datetime.datetime(2010, 1, 1, 20, 0)) |
|
|||
558 |
|
556 | |||
559 | def test_head_added(self): |
|
557 | def test_head_added(self): | |
560 | commit = self.repo.get_commit() |
|
558 | commit = self.repo.get_commit() | |
561 | assert isinstance(commit.added, AddedFileNodesGenerator) |
|
559 | assert isinstance(commit.added, AddedFileNodesGenerator) | |
562 |
assert set(commit.added) == {commit.get_node( |
|
560 | assert set(commit.added) == {commit.get_node("fallout")} | |
563 | assert isinstance(commit.changed, ChangedFileNodesGenerator) |
|
561 | assert isinstance(commit.changed, ChangedFileNodesGenerator) | |
564 |
assert set(commit.changed) == {commit.get_node( |
|
562 | assert set(commit.changed) == {commit.get_node("foo/bar"), commit.get_node("foobar")} | |
565 | assert isinstance(commit.removed, RemovedFileNodesGenerator) |
|
563 | assert isinstance(commit.removed, RemovedFileNodesGenerator) | |
566 | assert len(commit.removed) == 1 |
|
564 | assert len(commit.removed) == 1 | |
567 |
assert list(commit.removed)[0].path == |
|
565 | assert list(commit.removed)[0].path == "qwe" | |
568 |
|
566 | |||
569 | def test_get_filemode(self): |
|
567 | def test_get_filemode(self): | |
570 | commit = self.repo.get_commit() |
|
568 | commit = self.repo.get_commit() | |
571 |
assert FILEMODE_DEFAULT == commit.get_file_mode( |
|
569 | assert FILEMODE_DEFAULT == commit.get_file_mode("foo/bar") | |
572 |
|
570 | |||
573 | def test_get_filemode_non_ascii(self): |
|
571 | def test_get_filemode_non_ascii(self): | |
574 | commit = self.repo.get_commit() |
|
572 | commit = self.repo.get_commit() | |
575 |
assert FILEMODE_DEFAULT == commit.get_file_mode( |
|
573 | assert FILEMODE_DEFAULT == commit.get_file_mode("foo/bał") | |
576 |
assert FILEMODE_DEFAULT == commit.get_file_mode( |
|
574 | assert FILEMODE_DEFAULT == commit.get_file_mode("foo/bał") | |
577 |
|
575 | |||
578 | def test_get_path_history(self): |
|
576 | def test_get_path_history(self): | |
579 | commit = self.repo.get_commit() |
|
577 | commit = self.repo.get_commit() | |
580 |
history = commit.get_path_history( |
|
578 | history = commit.get_path_history("foo/bar") | |
581 | assert len(history) == 2 |
|
579 | assert len(history) == 2 | |
582 |
|
580 | |||
583 | def test_get_path_history_with_limit(self): |
|
581 | def test_get_path_history_with_limit(self): | |
584 | commit = self.repo.get_commit() |
|
582 | commit = self.repo.get_commit() | |
585 |
history = commit.get_path_history( |
|
583 | history = commit.get_path_history("foo/bar", limit=1) | |
586 | assert len(history) == 1 |
|
584 | assert len(history) == 1 | |
587 |
|
585 | |||
588 | def test_get_path_history_first_commit(self): |
|
586 | def test_get_path_history_first_commit(self): | |
589 | commit = self.repo[0] |
|
587 | commit = self.repo[0] | |
590 |
history = commit.get_path_history( |
|
588 | history = commit.get_path_history("foo/bar") | |
591 | assert len(history) == 1 |
|
589 | assert len(history) == 1 | |
592 |
|
590 | |||
593 |
|
591 |
@@ -1,4 +1,3 b'' | |||||
1 |
|
||||
2 |
|
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
3 | # |
|
2 | # | |
4 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
@@ -21,14 +20,17 b' import pytest' | |||||
21 |
|
20 | |||
22 |
|
21 | |||
23 | def test_get_existing_value(config): |
|
22 | def test_get_existing_value(config): | |
24 |
value = config.get( |
|
23 | value = config.get("section-a", "a-1") | |
25 |
assert value == |
|
24 | assert value == "value-a-1" | |
26 |
|
25 | |||
27 |
|
26 | |||
28 |
@pytest.mark.parametrize( |
|
27 | @pytest.mark.parametrize( | |
29 | ('section-a', 'does-not-exist'), |
|
28 | "section, option", | |
30 | ('does-not-exist', 'does-not-exist'), |
|
29 | [ | |
31 | ]) |
|
30 | ("section-a", "does-not-exist"), | |
|
31 | ("does-not-exist", "does-not-exist"), | |||
|
32 | ], | |||
|
33 | ) | |||
32 | def test_get_unset_value_returns_none(config, section, option): |
|
34 | def test_get_unset_value_returns_none(config, section, option): | |
33 | value = config.get(section, option) |
|
35 | value = config.get(section, option) | |
34 | assert value is None |
|
36 | assert value is None | |
@@ -41,11 +43,11 b' def test_allows_to_create_a_copy(config)' | |||||
41 |
|
43 | |||
42 | def test_changes_in_the_copy_dont_affect_the_original(config): |
|
44 | def test_changes_in_the_copy_dont_affect_the_original(config): | |
43 | clone = config.copy() |
|
45 | clone = config.copy() | |
44 |
clone.set( |
|
46 | clone.set("section-a", "a-2", "value-a-2") | |
45 |
assert set(config.serialize()) == {( |
|
47 | assert set(config.serialize()) == {("section-a", "a-1", "value-a-1")} | |
46 |
|
48 | |||
47 |
|
49 | |||
48 | def test_changes_in_the_original_dont_affect_the_copy(config): |
|
50 | def test_changes_in_the_original_dont_affect_the_copy(config): | |
49 | clone = config.copy() |
|
51 | clone = config.copy() | |
50 |
config.set( |
|
52 | config.set("section-a", "a-2", "value-a-2") | |
51 |
assert set(clone.serialize()) == {( |
|
53 | assert set(clone.serialize()) == {("section-a", "a-1", "value-a-1")} |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed | ||
This diff has been collapsed as it changes many lines, (1094 lines changed) Show them Hide them |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now