Show More
@@ -0,0 +1,132 b'' | |||
|
1 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
|
2 | # | |
|
3 | # This program is free software: you can redistribute it and/or modify | |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
5 | # (only), as published by the Free Software Foundation. | |
|
6 | # | |
|
7 | # This program is distributed in the hope that it will be useful, | |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
10 | # GNU General Public License for more details. | |
|
11 | # | |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
14 | # | |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
18 | ||
|
19 | """ | |
|
20 | py.test config for test suite for making push/pull operations. | |
|
21 | ||
|
22 | .. important:: | |
|
23 | ||
|
24 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started | |
|
25 | to redirect things to stderr instead of stdout. | |
|
26 | """ | |
|
27 | ||
|
28 | import pytest | |
|
29 | import logging | |
|
30 | ||
|
31 | from rhodecode.authentication import AuthenticationPluginRegistry | |
|
32 | from rhodecode.model.db import Permission, User | |
|
33 | from rhodecode.model.meta import Session | |
|
34 | from rhodecode.model.settings import SettingsModel | |
|
35 | from rhodecode.model.user import UserModel | |
|
36 | ||
|
37 | ||
|
38 | log = logging.getLogger(__name__) | |
|
39 | ||
|
40 | ||
|
41 | @pytest.fixture() | |
|
42 | def enable_auth_plugins(request, baseapp, csrf_token): | |
|
43 | """ | |
|
44 | Return a factory object that when called, allows to control which | |
|
45 | authentication plugins are enabled. | |
|
46 | """ | |
|
47 | ||
|
48 | class AuthPluginManager(object): | |
|
49 | ||
|
50 | def cleanup(self): | |
|
51 | self._enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) | |
|
52 | ||
|
53 | def enable(self, plugins_list, override=None): | |
|
54 | return self._enable_plugins(plugins_list, override) | |
|
55 | ||
|
56 | def _enable_plugins(self, plugins_list, override=None): | |
|
57 | override = override or {} | |
|
58 | params = { | |
|
59 | 'auth_plugins': ','.join(plugins_list), | |
|
60 | } | |
|
61 | ||
|
62 | # helper translate some names to others, to fix settings code | |
|
63 | name_map = { | |
|
64 | 'token': 'authtoken' | |
|
65 | } | |
|
66 | log.debug('enable_auth_plugins: enabling following auth-plugins: %s', plugins_list) | |
|
67 | ||
|
68 | for module in plugins_list: | |
|
69 | plugin_name = module.partition('#')[-1] | |
|
70 | if plugin_name in name_map: | |
|
71 | plugin_name = name_map[plugin_name] | |
|
72 | enabled_plugin = f'auth_{plugin_name}_enabled' | |
|
73 | cache_ttl = f'auth_{plugin_name}_cache_ttl' | |
|
74 | ||
|
75 | # default params that are needed for each plugin, | |
|
76 | # `enabled` and `cache_ttl` | |
|
77 | params.update({ | |
|
78 | enabled_plugin: True, | |
|
79 | cache_ttl: 0 | |
|
80 | }) | |
|
81 | if override.get: | |
|
82 | params.update(override.get(module, {})) | |
|
83 | ||
|
84 | validated_params = params | |
|
85 | ||
|
86 | for k, v in validated_params.items(): | |
|
87 | setting = SettingsModel().create_or_update_setting(k, v) | |
|
88 | Session().add(setting) | |
|
89 | Session().commit() | |
|
90 | ||
|
91 | AuthenticationPluginRegistry.invalidate_auth_plugins_cache(hard=True) | |
|
92 | ||
|
93 | enabled_plugins = SettingsModel().get_auth_plugins() | |
|
94 | assert plugins_list == enabled_plugins | |
|
95 | ||
|
96 | enabler = AuthPluginManager() | |
|
97 | request.addfinalizer(enabler.cleanup) | |
|
98 | ||
|
99 | return enabler | |
|
100 | ||
|
101 | ||
|
102 | @pytest.fixture() | |
|
103 | def test_user_factory(request, baseapp): | |
|
104 | ||
|
105 | def user_factory(username='test_user', password='qweqwe', first_name='John', last_name='Testing', **kwargs): | |
|
106 | usr = UserModel().create_or_update( | |
|
107 | username=username, | |
|
108 | password=password, | |
|
109 | email=f'{username}@rhodecode.org', | |
|
110 | firstname=first_name, lastname=last_name) | |
|
111 | Session().commit() | |
|
112 | ||
|
113 | for k, v in kwargs.items(): | |
|
114 | setattr(usr, k, v) | |
|
115 | Session().add(usr) | |
|
116 | ||
|
117 | assert User.get_by_username(username) == usr | |
|
118 | ||
|
119 | @request.addfinalizer | |
|
120 | def cleanup(): | |
|
121 | if UserModel().get_user(usr.user_id) is None: | |
|
122 | return | |
|
123 | ||
|
124 | perm = Permission.query().all() | |
|
125 | for p in perm: | |
|
126 | UserModel().revoke_perm(usr, p) | |
|
127 | ||
|
128 | UserModel().delete(usr.user_id) | |
|
129 | Session().commit() | |
|
130 | return usr | |
|
131 | ||
|
132 | return user_factory |
@@ -0,0 +1,167 b'' | |||
|
1 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
|
2 | # | |
|
3 | # This program is free software: you can redistribute it and/or modify | |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
5 | # (only), as published by the Free Software Foundation. | |
|
6 | # | |
|
7 | # This program is distributed in the hope that it will be useful, | |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
10 | # GNU General Public License for more details. | |
|
11 | # | |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
14 | # | |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
18 | import pytest | |
|
19 | from rhodecode.lib import ext_json | |
|
20 | ||
|
21 | ||
|
22 | def get_backends_from_metafunc(metafunc): | |
|
23 | requested_backends = set(metafunc.config.getoption('--backends')) | |
|
24 | backend_mark = metafunc.definition.get_closest_marker('backends') | |
|
25 | if backend_mark: | |
|
26 | # Supported backends by this test function, created from | |
|
27 | # pytest.mark.backends | |
|
28 | backends = backend_mark.args | |
|
29 | elif hasattr(metafunc.cls, 'backend_alias'): | |
|
30 | # Support class attribute "backend_alias", this is mainly | |
|
31 | # for legacy reasons for tests not yet using pytest.mark.backends | |
|
32 | backends = [metafunc.cls.backend_alias] | |
|
33 | else: | |
|
34 | backends = metafunc.config.getoption('--backends') | |
|
35 | return requested_backends.intersection(backends) | |
|
36 | ||
|
37 | ||
|
38 | def pytest_addoption(parser): | |
|
39 | ||
|
40 | def _parse_json(value): | |
|
41 | return ext_json.str_json(value) if value else None | |
|
42 | ||
|
43 | def _split_comma(value): | |
|
44 | return value.split(',') | |
|
45 | ||
|
46 | parser.addoption( | |
|
47 | '--keep-tmp-path', action='store_true', | |
|
48 | help="Keep the test temporary directories") | |
|
49 | ||
|
50 | parser.addoption( | |
|
51 | '--backends', action='store', type=_split_comma, | |
|
52 | default=['git', 'hg', 'svn'], | |
|
53 | help="Select which backends to test for backend specific tests.") | |
|
54 | parser.addoption( | |
|
55 | '--dbs', action='store', type=_split_comma, | |
|
56 | default=['sqlite'], | |
|
57 | help="Select which database to test for database specific tests. " | |
|
58 | "Possible options are sqlite,postgres,mysql") | |
|
59 | parser.addoption( | |
|
60 | '--appenlight', '--ae', action='store_true', | |
|
61 | help="Track statistics in appenlight.") | |
|
62 | parser.addoption( | |
|
63 | '--appenlight-api-key', '--ae-key', | |
|
64 | help="API key for Appenlight.") | |
|
65 | parser.addoption( | |
|
66 | '--appenlight-url', '--ae-url', | |
|
67 | default="https://ae.rhodecode.com", | |
|
68 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") | |
|
69 | parser.addoption( | |
|
70 | '--sqlite-connection-string', action='store', | |
|
71 | default='', help="Connection string for the dbs tests with SQLite") | |
|
72 | parser.addoption( | |
|
73 | '--postgres-connection-string', action='store', | |
|
74 | default='', help="Connection string for the dbs tests with Postgres") | |
|
75 | parser.addoption( | |
|
76 | '--mysql-connection-string', action='store', | |
|
77 | default='', help="Connection string for the dbs tests with MySQL") | |
|
78 | parser.addoption( | |
|
79 | '--repeat', type=int, default=100, | |
|
80 | help="Number of repetitions in performance tests.") | |
|
81 | ||
|
82 | parser.addoption( | |
|
83 | '--test-loglevel', dest='test_loglevel', | |
|
84 | help="Set default Logging level for tests, critical(default), error, warn , info, debug") | |
|
85 | group = parser.getgroup('pylons') | |
|
86 | group.addoption( | |
|
87 | '--with-pylons', dest='pyramid_config', | |
|
88 | help="Set up a Pylons environment with the specified config file.") | |
|
89 | group.addoption( | |
|
90 | '--ini-config-override', action='store', type=_parse_json, | |
|
91 | default=None, dest='pyramid_config_override', help=( | |
|
92 | "Overrides the .ini file settings. Should be specified in JSON" | |
|
93 | " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" | |
|
94 | ) | |
|
95 | ) | |
|
96 | parser.addini( | |
|
97 | 'pyramid_config', | |
|
98 | "Set up a Pyramid environment with the specified config file.") | |
|
99 | ||
|
100 | vcsgroup = parser.getgroup('vcs') | |
|
101 | vcsgroup.addoption( | |
|
102 | '--without-vcsserver', dest='with_vcsserver', action='store_false', | |
|
103 | help="Do not start the VCSServer in a background process.") | |
|
104 | vcsgroup.addoption( | |
|
105 | '--with-vcsserver-http', dest='vcsserver_config_http', | |
|
106 | help="Start the HTTP VCSServer with the specified config file.") | |
|
107 | vcsgroup.addoption( | |
|
108 | '--vcsserver-protocol', dest='vcsserver_protocol', | |
|
109 | help="Start the VCSServer with HTTP protocol support.") | |
|
110 | vcsgroup.addoption( | |
|
111 | '--vcsserver-config-override', action='store', type=_parse_json, | |
|
112 | default=None, dest='vcsserver_config_override', help=( | |
|
113 | "Overrides the .ini file settings for the VCSServer. " | |
|
114 | "Should be specified in JSON " | |
|
115 | "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" | |
|
116 | ) | |
|
117 | ) | |
|
118 | vcsgroup.addoption( | |
|
119 | '--vcsserver-port', action='store', type=int, | |
|
120 | default=None, help=( | |
|
121 | "Allows to set the port of the vcsserver. Useful when testing " | |
|
122 | "against an already running server and random ports cause " | |
|
123 | "trouble.")) | |
|
124 | parser.addini( | |
|
125 | 'vcsserver_config_http', | |
|
126 | "Start the HTTP VCSServer with the specified config file.") | |
|
127 | parser.addini( | |
|
128 | 'vcsserver_protocol', | |
|
129 | "Start the VCSServer with HTTP protocol support.") | |
|
130 | ||
|
131 | ||
|
132 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) | |
|
133 | def pytest_runtest_makereport(item, call): | |
|
134 | """ | |
|
135 | Adding the remote traceback if the exception has this information. | |
|
136 | ||
|
137 | VCSServer attaches this information as the attribute `_vcs_server_traceback` | |
|
138 | to the exception instance. | |
|
139 | """ | |
|
140 | outcome = yield | |
|
141 | report = outcome.get_result() | |
|
142 | ||
|
143 | if call.excinfo: | |
|
144 | exc = call.excinfo.value | |
|
145 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) | |
|
146 | ||
|
147 | if vcsserver_traceback and report.outcome == 'failed': | |
|
148 | section = f'VCSServer remote traceback {report.when}' | |
|
149 | report.sections.append((section, vcsserver_traceback)) | |
|
150 | ||
|
151 | ||
|
152 | def pytest_generate_tests(metafunc): | |
|
153 | ||
|
154 | # Support test generation based on --backend parameter | |
|
155 | if 'backend_alias' in metafunc.fixturenames: | |
|
156 | backends = get_backends_from_metafunc(metafunc) | |
|
157 | scope = None | |
|
158 | if not backends: | |
|
159 | pytest.skip("Not enabled for any of selected backends") | |
|
160 | ||
|
161 | metafunc.parametrize('backend_alias', backends, scope=scope) | |
|
162 | ||
|
163 | backend_mark = metafunc.definition.get_closest_marker('backends') | |
|
164 | if backend_mark: | |
|
165 | backends = get_backends_from_metafunc(metafunc) | |
|
166 | if not backends: | |
|
167 | pytest.skip("Not enabled for any of selected backends") |
@@ -0,0 +1,174 b'' | |||
|
1 | ||
|
2 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
|
3 | # | |
|
4 | # This program is free software: you can redistribute it and/or modify | |
|
5 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
6 | # (only), as published by the Free Software Foundation. | |
|
7 | # | |
|
8 | # This program is distributed in the hope that it will be useful, | |
|
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
11 | # GNU General Public License for more details. | |
|
12 | # | |
|
13 | # You should have received a copy of the GNU Affero General Public License | |
|
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
15 | # | |
|
16 | # This program is dual-licensed. If you wish to learn more about the | |
|
17 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
19 | ||
|
20 | """ | |
|
21 | Test suite for making push/pull operations, on specially modified INI files | |
|
22 | ||
|
23 | .. important:: | |
|
24 | ||
|
25 | You must have git >= 1.8.5 for tests to work fine. With 68b939b git started | |
|
26 | to redirect things to stderr instead of stdout. | |
|
27 | """ | |
|
28 | ||
|
29 | import pytest | |
|
30 | ||
|
31 | from rhodecode.model.auth_token import AuthTokenModel | |
|
32 | from rhodecode.model.db import Repository | |
|
33 | from rhodecode.model.meta import Session | |
|
34 | from rhodecode.tests import (GIT_REPO, HG_REPO) | |
|
35 | ||
|
36 | from rhodecode.tests.vcs_operations import (Command, _check_proper_clone) | |
|
37 | ||
|
38 | ||
|
39 | @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user") | |
|
40 | class TestVCSOperations(object): | |
|
41 | def test_clone_by_auth_token( | |
|
42 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
43 | ||
|
44 | enable_auth_plugins.enable([ | |
|
45 | 'egg:rhodecode-enterprise-ce#token', | |
|
46 | 'egg:rhodecode-enterprise-ce#rhodecode' | |
|
47 | ]) | |
|
48 | ||
|
49 | user = user_util.create_user() | |
|
50 | token = user.auth_tokens[1] | |
|
51 | ||
|
52 | clone_url = rc_web_server.repo_clone_url( | |
|
53 | HG_REPO, user=user.username, passwd=token) | |
|
54 | ||
|
55 | stdout, stderr = Command('/tmp').execute( | |
|
56 | 'hg clone', clone_url, tmpdir.strpath) | |
|
57 | ||
|
58 | _check_proper_clone(stdout, stderr, 'hg') | |
|
59 | ||
|
60 | def test_clone_by_auth_token_expired( | |
|
61 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
62 | enable_auth_plugins.enable([ | |
|
63 | 'egg:rhodecode-enterprise-ce#token', | |
|
64 | 'egg:rhodecode-enterprise-ce#rhodecode' | |
|
65 | ]) | |
|
66 | ||
|
67 | user = user_util.create_user() | |
|
68 | auth_token = AuthTokenModel().create( | |
|
69 | user.user_id, 'test-token', -10, AuthTokenModel.cls.ROLE_VCS) | |
|
70 | token = auth_token.api_key | |
|
71 | ||
|
72 | clone_url = rc_web_server.repo_clone_url( | |
|
73 | HG_REPO, user=user.username, passwd=token) | |
|
74 | ||
|
75 | stdout, stderr = Command('/tmp').execute( | |
|
76 | 'hg clone', clone_url, tmpdir.strpath) | |
|
77 | assert 'abort: authorization failed' in stderr | |
|
78 | ||
|
79 | msg = 'reason: bad or inactive token.' | |
|
80 | rc_web_server.assert_message_in_server_logs(msg) | |
|
81 | ||
|
82 | def test_clone_by_auth_token_bad_role( | |
|
83 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
84 | enable_auth_plugins.enable([ | |
|
85 | 'egg:rhodecode-enterprise-ce#token', | |
|
86 | 'egg:rhodecode-enterprise-ce#rhodecode' | |
|
87 | ]) | |
|
88 | ||
|
89 | user = user_util.create_user() | |
|
90 | auth_token = AuthTokenModel().create( | |
|
91 | user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_API) | |
|
92 | token = auth_token.api_key | |
|
93 | ||
|
94 | clone_url = rc_web_server.repo_clone_url( | |
|
95 | HG_REPO, user=user.username, passwd=token) | |
|
96 | ||
|
97 | stdout, stderr = Command('/tmp').execute( | |
|
98 | 'hg clone', clone_url, tmpdir.strpath) | |
|
99 | assert 'abort: authorization failed' in stderr | |
|
100 | ||
|
101 | def test_clone_by_auth_token_user_disabled( | |
|
102 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
103 | enable_auth_plugins.enable([ | |
|
104 | 'egg:rhodecode-enterprise-ce#token', | |
|
105 | 'egg:rhodecode-enterprise-ce#rhodecode' | |
|
106 | ]) | |
|
107 | ||
|
108 | user = user_util.create_user() | |
|
109 | user.active = False | |
|
110 | Session().add(user) | |
|
111 | Session().commit() | |
|
112 | token = user.auth_tokens[1] | |
|
113 | ||
|
114 | clone_url = rc_web_server.repo_clone_url( | |
|
115 | HG_REPO, user=user.username, passwd=token) | |
|
116 | ||
|
117 | stdout, stderr = Command('/tmp').execute( | |
|
118 | 'hg clone', clone_url, tmpdir.strpath) | |
|
119 | assert 'abort: authorization failed' in stderr | |
|
120 | ||
|
121 | msg = 'reason: account not active.' | |
|
122 | rc_web_server.assert_message_in_server_logs(msg) | |
|
123 | ||
|
124 | def test_clone_by_auth_token_with_scope( | |
|
125 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
126 | enable_auth_plugins.enable([ | |
|
127 | 'egg:rhodecode-enterprise-ce#token', | |
|
128 | 'egg:rhodecode-enterprise-ce#rhodecode' | |
|
129 | ]) | |
|
130 | ||
|
131 | user = user_util.create_user() | |
|
132 | auth_token = AuthTokenModel().create( | |
|
133 | user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_VCS) | |
|
134 | token = auth_token.api_key | |
|
135 | ||
|
136 | # manually set scope | |
|
137 | auth_token.repo = Repository.get_by_repo_name(HG_REPO) | |
|
138 | Session().add(auth_token) | |
|
139 | Session().commit() | |
|
140 | ||
|
141 | clone_url = rc_web_server.repo_clone_url( | |
|
142 | HG_REPO, user=user.username, passwd=token) | |
|
143 | ||
|
144 | stdout, stderr = Command('/tmp').execute( | |
|
145 | 'hg clone', clone_url, tmpdir.strpath) | |
|
146 | _check_proper_clone(stdout, stderr, 'hg') | |
|
147 | ||
|
148 | def test_clone_by_auth_token_with_wrong_scope( | |
|
149 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
150 | enable_auth_plugins.enable([ | |
|
151 | 'egg:rhodecode-enterprise-ce#token', | |
|
152 | 'egg:rhodecode-enterprise-ce#rhodecode' | |
|
153 | ]) | |
|
154 | ||
|
155 | user = user_util.create_user() | |
|
156 | auth_token = AuthTokenModel().create( | |
|
157 | user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_VCS) | |
|
158 | token = auth_token.api_key | |
|
159 | ||
|
160 | # manually set scope | |
|
161 | auth_token.repo = Repository.get_by_repo_name(GIT_REPO) | |
|
162 | Session().add(auth_token) | |
|
163 | Session().commit() | |
|
164 | ||
|
165 | clone_url = rc_web_server.repo_clone_url( | |
|
166 | HG_REPO, user=user.username, passwd=token) | |
|
167 | ||
|
168 | stdout, stderr = Command('/tmp').execute( | |
|
169 | 'hg clone', clone_url, tmpdir.strpath) | |
|
170 | ||
|
171 | assert 'abort: authorization failed' in stderr | |
|
172 | ||
|
173 | msg = 'reason: bad or inactive token.' | |
|
174 | rc_web_server.assert_message_in_server_logs(msg) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -18,8 +17,14 b'' | |||
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | import pytest | |
|
22 | from rhodecode.lib import ext_json | |
|
20 | import pytest # noqa | |
|
21 | ||
|
22 | # keep the imports to have a toplevel conftest.py but still importable from EE edition | |
|
23 | from rhodecode.tests.conftest_common import ( # noqa | |
|
24 | pytest_generate_tests, | |
|
25 | pytest_runtest_makereport, | |
|
26 | pytest_addoption | |
|
27 | ) | |
|
23 | 28 | |
|
24 | 29 | |
|
25 | 30 | pytest_plugins = [ |
@@ -29,121 +34,7 b' pytest_plugins = [' | |||
|
29 | 34 | |
|
30 | 35 | |
|
31 | 36 | def pytest_configure(config): |
|
32 | from rhodecode.config import patches | |
|
33 | ||
|
34 | ||
|
35 | def pytest_addoption(parser): | |
|
36 | ||
|
37 | def _parse_json(value): | |
|
38 | return ext_json.str_json(value) if value else None | |
|
39 | ||
|
40 | def _split_comma(value): | |
|
41 | return value.split(',') | |
|
42 | ||
|
43 | parser.addoption( | |
|
44 | '--keep-tmp-path', action='store_true', | |
|
45 | help="Keep the test temporary directories") | |
|
46 | ||
|
47 | parser.addoption( | |
|
48 | '--backends', action='store', type=_split_comma, | |
|
49 | default=['git', 'hg', 'svn'], | |
|
50 | help="Select which backends to test for backend specific tests.") | |
|
51 | parser.addoption( | |
|
52 | '--dbs', action='store', type=_split_comma, | |
|
53 | default=['sqlite'], | |
|
54 | help="Select which database to test for database specific tests. " | |
|
55 | "Possible options are sqlite,postgres,mysql") | |
|
56 | parser.addoption( | |
|
57 | '--appenlight', '--ae', action='store_true', | |
|
58 | help="Track statistics in appenlight.") | |
|
59 | parser.addoption( | |
|
60 | '--appenlight-api-key', '--ae-key', | |
|
61 | help="API key for Appenlight.") | |
|
62 | parser.addoption( | |
|
63 | '--appenlight-url', '--ae-url', | |
|
64 | default="https://ae.rhodecode.com", | |
|
65 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") | |
|
66 | parser.addoption( | |
|
67 | '--sqlite-connection-string', action='store', | |
|
68 | default='', help="Connection string for the dbs tests with SQLite") | |
|
69 | parser.addoption( | |
|
70 | '--postgres-connection-string', action='store', | |
|
71 | default='', help="Connection string for the dbs tests with Postgres") | |
|
72 | parser.addoption( | |
|
73 | '--mysql-connection-string', action='store', | |
|
74 | default='', help="Connection string for the dbs tests with MySQL") | |
|
75 | parser.addoption( | |
|
76 | '--repeat', type=int, default=100, | |
|
77 | help="Number of repetitions in performance tests.") | |
|
78 | ||
|
79 | parser.addoption( | |
|
80 | '--test-loglevel', dest='test_loglevel', | |
|
81 | help="Set default Logging level for tests, critical(default), error, warn , info, debug") | |
|
82 | group = parser.getgroup('pylons') | |
|
83 | group.addoption( | |
|
84 | '--with-pylons', dest='pyramid_config', | |
|
85 | help="Set up a Pylons environment with the specified config file.") | |
|
86 | group.addoption( | |
|
87 | '--ini-config-override', action='store', type=_parse_json, | |
|
88 | default=None, dest='pyramid_config_override', help=( | |
|
89 | "Overrides the .ini file settings. Should be specified in JSON" | |
|
90 | " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" | |
|
91 | ) | |
|
92 | ) | |
|
93 | parser.addini( | |
|
94 | 'pyramid_config', | |
|
95 | "Set up a Pyramid environment with the specified config file.") | |
|
96 | ||
|
97 | vcsgroup = parser.getgroup('vcs') | |
|
98 | vcsgroup.addoption( | |
|
99 | '--without-vcsserver', dest='with_vcsserver', action='store_false', | |
|
100 | help="Do not start the VCSServer in a background process.") | |
|
101 | vcsgroup.addoption( | |
|
102 | '--with-vcsserver-http', dest='vcsserver_config_http', | |
|
103 | help="Start the HTTP VCSServer with the specified config file.") | |
|
104 | vcsgroup.addoption( | |
|
105 | '--vcsserver-protocol', dest='vcsserver_protocol', | |
|
106 | help="Start the VCSServer with HTTP protocol support.") | |
|
107 | vcsgroup.addoption( | |
|
108 | '--vcsserver-config-override', action='store', type=_parse_json, | |
|
109 | default=None, dest='vcsserver_config_override', help=( | |
|
110 | "Overrides the .ini file settings for the VCSServer. " | |
|
111 | "Should be specified in JSON " | |
|
112 | "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'" | |
|
113 | ) | |
|
114 | ) | |
|
115 | vcsgroup.addoption( | |
|
116 | '--vcsserver-port', action='store', type=int, | |
|
117 | default=None, help=( | |
|
118 | "Allows to set the port of the vcsserver. Useful when testing " | |
|
119 | "against an already running server and random ports cause " | |
|
120 | "trouble.")) | |
|
121 | parser.addini( | |
|
122 | 'vcsserver_config_http', | |
|
123 | "Start the HTTP VCSServer with the specified config file.") | |
|
124 | parser.addini( | |
|
125 | 'vcsserver_protocol', | |
|
126 | "Start the VCSServer with HTTP protocol support.") | |
|
127 | ||
|
128 | ||
|
129 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) | |
|
130 | def pytest_runtest_makereport(item, call): | |
|
131 | """ | |
|
132 | Adding the remote traceback if the exception has this information. | |
|
133 | ||
|
134 | VCSServer attaches this information as the attribute `_vcs_server_traceback` | |
|
135 | to the exception instance. | |
|
136 | """ | |
|
137 | outcome = yield | |
|
138 | report = outcome.get_result() | |
|
139 | ||
|
140 | if call.excinfo: | |
|
141 | exc = call.excinfo.value | |
|
142 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) | |
|
143 | ||
|
144 | if vcsserver_traceback and report.outcome == 'failed': | |
|
145 | section = f'VCSServer remote traceback {report.when}' | |
|
146 | report.sections.append((section, vcsserver_traceback)) | |
|
37 | from rhodecode.config import patches # noqa | |
|
147 | 38 | |
|
148 | 39 | |
|
149 | 40 | def pytest_collection_modifyitems(session, config, items): |
@@ -152,7 +43,7 b' def pytest_collection_modifyitems(sessio' | |||
|
152 | 43 | i for i in items if getattr(i.obj, '__test__', True)] |
|
153 | 44 | items[:] = remaining |
|
154 | 45 | |
|
155 | # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should | |
|
46 | # NOTE(marcink): custom test ordering, db tests and vcstests are slowest and should | |
|
156 | 47 | # be executed at the end for faster test feedback |
|
157 | 48 | def sorter(item): |
|
158 | 49 | pos = 0 |
@@ -165,37 +56,3 b' def pytest_collection_modifyitems(sessio' | |||
|
165 | 56 | return pos |
|
166 | 57 | |
|
167 | 58 | items.sort(key=sorter) |
|
168 | ||
|
169 | ||
|
170 | def get_backends_from_metafunc(metafunc): | |
|
171 | requested_backends = set(metafunc.config.getoption('--backends')) | |
|
172 | backend_mark = metafunc.definition.get_closest_marker('backends') | |
|
173 | if backend_mark: | |
|
174 | # Supported backends by this test function, created from | |
|
175 | # pytest.mark.backends | |
|
176 | backends = backend_mark.args | |
|
177 | elif hasattr(metafunc.cls, 'backend_alias'): | |
|
178 | # Support class attribute "backend_alias", this is mainly | |
|
179 | # for legacy reasons for tests not yet using pytest.mark.backends | |
|
180 | backends = [metafunc.cls.backend_alias] | |
|
181 | else: | |
|
182 | backends = metafunc.config.getoption('--backends') | |
|
183 | return requested_backends.intersection(backends) | |
|
184 | ||
|
185 | ||
|
186 | def pytest_generate_tests(metafunc): | |
|
187 | ||
|
188 | # Support test generation based on --backend parameter | |
|
189 | if 'backend_alias' in metafunc.fixturenames: | |
|
190 | backends = get_backends_from_metafunc(metafunc) | |
|
191 | scope = None | |
|
192 | if not backends: | |
|
193 | pytest.skip("Not enabled for any of selected backends") | |
|
194 | ||
|
195 | metafunc.parametrize('backend_alias', backends, scope=scope) | |
|
196 | ||
|
197 | backend_mark = metafunc.definition.get_closest_marker('backends') | |
|
198 | if backend_mark: | |
|
199 | backends = get_backends_from_metafunc(metafunc) | |
|
200 | if not backends: | |
|
201 | pytest.skip("Not enabled for any of selected backends") |
@@ -13,6 +13,8 b' addopts =' | |||
|
13 | 13 | --capture=no |
|
14 | 14 | --show-capture=all |
|
15 | 15 | |
|
16 | # --test-loglevel=INFO, show log-level during execution | |
|
17 | ||
|
16 | 18 | markers = |
|
17 | 19 | vcs_operations: Mark tests depending on a running RhodeCode instance. |
|
18 | 20 | xfail_backends: Mark tests as xfail for given backends. |
@@ -23,15 +23,17 b' import datetime' | |||
|
23 | 23 | |
|
24 | 24 | import pytest |
|
25 | 25 | |
|
26 | from rhodecode.lib.str_utils import safe_str | |
|
26 | 27 | from rhodecode.tests import * |
|
27 | 28 | from rhodecode.tests.fixture import FIXTURES |
|
28 | 29 | from rhodecode.model.db import UserLog |
|
29 | 30 | from rhodecode.model.meta import Session |
|
30 | from rhodecode.lib.utils2 import safe_unicode | |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | def route_path(name, params=None, **kwargs): |
|
34 |
import urllib.request |
|
|
34 | import urllib.request | |
|
35 | import urllib.parse | |
|
36 | import urllib.error | |
|
35 | 37 | from rhodecode.apps._base import ADMIN_PREFIX |
|
36 | 38 | |
|
37 | 39 | base_url = { |
@@ -69,7 +71,7 b' class TestAdminController(object):' | |||
|
69 | 71 | for row in csv.DictReader(f): |
|
70 | 72 | ul = UserLog() |
|
71 | 73 | for k, v in row.items(): |
|
72 |
v = safe_ |
|
|
74 | v = safe_str(v) | |
|
73 | 75 | if k == 'action_date': |
|
74 | 76 | v = strptime(v) |
|
75 | 77 | if k in ['user_id', 'repository_id']: |
@@ -24,7 +24,9 b' from rhodecode.model.settings import Set' | |||
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | def route_path(name, params=None, **kwargs): |
|
27 |
import urllib.request |
|
|
27 | import urllib.request | |
|
28 | import urllib.parse | |
|
29 | import urllib.error | |
|
28 | 30 | from rhodecode.apps._base import ADMIN_PREFIX |
|
29 | 31 | |
|
30 | 32 | base_url = { |
@@ -26,7 +26,9 b' fixture = Fixture()' | |||
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | def route_path(name, params=None, **kwargs): |
|
29 |
import urllib.request |
|
|
29 | import urllib.request | |
|
30 | import urllib.parse | |
|
31 | import urllib.error | |
|
30 | 32 | from rhodecode.apps._base import ADMIN_PREFIX |
|
31 | 33 | |
|
32 | 34 | base_url = { |
@@ -50,7 +52,10 b' class TestAdminMainView(TestController):' | |||
|
50 | 52 | response = self.app.get(route_path('admin_home'), status=200) |
|
51 | 53 | response.mustcontain("Administration area") |
|
52 | 54 | |
|
53 | def test_redirect_pull_request_view(self, view): | |
|
55 | @pytest.mark.parametrize('view', [ | |
|
56 | 'pull_requests_global', | |
|
57 | ]) | |
|
58 | def test_redirect_pull_request_view_global(self, view): | |
|
54 | 59 | self.log_user() |
|
55 | 60 | self.app.get( |
|
56 | 61 | route_path(view, pull_request_id='xxxx'), |
@@ -28,7 +28,9 b' from rhodecode.tests import (' | |||
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | def route_path(name, params=None, **kwargs): |
|
31 |
import urllib.request |
|
|
31 | import urllib.request | |
|
32 | import urllib.parse | |
|
33 | import urllib.error | |
|
32 | 34 | from rhodecode.apps._base import ADMIN_PREFIX |
|
33 | 35 | |
|
34 | 36 | base_url = { |
@@ -17,7 +17,9 b'' | |||
|
17 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | 19 | |
|
20 |
import urllib.request |
|
|
20 | import urllib.request | |
|
21 | import urllib.parse | |
|
22 | import urllib.error | |
|
21 | 23 | |
|
22 | 24 | import mock |
|
23 | 25 | import pytest |
@@ -42,7 +44,9 b' fixture = Fixture()' | |||
|
42 | 44 | |
|
43 | 45 | |
|
44 | 46 | def route_path(name, params=None, **kwargs): |
|
45 |
import urllib.request |
|
|
47 | import urllib.request | |
|
48 | import urllib.parse | |
|
49 | import urllib.error | |
|
46 | 50 | |
|
47 | 51 | base_url = { |
|
48 | 52 | 'repos': ADMIN_PREFIX + '/repos', |
@@ -92,12 +96,14 b' class TestAdminRepos(object):' | |||
|
92 | 96 | assert ['hg', 'git', 'svn'] == [x.get('value') for x in assert_response.get_elements('[name=repo_type]')] |
|
93 | 97 | |
|
94 | 98 | @pytest.mark.parametrize( |
|
95 |
"suffix", [ |
|
|
99 | "suffix", ['', 'xxa'], ids=['', 'non-ascii']) | |
|
96 | 100 | def test_create(self, autologin_user, backend, suffix, csrf_token): |
|
97 | 101 | repo_name_unicode = backend.new_repo_name(suffix=suffix) |
|
98 |
repo_name = repo_name_unicode |
|
|
99 | description_unicode = u'description for newly created repo' + suffix | |
|
100 | description = description_unicode.encode('utf8') | |
|
102 | repo_name = repo_name_unicode | |
|
103 | ||
|
104 | description_unicode = 'description for newly created repo' + suffix | |
|
105 | description = description_unicode | |
|
106 | ||
|
101 | 107 | response = self.app.post( |
|
102 | 108 | route_path('repo_create'), |
|
103 | 109 | fixture._get_repo_create_params( |
@@ -127,20 +133,20 b' class TestAdminRepos(object):' | |||
|
127 | 133 | self.assert_repository_is_created_correctly( |
|
128 | 134 | repo_name, description, backend) |
|
129 | 135 | |
|
130 |
@pytest.mark.parametrize("suffix", [ |
|
|
136 | @pytest.mark.parametrize("suffix", ['', '_Δ ΔΔ'], ids=['', 'non-ascii']) | |
|
131 | 137 | def test_create_in_group( |
|
132 | 138 | self, autologin_user, backend, suffix, csrf_token): |
|
133 | 139 | # create GROUP |
|
134 |
group_name = 'sometest_ |
|
|
140 | group_name = f'sometest_{backend.alias}' | |
|
135 | 141 | gr = RepoGroupModel().create(group_name=group_name, |
|
136 | 142 | group_description='test', |
|
137 | 143 | owner=TEST_USER_ADMIN_LOGIN) |
|
138 | 144 | Session().commit() |
|
139 | 145 | |
|
140 |
repo_name = |
|
|
141 | repo_name_full = RepoGroup.url_sep().join( | |
|
142 | [group_name, repo_name]) | |
|
143 | description = u'description for newly created repo' | |
|
146 | repo_name = f'ingroup{suffix}' | |
|
147 | repo_name_full = RepoGroup.url_sep().join([group_name, repo_name]) | |
|
148 | description = 'description for newly created repo' | |
|
149 | ||
|
144 | 150 | self.app.post( |
|
145 | 151 | route_path('repo_create'), |
|
146 | 152 | fixture._get_repo_create_params( |
@@ -483,17 +489,15 b' class TestAdminRepos(object):' | |||
|
483 | 489 | # repo must not be in filesystem ! |
|
484 | 490 | assert not repo_on_filesystem(repo_name) |
|
485 | 491 | |
|
486 | def assert_repository_is_created_correctly( | |
|
487 | self, repo_name, description, backend): | |
|
488 | repo_name_utf8 = safe_str(repo_name) | |
|
492 | def assert_repository_is_created_correctly(self, repo_name, description, backend): | |
|
493 | url_quoted_repo_name = urllib.parse.quote(repo_name) | |
|
489 | 494 | |
|
490 | 495 | # run the check page that triggers the flash message |
|
491 | 496 | response = self.app.get( |
|
492 |
route_path('repo_creating_check', repo_name= |
|
|
493 |
assert response.json == { |
|
|
497 | route_path('repo_creating_check', repo_name=repo_name)) | |
|
498 | assert response.json == {'result': True} | |
|
494 | 499 | |
|
495 |
flash_msg = |
|
|
496 | urllib.parse.quote(repo_name_utf8), repo_name) | |
|
500 | flash_msg = 'Created repository <a href="/{}">{}</a>'.format(url_quoted_repo_name, repo_name) | |
|
497 | 501 | assert_session_flash(response, flash_msg) |
|
498 | 502 | |
|
499 | 503 | # test if the repo was created in the database |
@@ -504,7 +508,7 b' class TestAdminRepos(object):' | |||
|
504 | 508 | |
|
505 | 509 | # test if the repository is visible in the list ? |
|
506 | 510 | response = self.app.get( |
|
507 |
h.route_path('repo_summary', repo_name= |
|
|
511 | h.route_path('repo_summary', repo_name=repo_name)) | |
|
508 | 512 | response.mustcontain(repo_name) |
|
509 | 513 | response.mustcontain(backend.alias) |
|
510 | 514 |
@@ -33,7 +33,9 b' fixture = Fixture()' | |||
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | def route_path(name, params=None, **kwargs): |
|
36 |
import urllib.request |
|
|
36 | import urllib.request | |
|
37 | import urllib.parse | |
|
38 | import urllib.error | |
|
37 | 39 | |
|
38 | 40 | base_url = { |
|
39 | 41 | 'repo_groups': ADMIN_PREFIX + '/repo_groups', |
@@ -106,7 +108,7 b' class TestAdminRepositoryGroups(object):' | |||
|
106 | 108 | 'hg_repo_Δ Δ', |
|
107 | 109 | ]) |
|
108 | 110 | def test_create(self, autologin_user, repo_group_name, csrf_token): |
|
109 |
repo_group_name_ |
|
|
111 | repo_group_name_non_ascii = repo_group_name | |
|
110 | 112 | description = 'description for newly created repo group' |
|
111 | 113 | |
|
112 | 114 | response = self.app.post( |
@@ -123,14 +125,14 b' class TestAdminRepositoryGroups(object):' | |||
|
123 | 125 | assert_session_flash( |
|
124 | 126 | response, |
|
125 | 127 | 'Created repository group <a href="%s">%s</a>' % ( |
|
126 |
repo_gr_url, repo_group_name_ |
|
|
128 | repo_gr_url, repo_group_name_non_ascii)) | |
|
127 | 129 | |
|
128 | 130 | # # test if the repo group was created in the database |
|
129 | 131 | new_repo_group = RepoGroupModel()._get_repo_group( |
|
130 |
repo_group_name_ |
|
|
132 | repo_group_name_non_ascii) | |
|
131 | 133 | assert new_repo_group is not None |
|
132 | 134 | |
|
133 |
assert new_repo_group.group_name == repo_group_name_ |
|
|
135 | assert new_repo_group.group_name == repo_group_name_non_ascii | |
|
134 | 136 | assert new_repo_group.group_description == description |
|
135 | 137 | |
|
136 | 138 | # test if the repository is visible in the list ? |
@@ -143,7 +145,7 b' class TestAdminRepositoryGroups(object):' | |||
|
143 | 145 | if not is_on_filesystem: |
|
144 | 146 | self.fail('no repo group %s in filesystem' % repo_group_name) |
|
145 | 147 | |
|
146 |
RepoGroupModel().delete(repo_group_name_ |
|
|
148 | RepoGroupModel().delete(repo_group_name_non_ascii) | |
|
147 | 149 | Session().commit() |
|
148 | 150 | |
|
149 | 151 | @pytest.mark.parametrize('repo_group_name', [ |
@@ -159,7 +161,7 b' class TestAdminRepositoryGroups(object):' | |||
|
159 | 161 | |
|
160 | 162 | expected_group_name = '{}/{}'.format( |
|
161 | 163 | parent_group_name, repo_group_name) |
|
162 |
expected_group_name_ |
|
|
164 | expected_group_name_non_ascii = expected_group_name | |
|
163 | 165 | |
|
164 | 166 | try: |
|
165 | 167 | response = self.app.post( |
@@ -175,9 +177,9 b' class TestAdminRepositoryGroups(object):' | |||
|
175 | 177 | u'Created repository group <a href="%s">%s</a>' % ( |
|
176 | 178 | h.route_path('repo_group_home', |
|
177 | 179 | repo_group_name=expected_group_name), |
|
178 |
expected_group_name_ |
|
|
180 | expected_group_name_non_ascii)) | |
|
179 | 181 | finally: |
|
180 |
RepoGroupModel().delete(expected_group_name_ |
|
|
182 | RepoGroupModel().delete(expected_group_name_non_ascii) | |
|
181 | 183 | Session().commit() |
|
182 | 184 | |
|
183 | 185 | def test_user_with_creation_permissions_cannot_create_subgroups( |
@@ -22,19 +22,20 b' import pytest' | |||
|
22 | 22 | |
|
23 | 23 | import rhodecode |
|
24 | 24 | from rhodecode.apps._base import ADMIN_PREFIX |
|
25 |
from rhodecode.lib.utils |
|
|
25 | from rhodecode.lib.hash_utils import md5_safe | |
|
26 | 26 | from rhodecode.model.db import RhodeCodeUi |
|
27 | 27 | from rhodecode.model.meta import Session |
|
28 | 28 | from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel |
|
29 | 29 | from rhodecode.tests import assert_session_flash |
|
30 | from rhodecode.tests.utils import AssertResponse | |
|
31 | 30 | |
|
32 | 31 | |
|
33 | 32 | UPDATE_DATA_QUALNAME = 'rhodecode.model.update.UpdateModel.get_update_data' |
|
34 | 33 | |
|
35 | 34 | |
|
36 | 35 | def route_path(name, params=None, **kwargs): |
|
37 |
import urllib.request |
|
|
36 | import urllib.request | |
|
37 | import urllib.parse | |
|
38 | import urllib.error | |
|
38 | 39 | from rhodecode.apps._base import ADMIN_PREFIX |
|
39 | 40 | |
|
40 | 41 | base_url = { |
@@ -233,6 +234,7 b' class TestAdminSettingsGlobal(object):' | |||
|
233 | 234 | route_path('admin_settings_global_update'), params=params) |
|
234 | 235 | |
|
235 | 236 | assert_session_flash(response, 'Updated application settings') |
|
237 | ||
|
236 | 238 | app_settings = SettingsModel().get_all_settings() |
|
237 | 239 | del settings['csrf_token'] |
|
238 | 240 | for key, value in settings.items(): |
@@ -413,8 +415,9 b' class TestAdminSettingsVcs(object):' | |||
|
413 | 415 | |
|
414 | 416 | @pytest.fixture() |
|
415 | 417 | def disable_sql_cache(self, request): |
|
418 | # patch _do_orm_execute so it returns None similar like if we don't use a cached query | |
|
416 | 419 | patcher = mock.patch( |
|
417 |
'rhodecode.lib.caching_query. |
|
|
420 | 'rhodecode.lib.caching_query.ORMCache._do_orm_execute', return_value=None) | |
|
418 | 421 | request.addfinalizer(patcher.stop) |
|
419 | 422 | patcher.start() |
|
420 | 423 | |
@@ -428,8 +431,7 b' class TestAdminSettingsVcs(object):' | |||
|
428 | 431 | @pytest.fixture(scope='class', autouse=True) |
|
429 | 432 | def cleanup_settings(self, request, baseapp): |
|
430 | 433 | ui_id = RhodeCodeUi.ui_id |
|
431 | original_ids = list( | |
|
432 | r.ui_id for r in RhodeCodeUi.query().values(ui_id)) | |
|
434 | original_ids = [r.ui_id for r in RhodeCodeUi.query().with_entities(ui_id)] | |
|
433 | 435 | |
|
434 | 436 | @request.addfinalizer |
|
435 | 437 | def cleanup(): |
@@ -644,9 +646,9 b' class TestAdminSettingsIssueTracker(obje' | |||
|
644 | 646 | } |
|
645 | 647 | self.app.post(post_url, post_data, status=302) |
|
646 | 648 | settings = SettingsModel().get_all_settings() |
|
647 | self.uid = md5(pattern) | |
|
649 | self.uid = md5_safe(pattern) | |
|
648 | 650 | assert settings[self.PATTERN_KEY+self.uid] == pattern |
|
649 | self.another_uid = md5(another_pattern) | |
|
651 | self.another_uid = md5_safe(another_pattern) | |
|
650 | 652 | assert settings[self.PATTERN_KEY+self.another_uid] == another_pattern |
|
651 | 653 | |
|
652 | 654 | @request.addfinalizer |
@@ -654,7 +656,7 b' class TestAdminSettingsIssueTracker(obje' | |||
|
654 | 656 | defaults = SettingsModel().get_all_settings() |
|
655 | 657 | |
|
656 | 658 | entries = [name for name in defaults if ( |
|
657 |
(self.uid in name) or (self.another_uid |
|
|
659 | (self.uid in name) or (self.another_uid in name))] | |
|
658 | 660 | start = len(self.RC_PREFIX) |
|
659 | 661 | for del_key in entries: |
|
660 | 662 | # TODO: anderson: get_by_name needs name without prefix |
@@ -667,7 +669,7 b' class TestAdminSettingsIssueTracker(obje' | |||
|
667 | 669 | self, autologin_user, backend, csrf_token, request): |
|
668 | 670 | |
|
669 | 671 | old_pattern = 'issuetracker_pat1' |
|
670 | old_uid = md5(old_pattern) | |
|
672 | old_uid = md5_safe(old_pattern) | |
|
671 | 673 | |
|
672 | 674 | post_url = route_path('admin_settings_issuetracker_update') |
|
673 | 675 | post_data = { |
@@ -681,7 +683,7 b' class TestAdminSettingsIssueTracker(obje' | |||
|
681 | 683 | self.app.post(post_url, post_data, status=302) |
|
682 | 684 | |
|
683 | 685 | new_pattern = 'issuetracker_pat1_edited' |
|
684 | self.new_uid = md5(new_pattern) | |
|
686 | self.new_uid = md5_safe(new_pattern) | |
|
685 | 687 | |
|
686 | 688 | post_url = route_path('admin_settings_issuetracker_update') |
|
687 | 689 | post_data = { |
@@ -708,7 +710,7 b' class TestAdminSettingsIssueTracker(obje' | |||
|
708 | 710 | self, autologin_user, csrf_token, request, settings_util): |
|
709 | 711 | prefix = 'issuetracker' |
|
710 | 712 | pattern = 'issuetracker_pat' |
|
711 | self.uid = md5(pattern) | |
|
713 | self.uid = md5_safe(pattern) | |
|
712 | 714 | pattern_key = '_'.join([prefix, 'pat', self.uid]) |
|
713 | 715 | rc_pattern_key = '_'.join(['rhodecode', pattern_key]) |
|
714 | 716 | desc_key = '_'.join([prefix, 'desc', self.uid]) |
@@ -742,7 +744,7 b' class TestAdminSettingsIssueTracker(obje' | |||
|
742 | 744 | self, autologin_user, backend, csrf_token, settings_util, xhr_header): |
|
743 | 745 | |
|
744 | 746 | old_pattern = 'issuetracker_pat_deleted' |
|
745 | old_uid = md5(old_pattern) | |
|
747 | old_uid = md5_safe(old_pattern) | |
|
746 | 748 | |
|
747 | 749 | post_url = route_path('admin_settings_issuetracker_update') |
|
748 | 750 | post_data = { |
@@ -30,7 +30,9 b' fixture = Fixture()' | |||
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | def route_path(name, params=None, **kwargs): |
|
33 |
import urllib.request |
|
|
33 | import urllib.request | |
|
34 | import urllib.parse | |
|
35 | import urllib.error | |
|
34 | 36 | from rhodecode.apps._base import ADMIN_PREFIX |
|
35 | 37 | |
|
36 | 38 | base_url = { |
@@ -34,7 +34,9 b' fixture = Fixture()' | |||
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | def route_path(name, params=None, **kwargs): |
|
37 |
import urllib.request |
|
|
37 | import urllib.request | |
|
38 | import urllib.parse | |
|
39 | import urllib.error | |
|
38 | 40 | from rhodecode.apps._base import ADMIN_PREFIX |
|
39 | 41 | |
|
40 | 42 | base_url = { |
@@ -28,7 +28,9 b' fixture = Fixture()' | |||
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | def route_path(name, params=None, **kwargs): |
|
31 |
import urllib.request |
|
|
31 | import urllib.request | |
|
32 | import urllib.parse | |
|
33 | import urllib.error | |
|
32 | 34 | from rhodecode.apps._base import ADMIN_PREFIX |
|
33 | 35 | |
|
34 | 36 | base_url = { |
@@ -27,7 +27,9 b' from rhodecode.apps.file_store import ut' | |||
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | def route_path(name, params=None, **kwargs): |
|
30 |
import urllib.request |
|
|
30 | import urllib.request | |
|
31 | import urllib.parse | |
|
32 | import urllib.error | |
|
31 | 33 | |
|
32 | 34 | base_url = { |
|
33 | 35 | 'upload_file': '/_file_store/upload', |
@@ -59,7 +61,7 b' class TestFileStoreViews(TestController)' | |||
|
59 | 61 | status = 200 |
|
60 | 62 | store = utils.get_file_storage({config_keys.store_path: store_path}) |
|
61 | 63 | filesystem_file = os.path.join(str(tmpdir), fid) |
|
62 |
with open(filesystem_file, 'w |
|
|
64 | with open(filesystem_file, 'wt') as f: | |
|
63 | 65 | f.write(content) |
|
64 | 66 | |
|
65 | 67 | with open(filesystem_file, 'rb') as f: |
@@ -120,7 +122,7 b' class TestFileStoreViews(TestController)' | |||
|
120 | 122 | self.log_user() |
|
121 | 123 | response = self.app.post( |
|
122 | 124 | route_path('upload_file'), |
|
123 | upload_files=[('store_file', 'myfile.txt', 'SOME CONTENT')], | |
|
125 | upload_files=[('store_file', b'myfile.txt', b'SOME CONTENT')], | |
|
124 | 126 | params={'csrf_token': self.csrf_token}, |
|
125 | 127 | status=200) |
|
126 | 128 | |
@@ -134,7 +136,7 b' class TestFileStoreViews(TestController)' | |||
|
134 | 136 | fid = 'example.txt' |
|
135 | 137 | |
|
136 | 138 | filesystem_file = os.path.join(str(tmpdir), fid) |
|
137 |
with open(filesystem_file, 'w |
|
|
139 | with open(filesystem_file, 'wt') as f: | |
|
138 | 140 | f.write(content) |
|
139 | 141 | |
|
140 | 142 | with open(filesystem_file, 'rb') as f: |
@@ -30,7 +30,8 b' from rhodecode.tests import (' | |||
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | def route_path(name, params=None, **kwargs): |
|
33 |
import urllib. |
|
|
33 | import urllib.parse | |
|
34 | import urllib.error | |
|
34 | 35 | from rhodecode.apps._base import ADMIN_PREFIX |
|
35 | 36 | |
|
36 | 37 | base_url = { |
@@ -59,7 +60,7 b' class GistUtility(object):' | |||
|
59 | 60 | self._gist_ids = [] |
|
60 | 61 | |
|
61 | 62 | def __call__( |
|
62 | self, f_name, content='some gist', lifetime=-1, | |
|
63 | self, f_name: bytes, content: bytes = b'some gist', lifetime=-1, | |
|
63 | 64 | description='gist-desc', gist_type='public', |
|
64 | 65 | acl_level=Gist.GIST_PUBLIC, owner=TEST_USER_ADMIN_LOGIN): |
|
65 | 66 | gist_mapping = { |
@@ -94,14 +95,14 b' class TestGistsController(TestController' | |||
|
94 | 95 | def test_index_empty(self, create_gist): |
|
95 | 96 | self.log_user() |
|
96 | 97 | response = self.app.get(route_path('gists_show')) |
|
97 |
response.mustcontain(' |
|
|
98 | response.mustcontain('var gist_data = [];') | |
|
98 | 99 | |
|
99 | 100 | def test_index(self, create_gist): |
|
100 | 101 | self.log_user() |
|
101 | g1 = create_gist('gist1') | |
|
102 | g2 = create_gist('gist2', lifetime=1400) | |
|
103 | g3 = create_gist('gist3', description='gist3-desc') | |
|
104 | g4 = create_gist('gist4', gist_type='private').gist_access_id | |
|
102 | g1 = create_gist(b'gist1') | |
|
103 | g2 = create_gist(b'gist2', lifetime=1400) | |
|
104 | g3 = create_gist(b'gist3', description='gist3-desc') | |
|
105 | g4 = create_gist(b'gist4', gist_type='private').gist_access_id | |
|
105 | 106 | response = self.app.get(route_path('gists_show')) |
|
106 | 107 | |
|
107 | 108 | response.mustcontain(g1.gist_access_id) |
@@ -111,13 +112,12 b' class TestGistsController(TestController' | |||
|
111 | 112 | response.mustcontain(no=[g4]) |
|
112 | 113 | |
|
113 | 114 | # Expiration information should be visible |
|
114 |
expires_tag = |
|
|
115 | h.time_to_utcdatetime(g2.gist_expires)) | |
|
115 | expires_tag = str(h.age_component(h.time_to_utcdatetime(g2.gist_expires))) | |
|
116 | 116 | response.mustcontain(expires_tag.replace('"', '\\"')) |
|
117 | 117 | |
|
118 | 118 | def test_index_private_gists(self, create_gist): |
|
119 | 119 | self.log_user() |
|
120 | gist = create_gist('gist5', gist_type='private') | |
|
120 | gist = create_gist(b'gist5', gist_type='private') | |
|
121 | 121 | response = self.app.get(route_path('gists_show', params=dict(private=1))) |
|
122 | 122 | |
|
123 | 123 | # and privates |
@@ -125,10 +125,10 b' class TestGistsController(TestController' | |||
|
125 | 125 | |
|
126 | 126 | def test_index_show_all(self, create_gist): |
|
127 | 127 | self.log_user() |
|
128 | create_gist('gist1') | |
|
129 | create_gist('gist2', lifetime=1400) | |
|
130 | create_gist('gist3', description='gist3-desc') | |
|
131 | create_gist('gist4', gist_type='private') | |
|
128 | create_gist(b'gist1') | |
|
129 | create_gist(b'gist2', lifetime=1400) | |
|
130 | create_gist(b'gist3', description='gist3-desc') | |
|
131 | create_gist(b'gist4', gist_type='private') | |
|
132 | 132 | |
|
133 | 133 | response = self.app.get(route_path('gists_show', params=dict(all=1))) |
|
134 | 134 | |
@@ -139,9 +139,9 b' class TestGistsController(TestController' | |||
|
139 | 139 | |
|
140 | 140 | def test_index_show_all_hidden_from_regular(self, create_gist): |
|
141 | 141 | self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
142 | create_gist('gist2', gist_type='private') | |
|
143 | create_gist('gist3', gist_type='private') | |
|
144 | create_gist('gist4', gist_type='private') | |
|
142 | create_gist(b'gist2', gist_type='private') | |
|
143 | create_gist(b'gist3', gist_type='private') | |
|
144 | create_gist(b'gist4', gist_type='private') | |
|
145 | 145 | |
|
146 | 146 | response = self.app.get(route_path('gists_show', params=dict(all=1))) |
|
147 | 147 | |
@@ -181,7 +181,7 b' class TestGistsController(TestController' | |||
|
181 | 181 | |
|
182 | 182 | def test_access_expired_gist(self, create_gist): |
|
183 | 183 | self.log_user() |
|
184 | gist = create_gist('never-see-me') | |
|
184 | gist = create_gist(b'never-see-me') | |
|
185 | 185 | gist.gist_expires = 0 # 1970 |
|
186 | 186 | Session().add(gist) |
|
187 | 187 | Session().commit() |
@@ -269,7 +269,7 b' class TestGistsController(TestController' | |||
|
269 | 269 | |
|
270 | 270 | def test_delete(self, create_gist): |
|
271 | 271 | self.log_user() |
|
272 | gist = create_gist('delete-me') | |
|
272 | gist = create_gist(b'delete-me') | |
|
273 | 273 | response = self.app.post( |
|
274 | 274 | route_path('gist_delete', gist_id=gist.gist_id), |
|
275 | 275 | params={'csrf_token': self.csrf_token}) |
@@ -277,7 +277,7 b' class TestGistsController(TestController' | |||
|
277 | 277 | |
|
278 | 278 | def test_delete_normal_user_his_gist(self, create_gist): |
|
279 | 279 | self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
280 | gist = create_gist('delete-me', owner=TEST_USER_REGULAR_LOGIN) | |
|
280 | gist = create_gist(b'delete-me', owner=TEST_USER_REGULAR_LOGIN) | |
|
281 | 281 | |
|
282 | 282 | response = self.app.post( |
|
283 | 283 | route_path('gist_delete', gist_id=gist.gist_id), |
@@ -286,14 +286,14 b' class TestGistsController(TestController' | |||
|
286 | 286 | |
|
287 | 287 | def test_delete_normal_user_not_his_own_gist(self, create_gist): |
|
288 | 288 | self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
289 | gist = create_gist('delete-me-2') | |
|
289 | gist = create_gist(b'delete-me-2') | |
|
290 | 290 | |
|
291 | 291 | self.app.post( |
|
292 | 292 | route_path('gist_delete', gist_id=gist.gist_id), |
|
293 | 293 | params={'csrf_token': self.csrf_token}, status=404) |
|
294 | 294 | |
|
295 | 295 | def test_show(self, create_gist): |
|
296 | gist = create_gist('gist-show-me') | |
|
296 | gist = create_gist(b'gist-show-me') | |
|
297 | 297 | response = self.app.get(route_path('gist_show', gist_id=gist.gist_access_id)) |
|
298 | 298 | |
|
299 | 299 | response.mustcontain('added file: gist-show-me<') |
@@ -308,12 +308,12 b' class TestGistsController(TestController' | |||
|
308 | 308 | def test_show_without_hg(self, create_gist): |
|
309 | 309 | with mock.patch( |
|
310 | 310 | 'rhodecode.lib.vcs.settings.ALIASES', ['git']): |
|
311 | gist = create_gist('gist-show-me-again') | |
|
311 | gist = create_gist(b'gist-show-me-again') | |
|
312 | 312 | self.app.get( |
|
313 | 313 | route_path('gist_show', gist_id=gist.gist_access_id), status=200) |
|
314 | 314 | |
|
315 | 315 | def test_show_acl_private(self, create_gist): |
|
316 | gist = create_gist('gist-show-me-only-when-im-logged-in', | |
|
316 | gist = create_gist(b'gist-show-me-only-when-im-logged-in', | |
|
317 | 317 | acl_level=Gist.ACL_LEVEL_PRIVATE) |
|
318 | 318 | self.app.get( |
|
319 | 319 | route_path('gist_show', gist_id=gist.gist_access_id), status=404) |
@@ -331,7 +331,7 b' class TestGistsController(TestController' | |||
|
331 | 331 | response.mustcontain('gist-desc') |
|
332 | 332 | |
|
333 | 333 | def test_show_as_raw(self, create_gist): |
|
334 | gist = create_gist('gist-show-me', content='GIST CONTENT') | |
|
334 | gist = create_gist(b'gist-show-me', content=b'GIST CONTENT') | |
|
335 | 335 | response = self.app.get( |
|
336 | 336 | route_path('gist_show_formatted', |
|
337 | 337 | gist_id=gist.gist_access_id, revision='tip', |
@@ -339,7 +339,7 b' class TestGistsController(TestController' | |||
|
339 | 339 | assert response.text == 'GIST CONTENT' |
|
340 | 340 | |
|
341 | 341 | def test_show_as_raw_individual_file(self, create_gist): |
|
342 | gist = create_gist('gist-show-me-raw', content='GIST BODY') | |
|
342 | gist = create_gist(b'gist-show-me-raw', content=b'GIST BODY') | |
|
343 | 343 | response = self.app.get( |
|
344 | 344 | route_path('gist_show_formatted_path', |
|
345 | 345 | gist_id=gist.gist_access_id, format='raw', |
@@ -348,24 +348,24 b' class TestGistsController(TestController' | |||
|
348 | 348 | |
|
349 | 349 | def test_edit_page(self, create_gist): |
|
350 | 350 | self.log_user() |
|
351 | gist = create_gist('gist-for-edit', content='GIST EDIT BODY') | |
|
351 | gist = create_gist(b'gist-for-edit', content=b'GIST EDIT BODY') | |
|
352 | 352 | response = self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id)) |
|
353 | 353 | response.mustcontain('GIST EDIT BODY') |
|
354 | 354 | |
|
355 | 355 | def test_edit_page_non_logged_user(self, create_gist): |
|
356 | gist = create_gist('gist-for-edit', content='GIST EDIT BODY') | |
|
356 | gist = create_gist(b'gist-for-edit', content=b'GIST EDIT BODY') | |
|
357 | 357 | self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id), |
|
358 | 358 | status=302) |
|
359 | 359 | |
|
360 | 360 | def test_edit_normal_user_his_gist(self, create_gist): |
|
361 | 361 | self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
362 | gist = create_gist('gist-for-edit', owner=TEST_USER_REGULAR_LOGIN) | |
|
362 | gist = create_gist(b'gist-for-edit', owner=TEST_USER_REGULAR_LOGIN) | |
|
363 | 363 | self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id, |
|
364 | 364 | status=200)) |
|
365 | 365 | |
|
366 | 366 | def test_edit_normal_user_not_his_own_gist(self, create_gist): |
|
367 | 367 | self.log_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
368 | gist = create_gist('delete-me') | |
|
368 | gist = create_gist(b'delete-me') | |
|
369 | 369 | self.app.get(route_path('gist_edit', gist_id=gist.gist_access_id), |
|
370 | 370 | status=404) |
|
371 | 371 | |
@@ -375,7 +375,7 b' class TestGistsController(TestController' | |||
|
375 | 375 | password = 'test' |
|
376 | 376 | user = user_util.create_user( |
|
377 | 377 | firstname=xss_atack_string, password=password) |
|
378 | create_gist('gist', gist_type='public', owner=user.username) | |
|
378 | create_gist(b'gist', gist_type='public', owner=user.username) | |
|
379 | 379 | response = self.app.get(route_path('gists_show')) |
|
380 | 380 | response.mustcontain(xss_escaped_string) |
|
381 | 381 | |
@@ -385,6 +385,6 b' class TestGistsController(TestController' | |||
|
385 | 385 | password = 'test' |
|
386 | 386 | user = user_util.create_user( |
|
387 | 387 | lastname=xss_atack_string, password=password) |
|
388 | create_gist('gist', gist_type='public', owner=user.username) | |
|
388 | create_gist(b'gist', gist_type='public', owner=user.username) | |
|
389 | 389 | response = self.app.get(route_path('gists_show')) |
|
390 | 390 | response.mustcontain(xss_escaped_string) |
@@ -18,18 +18,20 b'' | |||
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | import json | |
|
22 | ||
|
23 | 21 | from . import assert_and_get_repo_list_content |
|
24 | 22 | from rhodecode.tests import TestController |
|
25 | 23 | from rhodecode.tests.fixture import Fixture |
|
26 | 24 | from rhodecode.model.db import Repository |
|
25 | from rhodecode.lib.ext_json import json | |
|
26 | ||
|
27 | 27 | |
|
28 | 28 | fixture = Fixture() |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def route_path(name, params=None, **kwargs): |
|
32 |
import urllib.request |
|
|
32 | import urllib.request | |
|
33 | import urllib.parse | |
|
34 | import urllib.error | |
|
33 | 35 | |
|
34 | 36 | base_url = { |
|
35 | 37 | 'repo_list_data': '/_repos', |
@@ -17,19 +17,19 b'' | |||
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | ||
|
21 | import json | |
|
22 | 20 | import pytest |
|
23 | 21 | |
|
24 | 22 | from rhodecode.tests import TestController |
|
25 | 23 | from rhodecode.tests.fixture import Fixture |
|
26 | ||
|
24 | from rhodecode.lib.ext_json import json | |
|
27 | 25 | |
|
28 | 26 | fixture = Fixture() |
|
29 | 27 | |
|
30 | 28 | |
|
31 | 29 | def route_path(name, params=None, **kwargs): |
|
32 |
import urllib.request |
|
|
30 | import urllib.request | |
|
31 | import urllib.parse | |
|
32 | import urllib.error | |
|
33 | 33 | |
|
34 | 34 | base_url = { |
|
35 | 35 | 'user_autocomplete_data': '/_users', |
@@ -1,22 +1,4 b'' | |||
|
1 | 1 | |
|
2 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
|
3 | # | |
|
4 | # This program is free software: you can redistribute it and/or modify | |
|
5 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
6 | # (only), as published by the Free Software Foundation. | |
|
7 | # | |
|
8 | # This program is distributed in the hope that it will be useful, | |
|
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
11 | # GNU General Public License for more details. | |
|
12 | # | |
|
13 | # You should have received a copy of the GNU Affero General Public License | |
|
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
15 | # | |
|
16 | # This program is dual-licensed. If you wish to learn more about the | |
|
17 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
19 | # -*- coding: utf-8 -*- | |
|
20 | 2 | |
|
21 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
22 | 4 | # |
@@ -36,19 +18,39 b'' | |||
|
36 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
37 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
38 | 20 | |
|
39 | import json | |
|
21 | ||
|
22 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
|
23 | # | |
|
24 | # This program is free software: you can redistribute it and/or modify | |
|
25 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
26 | # (only), as published by the Free Software Foundation. | |
|
27 | # | |
|
28 | # This program is distributed in the hope that it will be useful, | |
|
29 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
30 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
31 | # GNU General Public License for more details. | |
|
32 | # | |
|
33 | # You should have received a copy of the GNU Affero General Public License | |
|
34 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
35 | # | |
|
36 | # This program is dual-licensed. If you wish to learn more about the | |
|
37 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
38 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
40 | 39 | |
|
41 | 40 | import pytest |
|
42 | 41 | |
|
43 | 42 | from rhodecode.tests import TestController |
|
44 | 43 | from rhodecode.tests.fixture import Fixture |
|
44 | from rhodecode.lib.ext_json import json | |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | fixture = Fixture() |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | def route_path(name, params=None, **kwargs): |
|
51 |
import urllib.request |
|
|
51 | import urllib.request | |
|
52 | import urllib.parse | |
|
53 | import urllib.error | |
|
52 | 54 | |
|
53 | 55 | base_url = { |
|
54 | 56 | 'user_autocomplete_data': '/_users', |
@@ -163,7 +163,7 b' class TestHomeController(TestController)' | |||
|
163 | 163 | 'show_version', state, 'bool') |
|
164 | 164 | Session().add(sett) |
|
165 | 165 | Session().commit() |
|
166 | SettingsModel().invalidate_settings_cache() | |
|
166 | SettingsModel().invalidate_settings_cache(hard=True) | |
|
167 | 167 | |
|
168 | 168 | response = self.app.get(route_path('home')) |
|
169 | 169 | if state is True: |
@@ -27,7 +27,9 b' from rhodecode.model.db import UserFollo' | |||
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | def route_path(name, params=None, **kwargs): |
|
30 |
import urllib.request |
|
|
30 | import urllib.request | |
|
31 | import urllib.parse | |
|
32 | import urllib.error | |
|
31 | 33 | |
|
32 | 34 | base_url = { |
|
33 | 35 | 'journal': ADMIN_PREFIX + '/journal', |
@@ -38,7 +38,9 b" whitelist_view = ['RepoCommitsView:repo_" | |||
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | def route_path(name, params=None, **kwargs): |
|
41 |
import urllib.request |
|
|
41 | import urllib.request | |
|
42 | import urllib.parse | |
|
43 | import urllib.error | |
|
42 | 44 | from rhodecode.apps._base import ADMIN_PREFIX |
|
43 | 45 | |
|
44 | 46 | base_url = { |
@@ -160,18 +162,28 b' class TestLoginController(object):' | |||
|
160 | 162 | 'file:///etc/passwd', |
|
161 | 163 | 'ftp://some.ftp.server', |
|
162 | 164 | 'http://other.domain', |
|
163 | '/\r\nX-Forwarded-Host: http://example.org', | |
|
164 | 165 | ], ids=no_newline_id_generator) |
|
165 | 166 | def test_login_bad_came_froms(self, url_came_from): |
|
166 | 167 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) |
|
167 | 168 | response = self.app.post( |
|
168 | _url, | |
|
169 | {'username': 'test_admin', 'password': 'test12'}) | |
|
169 | _url, {'username': 'test_admin', 'password': 'test12'}, status=302) | |
|
170 | 170 | assert response.status == '302 Found' |
|
171 | 171 | response = response.follow() |
|
172 | 172 | assert response.status == '200 OK' |
|
173 | 173 | assert response.request.path == '/' |
|
174 | 174 | |
|
175 | @pytest.mark.xfail(reason="newline params changed behaviour in python3") | |
|
176 | @pytest.mark.parametrize("url_came_from", [ | |
|
177 | '/\r\nX-Forwarded-Host: \rhttp://example.org', | |
|
178 | ], ids=no_newline_id_generator) | |
|
179 | def test_login_bad_came_froms_404(self, url_came_from): | |
|
180 | _url = '{}?came_from={}'.format(route_path('login'), url_came_from) | |
|
181 | response = self.app.post( | |
|
182 | _url, {'username': 'test_admin', 'password': 'test12'}, status=302) | |
|
183 | ||
|
184 | response = response.follow() | |
|
185 | assert response.status == '404 Not Found' | |
|
186 | ||
|
175 | 187 | def test_login_short_password(self): |
|
176 | 188 | response = self.app.post(route_path('login'), |
|
177 | 189 | {'username': 'test_admin', |
@@ -184,7 +196,7 b' class TestLoginController(object):' | |||
|
184 | 196 | response = self.app.post( |
|
185 | 197 | route_path('login'), |
|
186 | 198 | {'username': user_regular.username, |
|
187 |
'password': |
|
|
199 | 'password': 'invalid-non-asci\xe4'.encode('utf8')}) | |
|
188 | 200 | |
|
189 | 201 | response.mustcontain('invalid user name') |
|
190 | 202 | response.mustcontain('invalid password') |
@@ -486,6 +498,10 b' class TestLoginController(object):' | |||
|
486 | 498 | auth_token = user_admin.api_key |
|
487 | 499 | |
|
488 | 500 | with fixture.anon_access(False): |
|
501 | # webtest uses linter to check if response is bytes, | |
|
502 | # and we use memoryview here as a wrapper, quick turn-off | |
|
503 | self.app.lint = False | |
|
504 | ||
|
489 | 505 | self.app.get( |
|
490 | 506 | route_path('repo_commit_raw', |
|
491 | 507 | repo_name=HG_REPO, commit_id='tip', |
@@ -511,6 +527,9 b' class TestLoginController(object):' | |||
|
511 | 527 | assert auth_token |
|
512 | 528 | |
|
513 | 529 | with fixture.anon_access(False): |
|
530 | # webtest uses linter to check if response is bytes, | |
|
531 | # and we use memoryview here as a wrapper, quick turn-off | |
|
532 | self.app.lint = False | |
|
514 | 533 | self.app.get( |
|
515 | 534 | route_path('repo_commit_raw', |
|
516 | 535 | repo_name=HG_REPO, commit_id='tip', |
@@ -536,6 +555,10 b' class TestLoginController(object):' | |||
|
536 | 555 | with mock.patch.dict('rhodecode.CONFIG', whitelist): |
|
537 | 556 | |
|
538 | 557 | with fixture.anon_access(False): |
|
558 | # webtest uses linter to check if response is bytes, | |
|
559 | # and we use memoryview here as a wrapper, quick turn-off | |
|
560 | self.app.lint = False | |
|
561 | ||
|
539 | 562 | self.app.get( |
|
540 | 563 | route_path('repo_commit_raw', |
|
541 | 564 | repo_name=HG_REPO, commit_id='tip', |
@@ -552,6 +575,9 b' class TestLoginController(object):' | |||
|
552 | 575 | TEST_USER_ADMIN_LOGIN, 'test') |
|
553 | 576 | Session().commit() |
|
554 | 577 | with fixture.anon_access(False): |
|
578 | # webtest uses linter to check if response is bytes, | |
|
579 | # and we use memoryview here as a wrapper, quick turn-off | |
|
580 | self.app.lint = False | |
|
555 | 581 | self.app.get( |
|
556 | 582 | route_path('repo_commit_raw', |
|
557 | 583 | repo_name=HG_REPO, commit_id='tip', |
@@ -572,6 +598,9 b' class TestLoginController(object):' | |||
|
572 | 598 | Session().add(new_auth_token) |
|
573 | 599 | Session().commit() |
|
574 | 600 | with fixture.anon_access(False): |
|
601 | # webtest uses linter to check if response is bytes, | |
|
602 | # and we use memoryview here as a wrapper, quick turn-off | |
|
603 | self.app.lint = False | |
|
575 | 604 | self.app.get( |
|
576 | 605 | route_path('repo_commit_raw', |
|
577 | 606 | repo_name=HG_REPO, commit_id='tip', |
@@ -30,7 +30,9 b' fixture = Fixture()' | |||
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | def route_path(name, params=None, **kwargs): |
|
33 |
import urllib.request |
|
|
33 | import urllib.request | |
|
34 | import urllib.parse | |
|
35 | import urllib.error | |
|
34 | 36 | from rhodecode.apps._base import ADMIN_PREFIX |
|
35 | 37 | |
|
36 | 38 | base_url = { |
@@ -34,7 +34,9 b' fixture = Fixture()' | |||
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | def route_path(name, params=None, **kwargs): |
|
37 |
import urllib.request |
|
|
37 | import urllib.request | |
|
38 | import urllib.parse | |
|
39 | import urllib.error | |
|
38 | 40 | from rhodecode.apps._base import ADMIN_PREFIX |
|
39 | 41 | |
|
40 | 42 | base_url = { |
@@ -133,7 +135,8 b' class TestNotificationsController(TestCo' | |||
|
133 | 135 | u2 = User.get(u2.user_id) |
|
134 | 136 | |
|
135 | 137 | # check DB |
|
136 | get_notif = lambda un: [x.notification for x in un] | |
|
138 | def get_notif(un): | |
|
139 | return [x.notification for x in un] | |
|
137 | 140 | assert get_notif(cur_user.notifications) == [notification] |
|
138 | 141 | assert get_notif(u1.notifications) == [notification] |
|
139 | 142 | assert get_notif(u2.notifications) == [notification] |
@@ -28,7 +28,9 b' fixture = Fixture()' | |||
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | def route_path(name, params=None, **kwargs): |
|
31 |
import urllib.request |
|
|
31 | import urllib.request | |
|
32 | import urllib.parse | |
|
33 | import urllib.error | |
|
32 | 34 | from rhodecode.apps._base import ADMIN_PREFIX |
|
33 | 35 | |
|
34 | 36 | base_url = { |
@@ -23,7 +23,9 b' from rhodecode.tests import assert_sessi' | |||
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def route_path(name, params=None, **kwargs): |
|
26 |
import urllib.request |
|
|
26 | import urllib.request | |
|
27 | import urllib.parse | |
|
28 | import urllib.error | |
|
27 | 29 | |
|
28 | 30 | base_url = { |
|
29 | 31 | 'edit_repo_group_advanced': |
@@ -23,7 +23,9 b' from rhodecode.tests.utils import permis' | |||
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def route_path(name, params=None, **kwargs): |
|
26 |
import urllib.request |
|
|
26 | import urllib.request | |
|
27 | import urllib.parse | |
|
28 | import urllib.error | |
|
27 | 29 | |
|
28 | 30 | base_url = { |
|
29 | 31 | 'edit_repo_group_perms': |
@@ -23,7 +23,9 b' from rhodecode.tests import assert_sessi' | |||
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def route_path(name, params=None, **kwargs): |
|
26 |
import urllib.request |
|
|
26 | import urllib.request | |
|
27 | import urllib.parse | |
|
28 | import urllib.error | |
|
27 | 29 | |
|
28 | 30 | base_url = { |
|
29 | 31 | 'edit_repo_group': '/{repo_group_name}/_edit', |
@@ -22,7 +22,9 b' from rhodecode.model.db import Repositor' | |||
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | def route_path(name, params=None, **kwargs): |
|
25 |
import urllib.request |
|
|
25 | import urllib.request | |
|
26 | import urllib.parse | |
|
27 | import urllib.error | |
|
26 | 28 | |
|
27 | 29 | base_url = { |
|
28 | 30 | 'pullrequest_show_all': '/{repo_name}/pull-request', |
@@ -22,7 +22,9 b' from rhodecode.model.db import Repositor' | |||
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | def route_path(name, params=None, **kwargs): |
|
25 |
import urllib.request |
|
|
25 | import urllib.request | |
|
26 | import urllib.parse | |
|
27 | import urllib.error | |
|
26 | 28 | |
|
27 | 29 | base_url = { |
|
28 | 30 | 'bookmarks_home': '/{repo_name}/bookmarks', |
@@ -22,7 +22,9 b' from rhodecode.model.db import Repositor' | |||
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | def route_path(name, params=None, **kwargs): |
|
25 |
import urllib.request |
|
|
25 | import urllib.request | |
|
26 | import urllib.parse | |
|
27 | import urllib.error | |
|
26 | 28 | |
|
27 | 29 | base_url = { |
|
28 | 30 | 'branches_home': '/{repo_name}/branches', |
@@ -28,7 +28,9 b' MATCH_HASH = re.compile(r\'<span class="c' | |||
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | def route_path(name, params=None, **kwargs): |
|
31 |
import urllib.request |
|
|
31 | import urllib.request | |
|
32 | import urllib.parse | |
|
33 | import urllib.error | |
|
32 | 34 | |
|
33 | 35 | base_url = { |
|
34 | 36 | 'repo_changelog': '/{repo_name}/changelog', |
@@ -43,7 +45,7 b' def route_path(name, params=None, **kwar' | |||
|
43 | 45 | |
|
44 | 46 | |
|
45 | 47 | def assert_commits_on_page(response, indexes): |
|
46 |
found_indexes = [int(idx) for idx in MATCH_HASH.findall(response. |
|
|
48 | found_indexes = [int(idx) for idx in MATCH_HASH.findall(response.text)] | |
|
47 | 49 | assert found_indexes == indexes |
|
48 | 50 | |
|
49 | 51 | |
@@ -109,8 +111,7 b' class TestChangelogController(TestContro' | |||
|
109 | 111 | assert expected_warning in response.text |
|
110 | 112 | |
|
111 | 113 | @pytest.mark.xfail_backends("svn", reason="Depends on branch support") |
|
112 | def test_changelog_filtered_by_branch_with_merges( | |
|
113 | self, autologin_user, backend): | |
|
114 | def test_changelog_filtered_by_branch_with_merges(self, autologin_user, backend): | |
|
114 | 115 | |
|
115 | 116 | # Note: The changelog of branch "b" does not contain the commit "a1" |
|
116 | 117 | # although this is a parent of commit "b1". And branch "b" has commits |
@@ -27,7 +27,9 b' from rhodecode.lib import helpers as h' | |||
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | def route_path(name, params=None, **kwargs): |
|
30 |
import urllib.request |
|
|
30 | import urllib.request | |
|
31 | import urllib.parse | |
|
32 | import urllib.error | |
|
31 | 33 | |
|
32 | 34 | base_url = { |
|
33 | 35 | 'repo_commit': '/{repo_name}/changeset/{commit_id}', |
@@ -71,7 +73,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
71 | 73 | self.log_user() |
|
72 | 74 | commit = backend.repo.get_commit('300') |
|
73 | 75 | commit_id = commit.raw_id |
|
74 |
text = |
|
|
76 | text = 'CommentOnCommit' | |
|
75 | 77 | |
|
76 | 78 | params = {'text': text, 'csrf_token': self.csrf_token, |
|
77 | 79 | 'comment_type': comment_type} |
@@ -101,7 +103,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
101 | 103 | author, comment_type, h.show_id(commit), backend.repo_name) |
|
102 | 104 | assert sbj == notification.subject |
|
103 | 105 | |
|
104 |
lnk = ( |
|
|
106 | lnk = ('/{0}/changeset/{1}#comment-{2}'.format( | |
|
105 | 107 | backend.repo_name, commit_id, comment_id)) |
|
106 | 108 | assert lnk in notification.body |
|
107 | 109 | |
@@ -110,7 +112,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
110 | 112 | self.log_user() |
|
111 | 113 | commit = backend.repo.get_commit('300') |
|
112 | 114 | commit_id = commit.raw_id |
|
113 |
text = |
|
|
115 | text = 'CommentOnCommit' | |
|
114 | 116 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
115 | 117 | line = 'n1' |
|
116 | 118 | |
@@ -163,7 +165,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
163 | 165 | |
|
164 | 166 | assert sbj == notification.subject |
|
165 | 167 | |
|
166 |
lnk = ( |
|
|
168 | lnk = ('/{0}/changeset/{1}#comment-{2}'.format( | |
|
167 | 169 | backend.repo_name, commit_id, comment.comment_id)) |
|
168 | 170 | assert lnk in notification.body |
|
169 | 171 | assert 'on line n1' in notification.body |
@@ -172,7 +174,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
172 | 174 | self.log_user() |
|
173 | 175 | |
|
174 | 176 | commit_id = backend.repo.get_commit('300').raw_id |
|
175 |
text = |
|
|
177 | text = '@test_regular check CommentOnCommit' | |
|
176 | 178 | |
|
177 | 179 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
178 | 180 | self.app.post( |
@@ -193,13 +195,13 b' class TestRepoCommitCommentsView(TestCon' | |||
|
193 | 195 | users = [x.username for x in notification.recipients] |
|
194 | 196 | |
|
195 | 197 | # test_regular gets notification by @mention |
|
196 |
assert sorted(users) == [ |
|
|
198 | assert sorted(users) == ['test_admin', 'test_regular'] | |
|
197 | 199 | |
|
198 | 200 | def test_create_with_status_change(self, backend): |
|
199 | 201 | self.log_user() |
|
200 | 202 | commit = backend.repo.get_commit('300') |
|
201 | 203 | commit_id = commit.raw_id |
|
202 |
text = |
|
|
204 | text = 'CommentOnCommit' | |
|
203 | 205 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
204 | 206 | line = 'n1' |
|
205 | 207 | |
@@ -233,14 +235,14 b' class TestRepoCommitCommentsView(TestCon' | |||
|
233 | 235 | author, h.show_id(commit), backend.repo_name) |
|
234 | 236 | assert sbj == notification.subject |
|
235 | 237 | |
|
236 |
lnk = ( |
|
|
238 | lnk = ('/{0}/changeset/{1}#comment-{2}'.format( | |
|
237 | 239 | backend.repo_name, commit_id, comment_id)) |
|
238 | 240 | assert lnk in notification.body |
|
239 | 241 | |
|
240 | 242 | def test_delete(self, backend): |
|
241 | 243 | self.log_user() |
|
242 | 244 | commit_id = backend.repo.get_commit('300').raw_id |
|
243 |
text = |
|
|
245 | text = 'CommentOnCommit' | |
|
244 | 246 | |
|
245 | 247 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
246 | 248 | self.app.post( |
@@ -271,7 +273,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
271 | 273 | def test_edit(self, backend): |
|
272 | 274 | self.log_user() |
|
273 | 275 | commit_id = backend.repo.get_commit('300').raw_id |
|
274 |
text = |
|
|
276 | text = 'CommentOnCommit' | |
|
275 | 277 | |
|
276 | 278 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
277 | 279 | self.app.post( |
@@ -304,7 +306,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
304 | 306 | def test_edit_without_change(self, backend): |
|
305 | 307 | self.log_user() |
|
306 | 308 | commit_id = backend.repo.get_commit('300').raw_id |
|
307 |
text = |
|
|
309 | text = 'CommentOnCommit' | |
|
308 | 310 | |
|
309 | 311 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
310 | 312 | self.app.post( |
@@ -336,7 +338,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
336 | 338 | def test_edit_try_edit_already_edited(self, backend): |
|
337 | 339 | self.log_user() |
|
338 | 340 | commit_id = backend.repo.get_commit('300').raw_id |
|
339 |
text = |
|
|
341 | text = 'CommentOnCommit' | |
|
340 | 342 | |
|
341 | 343 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
342 | 344 | self.app.post( |
@@ -390,7 +392,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
390 | 392 | def test_edit_forbidden_for_immutable_comments(self, backend): |
|
391 | 393 | self.log_user() |
|
392 | 394 | commit_id = backend.repo.get_commit('300').raw_id |
|
393 |
text = |
|
|
395 | text = 'CommentOnCommit' | |
|
394 | 396 | |
|
395 | 397 | params = {'text': text, 'csrf_token': self.csrf_token, 'version': '0'} |
|
396 | 398 | self.app.post( |
@@ -429,7 +431,7 b' class TestRepoCommitCommentsView(TestCon' | |||
|
429 | 431 | def test_delete_forbidden_for_immutable_comments(self, backend): |
|
430 | 432 | self.log_user() |
|
431 | 433 | commit_id = backend.repo.get_commit('300').raw_id |
|
432 |
text = |
|
|
434 | text = 'CommentOnCommit' | |
|
433 | 435 | |
|
434 | 436 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
435 | 437 | self.app.post( |
@@ -24,7 +24,9 b' from rhodecode.lib.helpers import _short' | |||
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | def route_path(name, params=None, **kwargs): |
|
27 |
import urllib.request |
|
|
27 | import urllib.request | |
|
28 | import urllib.parse | |
|
29 | import urllib.error | |
|
28 | 30 | |
|
29 | 31 | base_url = { |
|
30 | 32 | 'repo_commit': '/{repo_name}/changeset/{commit_id}', |
@@ -55,23 +57,31 b' class TestRepoCommitView(object):' | |||
|
55 | 57 | |
|
56 | 58 | def test_show_raw(self, backend): |
|
57 | 59 | commit_id = self.commit_id[backend.alias] |
|
60 | # webtest uses linter to check if response is bytes, | |
|
61 | # and we use memoryview here as a wrapper, quick turn-off | |
|
62 | self.app.lint = False | |
|
63 | ||
|
58 | 64 | response = self.app.get(route_path( |
|
59 | 65 | 'repo_commit_raw', |
|
60 | 66 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
61 |
assert response. |
|
|
67 | assert response.body == self.diffs[backend.alias] | |
|
62 | 68 | |
|
63 | 69 | def test_show_raw_patch(self, backend): |
|
64 | 70 | response = self.app.get(route_path( |
|
65 | 71 | 'repo_commit_patch', repo_name=backend.repo_name, |
|
66 | 72 | commit_id=self.commit_id[backend.alias])) |
|
67 |
assert response. |
|
|
73 | assert response.body == self.patches[backend.alias] | |
|
68 | 74 | |
|
69 | 75 | def test_commit_download(self, backend): |
|
76 | # webtest uses linter to check if response is bytes, | |
|
77 | # and we use memoryview here as a wrapper, quick turn-off | |
|
78 | self.app.lint = False | |
|
79 | ||
|
70 | 80 | response = self.app.get(route_path( |
|
71 | 81 | 'repo_commit_download', |
|
72 | 82 | repo_name=backend.repo_name, |
|
73 | 83 | commit_id=self.commit_id[backend.alias])) |
|
74 |
assert response. |
|
|
84 | assert response.body == self.diffs[backend.alias] | |
|
75 | 85 | |
|
76 | 86 | def test_single_commit_page_different_ops(self, backend): |
|
77 | 87 | commit_id = { |
@@ -257,7 +267,7 b' class TestRepoCommitView(object):' | |||
|
257 | 267 | } |
|
258 | 268 | |
|
259 | 269 | diffs = { |
|
260 | 'hg': r"""diff --git a/README b/README | |
|
270 | 'hg': br"""diff --git a/README b/README | |
|
261 | 271 | new file mode 120000 |
|
262 | 272 | --- /dev/null |
|
263 | 273 | +++ b/README |
@@ -265,7 +275,7 b' new file mode 120000' | |||
|
265 | 275 | +README.rst |
|
266 | 276 | \ No newline at end of file |
|
267 | 277 | """, |
|
268 | 'git': r"""diff --git a/README b/README | |
|
278 | 'git': br"""diff --git a/README b/README | |
|
269 | 279 | new file mode 120000 |
|
270 | 280 | index 0000000..92cacd2 |
|
271 | 281 | --- /dev/null |
@@ -274,7 +284,7 b' index 0000000..92cacd2' | |||
|
274 | 284 | +README.rst |
|
275 | 285 | \ No newline at end of file |
|
276 | 286 | """, |
|
277 | 'svn': """Index: README | |
|
287 | 'svn': b"""Index: README | |
|
278 | 288 | =================================================================== |
|
279 | 289 | diff --git a/README b/README |
|
280 | 290 | new file mode 10644 |
@@ -287,7 +297,7 b' new file mode 10644' | |||
|
287 | 297 | } |
|
288 | 298 | |
|
289 | 299 | patches = { |
|
290 | 'hg': r"""# HG changeset patch | |
|
300 | 'hg': br"""# HG changeset patch | |
|
291 | 301 | # User Marcin Kuzminski <marcin@python-works.com> |
|
292 | 302 | # Date 2014-01-07 12:21:40 |
|
293 | 303 | # Node ID 2062ec7beeeaf9f44a1c25c41479565040b930b2 |
@@ -296,7 +306,7 b' new file mode 10644' | |||
|
296 | 306 | Added a symlink |
|
297 | 307 | |
|
298 | 308 | """ + diffs['hg'], |
|
299 | 'git': r"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20 | |
|
309 | 'git': br"""From fd627b9e0dd80b47be81af07c4a98518244ed2f7 2014-01-07 12:22:20 | |
|
300 | 310 | From: Marcin Kuzminski <marcin@python-works.com> |
|
301 | 311 | Date: 2014-01-07 12:22:20 |
|
302 | 312 | Subject: [PATCH] Added a symlink |
@@ -304,7 +314,7 b' Subject: [PATCH] Added a symlink' | |||
|
304 | 314 | --- |
|
305 | 315 | |
|
306 | 316 | """ + diffs['git'], |
|
307 | 'svn': r"""# SVN changeset patch | |
|
317 | 'svn': br"""# SVN changeset patch | |
|
308 | 318 | # User marcin |
|
309 | 319 | # Date 2014-09-02 12:25:22.071142 |
|
310 | 320 | # Revision 393 |
@@ -27,7 +27,9 b' from rhodecode.tests.utils import Assert' | |||
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | def route_path(name, params=None, **kwargs): |
|
30 |
import urllib.request |
|
|
30 | import urllib.request | |
|
31 | import urllib.parse | |
|
32 | import urllib.error | |
|
31 | 33 | |
|
32 | 34 | base_url = { |
|
33 | 35 | 'repo_compare_select': '/{repo_name}/compare', |
@@ -65,30 +67,30 b' class TestCompareView(object):' | |||
|
65 | 67 | # |
|
66 | 68 | |
|
67 | 69 | fork = backend.create_repo() |
|
70 | origin = backend.create_repo() | |
|
68 | 71 | |
|
69 | 72 | # prepare fork |
|
70 | 73 | commit0 = commit_change( |
|
71 | fork.repo_name, filename='file1', content='A', | |
|
72 | message='A', vcs_type=backend.alias, parent=None, newfile=True) | |
|
74 | fork.repo_name, filename=b'file1', content=b'A', | |
|
75 | message='A - Initial Commit', vcs_type=backend.alias, parent=None, newfile=True) | |
|
73 | 76 | |
|
74 | 77 | commit1 = commit_change( |
|
75 | fork.repo_name, filename='file1', content='B', | |
|
78 | fork.repo_name, filename=b'file1', content=b'B', | |
|
76 | 79 | message='B, child of A', vcs_type=backend.alias, parent=commit0) |
|
77 | 80 | |
|
78 | 81 | commit_change( # commit 2 |
|
79 | fork.repo_name, filename='file1', content='C', | |
|
82 | fork.repo_name, filename=b'file1', content=b'C', | |
|
80 | 83 | message='C, child of B', vcs_type=backend.alias, parent=commit1) |
|
81 | 84 | |
|
82 | 85 | commit3 = commit_change( |
|
83 | fork.repo_name, filename='file1', content='D', | |
|
86 | fork.repo_name, filename=b'file1', content=b'D', | |
|
84 | 87 | message='D, child of A', vcs_type=backend.alias, parent=commit0) |
|
85 | 88 | |
|
86 | 89 | commit4 = commit_change( |
|
87 | fork.repo_name, filename='file1', content='E', | |
|
90 | fork.repo_name, filename=b'file1', content=b'E', | |
|
88 | 91 | message='E, child of D', vcs_type=backend.alias, parent=commit3) |
|
89 | 92 | |
|
90 | 93 | # prepare origin repository, taking just the history up to D |
|
91 | origin = backend.create_repo() | |
|
92 | 94 | |
|
93 | 95 | origin_repo = origin.scm_instance(cache=False) |
|
94 | 96 | origin_repo.config.clear_section('hooks') |
@@ -98,7 +100,7 b' class TestCompareView(object):' | |||
|
98 | 100 | # Verify test fixture setup |
|
99 | 101 | # This does not work for git |
|
100 | 102 | if backend.alias != 'git': |
|
101 | assert 5 == len(fork.scm_instance().commit_ids) | |
|
103 | assert 5 == len(fork.scm_instance(cache=False).commit_ids) | |
|
102 | 104 | assert 2 == len(origin_repo.commit_ids) |
|
103 | 105 | |
|
104 | 106 | # Comparing the revisions |
@@ -108,7 +110,8 b' class TestCompareView(object):' | |||
|
108 | 110 | source_ref_type="rev", source_ref=commit3.raw_id, |
|
109 | 111 | target_ref_type="rev", target_ref=commit4.raw_id, |
|
110 | 112 | params=dict(merge='1', target_repo=fork.repo_name) |
|
111 |
) |
|
|
113 | ), | |
|
114 | status=200) | |
|
112 | 115 | |
|
113 | 116 | compare_page = ComparePage(response) |
|
114 | 117 | compare_page.contains_commits([commit4]) |
@@ -119,7 +122,7 b' class TestCompareView(object):' | |||
|
119 | 122 | |
|
120 | 123 | # commit something ! |
|
121 | 124 | commit0 = commit_change( |
|
122 | repo1.repo_name, filename='file1', content='line1\n', | |
|
125 | repo1.repo_name, filename=b'file1', content=b'line1\n', | |
|
123 | 126 | message='commit1', vcs_type=backend.alias, parent=None, |
|
124 | 127 | newfile=True) |
|
125 | 128 | |
@@ -128,11 +131,11 b' class TestCompareView(object):' | |||
|
128 | 131 | |
|
129 | 132 | # add two extra commit into fork |
|
130 | 133 | commit1 = commit_change( |
|
131 | repo2.repo_name, filename='file1', content='line1\nline2\n', | |
|
134 | repo2.repo_name, filename=b'file1', content=b'line1\nline2\n', | |
|
132 | 135 | message='commit2', vcs_type=backend.alias, parent=commit0) |
|
133 | 136 | |
|
134 | 137 | commit2 = commit_change( |
|
135 | repo2.repo_name, filename='file1', content='line1\nline2\nline3\n', | |
|
138 | repo2.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n', | |
|
136 | 139 | message='commit3', vcs_type=backend.alias, parent=commit1) |
|
137 | 140 | |
|
138 | 141 | commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME |
@@ -167,7 +170,7 b' class TestCompareView(object):' | |||
|
167 | 170 | |
|
168 | 171 | # commit something ! |
|
169 | 172 | commit0 = commit_change( |
|
170 | repo1.repo_name, filename='file1', content='line1\n', | |
|
173 | repo1.repo_name, filename=b'file1', content=b'line1\n', | |
|
171 | 174 | message='commit1', vcs_type=backend.alias, parent=None, |
|
172 | 175 | newfile=True) |
|
173 | 176 | |
@@ -176,17 +179,17 b' class TestCompareView(object):' | |||
|
176 | 179 | |
|
177 | 180 | # now commit something to origin repo |
|
178 | 181 | commit_change( |
|
179 | repo1.repo_name, filename='file2', content='line1file2\n', | |
|
182 | repo1.repo_name, filename=b'file2', content=b'line1file2\n', | |
|
180 | 183 | message='commit2', vcs_type=backend.alias, parent=commit0, |
|
181 | 184 | newfile=True) |
|
182 | 185 | |
|
183 | 186 | # add two extra commit into fork |
|
184 | 187 | commit1 = commit_change( |
|
185 | repo2.repo_name, filename='file1', content='line1\nline2\n', | |
|
188 | repo2.repo_name, filename=b'file1', content=b'line1\nline2\n', | |
|
186 | 189 | message='commit2', vcs_type=backend.alias, parent=commit0) |
|
187 | 190 | |
|
188 | 191 | commit2 = commit_change( |
|
189 | repo2.repo_name, filename='file1', content='line1\nline2\nline3\n', | |
|
192 | repo2.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n', | |
|
190 | 193 | message='commit3', vcs_type=backend.alias, parent=commit1) |
|
191 | 194 | |
|
192 | 195 | commit_id1 = repo1.scm_instance().DEFAULT_BRANCH_NAME |
@@ -250,11 +253,11 b' class TestCompareView(object):' | |||
|
250 | 253 | |
|
251 | 254 | # commit something ! |
|
252 | 255 | commit0 = commit_change( |
|
253 | repo1.repo_name, filename='file1', content='line1\n', | |
|
256 | repo1.repo_name, filename=b'file1', content=b'line1\n', | |
|
254 | 257 | message='commit1', vcs_type=backend.alias, parent=None, |
|
255 | 258 | newfile=True) |
|
256 | 259 | commit1 = commit_change( |
|
257 | repo1.repo_name, filename='file1', content='line1\nline2\n', | |
|
260 | repo1.repo_name, filename=b'file1', content=b'line1\nline2\n', | |
|
258 | 261 | message='commit2', vcs_type=backend.alias, parent=commit0) |
|
259 | 262 | |
|
260 | 263 | # fork this repo |
@@ -262,19 +265,16 b' class TestCompareView(object):' | |||
|
262 | 265 | |
|
263 | 266 | # now make commit3-6 |
|
264 | 267 | commit2 = commit_change( |
|
265 | repo1.repo_name, filename='file1', content='line1\nline2\nline3\n', | |
|
268 | repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n', | |
|
266 | 269 | message='commit3', vcs_type=backend.alias, parent=commit1) |
|
267 | 270 | commit3 = commit_change( |
|
268 | repo1.repo_name, filename='file1', | |
|
269 | content='line1\nline2\nline3\nline4\n', message='commit4', | |
|
270 | vcs_type=backend.alias, parent=commit2) | |
|
271 | repo1.repo_name, filename=b'file1',content=b'line1\nline2\nline3\nline4\n', | |
|
272 | message='commit4', vcs_type=backend.alias, parent=commit2) | |
|
271 | 273 | commit4 = commit_change( |
|
272 | repo1.repo_name, filename='file1', | |
|
273 | content='line1\nline2\nline3\nline4\nline5\n', message='commit5', | |
|
274 | vcs_type=backend.alias, parent=commit3) | |
|
274 | repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\nline4\nline5\n', | |
|
275 | message='commit5', vcs_type=backend.alias, parent=commit3) | |
|
275 | 276 | commit_change( # commit 5 |
|
276 | repo1.repo_name, filename='file1', | |
|
277 | content='line1\nline2\nline3\nline4\nline5\nline6\n', | |
|
277 | repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\nline4\nline5\nline6\n', | |
|
278 | 278 | message='commit6', vcs_type=backend.alias, parent=commit4) |
|
279 | 279 | |
|
280 | 280 | response = self.app.get( |
@@ -313,11 +313,11 b' class TestCompareView(object):' | |||
|
313 | 313 | |
|
314 | 314 | # commit something ! |
|
315 | 315 | commit0 = commit_change( |
|
316 | repo1.repo_name, filename='file1', content='line1\n', | |
|
316 | repo1.repo_name, filename=b'file1', content=b'line1\n', | |
|
317 | 317 | message='commit1', vcs_type=backend.alias, parent=None, |
|
318 | 318 | newfile=True) |
|
319 | 319 | commit1 = commit_change( |
|
320 | repo1.repo_name, filename='file1', content='line1\nline2\n', | |
|
320 | repo1.repo_name, filename=b'file1', content=b'line1\nline2\n', | |
|
321 | 321 | message='commit2', vcs_type=backend.alias, parent=commit0) |
|
322 | 322 | |
|
323 | 323 | # fork this repo |
@@ -325,19 +325,19 b' class TestCompareView(object):' | |||
|
325 | 325 | |
|
326 | 326 | # now make commit3-6 |
|
327 | 327 | commit2 = commit_change( |
|
328 | repo1.repo_name, filename='file1', content='line1\nline2\nline3\n', | |
|
328 | repo1.repo_name, filename=b'file1', content=b'line1\nline2\nline3\n', | |
|
329 | 329 | message='commit3', vcs_type=backend.alias, parent=commit1) |
|
330 | 330 | commit3 = commit_change( |
|
331 | repo1.repo_name, filename='file1', | |
|
332 | content='line1\nline2\nline3\nline4\n', message='commit4', | |
|
331 | repo1.repo_name, filename=b'file1', | |
|
332 | content=b'line1\nline2\nline3\nline4\n', message='commit4', | |
|
333 | 333 | vcs_type=backend.alias, parent=commit2) |
|
334 | 334 | commit4 = commit_change( |
|
335 | repo1.repo_name, filename='file1', | |
|
336 | content='line1\nline2\nline3\nline4\nline5\n', message='commit5', | |
|
335 | repo1.repo_name, filename=b'file1', | |
|
336 | content=b'line1\nline2\nline3\nline4\nline5\n', message='commit5', | |
|
337 | 337 | vcs_type=backend.alias, parent=commit3) |
|
338 | 338 | commit5 = commit_change( |
|
339 | repo1.repo_name, filename='file1', | |
|
340 | content='line1\nline2\nline3\nline4\nline5\nline6\n', | |
|
339 | repo1.repo_name, filename=b'file1', | |
|
340 | content=b'line1\nline2\nline3\nline4\nline5\nline6\n', | |
|
341 | 341 | message='commit6', vcs_type=backend.alias, parent=commit4) |
|
342 | 342 | |
|
343 | 343 | response = self.app.get( |
@@ -399,8 +399,8 b' class TestCompareView(object):' | |||
|
399 | 399 | r1_name = repo1.repo_name |
|
400 | 400 | |
|
401 | 401 | commit0 = commit_change( |
|
402 | repo=r1_name, filename='file1', | |
|
403 | content='line1', message='commit1', vcs_type=backend.alias, | |
|
402 | repo=r1_name, filename=b'file1', | |
|
403 | content=b'line1', message='commit1', vcs_type=backend.alias, | |
|
404 | 404 | newfile=True) |
|
405 | 405 | assert repo1.scm_instance().commit_ids == [commit0.raw_id] |
|
406 | 406 | |
@@ -412,20 +412,20 b' class TestCompareView(object):' | |||
|
412 | 412 | r2_name = repo2.repo_name |
|
413 | 413 | |
|
414 | 414 | commit1 = commit_change( |
|
415 | repo=r2_name, filename='file1-fork', | |
|
416 | content='file1-line1-from-fork', message='commit1-fork', | |
|
415 | repo=r2_name, filename=b'file1-fork', | |
|
416 | content=b'file1-line1-from-fork', message='commit1-fork', | |
|
417 | 417 | vcs_type=backend.alias, parent=repo2.scm_instance()[-1], |
|
418 | 418 | newfile=True) |
|
419 | 419 | |
|
420 | 420 | commit2 = commit_change( |
|
421 | repo=r2_name, filename='file2-fork', | |
|
422 | content='file2-line1-from-fork', message='commit2-fork', | |
|
421 | repo=r2_name, filename=b'file2-fork', | |
|
422 | content=b'file2-line1-from-fork', message='commit2-fork', | |
|
423 | 423 | vcs_type=backend.alias, parent=commit1, |
|
424 | 424 | newfile=True) |
|
425 | 425 | |
|
426 | 426 | commit_change( # commit 3 |
|
427 | repo=r2_name, filename='file3-fork', | |
|
428 | content='file3-line1-from-fork', message='commit3-fork', | |
|
427 | repo=r2_name, filename=b'file3-fork', | |
|
428 | content=b'file3-line1-from-fork', message='commit3-fork', | |
|
429 | 429 | vcs_type=backend.alias, parent=commit2, newfile=True) |
|
430 | 430 | |
|
431 | 431 | # compare ! |
@@ -446,8 +446,8 b' class TestCompareView(object):' | |||
|
446 | 446 | response.mustcontain('No commits in this compare') |
|
447 | 447 | |
|
448 | 448 | commit0 = commit_change( |
|
449 | repo=r1_name, filename='file2', | |
|
450 | content='line1-added-after-fork', message='commit2-parent', | |
|
449 | repo=r1_name, filename=b'file2', | |
|
450 | content=b'line1-added-after-fork', message='commit2-parent', | |
|
451 | 451 | vcs_type=backend.alias, parent=None, newfile=True) |
|
452 | 452 | |
|
453 | 453 | # compare ! |
@@ -487,11 +487,10 b' class TestCompareView(object):' | |||
|
487 | 487 | |
|
488 | 488 | def test_errors_when_comparing_unknown_source_repo(self, backend): |
|
489 | 489 | repo = backend.repo |
|
490 | badrepo = 'badrepo' | |
|
491 | 490 | |
|
492 |
|
|
|
491 | self.app.get( | |
|
493 | 492 | route_path('repo_compare', |
|
494 | repo_name=badrepo, | |
|
493 | repo_name='badrepo', | |
|
495 | 494 | source_ref_type="rev", source_ref='tip', |
|
496 | 495 | target_ref_type="rev", target_ref='tip', |
|
497 | 496 | params=dict(merge='1', target_repo=repo.repo_name) |
@@ -23,7 +23,9 b' from .test_repo_compare import ComparePa' | |||
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def route_path(name, params=None, **kwargs): |
|
26 |
import urllib.request |
|
|
26 | import urllib.request | |
|
27 | import urllib.parse | |
|
28 | import urllib.error | |
|
27 | 29 | |
|
28 | 30 | base_url = { |
|
29 | 31 | 'repo_compare_select': '/{repo_name}/compare', |
@@ -55,8 +57,8 b' class TestCompareView(object):' | |||
|
55 | 57 | |
|
56 | 58 | # outgoing commits between tags |
|
57 | 59 | commit_indexes = { |
|
58 | 'git': [113] + range(115, 121), | |
|
59 | 'hg': [112] + range(115, 121), | |
|
60 | 'git': [113] + list(range(115, 121)), | |
|
61 | 'hg': [112] + list(range(115, 121)), | |
|
60 | 62 | } |
|
61 | 63 | repo = backend.repo |
|
62 | 64 | commits = (repo.get_commit(commit_idx=idx) |
@@ -29,7 +29,9 b' fixture = Fixture()' | |||
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def route_path(name, params=None, **kwargs): |
|
32 |
import urllib.request |
|
|
32 | import urllib.request | |
|
33 | import urllib.parse | |
|
34 | import urllib.error | |
|
33 | 35 | |
|
34 | 36 | base_url = { |
|
35 | 37 | 'repo_compare_select': '/{repo_name}/compare', |
@@ -151,9 +153,9 b' class TestSideBySideDiff(object):' | |||
|
151 | 153 | |
|
152 | 154 | @pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)') |
|
153 | 155 | def test_diff_side_by_side_from_0_commit(self, app, backend, backend_stub): |
|
154 | f_path = 'test_sidebyside_file.py' | |
|
155 | commit1_content = 'content-25d7e49c18b159446c\n' | |
|
156 | commit2_content = 'content-603d6c72c46d953420\n' | |
|
156 | f_path = b'test_sidebyside_file.py' | |
|
157 | commit1_content = b'content-25d7e49c18b159446c\n' | |
|
158 | commit2_content = b'content-603d6c72c46d953420\n' | |
|
157 | 159 | repo = backend.create_repo() |
|
158 | 160 | |
|
159 | 161 | commit1 = commit_change( |
@@ -185,9 +187,9 b' class TestSideBySideDiff(object):' | |||
|
185 | 187 | |
|
186 | 188 | @pytest.mark.xfail(reason='GIT does not handle empty commit compare correct (missing 1 commit)') |
|
187 | 189 | def test_diff_side_by_side_from_0_commit_with_file_filter(self, app, backend, backend_stub): |
|
188 | f_path = 'test_sidebyside_file.py' | |
|
189 | commit1_content = 'content-25d7e49c18b159446c\n' | |
|
190 | commit2_content = 'content-603d6c72c46d953420\n' | |
|
190 | f_path = b'test_sidebyside_file.py' | |
|
191 | commit1_content = b'content-25d7e49c18b159446c\n' | |
|
192 | commit2_content = b'content-603d6c72c46d953420\n' | |
|
191 | 193 | repo = backend.create_repo() |
|
192 | 194 | |
|
193 | 195 | commit1 = commit_change( |
@@ -222,7 +224,7 b' class TestSideBySideDiff(object):' | |||
|
222 | 224 | {'message': 'First commit'}, |
|
223 | 225 | {'message': 'Second commit'}, |
|
224 | 226 | {'message': 'Commit with binary', |
|
225 | 'added': [nodes.FileNode('file.empty', content='')]}, | |
|
227 | 'added': [nodes.FileNode(b'file.empty', content=b'')]}, | |
|
226 | 228 | ] |
|
227 | 229 | f_path = 'file.empty' |
|
228 | 230 | repo = backend.create_repo(commits=commits) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -24,7 +23,9 b' from rhodecode.tests import TestControll' | |||
|
24 | 23 | |
|
25 | 24 | |
|
26 | 25 | def route_path(name, params=None, **kwargs): |
|
27 |
import urllib.request |
|
|
26 | import urllib.request | |
|
27 | import urllib.parse | |
|
28 | import urllib.error | |
|
28 | 29 | |
|
29 | 30 | base_url = { |
|
30 | 31 | 'rss_feed_home': '/{repo_name}/feed-rss', |
@@ -23,10 +23,11 b' import mock' | |||
|
23 | 23 | import pytest |
|
24 | 24 | |
|
25 | 25 | from rhodecode.apps.repository.tests.test_repo_compare import ComparePage |
|
26 | from rhodecode.apps.repository.views.repo_files import RepoFilesView | |
|
26 | from rhodecode.apps.repository.views.repo_files import RepoFilesView, get_archive_name, get_path_sha | |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from collections import OrderedDict |
|
29 | 29 | from rhodecode.lib.ext_json import json |
|
30 | from rhodecode.lib.str_utils import safe_str | |
|
30 | 31 | from rhodecode.lib.vcs import nodes |
|
31 | 32 | |
|
32 | 33 | from rhodecode.lib.vcs.conf import settings |
@@ -46,7 +47,9 b' def get_node_history(backend_type):' | |||
|
46 | 47 | |
|
47 | 48 | |
|
48 | 49 | def route_path(name, params=None, **kwargs): |
|
49 |
import urllib.request |
|
|
50 | import urllib.request | |
|
51 | import urllib.parse | |
|
52 | import urllib.error | |
|
50 | 53 | |
|
51 | 54 | base_url = { |
|
52 | 55 | 'repo_summary': '/{repo_name}', |
@@ -506,7 +509,7 b' class TestRawFileHandling(object):' | |||
|
506 | 509 | |
|
507 | 510 | def test_raw_svg_should_not_be_rendered(self, backend): |
|
508 | 511 | backend.create_repo() |
|
509 | backend.ensure_file("xss.svg") | |
|
512 | backend.ensure_file(b"xss.svg") | |
|
510 | 513 | response = self.app.get( |
|
511 | 514 | route_path('repo_file_raw', |
|
512 | 515 | repo_name=backend.repo_name, |
@@ -523,10 +526,10 b' class TestRepositoryArchival(object):' | |||
|
523 | 526 | backend.enable_downloads() |
|
524 | 527 | commit = backend.repo.get_commit(commit_idx=173) |
|
525 | 528 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: |
|
529 | path_sha = get_path_sha('/') | |
|
530 | filename = get_archive_name(backend.repo_name, commit_sha=commit.short_id, ext=extension, path_sha=path_sha) | |
|
526 | 531 | |
|
527 | short = commit.short_id + extension | |
|
528 | 532 | fname = commit.raw_id + extension |
|
529 | filename = '%s-%s' % (backend.repo_name, short) | |
|
530 | 533 | response = self.app.get( |
|
531 | 534 | route_path('repo_archivefile', |
|
532 | 535 | repo_name=backend.repo_name, |
@@ -545,10 +548,10 b' class TestRepositoryArchival(object):' | |||
|
545 | 548 | backend.enable_downloads() |
|
546 | 549 | commit = backend.repo.get_commit(commit_idx=173) |
|
547 | 550 | for a_type, content_type, extension in settings.ARCHIVE_SPECS: |
|
551 | path_sha = get_path_sha('/') | |
|
552 | filename = get_archive_name(backend.repo_name, commit_sha=commit.short_id, ext=extension, path_sha=path_sha, with_hash=False) | |
|
548 | 553 | |
|
549 | short = 'plain' + extension | |
|
550 | 554 | fname = commit.raw_id + extension |
|
551 | filename = '%s-%s' % (backend.repo_name, short) | |
|
552 | 555 | response = self.app.get( |
|
553 | 556 | route_path('repo_archivefile', |
|
554 | 557 | repo_name=backend.repo_name, |
@@ -622,7 +625,7 b' class TestFilesDiff(object):' | |||
|
622 | 625 | commits = [ |
|
623 | 626 | {'message': 'First commit'}, |
|
624 | 627 | {'message': 'Commit with binary', |
|
625 | 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]}, | |
|
628 | 'added': [nodes.FileNode(b'file.bin', content='\0BINARY\0')]}, | |
|
626 | 629 | ] |
|
627 | 630 | repo = backend.create_repo(commits=commits) |
|
628 | 631 | |
@@ -899,7 +902,7 b' class TestModifyFilesWithWebInterface(ob' | |||
|
899 | 902 | |
|
900 | 903 | def test_edit_file_view_not_on_branch(self, backend): |
|
901 | 904 | repo = backend.create_repo() |
|
902 | backend.ensure_file("vcs/nodes.py") | |
|
905 | backend.ensure_file(b"vcs/nodes.py") | |
|
903 | 906 | |
|
904 | 907 | response = self.app.get( |
|
905 | 908 | route_path('repo_files_edit_file', |
@@ -912,7 +915,7 b' class TestModifyFilesWithWebInterface(ob' | |||
|
912 | 915 | |
|
913 | 916 | def test_edit_file_view_commit_changes(self, backend, csrf_token): |
|
914 | 917 | repo = backend.create_repo() |
|
915 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") | |
|
918 | backend.ensure_file(b"vcs/nodes.py", content=b"print 'hello'") | |
|
916 | 919 | |
|
917 | 920 | response = self.app.post( |
|
918 | 921 | route_path('repo_files_update_file', |
@@ -934,7 +937,7 b' class TestModifyFilesWithWebInterface(ob' | |||
|
934 | 937 | def test_edit_file_view_commit_changes_default_message(self, backend, |
|
935 | 938 | csrf_token): |
|
936 | 939 | repo = backend.create_repo() |
|
937 | backend.ensure_file("vcs/nodes.py", content="print 'hello'") | |
|
940 | backend.ensure_file(b"vcs/nodes.py", content=b"print 'hello'") | |
|
938 | 941 | |
|
939 | 942 | commit_id = ( |
|
940 | 943 | backend.default_branch_name or |
@@ -967,7 +970,7 b' class TestModifyFilesWithWebInterface(ob' | |||
|
967 | 970 | |
|
968 | 971 | def test_delete_file_view_not_on_branch(self, backend): |
|
969 | 972 | repo = backend.create_repo() |
|
970 | backend.ensure_file('vcs/nodes.py') | |
|
973 | backend.ensure_file(b'vcs/nodes.py') | |
|
971 | 974 | |
|
972 | 975 | response = self.app.get( |
|
973 | 976 | route_path('repo_files_remove_file', |
@@ -980,7 +983,7 b' class TestModifyFilesWithWebInterface(ob' | |||
|
980 | 983 | |
|
981 | 984 | def test_delete_file_view_commit_changes(self, backend, csrf_token): |
|
982 | 985 | repo = backend.create_repo() |
|
983 | backend.ensure_file("vcs/nodes.py") | |
|
986 | backend.ensure_file(b"vcs/nodes.py") | |
|
984 | 987 | |
|
985 | 988 | response = self.app.post( |
|
986 | 989 | route_path('repo_files_delete_file', |
@@ -1015,12 +1018,18 b' class TestFilesViewOtherCases(object):' | |||
|
1015 | 1018 | 'repo_files_add_file', |
|
1016 | 1019 | repo_name=repo.repo_name, |
|
1017 | 1020 | commit_id=0, f_path='') |
|
1021 | add_new = f'<a class="alert-link" href="{repo_file_add_url}">add a new file</a>' | |
|
1022 | ||
|
1023 | repo_file_upload_url = route_path( | |
|
1024 | 'repo_files_upload_file', | |
|
1025 | repo_name=repo.repo_name, | |
|
1026 | commit_id=0, f_path='') | |
|
1027 | upload_new = f'<a class="alert-link" href="{repo_file_upload_url}">upload a new file</a>' | |
|
1018 | 1028 | |
|
1019 | 1029 | assert_session_flash( |
|
1020 | 1030 | response, |
|
1021 | 'There are no files yet. <a class="alert-link" ' | |
|
1022 | 'href="{}">Click here to add a new file.</a>' | |
|
1023 | .format(repo_file_add_url)) | |
|
1031 | 'There are no files yet. Click here to %s or %s.' % (add_new, upload_new) | |
|
1032 | ) | |
|
1024 | 1033 | |
|
1025 | 1034 | def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms( |
|
1026 | 1035 | self, backend_stub, autologin_regular_user): |
@@ -1041,12 +1050,12 b' class TestFilesViewOtherCases(object):' | |||
|
1041 | 1050 | assert_session_flash(response, no_=repo_file_add_url) |
|
1042 | 1051 | |
|
1043 | 1052 | @pytest.mark.parametrize('file_node', [ |
|
1044 | 'archive/file.zip', | |
|
1045 | 'diff/my-file.txt', | |
|
1046 | 'render.py', | |
|
1047 | 'render', | |
|
1048 | 'remove_file', | |
|
1049 | 'remove_file/to-delete.txt', | |
|
1053 | b'archive/file.zip', | |
|
1054 | b'diff/my-file.txt', | |
|
1055 | b'render.py', | |
|
1056 | b'render', | |
|
1057 | b'remove_file', | |
|
1058 | b'remove_file/to-delete.txt', | |
|
1050 | 1059 | ]) |
|
1051 | 1060 | def test_file_names_equal_to_routes_parts(self, backend, file_node): |
|
1052 | 1061 | backend.create_repo() |
@@ -1055,7 +1064,7 b' class TestFilesViewOtherCases(object):' | |||
|
1055 | 1064 | self.app.get( |
|
1056 | 1065 | route_path('repo_files', |
|
1057 | 1066 | repo_name=backend.repo_name, |
|
1058 | commit_id='tip', f_path=file_node), | |
|
1067 | commit_id='tip', f_path=safe_str(file_node)), | |
|
1059 | 1068 | status=200) |
|
1060 | 1069 | |
|
1061 | 1070 |
@@ -33,7 +33,9 b' fixture = Fixture()' | |||
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | def route_path(name, params=None, **kwargs): |
|
36 |
import urllib.request |
|
|
36 | import urllib.request | |
|
37 | import urllib.parse | |
|
38 | import urllib.error | |
|
37 | 39 | |
|
38 | 40 | base_url = { |
|
39 | 41 | 'repo_summary': '/{repo_name}', |
@@ -19,14 +19,16 b'' | |||
|
19 | 19 | |
|
20 | 20 | import pytest |
|
21 | 21 | |
|
22 |
from rhodecode.lib.utils |
|
|
22 | from rhodecode.lib.hash_utils import md5_safe | |
|
23 | 23 | from rhodecode.model.db import Repository |
|
24 | 24 | from rhodecode.model.meta import Session |
|
25 | 25 | from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | def route_path(name, params=None, **kwargs): |
|
29 |
import urllib.request |
|
|
29 | import urllib.request | |
|
30 | import urllib.parse | |
|
31 | import urllib.error | |
|
30 | 32 | |
|
31 | 33 | base_url = { |
|
32 | 34 | 'repo_summary': '/{repo_name}', |
@@ -69,9 +71,9 b' class TestRepoIssueTracker(object):' | |||
|
69 | 71 | self.app.post(post_url, post_data, status=302) |
|
70 | 72 | self.settings_model = IssueTrackerSettingsModel(repo=backend.repo) |
|
71 | 73 | settings = self.settings_model.get_repo_settings() |
|
72 | self.uid = md5(pattern) | |
|
74 | self.uid = md5_safe(pattern) | |
|
73 | 75 | assert settings[self.uid]['pat'] == pattern |
|
74 | self.another_uid = md5(another_pattern) | |
|
76 | self.another_uid = md5_safe(another_pattern) | |
|
75 | 77 | assert settings[self.another_uid]['pat'] == another_pattern |
|
76 | 78 | |
|
77 | 79 | # test pattern |
@@ -95,7 +97,7 b' class TestRepoIssueTracker(object):' | |||
|
95 | 97 | entry_key = 'issuetracker_pat_' |
|
96 | 98 | pattern = 'issuetracker_pat2' |
|
97 | 99 | old_pattern = 'issuetracker_pat' |
|
98 | old_uid = md5(old_pattern) | |
|
100 | old_uid = md5_safe(old_pattern) | |
|
99 | 101 | |
|
100 | 102 | sett = SettingsModel(repo=backend.repo).create_or_update_setting( |
|
101 | 103 | entry_key+old_uid, old_pattern, 'unicode') |
@@ -114,7 +116,7 b' class TestRepoIssueTracker(object):' | |||
|
114 | 116 | self.app.post(post_url, post_data, status=302) |
|
115 | 117 | self.settings_model = IssueTrackerSettingsModel(repo=backend.repo) |
|
116 | 118 | settings = self.settings_model.get_repo_settings() |
|
117 | self.uid = md5(pattern) | |
|
119 | self.uid = md5_safe(pattern) | |
|
118 | 120 | assert settings[self.uid]['pat'] == pattern |
|
119 | 121 | with pytest.raises(KeyError): |
|
120 | 122 | key = settings[old_uid] |
@@ -129,7 +131,7 b' class TestRepoIssueTracker(object):' | |||
|
129 | 131 | repo_name = repo.repo_name |
|
130 | 132 | entry_key = 'issuetracker_pat_' |
|
131 | 133 | pattern = 'issuetracker_pat3' |
|
132 | uid = md5(pattern) | |
|
134 | uid = md5_safe(pattern) | |
|
133 | 135 | settings_util.create_repo_rhodecode_setting( |
|
134 | 136 | repo=backend.repo, name=entry_key+uid, |
|
135 | 137 | value=entry_key, type_='unicode', cleanup=False) |
@@ -32,7 +32,9 b' fixture = Fixture()' | |||
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | def route_path(name, params=None, **kwargs): |
|
35 |
import urllib.request |
|
|
35 | import urllib.request | |
|
36 | import urllib.parse | |
|
37 | import urllib.error | |
|
36 | 38 | |
|
37 | 39 | base_url = { |
|
38 | 40 | 'edit_repo_maintenance': '/{repo_name}/settings/maintenance', |
@@ -23,7 +23,9 b' from rhodecode.tests.utils import permis' | |||
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def route_path(name, params=None, **kwargs): |
|
26 |
import urllib.request |
|
|
26 | import urllib.request | |
|
27 | import urllib.parse | |
|
28 | import urllib.error | |
|
27 | 29 | |
|
28 | 30 | base_url = { |
|
29 | 31 | 'edit_repo_perms': '/{repo_name}/settings/permissions' |
@@ -36,7 +36,9 b' from rhodecode.tests import (' | |||
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | def route_path(name, params=None, **kwargs): |
|
39 |
import urllib.request |
|
|
39 | import urllib.request | |
|
40 | import urllib.parse | |
|
41 | import urllib.error | |
|
40 | 42 | |
|
41 | 43 | base_url = { |
|
42 | 44 | 'repo_changelog': '/{repo_name}/changelog', |
@@ -119,21 +121,21 b' class TestPullrequestsView(object):' | |||
|
119 | 121 | def test_show_versions_of_pr(self, backend, csrf_token): |
|
120 | 122 | commits = [ |
|
121 | 123 | {'message': 'initial-commit', |
|
122 | 'added': [FileNode('test-file.txt', 'LINE1\n')]}, | |
|
124 | 'added': [FileNode(b'test-file.txt', b'LINE1\n')]}, | |
|
123 | 125 | |
|
124 | 126 | {'message': 'commit-1', |
|
125 | 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\n')]}, | |
|
127 | 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\n')]}, | |
|
126 | 128 | # Above is the initial version of PR that changes a single line |
|
127 | 129 | |
|
128 | 130 | # from now on we'll add 3x commit adding a nother line on each step |
|
129 | 131 | {'message': 'commit-2', |
|
130 | 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\n')]}, | |
|
132 | 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\nLINE3\n')]}, | |
|
131 | 133 | |
|
132 | 134 | {'message': 'commit-3', |
|
133 | 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\n')]}, | |
|
135 | 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\nLINE3\nLINE4\n')]}, | |
|
134 | 136 | |
|
135 | 137 | {'message': 'commit-4', |
|
136 | 'changed': [FileNode('test-file.txt', 'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]}, | |
|
138 | 'changed': [FileNode(b'test-file.txt', b'LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n')]}, | |
|
137 | 139 | ] |
|
138 | 140 | |
|
139 | 141 | commit_ids = backend.create_master_repo(commits) |
@@ -404,8 +406,8 b' class TestPullrequestsView(object):' | |||
|
404 | 406 | |
|
405 | 407 | response = self.app.post( |
|
406 | 408 | route_path('pullrequest_update', |
|
407 | repo_name=pull_request.target_repo.repo_name, | |
|
408 | pull_request_id=pull_request_id), | |
|
409 | repo_name=pull_request.target_repo.repo_name, | |
|
410 | pull_request_id=pull_request_id), | |
|
409 | 411 | params={ |
|
410 | 412 | 'edit_pull_request': 'true', |
|
411 | 413 | 'title': 'New title', |
@@ -413,21 +415,21 b' class TestPullrequestsView(object):' | |||
|
413 | 415 | 'csrf_token': csrf_token}) |
|
414 | 416 | |
|
415 | 417 | assert_session_flash( |
|
416 |
response, |
|
|
418 | response, 'Pull request title & description updated.', | |
|
417 | 419 | category='success') |
|
418 | 420 | |
|
419 | 421 | pull_request = PullRequest.get(pull_request_id) |
|
420 | 422 | assert pull_request.title == 'New title' |
|
421 | 423 | assert pull_request.description == 'New description' |
|
422 | 424 | |
|
423 | def test_edit_title_description(self, pr_util, csrf_token): | |
|
425 | def test_edit_title_description_special(self, pr_util, csrf_token): | |
|
424 | 426 | pull_request = pr_util.create_pull_request() |
|
425 | 427 | pull_request_id = pull_request.pull_request_id |
|
426 | 428 | |
|
427 | 429 | response = self.app.post( |
|
428 | 430 | route_path('pullrequest_update', |
|
429 | repo_name=pull_request.target_repo.repo_name, | |
|
430 | pull_request_id=pull_request_id), | |
|
431 | repo_name=pull_request.target_repo.repo_name, | |
|
432 | pull_request_id=pull_request_id), | |
|
431 | 433 | params={ |
|
432 | 434 | 'edit_pull_request': 'true', |
|
433 | 435 | 'title': 'New title {} {2} {foo}', |
@@ -435,7 +437,7 b' class TestPullrequestsView(object):' | |||
|
435 | 437 | 'csrf_token': csrf_token}) |
|
436 | 438 | |
|
437 | 439 | assert_session_flash( |
|
438 |
response, |
|
|
440 | response, 'Pull request title & description updated.', | |
|
439 | 441 | category='success') |
|
440 | 442 | |
|
441 | 443 | pull_request = PullRequest.get(pull_request_id) |
@@ -456,7 +458,7 b' class TestPullrequestsView(object):' | |||
|
456 | 458 | 'description': 'New description', |
|
457 | 459 | 'csrf_token': csrf_token}, status=200) |
|
458 | 460 | assert_session_flash( |
|
459 |
response, |
|
|
461 | response, 'Cannot update closed pull requests.', | |
|
460 | 462 | category='error') |
|
461 | 463 | |
|
462 | 464 | def test_update_invalid_source_reference(self, pr_util, csrf_token): |
@@ -483,7 +485,7 b' class TestPullrequestsView(object):' | |||
|
483 | 485 | from rhodecode.lib.vcs.backends.base import MergeFailureReason |
|
484 | 486 | pull_request = pr_util.create_pull_request( |
|
485 | 487 | approved=True, mergeable=True) |
|
486 |
unicode_reference = |
|
|
488 | unicode_reference = 'branch:invalid-branch:invalid-commit-id' | |
|
487 | 489 | pull_request.target_ref = unicode_reference |
|
488 | 490 | Session().add(pull_request) |
|
489 | 491 | Session().commit() |
@@ -687,7 +689,7 b' class TestPullrequestsView(object):' | |||
|
687 | 689 | ChangesetComment.comment_id == comment_id).first().text |
|
688 | 690 | assert test_text == text_form_db |
|
689 | 691 | |
|
690 | def test_comment_and_comment_edit(self, pr_util, csrf_token, xhr_header): | |
|
692 | def test_comment_and_comment_edit_special(self, pr_util, csrf_token, xhr_header): | |
|
691 | 693 | pull_request = pr_util.create_pull_request() |
|
692 | 694 | target_scm = pull_request.target_repo.scm_instance() |
|
693 | 695 | target_scm_name = target_scm.name |
@@ -867,13 +869,12 b' class TestPullrequestsView(object):' | |||
|
867 | 869 | # notifications properly with the new PR |
|
868 | 870 | commits = [ |
|
869 | 871 | {'message': 'ancestor', |
|
870 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, | |
|
872 | 'added': [FileNode(b'file_A', content=b'content_of_ancestor')]}, | |
|
871 | 873 | {'message': 'change', |
|
872 | 'added': [FileNode('file_a', content='content_of_change')]}, | |
|
874 | 'added': [FileNode(b'file_a', content=b'content_of_change')]}, | |
|
873 | 875 | {'message': 'change-child'}, |
|
874 | 876 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
875 | 'added': [ | |
|
876 | FileNode('file_B', content='content_of_ancestor_child')]}, | |
|
877 | 'added': [ FileNode(b'file_B', content=b'content_of_ancestor_child')]}, | |
|
877 | 878 | {'message': 'ancestor-child-2'}, |
|
878 | 879 | ] |
|
879 | 880 | commit_ids = backend.create_master_repo(commits) |
@@ -935,13 +936,13 b' class TestPullrequestsView(object):' | |||
|
935 | 936 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, csrf_token): |
|
936 | 937 | commits = [ |
|
937 | 938 | {'message': 'ancestor', |
|
938 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, | |
|
939 | 'added': [FileNode(b'file_A', content=b'content_of_ancestor')]}, | |
|
939 | 940 | {'message': 'change', |
|
940 | 'added': [FileNode('file_a', content='content_of_change')]}, | |
|
941 | 'added': [FileNode(b'file_a', content=b'content_of_change')]}, | |
|
941 | 942 | {'message': 'change-child'}, |
|
942 | 943 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
943 | 944 | 'added': [ |
|
944 | FileNode('file_B', content='content_of_ancestor_child')]}, | |
|
945 | FileNode(b'file_B', content=b'content_of_ancestor_child')]}, | |
|
945 | 946 | {'message': 'ancestor-child-2'}, |
|
946 | 947 | ] |
|
947 | 948 | commit_ids = backend.create_master_repo(commits) |
@@ -1021,9 +1022,9 b' class TestPullrequestsView(object):' | |||
|
1021 | 1022 | actions = [log.action for log in user_logs] |
|
1022 | 1023 | pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
1023 | 1024 | expected_actions = [ |
|
1024 |
|
|
|
1025 |
|
|
|
1026 |
|
|
|
1025 | 'repo.pull_request.close', | |
|
1026 | 'repo.pull_request.merge', | |
|
1027 | 'repo.pull_request.comment.create' | |
|
1027 | 1028 | ] |
|
1028 | 1029 | assert actions == expected_actions |
|
1029 | 1030 | |
@@ -1046,8 +1047,8 b' class TestPullrequestsView(object):' | |||
|
1046 | 1047 | |
|
1047 | 1048 | response = self.app.post( |
|
1048 | 1049 | route_path('pullrequest_merge', |
|
1049 | repo_name=pull_request.target_repo.scm_instance().name, | |
|
1050 | pull_request_id=pull_request.pull_request_id), | |
|
1050 | repo_name=pull_request.target_repo.scm_instance().name, | |
|
1051 | pull_request_id=pull_request.pull_request_id), | |
|
1051 | 1052 | params={'csrf_token': csrf_token}).follow() |
|
1052 | 1053 | |
|
1053 | 1054 | assert response.status_int == 200 |
@@ -1121,8 +1122,8 b' class TestPullrequestsView(object):' | |||
|
1121 | 1122 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
1122 | 1123 | |
|
1123 | 1124 | pull_request.revisions = [commit_ids['change']] |
|
1124 |
pull_request.title = |
|
|
1125 |
pull_request.description = |
|
|
1125 | pull_request.title = "Test" | |
|
1126 | pull_request.description = "Description" | |
|
1126 | 1127 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1127 | 1128 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1128 | 1129 | Session().add(pull_request) |
@@ -1175,8 +1176,8 b' class TestPullrequestsView(object):' | |||
|
1175 | 1176 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
1176 | 1177 | |
|
1177 | 1178 | pull_request.revisions = [commit_ids['change']] |
|
1178 |
pull_request.title = |
|
|
1179 |
pull_request.description = |
|
|
1179 | pull_request.title = "Test" | |
|
1180 | pull_request.description = "Description" | |
|
1180 | 1181 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1181 | 1182 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1182 | 1183 | |
@@ -1242,8 +1243,8 b' class TestPullrequestsView(object):' | |||
|
1242 | 1243 | commit_ids['feat-commit-1'], |
|
1243 | 1244 | commit_ids['feat-commit-2'] |
|
1244 | 1245 | ] |
|
1245 |
pull_request.title = |
|
|
1246 |
pull_request.description = |
|
|
1246 | pull_request.title = "Test" | |
|
1247 | pull_request.description = "Description" | |
|
1247 | 1248 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1248 | 1249 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1249 | 1250 | Session().add(pull_request) |
@@ -1292,8 +1293,8 b' class TestPullrequestsView(object):' | |||
|
1292 | 1293 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1293 | 1294 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
1294 | 1295 | pull_request.revisions = [commit_ids['change']] |
|
1295 |
pull_request.title = |
|
|
1296 |
pull_request.description = |
|
|
1296 | pull_request.title = "Test" | |
|
1297 | pull_request.description = "Description" | |
|
1297 | 1298 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1298 | 1299 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1299 | 1300 | Session().add(pull_request) |
@@ -1340,8 +1341,8 b' class TestPullrequestsView(object):' | |||
|
1340 | 1341 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
1341 | 1342 | branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature']) |
|
1342 | 1343 | pull_request.revisions = [commit_ids['new-feature']] |
|
1343 |
pull_request.title = |
|
|
1344 |
pull_request.description = |
|
|
1344 | pull_request.title = "Test" | |
|
1345 | pull_request.description = "Description" | |
|
1345 | 1346 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1346 | 1347 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
1347 | 1348 | Session().add(pull_request) |
@@ -32,7 +32,9 b' fixture = Fixture()' | |||
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | def route_path(name, params=None, **kwargs): |
|
35 |
import urllib.request |
|
|
35 | import urllib.request | |
|
36 | import urllib.parse | |
|
37 | import urllib.error | |
|
36 | 38 | |
|
37 | 39 | base_url = { |
|
38 | 40 | 'edit_repo': '/{repo_name}/settings', |
@@ -19,7 +19,7 b'' | |||
|
19 | 19 | |
|
20 | 20 | import pytest |
|
21 | 21 | |
|
22 |
from rhodecode.lib.utils |
|
|
22 | from rhodecode.lib.str_utils import safe_str | |
|
23 | 23 | from rhodecode.model.db import Repository |
|
24 | 24 | from rhodecode.model.repo import RepoModel |
|
25 | 25 | from rhodecode.tests import ( |
@@ -31,7 +31,9 b' fixture = Fixture()' | |||
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | def route_path(name, params=None, **kwargs): |
|
34 |
import urllib.request |
|
|
34 | import urllib.request | |
|
35 | import urllib.parse | |
|
36 | import urllib.error | |
|
35 | 37 | |
|
36 | 38 | base_url = { |
|
37 | 39 | 'repo_summary_explicit': '/{repo_name}/summary', |
@@ -40,7 +40,9 b' fixture = Fixture()' | |||
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | def route_path(name, params=None, **kwargs): |
|
43 |
import urllib.request |
|
|
43 | import urllib.request | |
|
44 | import urllib.parse | |
|
45 | import urllib.error | |
|
44 | 46 | |
|
45 | 47 | base_url = { |
|
46 | 48 | 'repo_summary': '/{repo_name}', |
@@ -276,8 +278,8 b' class TestRepoLocation(object):' | |||
|
276 | 278 | response = self.app.get( |
|
277 | 279 | route_path('repo_summary', repo_name=safe_str(repo_name)), status=302) |
|
278 | 280 | |
|
279 |
msg = 'The repository ` |
|
|
280 |
'Please check if it exist, or is not damaged.' |
|
|
281 | msg = f'The repository `{repo_name}` cannot be loaded in filesystem. ' \ | |
|
282 | f'Please check if it exist, or is not damaged.' | |
|
281 | 283 | assert_session_flash(response, msg) |
|
282 | 284 | |
|
283 | 285 | @pytest.mark.parametrize("suffix", [u'', u'Δ ΔΕ'], ids=['', 'non-ascii']) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -23,7 +22,9 b' from rhodecode.model.db import Repositor' | |||
|
23 | 22 | |
|
24 | 23 | |
|
25 | 24 | def route_path(name, params=None, **kwargs): |
|
26 |
import urllib.request |
|
|
25 | import urllib.request | |
|
26 | import urllib.parse | |
|
27 | import urllib.error | |
|
27 | 28 | |
|
28 | 29 | base_url = { |
|
29 | 30 | 'tags_home': '/{repo_name}/tags', |
@@ -37,7 +37,9 b' fixture = Fixture()' | |||
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | def route_path(name, params=None, **kwargs): |
|
40 |
import urllib.request |
|
|
40 | import urllib.request | |
|
41 | import urllib.parse | |
|
42 | import urllib.error | |
|
41 | 43 | |
|
42 | 44 | base_url = { |
|
43 | 45 | 'repo_summary': '/{repo_name}', |
@@ -27,7 +27,9 b' from rhodecode.tests.utils import Assert' | |||
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | def route_path(name, params=None, **kwargs): |
|
30 |
import urllib.request |
|
|
30 | import urllib.request | |
|
31 | import urllib.parse | |
|
32 | import urllib.error | |
|
31 | 33 | |
|
32 | 34 | base_url = { |
|
33 | 35 | 'edit_repo': '/{repo_name}/settings', |
@@ -179,6 +179,7 b' class TestSearchController(TestControlle' | |||
|
179 | 179 | def test_filters_are_not_applied_for_admin_user(self): |
|
180 | 180 | self.log_user() |
|
181 | 181 | with mock.patch('whoosh.searching.Searcher.search') as search_mock: |
|
182 | ||
|
182 | 183 | self.app.get(route_path('search'), |
|
183 | 184 | {'q': 'test query', 'type': 'commit'}) |
|
184 | 185 | assert search_mock.call_count == 1 |
@@ -35,7 +35,7 b' def dummy_conf_file(tmpdir):' | |||
|
35 | 35 | conf.set('app:main', 'ssh.executable.svn', '/usr/bin/svnserve') |
|
36 | 36 | |
|
37 | 37 | f_path = os.path.join(str(tmpdir), 'ssh_wrapper_test.ini') |
|
38 |
with open(f_path, 'w |
|
|
38 | with open(f_path, 'wt') as f: | |
|
39 | 39 | conf.write(f) |
|
40 | 40 | |
|
41 | 41 | return os.path.join(f_path) |
@@ -18,7 +18,6 b'' | |||
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | import json | |
|
22 | 21 | import os |
|
23 | 22 | |
|
24 | 23 | import mock |
@@ -26,7 +25,7 b' import pytest' | |||
|
26 | 25 | |
|
27 | 26 | from rhodecode.apps.ssh_support.lib.backends.git import GitServer |
|
28 | 27 | from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user |
|
29 | ||
|
28 | from rhodecode.lib.ext_json import json | |
|
30 | 29 | |
|
31 | 30 | class GitServerCreator(object): |
|
32 | 31 | root = '/tmp/repo/path/' |
@@ -39,7 +39,7 b' class TestModDavSvnConfig(object):' | |||
|
39 | 39 | def get_repo_group_mocks(cls, count=1): |
|
40 | 40 | repo_groups = [] |
|
41 | 41 | for num in range(0, count): |
|
42 |
full_path = |
|
|
42 | full_path = f'/path/to/RepâGrâúp-°¡ {num}' | |
|
43 | 43 | repo_group_mock = mock.MagicMock() |
|
44 | 44 | repo_group_mock.full_path = full_path |
|
45 | 45 | repo_group_mock.full_path_splitted = full_path.split('/') |
@@ -78,7 +78,7 b' class TestModDavSvnConfig(object):' | |||
|
78 | 78 | def test_render_mod_dav_svn_config_with_alternative_template(self, tmpdir): |
|
79 | 79 | repo_groups = self.get_repo_group_mocks(count=10) |
|
80 | 80 | test_file_path = os.path.join(str(tmpdir), 'example.mako') |
|
81 |
with open(test_file_path, 'w |
|
|
81 | with open(test_file_path, 'wt') as f: | |
|
82 | 82 | f.write('TEST_EXAMPLE\n') |
|
83 | 83 | |
|
84 | 84 | generated_config = utils._render_mod_dav_svn_config( |
@@ -107,11 +107,11 b' class TestModDavSvnConfig(object):' | |||
|
107 | 107 | |
|
108 | 108 | # Assert that correct configuration directive is present. |
|
109 | 109 | if list_parent_path: |
|
110 | assert not re.search('SVNListParentPath\s+Off', generated_config) | |
|
111 | assert re.search('SVNListParentPath\s+On', generated_config) | |
|
110 | assert not re.search(r'SVNListParentPath\s+Off', generated_config) | |
|
111 | assert re.search(r'SVNListParentPath\s+On', generated_config) | |
|
112 | 112 | else: |
|
113 | assert re.search('SVNListParentPath\s+Off', generated_config) | |
|
114 | assert not re.search('SVNListParentPath\s+On', generated_config) | |
|
113 | assert re.search(r'SVNListParentPath\s+Off', generated_config) | |
|
114 | assert not re.search(r'SVNListParentPath\s+On', generated_config) | |
|
115 | 115 | |
|
116 | 116 | if use_ssl: |
|
117 | 117 | assert 'RequestHeader edit Destination ^https: http: early' \ |
@@ -29,7 +29,9 b' fixture = Fixture()' | |||
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def route_path(name, params=None, **kwargs): |
|
32 |
import urllib.request |
|
|
32 | import urllib.request | |
|
33 | import urllib.parse | |
|
34 | import urllib.error | |
|
33 | 35 | from rhodecode.apps._base import ADMIN_PREFIX |
|
34 | 36 | |
|
35 | 37 | base_url = { |
@@ -23,7 +23,9 b' from rhodecode.tests.utils import permis' | |||
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def route_path(name, params=None, **kwargs): |
|
26 |
import urllib.request |
|
|
26 | import urllib.request | |
|
27 | import urllib.parse | |
|
28 | import urllib.error | |
|
27 | 29 | from rhodecode.apps._base import ADMIN_PREFIX |
|
28 | 30 | |
|
29 | 31 | base_url = { |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -214,11 +213,13 b' def assert_session_flash(response, msg=N' | |||
|
214 | 213 | msg = f'msg `{no_}` found in session flash.' |
|
215 | 214 | pytest.fail(safe_str(msg)) |
|
216 | 215 | else: |
|
216 | ||
|
217 | 217 | if msg not in message_text: |
|
218 | 218 | fail_msg = f'msg `{msg}` not found in ' \ |
|
219 | 219 | f'session flash: got `{message_text}` (type:{type(message_text)}) instead' |
|
220 | 220 | |
|
221 | 221 | pytest.fail(safe_str(fail_msg)) |
|
222 | ||
|
222 | 223 | if category: |
|
223 | 224 | assert category == message.category |
|
224 | 225 |
@@ -1,4 +1,4 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
@@ -118,7 +118,7 b' class TestSanitizeVcsSettings(object):' | |||
|
118 | 118 | _string_funcs = [ |
|
119 | 119 | ('vcs.svn.compatible_version', ''), |
|
120 | 120 | ('vcs.hooks.protocol', 'http'), |
|
121 |
('vcs.hooks.host', ' |
|
|
121 | ('vcs.hooks.host', '*'), | |
|
122 | 122 | ('vcs.scm_app_implementation', 'http'), |
|
123 | 123 | ('vcs.server', ''), |
|
124 | 124 | ('vcs.server.protocol', 'http'), |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -20,13 +19,13 b'' | |||
|
20 | 19 | |
|
21 | 20 | from subprocess import Popen, PIPE |
|
22 | 21 | import os |
|
23 | import shutil | |
|
24 | 22 | import sys |
|
25 | 23 | import tempfile |
|
26 | 24 | |
|
27 | 25 | import pytest |
|
28 | 26 | from sqlalchemy.engine import url |
|
29 | 27 | |
|
28 | from rhodecode.lib.str_utils import safe_str, safe_bytes | |
|
30 | 29 | from rhodecode.tests.fixture import TestINI |
|
31 | 30 | |
|
32 | 31 | |
@@ -145,17 +144,19 b' class DBBackend(object):' | |||
|
145 | 144 | _env.update(env) |
|
146 | 145 | self.p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, env=_env) |
|
147 | 146 | self.stdout, self.stderr = self.p.communicate() |
|
148 | sys.stdout.write('COMMAND:'+command+'\n') | |
|
149 |
sys.stdout.write( |
|
|
147 | stdout_str = safe_str(self.stdout) | |
|
148 | sys.stdout.write(f'COMMAND:{command}\n') | |
|
149 | sys.stdout.write(stdout_str) | |
|
150 | 150 | return self.stdout, self.stderr |
|
151 | 151 | |
|
152 | 152 | def assert_returncode_success(self): |
|
153 | from rich import print as pprint | |
|
153 | 154 | if not self.p.returncode == 0: |
|
154 | print(self.stderr) | |
|
155 |
raise AssertionError('non 0 retcode:{ |
|
|
155 | pprint(safe_str(self.stderr)) | |
|
156 | raise AssertionError(f'non 0 retcode:{self.p.returncode}') | |
|
156 | 157 | |
|
157 | 158 | def assert_correct_output(self, stdout, version): |
|
158 |
assert 'UPGRADE FOR STEP |
|
|
159 | assert b'UPGRADE FOR STEP %b COMPLETED' % safe_bytes(version) in stdout | |
|
159 | 160 | |
|
160 | 161 | def setup_rhodecode_db(self, ini_params=None, env=None): |
|
161 | 162 | if not ini_params: |
@@ -233,11 +234,11 b' class SQLiteDBBackend(DBBackend):' | |||
|
233 | 234 | def import_dump(self, dumpname): |
|
234 | 235 | dump = os.path.join(self.fixture_store, dumpname) |
|
235 | 236 | target = os.path.join(self._basetemp, '{0.db_name}.sqlite'.format(self)) |
|
236 |
return self.execute('cp -v {} {}' |
|
|
237 | return self.execute(f'cp -v {dump} {target}') | |
|
237 | 238 | |
|
238 | 239 | def teardown_db(self): |
|
239 | return self.execute("rm -rf {}.sqlite".format( | |
|
240 | os.path.join(self._basetemp, self.db_name))) | |
|
240 | target_db = os.path.join(self._basetemp, self.db_name) | |
|
241 | return self.execute(f"rm -rf {target_db}.sqlite") | |
|
241 | 242 | |
|
242 | 243 | |
|
243 | 244 | class MySQLDBBackend(DBBackend): |
@@ -273,21 +274,15 b' class PostgresDBBackend(DBBackend):' | |||
|
273 | 274 | def setup_db(self): |
|
274 | 275 | # dump schema for tests |
|
275 | 276 | # pg_dump -U postgres -h localhost $TEST_DB_NAME |
|
276 | self._db_url = [{'app:main': { | |
|
277 | 'sqlalchemy.db1.url': | |
|
278 | self.connection_string}}] | |
|
279 | return self.execute("PGPASSWORD={} psql -U {} -h localhost " | |
|
280 | "-c 'create database '{}';'".format( | |
|
281 | self.password, self.user, self.db_name)) | |
|
277 | self._db_url = [{'app:main': {'sqlalchemy.db1.url': self.connection_string}}] | |
|
278 | cmd = f"PGPASSWORD={self.password} psql -U {self.user} -h localhost -c 'create database '{self.db_name}';'" | |
|
279 | return self.execute(cmd) | |
|
282 | 280 | |
|
283 | 281 | def teardown_db(self): |
|
284 | return self.execute("PGPASSWORD={} psql -U {} -h localhost " | |
|
285 | "-c 'drop database if exists '{}';'".format( | |
|
286 | self.password, self.user, self.db_name)) | |
|
282 | cmd = f"PGPASSWORD={self.password} psql -U {self.user} -h localhost -c 'drop database if exists '{self.db_name}';'" | |
|
283 | return self.execute(cmd) | |
|
287 | 284 | |
|
288 | 285 | def import_dump(self, dumpname): |
|
289 | 286 | dump = os.path.join(self.fixture_store, dumpname) |
|
290 | return self.execute( | |
|
291 | "PGPASSWORD={} psql -U {} -h localhost -d {} -1 " | |
|
292 | "-f {}".format( | |
|
293 | self.password, self.user, self.db_name, dump)) | |
|
287 | cmd = f"PGPASSWORD={self.password} psql -U {self.user} -h localhost -d {self.db_name} -1 -f {dump}" | |
|
288 | return self.execute(cmd) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -148,7 +147,7 b' class Fixture(object):' | |||
|
148 | 147 | plugin = self._get_plugin() |
|
149 | 148 | plugin.create_or_update_setting('auth_restriction', auth_restriction) |
|
150 | 149 | Session().commit() |
|
151 | SettingsModel().invalidate_settings_cache() | |
|
150 | SettingsModel().invalidate_settings_cache(hard=True) | |
|
152 | 151 | |
|
153 | 152 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
154 | 153 | |
@@ -156,7 +155,7 b' class Fixture(object):' | |||
|
156 | 155 | plugin.create_or_update_setting( |
|
157 | 156 | 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE) |
|
158 | 157 | Session().commit() |
|
159 | SettingsModel().invalidate_settings_cache() | |
|
158 | SettingsModel().invalidate_settings_cache(hard=True) | |
|
160 | 159 | |
|
161 | 160 | return context() |
|
162 | 161 | |
@@ -181,14 +180,14 b' class Fixture(object):' | |||
|
181 | 180 | plugin = self._get_plugin() |
|
182 | 181 | plugin.create_or_update_setting('scope_restriction', scope_restriction) |
|
183 | 182 | Session().commit() |
|
184 | SettingsModel().invalidate_settings_cache() | |
|
183 | SettingsModel().invalidate_settings_cache(hard=True) | |
|
185 | 184 | |
|
186 | 185 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
187 | 186 | plugin = self._get_plugin() |
|
188 | 187 | plugin.create_or_update_setting( |
|
189 | 188 | 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL) |
|
190 | 189 | Session().commit() |
|
191 | SettingsModel().invalidate_settings_cache() | |
|
190 | SettingsModel().invalidate_settings_cache(hard=True) | |
|
192 | 191 | |
|
193 | 192 | return context() |
|
194 | 193 | |
@@ -399,7 +398,7 b' class Fixture(object):' | |||
|
399 | 398 | 'gist_type': GistModel.cls.GIST_PUBLIC, |
|
400 | 399 | 'lifetime': -1, |
|
401 | 400 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, |
|
402 | 'gist_mapping': {'filename1.txt': {'content': 'hello world'},} | |
|
401 | 'gist_mapping': {b'filename1.txt': {'content': b'hello world'},} | |
|
403 | 402 | } |
|
404 | 403 | form_data.update(kwargs) |
|
405 | 404 | gist = GistModel().create( |
@@ -420,7 +419,7 b' class Fixture(object):' | |||
|
420 | 419 | Session().commit() |
|
421 | 420 | |
|
422 | 421 | def load_resource(self, resource_name, strip=False): |
|
423 | with open(os.path.join(FIXTURES, resource_name)) as f: | |
|
422 | with open(os.path.join(FIXTURES, resource_name), 'rb') as f: | |
|
424 | 423 | source = f.read() |
|
425 | 424 | if strip: |
|
426 | 425 | source = source.strip() |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -30,7 +29,7 b' def vcsserver(request, vcsserver_port, v' | |||
|
30 | 29 | """ |
|
31 | 30 | Session scope VCSServer. |
|
32 | 31 | |
|
33 | Tests wich need the VCSServer have to rely on this fixture in order | |
|
32 | Tests which need the VCSServer have to rely on this fixture in order | |
|
34 | 33 | to ensure it will be running. |
|
35 | 34 | |
|
36 | 35 | For specific needs, the fixture vcsserver_factory can be used. It allows to |
@@ -58,7 +57,7 b' def vcsserver_factory(tmpdir_factory):' | |||
|
58 | 57 | """ |
|
59 | 58 | |
|
60 | 59 | def factory(request, overrides=(), vcsserver_port=None, |
|
61 | log_file=None): | |
|
60 | log_file=None, workers='2'): | |
|
62 | 61 | |
|
63 | 62 | if vcsserver_port is None: |
|
64 | 63 | vcsserver_port = get_available_port() |
@@ -74,7 +73,7 b' def vcsserver_factory(tmpdir_factory):' | |||
|
74 | 73 | basetemp=tmpdir_factory.getbasetemp().strpath, |
|
75 | 74 | prefix='test_vcs_') |
|
76 | 75 | |
|
77 | server = RcVCSServer(config_file, log_file) | |
|
76 | server = RcVCSServer(config_file, log_file, workers) | |
|
78 | 77 | server.start() |
|
79 | 78 | |
|
80 | 79 | @request.addfinalizer |
@@ -100,7 +99,8 b' def ini_config(request, tmpdir_factory, ' | |||
|
100 | 99 | overrides = [ |
|
101 | 100 | {'server:main': {'port': rcserver_port}}, |
|
102 | 101 | {'app:main': { |
|
103 | 'vcs.server': 'localhost:%s' % vcsserver_port, | |
|
102 | 'cache_dir': '%(here)s/rc_data', | |
|
103 | 'vcs.server': f'localhost:{vcsserver_port}', | |
|
104 | 104 | # johbo: We will always start the VCSServer on our own based on the |
|
105 | 105 | # fixtures of the test cases. For the test run it must always be |
|
106 | 106 | # off in the INI file. |
@@ -109,7 +109,7 b' def ini_config(request, tmpdir_factory, ' | |||
|
109 | 109 | 'vcs.server.protocol': 'http', |
|
110 | 110 | 'vcs.scm_app_implementation': 'http', |
|
111 | 111 | 'vcs.hooks.protocol': 'http', |
|
112 |
'vcs.hooks.host': ' |
|
|
112 | 'vcs.hooks.host': '*', | |
|
113 | 113 | }}, |
|
114 | 114 | |
|
115 | 115 | {'handler_console': { |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -57,6 +56,7 b' from rhodecode.model.integration import ' | |||
|
57 | 56 | from rhodecode.integrations import integration_type_registry |
|
58 | 57 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
59 | 58 | from rhodecode.lib.utils import repo2db_mapper |
|
59 | from rhodecode.lib.str_utils import safe_bytes | |
|
60 | 60 | from rhodecode.lib.hash_utils import sha1_safe |
|
61 | 61 | from rhodecode.lib.vcs.backends import get_backend |
|
62 | 62 | from rhodecode.lib.vcs.nodes import FileNode |
@@ -540,7 +540,7 b' class Backend(object):' | |||
|
540 | 540 | |
|
541 | 541 | def create_repo( |
|
542 | 542 | self, commits=None, number_of_commits=0, heads=None, |
|
543 |
name_suffix= |
|
|
543 | name_suffix='', bare=False, **kwargs): | |
|
544 | 544 | """ |
|
545 | 545 | Create a repository and record it for later cleanup. |
|
546 | 546 | |
@@ -585,14 +585,14 b' class Backend(object):' | |||
|
585 | 585 | self._cleanup_repos.append(self.repo_name) |
|
586 | 586 | return repo |
|
587 | 587 | |
|
588 |
def new_repo_name(self, suffix= |
|
|
588 | def new_repo_name(self, suffix=''): | |
|
589 | 589 | self.repo_name = self._next_repo_name() + suffix |
|
590 | 590 | self._cleanup_repos.append(self.repo_name) |
|
591 | 591 | return self.repo_name |
|
592 | 592 | |
|
593 | 593 | def _next_repo_name(self): |
|
594 | 594 | return u"%s_%s" % ( |
|
595 |
self.invalid_repo_name.sub( |
|
|
595 | self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos)) | |
|
596 | 596 | |
|
597 | 597 | def ensure_file(self, filename, content='Test content\n'): |
|
598 | 598 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
@@ -634,14 +634,98 b' class Backend(object):' | |||
|
634 | 634 | repo.set_refs(ref_name, refs[ref_name]) |
|
635 | 635 | |
|
636 | 636 | |
|
637 | def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo): | |
|
637 | class VcsBackend(object): | |
|
638 | """ | |
|
639 | Represents the test configuration for one supported vcs backend. | |
|
640 | """ | |
|
641 | ||
|
642 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') | |
|
643 | ||
|
644 | def __init__(self, alias, repo_path, test_name, test_repo_container): | |
|
645 | self.alias = alias | |
|
646 | self._repo_path = repo_path | |
|
647 | self._cleanup_repos = [] | |
|
648 | self._test_name = test_name | |
|
649 | self._test_repo_container = test_repo_container | |
|
650 | ||
|
651 | def __getitem__(self, key): | |
|
652 | return self._test_repo_container(key, self.alias).scm_instance() | |
|
653 | ||
|
654 | def __repr__(self): | |
|
655 | return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})' | |
|
656 | ||
|
657 | @property | |
|
658 | def repo(self): | |
|
659 | """ | |
|
660 | Returns the "current" repository. This is the vcs_test repo of the last | |
|
661 | repo which has been created. | |
|
662 | """ | |
|
663 | Repository = get_backend(self.alias) | |
|
664 | return Repository(self._repo_path) | |
|
665 | ||
|
666 | @property | |
|
667 | def backend(self): | |
|
668 | """ | |
|
669 | Returns the backend implementation class. | |
|
670 | """ | |
|
671 | return get_backend(self.alias) | |
|
672 | ||
|
673 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, | |
|
674 | bare=False): | |
|
675 | repo_name = self._next_repo_name() | |
|
676 | self._repo_path = get_new_dir(repo_name) | |
|
677 | repo_class = get_backend(self.alias) | |
|
678 | src_url = None | |
|
679 | if _clone_repo: | |
|
680 | src_url = _clone_repo.path | |
|
681 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) | |
|
682 | self._cleanup_repos.append(repo) | |
|
683 | ||
|
684 | commits = commits or [ | |
|
685 | {'message': 'Commit %s of %s' % (x, repo_name)} | |
|
686 | for x in range(number_of_commits)] | |
|
687 | _add_commits_to_repo(repo, commits) | |
|
688 | return repo | |
|
689 | ||
|
690 | def clone_repo(self, repo): | |
|
691 | return self.create_repo(_clone_repo=repo) | |
|
692 | ||
|
693 | def cleanup(self): | |
|
694 | for repo in self._cleanup_repos: | |
|
695 | shutil.rmtree(repo.path) | |
|
696 | ||
|
697 | def new_repo_path(self): | |
|
698 | repo_name = self._next_repo_name() | |
|
699 | self._repo_path = get_new_dir(repo_name) | |
|
700 | return self._repo_path | |
|
701 | ||
|
702 | def _next_repo_name(self): | |
|
703 | ||
|
704 | return "{}_{}".format( | |
|
705 | self.invalid_repo_name.sub('_', self._test_name), | |
|
706 | len(self._cleanup_repos) | |
|
707 | ) | |
|
708 | ||
|
709 | def add_file(self, repo, filename, content='Test content\n'): | |
|
710 | imc = repo.in_memory_commit | |
|
711 | imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content))) | |
|
712 | imc.commit( | |
|
713 | message='Automatic commit from vcsbackend fixture', | |
|
714 | author='Automatic <automatic@rhodecode.com>') | |
|
715 | ||
|
716 | def ensure_file(self, filename, content='Test content\n'): | |
|
717 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
|
718 | self.add_file(self.repo, filename, content) | |
|
719 | ||
|
720 | ||
|
721 | def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend: | |
|
638 | 722 | if backend_alias not in request.config.getoption('--backends'): |
|
639 | 723 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
640 | 724 | |
|
641 | 725 | utils.check_xfail_backends(request.node, backend_alias) |
|
642 | 726 | utils.check_skip_backends(request.node, backend_alias) |
|
643 | 727 | |
|
644 |
repo_name = 'vcs_test_ |
|
|
728 | repo_name = f'vcs_test_{backend_alias}' | |
|
645 | 729 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
646 | 730 | backend = VcsBackend( |
|
647 | 731 | alias=backend_alias, |
@@ -691,85 +775,6 b' def vcsbackend_stub(vcsbackend_git):' | |||
|
691 | 775 | return vcsbackend_git |
|
692 | 776 | |
|
693 | 777 | |
|
694 | class VcsBackend(object): | |
|
695 | """ | |
|
696 | Represents the test configuration for one supported vcs backend. | |
|
697 | """ | |
|
698 | ||
|
699 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') | |
|
700 | ||
|
701 | def __init__(self, alias, repo_path, test_name, test_repo_container): | |
|
702 | self.alias = alias | |
|
703 | self._repo_path = repo_path | |
|
704 | self._cleanup_repos = [] | |
|
705 | self._test_name = test_name | |
|
706 | self._test_repo_container = test_repo_container | |
|
707 | ||
|
708 | def __getitem__(self, key): | |
|
709 | return self._test_repo_container(key, self.alias).scm_instance() | |
|
710 | ||
|
711 | @property | |
|
712 | def repo(self): | |
|
713 | """ | |
|
714 | Returns the "current" repository. This is the vcs_test repo of the last | |
|
715 | repo which has been created. | |
|
716 | """ | |
|
717 | Repository = get_backend(self.alias) | |
|
718 | return Repository(self._repo_path) | |
|
719 | ||
|
720 | @property | |
|
721 | def backend(self): | |
|
722 | """ | |
|
723 | Returns the backend implementation class. | |
|
724 | """ | |
|
725 | return get_backend(self.alias) | |
|
726 | ||
|
727 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, | |
|
728 | bare=False): | |
|
729 | repo_name = self._next_repo_name() | |
|
730 | self._repo_path = get_new_dir(repo_name) | |
|
731 | repo_class = get_backend(self.alias) | |
|
732 | src_url = None | |
|
733 | if _clone_repo: | |
|
734 | src_url = _clone_repo.path | |
|
735 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) | |
|
736 | self._cleanup_repos.append(repo) | |
|
737 | ||
|
738 | commits = commits or [ | |
|
739 | {'message': 'Commit %s of %s' % (x, repo_name)} | |
|
740 | for x in range(number_of_commits)] | |
|
741 | _add_commits_to_repo(repo, commits) | |
|
742 | return repo | |
|
743 | ||
|
744 | def clone_repo(self, repo): | |
|
745 | return self.create_repo(_clone_repo=repo) | |
|
746 | ||
|
747 | def cleanup(self): | |
|
748 | for repo in self._cleanup_repos: | |
|
749 | shutil.rmtree(repo.path) | |
|
750 | ||
|
751 | def new_repo_path(self): | |
|
752 | repo_name = self._next_repo_name() | |
|
753 | self._repo_path = get_new_dir(repo_name) | |
|
754 | return self._repo_path | |
|
755 | ||
|
756 | def _next_repo_name(self): | |
|
757 | return "%s_%s" % ( | |
|
758 | self.invalid_repo_name.sub('_', self._test_name), | |
|
759 | len(self._cleanup_repos)) | |
|
760 | ||
|
761 | def add_file(self, repo, filename, content='Test content\n'): | |
|
762 | imc = repo.in_memory_commit | |
|
763 | imc.add(FileNode(filename, content=content)) | |
|
764 | imc.commit( | |
|
765 | message=u'Automatic commit from vcsbackend fixture', | |
|
766 | author=u'Automatic <automatic@rhodecode.com>') | |
|
767 | ||
|
768 | def ensure_file(self, filename, content='Test content\n'): | |
|
769 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" | |
|
770 | self.add_file(self.repo, filename, content) | |
|
771 | ||
|
772 | ||
|
773 | 778 | def _add_commits_to_repo(vcs_repo, commits): |
|
774 | 779 | commit_ids = {} |
|
775 | 780 | if not commits: |
@@ -782,11 +787,11 b' def _add_commits_to_repo(vcs_repo, commi' | |||
|
782 | 787 | message = str(commit.get('message', 'Commit %s' % idx)) |
|
783 | 788 | |
|
784 | 789 | for node in commit.get('added', []): |
|
785 | imc.add(FileNode(node.path, content=node.content)) | |
|
790 | imc.add(FileNode(safe_bytes(node.path), content=node.content)) | |
|
786 | 791 | for node in commit.get('changed', []): |
|
787 | imc.change(FileNode(node.path, content=node.content)) | |
|
792 | imc.change(FileNode(safe_bytes(node.path), content=node.content)) | |
|
788 | 793 | for node in commit.get('removed', []): |
|
789 | imc.remove(FileNode(node.path)) | |
|
794 | imc.remove(FileNode(safe_bytes(node.path))) | |
|
790 | 795 | |
|
791 | 796 | parents = [ |
|
792 | 797 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
@@ -794,7 +799,7 b' def _add_commits_to_repo(vcs_repo, commi' | |||
|
794 | 799 | |
|
795 | 800 | operations = ('added', 'changed', 'removed') |
|
796 | 801 | if not any((commit.get(o) for o in operations)): |
|
797 |
imc.add(FileNode('file_% |
|
|
802 | imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message))) | |
|
798 | 803 | |
|
799 | 804 | commit = imc.commit( |
|
800 | 805 | message=message, |
@@ -877,7 +882,7 b' class PRTestUtility(object):' | |||
|
877 | 882 | def create_pull_request( |
|
878 | 883 | self, commits=None, target_head=None, source_head=None, |
|
879 | 884 | revisions=None, approved=False, author=None, mergeable=False, |
|
880 |
enable_notifications=True, name_suffix= |
|
|
885 | enable_notifications=True, name_suffix='', reviewers=None, observers=None, | |
|
881 | 886 | title=u"Test", description=u"Description"): |
|
882 | 887 | self.set_mergeable(mergeable) |
|
883 | 888 | if not enable_notifications: |
@@ -1002,7 +1007,7 b' class PRTestUtility(object):' | |||
|
1002 | 1007 | return comment |
|
1003 | 1008 | |
|
1004 | 1009 | def create_inline_comment( |
|
1005 |
self, linked_to=None, line_no= |
|
|
1010 | self, linked_to=None, line_no='n1', file_path='file_1'): | |
|
1006 | 1011 | comment = CommentsModel().create( |
|
1007 | 1012 | text=u"Test comment", |
|
1008 | 1013 | repo=self.target_repository.repo_name, |
@@ -245,8 +245,8 b' index e34033e29fa9b3d3366b723beab129cee7' | |||
|
245 | 245 | + 'author': 'Joe Doe <joe.doe@example.com>', |
|
246 | 246 | + 'date': datetime.datetime(2010, 1, 1, 20), |
|
247 | 247 | + 'added': [ |
|
248 | + FileNode('foobar', content='foobar'), | |
|
249 | + FileNode('foobar2', content='foobar2'), | |
|
248 | + FileNode(b'foobar', content='foobar'), | |
|
249 | + FileNode(b'foobar2', content='foobar2'), | |
|
250 | 250 | + ], |
|
251 | 251 | + }, |
|
252 | 252 | + { |
@@ -254,10 +254,10 b' index e34033e29fa9b3d3366b723beab129cee7' | |||
|
254 | 254 | + 'author': 'Jane Doe <jane.doe@example.com>', |
|
255 | 255 | + 'date': datetime.datetime(2010, 1, 1, 21), |
|
256 | 256 | + 'added': [ |
|
257 | + FileNode('foobar3', content='foobar3'), | |
|
257 | + FileNode(b'foobar3', content='foobar3'), | |
|
258 | 258 | + ], |
|
259 | 259 | + 'changed': [ |
|
260 | + FileNode('foobar', 'FOOBAR'), | |
|
260 | + FileNode(b'foobar', 'FOOBAR'), | |
|
261 | 261 | + ], |
|
262 | 262 | + }, |
|
263 | 263 | + { |
@@ -265,9 +265,9 b' index e34033e29fa9b3d3366b723beab129cee7' | |||
|
265 | 265 | + 'author': 'Jane Doe <jane.doe@example.com>', |
|
266 | 266 | + 'date': datetime.datetime(2010, 1, 1, 22), |
|
267 | 267 | + 'changed': [ |
|
268 | + FileNode('foobar3', content='FOOBAR\nFOOBAR\nFOOBAR\n'), | |
|
268 | + FileNode(b'foobar3', content='FOOBAR\nFOOBAR\nFOOBAR\n'), | |
|
269 | 269 | + ], |
|
270 | + 'removed': [FileNode('foobar')], | |
|
270 | + 'removed': [FileNode(b'foobar')], | |
|
271 | 271 | + }, |
|
272 | 272 | + ] |
|
273 | 273 | + return commits |
@@ -1,4 +1,4 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
@@ -25,7 +25,9 b' from rhodecode.tests.fixture import Fixt' | |||
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | def route_path(name, params=None, **kwargs): |
|
28 |
import urllib.request |
|
|
28 | import urllib.request | |
|
29 | import urllib.parse | |
|
30 | import urllib.error | |
|
29 | 31 | from rhodecode.apps._base import ADMIN_PREFIX |
|
30 | 32 | |
|
31 | 33 | base_url = { |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -18,11 +17,10 b'' | |||
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | import base64 | |
|
22 | ||
|
23 | 20 | import mock |
|
24 | 21 | import pytest |
|
25 | 22 | |
|
23 | from rhodecode.lib.str_utils import base64_to_str | |
|
26 | 24 | from rhodecode.lib.utils2 import AttributeDict |
|
27 | 25 | from rhodecode.tests.utils import CustomTestApp |
|
28 | 26 | |
@@ -32,7 +30,7 b' from rhodecode.lib.middleware import sim' | |||
|
32 | 30 | from rhodecode.lib.middleware.https_fixup import HttpsFixup |
|
33 | 31 | from rhodecode.lib.middleware.utils import scm_app_http |
|
34 | 32 | from rhodecode.model.db import User, _hash_key |
|
35 | from rhodecode.model.meta import Session | |
|
33 | from rhodecode.model.meta import Session, cache as db_cache | |
|
36 | 34 | from rhodecode.tests import ( |
|
37 | 35 | HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
38 | 36 | from rhodecode.tests.lib.middleware import mock_scm_app |
@@ -75,10 +73,13 b' class StubVCSController(simplevcs.Simple' | |||
|
75 | 73 | |
|
76 | 74 | @pytest.fixture() |
|
77 | 75 | def vcscontroller(baseapp, config_stub, request_stub): |
|
76 | from rhodecode.config.middleware import ce_auth_resources | |
|
77 | ||
|
78 | 78 | config_stub.testing_securitypolicy() |
|
79 | 79 | config_stub.include('rhodecode.authentication') |
|
80 | config_stub.include('rhodecode.authentication.plugins.auth_rhodecode') | |
|
81 | config_stub.include('rhodecode.authentication.plugins.auth_token') | |
|
80 | ||
|
81 | for resource in ce_auth_resources: | |
|
82 | config_stub.include(resource) | |
|
82 | 83 | |
|
83 | 84 | controller = StubVCSController( |
|
84 | 85 | baseapp.config.get_settings(), request_stub.registry) |
@@ -98,27 +99,45 b' def _remove_default_user_from_query_cach' | |||
|
98 | 99 | user = User.get_default_user(cache=True) |
|
99 | 100 | query = Session().query(User).filter(User.username == user.username) |
|
100 | 101 | query = query.options( |
|
101 |
FromCache("sql_cache_short", "get_user_ |
|
|
102 | query.invalidate() | |
|
102 | FromCache("sql_cache_short", f"get_user_{_hash_key(user.username)}")) | |
|
103 | ||
|
104 | db_cache.invalidate( | |
|
105 | query, {}, | |
|
106 | FromCache("sql_cache_short", f"get_user_{_hash_key(user.username)}")) | |
|
107 | ||
|
103 | 108 | Session().expire(user) |
|
104 | 109 | |
|
105 | 110 | |
|
106 | 111 | def test_handles_exceptions_during_permissions_checks( |
|
107 | vcscontroller, disable_anonymous_user): | |
|
108 | user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) | |
|
109 | auth_password = base64.encodestring(user_and_pass).strip() | |
|
112 | vcscontroller, disable_anonymous_user, enable_auth_plugins, test_user_factory): | |
|
113 | ||
|
114 | test_password = 'qweqwe' | |
|
115 | test_user = test_user_factory(password=test_password, extern_type='headers', extern_name='headers') | |
|
116 | test_username = test_user.username | |
|
117 | ||
|
118 | enable_auth_plugins.enable([ | |
|
119 | 'egg:rhodecode-enterprise-ce#headers', | |
|
120 | 'egg:rhodecode-enterprise-ce#token', | |
|
121 | 'egg:rhodecode-enterprise-ce#rhodecode'], | |
|
122 | override={ | |
|
123 | 'egg:rhodecode-enterprise-ce#headers': {'auth_headers_header': 'REMOTE_USER'} | |
|
124 | }) | |
|
125 | ||
|
126 | user_and_pass = f'{test_username}:{test_password}' | |
|
127 | auth_password = base64_to_str(user_and_pass) | |
|
128 | ||
|
110 | 129 | extra_environ = { |
|
111 | 130 | 'AUTH_TYPE': 'Basic', |
|
112 |
'HTTP_AUTHORIZATION': 'Basic |
|
|
113 |
'REMOTE_USER': |
|
|
131 | 'HTTP_AUTHORIZATION': f'Basic {auth_password}', | |
|
132 | 'REMOTE_USER': test_username, | |
|
114 | 133 | } |
|
115 | 134 | |
|
116 | # Verify that things are hooked up correctly | |
|
135 | # Verify that things are hooked up correctly, we pass user with headers bound auth, and headers filled in | |
|
117 | 136 | vcscontroller.get('/', status=200, extra_environ=extra_environ) |
|
118 | 137 | |
|
119 | 138 | # Simulate trouble during permission checks |
|
120 | 139 | with mock.patch('rhodecode.model.db.User.get_by_username', |
|
121 | side_effect=Exception) as get_user: | |
|
140 | side_effect=Exception('permission_error_test')) as get_user: | |
|
122 | 141 | # Verify that a correct 500 is returned and check that the expected |
|
123 | 142 | # code path was hit. |
|
124 | 143 | vcscontroller.get('/', status=500, extra_environ=extra_environ) |
@@ -230,7 +249,7 b' class TestShadowRepoExposure(object):' | |||
|
230 | 249 | controller.is_shadow_repo = True |
|
231 | 250 | controller._action = 'pull' |
|
232 | 251 | controller._is_shadow_repo_dir = True |
|
233 | controller.stub_response_body = 'dummy body value' | |
|
252 | controller.stub_response_body = (b'dummy body value',) | |
|
234 | 253 | controller._get_default_cache_ttl = mock.Mock( |
|
235 | 254 | return_value=(False, 0)) |
|
236 | 255 | |
@@ -242,10 +261,10 b' class TestShadowRepoExposure(object):' | |||
|
242 | 261 | } |
|
243 | 262 | |
|
244 | 263 | response = controller(environ_stub, mock.Mock()) |
|
245 | response_body = ''.join(response) | |
|
264 | response_body = b''.join(response) | |
|
246 | 265 | |
|
247 | 266 | # Assert that we got the response from the wsgi app. |
|
248 | assert response_body == controller.stub_response_body | |
|
267 | assert response_body == b''.join(controller.stub_response_body) | |
|
249 | 268 | |
|
250 | 269 | def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub): |
|
251 | 270 | """ |
@@ -258,7 +277,7 b' class TestShadowRepoExposure(object):' | |||
|
258 | 277 | controller.is_shadow_repo = True |
|
259 | 278 | controller._action = 'pull' |
|
260 | 279 | controller._is_shadow_repo_dir = False |
|
261 | controller.stub_response_body = 'dummy body value' | |
|
280 | controller.stub_response_body = (b'dummy body value',) | |
|
262 | 281 | environ_stub = { |
|
263 | 282 | 'HTTP_HOST': 'test.example.com', |
|
264 | 283 | 'HTTP_ACCEPT': 'application/mercurial', |
@@ -267,10 +286,10 b' class TestShadowRepoExposure(object):' | |||
|
267 | 286 | } |
|
268 | 287 | |
|
269 | 288 | response = controller(environ_stub, mock.Mock()) |
|
270 | response_body = ''.join(response) | |
|
289 | response_body = b''.join(response) | |
|
271 | 290 | |
|
272 | 291 | # Assert that we got the response from the wsgi app. |
|
273 | assert '404 Not Found' in response_body | |
|
292 | assert b'404 Not Found' in response_body | |
|
274 | 293 | |
|
275 | 294 | def test_push_on_shadow_repo_raises(self, baseapp, request_stub): |
|
276 | 295 | """ |
@@ -281,7 +300,7 b' class TestShadowRepoExposure(object):' | |||
|
281 | 300 | controller._check_ssl = mock.Mock() |
|
282 | 301 | controller.is_shadow_repo = True |
|
283 | 302 | controller._action = 'push' |
|
284 | controller.stub_response_body = 'dummy body value' | |
|
303 | controller.stub_response_body = (b'dummy body value',) | |
|
285 | 304 | environ_stub = { |
|
286 | 305 | 'HTTP_HOST': 'test.example.com', |
|
287 | 306 | 'HTTP_ACCEPT': 'application/mercurial', |
@@ -290,11 +309,11 b' class TestShadowRepoExposure(object):' | |||
|
290 | 309 | } |
|
291 | 310 | |
|
292 | 311 | response = controller(environ_stub, mock.Mock()) |
|
293 | response_body = ''.join(response) | |
|
312 | response_body = b''.join(response) | |
|
294 | 313 | |
|
295 | 314 | assert response_body != controller.stub_response_body |
|
296 | 315 | # Assert that a 406 error is returned. |
|
297 | assert '406 Not Acceptable' in response_body | |
|
316 | assert b'406 Not Acceptable' in response_body | |
|
298 | 317 | |
|
299 | 318 | def test_set_repo_names_no_shadow(self, baseapp, request_stub): |
|
300 | 319 | """ |
@@ -1,4 +1,4 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
@@ -55,20 +55,20 b' def data():' | |||
|
55 | 55 | |
|
56 | 56 | def test_reuse_app_no_data(repeat, vcsserver_http_echo_app): |
|
57 | 57 | app = vcs_http_app(vcsserver_http_echo_app) |
|
58 | for x in range(repeat / 10): | |
|
58 | for x in range(repeat // 10): | |
|
59 | 59 | response = app.post('/') |
|
60 | 60 | assert response.status_code == 200 |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | def test_reuse_app_with_data(data, repeat, vcsserver_http_echo_app): |
|
64 | 64 | app = vcs_http_app(vcsserver_http_echo_app) |
|
65 | for x in range(repeat / 10): | |
|
65 | for x in range(repeat // 10): | |
|
66 | 66 | response = app.post('/', params=data) |
|
67 | 67 | assert response.status_code == 200 |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | def test_create_app_per_request_no_data(repeat, vcsserver_http_echo_app): |
|
71 | for x in range(repeat / 10): | |
|
71 | for x in range(repeat // 10): | |
|
72 | 72 | app = vcs_http_app(vcsserver_http_echo_app) |
|
73 | 73 | response = app.post('/') |
|
74 | 74 | assert response.status_code == 200 |
@@ -76,7 +76,7 b' def test_create_app_per_request_no_data(' | |||
|
76 | 76 | |
|
77 | 77 | def test_create_app_per_request_with_data( |
|
78 | 78 | data, repeat, vcsserver_http_echo_app): |
|
79 | for x in range(repeat / 10): | |
|
79 | for x in range(repeat // 10): | |
|
80 | 80 | app = vcs_http_app(vcsserver_http_echo_app) |
|
81 | 81 | response = app.post('/', params=data) |
|
82 | 82 | assert response.status_code == 200 |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -82,7 +81,7 b' def test_remote_app_caller():' | |||
|
82 | 81 | ('a1', 'a2', 'a3', 'a4', None)) |
|
83 | 82 | # Note: RemoteAppCaller is expected to return a tuple like the |
|
84 | 83 | # following one |
|
85 | return (['content'], '200 OK', [('Content-Type', 'text/plain')]) | |
|
84 | return ([b'content'], '200 OK', [('Content-Type', 'text/plain')]) | |
|
86 | 85 | |
|
87 | 86 | wrapper_app = wsgi_app_caller_client.RemoteAppCaller( |
|
88 | 87 | RemoteAppCallerMock(), 'a1', 'a2', arg3='a3', arg4='a4') |
@@ -1,5 +1,4 b'' | |||
|
1 | 1 | import collections |
|
2 | # -*- coding: utf-8 -*- | |
|
3 | 2 | |
|
4 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
5 | 4 | # |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -19,13 +18,13 b'' | |||
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | 20 | import os |
|
22 | from hashlib import sha1 | |
|
23 | 21 | |
|
24 | 22 | import pytest |
|
25 | 23 | from mock import patch |
|
26 | 24 | |
|
27 | 25 | from rhodecode.lib import auth |
|
28 |
from rhodecode.lib.utils |
|
|
26 | from rhodecode.lib.str_utils import safe_bytes | |
|
27 | from rhodecode.lib.hash_utils import md5_safe, sha1 | |
|
29 | 28 | from rhodecode.model.auth_token import AuthTokenModel |
|
30 | 29 | from rhodecode.model.db import Session, User |
|
31 | 30 | from rhodecode.model.repo import RepoModel |
@@ -638,7 +637,7 b' def test_auth_user_get_cookie_store_for_' | |||
|
638 | 637 | expected_data = { |
|
639 | 638 | 'username': user.username, |
|
640 | 639 | 'user_id': user.user_id, |
|
641 | 'password': md5(user.password), | |
|
640 | 'password': md5_safe(user.password), | |
|
642 | 641 | 'is_authenticated': False |
|
643 | 642 | } |
|
644 | 643 | assert auth_user.get_cookie_store() == expected_data |
@@ -650,7 +649,7 b' def test_auth_user_get_cookie_store_for_' | |||
|
650 | 649 | expected_data = { |
|
651 | 650 | 'username': User.DEFAULT_USER, |
|
652 | 651 | 'user_id': default_user.user_id, |
|
653 | 'password': md5(default_user.password), | |
|
652 | 'password': md5_safe(default_user.password), | |
|
654 | 653 | 'is_authenticated': True |
|
655 | 654 | } |
|
656 | 655 | assert auth_user.get_cookie_store() == expected_data |
@@ -678,10 +677,10 b' def get_permissions(user, **kwargs):' | |||
|
678 | 677 | |
|
679 | 678 | class TestGenerateAuthToken(object): |
|
680 | 679 | def test_salt_is_used_when_specified(self): |
|
681 | salt = 'abcde' | |
|
680 | salt = b'abcde' | |
|
682 | 681 | user_name = 'test_user' |
|
683 | 682 | result = auth.generate_auth_token(user_name, salt) |
|
684 |
expected_result = sha1(user_name + salt) |
|
|
683 | expected_result = sha1(safe_bytes(user_name) + salt) | |
|
685 | 684 | assert result == expected_result |
|
686 | 685 | |
|
687 | 686 | def test_salt_is_geneated_when_not_specified(self): |
@@ -690,7 +689,8 b' class TestGenerateAuthToken(object):' | |||
|
690 | 689 | with patch.object(auth, 'os') as os_mock: |
|
691 | 690 | os_mock.urandom.return_value = random_salt |
|
692 | 691 | result = auth.generate_auth_token(user_name) |
|
693 | expected_result = sha1(user_name + random_salt).hexdigest() | |
|
692 | ||
|
693 | expected_result = sha1(safe_bytes(user_name) + random_salt) | |
|
694 | 694 | assert result == expected_result |
|
695 | 695 | |
|
696 | 696 |
@@ -1,4 +1,4 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
@@ -67,12 +67,12 b' class TestCaches(object):' | |||
|
67 | 67 | def test_cache_keygen(self, example_input, example_namespace): |
|
68 | 68 | def func_wrapped(): |
|
69 | 69 | return 1 |
|
70 | func = rc_cache.utils.key_generator(None, example_namespace, func_wrapped) | |
|
70 | func = rc_cache.utils.custom_key_generator(None, example_namespace, func_wrapped) | |
|
71 | 71 | key = func(*example_input) |
|
72 | 72 | assert key |
|
73 | 73 | |
|
74 | 74 | def test_store_value_in_cache(self): |
|
75 | cache_region = rc_cache.get_or_create_region('cache_perms') | |
|
75 | cache_region = rc_cache.get_or_create_region('cache_perms', 'test_cache') | |
|
76 | 76 | # make sure we empty the cache now |
|
77 | 77 | cache_region.delete_multi(cache_region.backend.list_keys()) |
|
78 | 78 | |
@@ -88,7 +88,7 b' class TestCaches(object):' | |||
|
88 | 88 | assert len(set(cache_region.backend.list_keys())) == 10 |
|
89 | 89 | |
|
90 | 90 | def test_store_and_get_value_from_region(self): |
|
91 | cache_region = rc_cache.get_or_create_region('cache_perms') | |
|
91 | cache_region = rc_cache.get_or_create_region('cache_perms', 'test_cache') | |
|
92 | 92 | # make sure we empty the cache now |
|
93 | 93 | for key in cache_region.backend.list_keys(): |
|
94 | 94 | cache_region.delete(key) |
@@ -1,4 +1,4 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
@@ -40,33 +40,34 b' class TestTokenizeString(object):' | |||
|
40 | 40 | def test_tokenize_as_python(self): |
|
41 | 41 | lexer = get_lexer_by_name('python') |
|
42 | 42 | tokens = list(tokenize_string(self.python_code, lexer)) |
|
43 | expected_tokens = [ | |
|
44 | ('w', '\n'), | |
|
45 | ('', ' '), | |
|
46 | ('kn', 'import'), | |
|
47 | ('', ' '), | |
|
48 | ('nn', 'this'), | |
|
49 | ('w', '\n'), | |
|
50 | ('w', '\n'), | |
|
51 | ('', ' '), | |
|
52 | ('n', 'var'), | |
|
53 | ('', ' '), | |
|
54 | ('o', '='), | |
|
55 | ('', ' '), | |
|
56 | ('mi', '6'), | |
|
57 | ('w', '\n'), | |
|
58 | ('', ' '), | |
|
59 | ('nb', 'print'), | |
|
60 | ('p', '('), | |
|
61 | ('s2', '"'), | |
|
62 | ('s2', 'this'), | |
|
63 | ('s2', '"'), | |
|
64 | ('p', ')'), | |
|
65 | ('w', '\n'), | |
|
66 | ('w', '\n'), | |
|
67 | ('', ' ') | |
|
68 | ] | |
|
43 | 69 | |
|
44 |
assert tokens == |
|
|
45 | ('', u'\n'), | |
|
46 | ('', u' '), | |
|
47 | ('kn', u'import'), | |
|
48 | ('', u' '), | |
|
49 | ('nn', u'this'), | |
|
50 | ('', u'\n'), | |
|
51 | ('', u'\n'), | |
|
52 | ('', u' '), | |
|
53 | ('n', u'var'), | |
|
54 | ('', u' '), | |
|
55 | ('o', u'='), | |
|
56 | ('', u' '), | |
|
57 | ('mi', u'6'), | |
|
58 | ('', u'\n'), | |
|
59 | ('', u' '), | |
|
60 | ('k', u'print'), | |
|
61 | ('p', u'('), | |
|
62 | ('s2', u'"'), | |
|
63 | ('s2', u'this'), | |
|
64 | ('s2', u'"'), | |
|
65 | ('p', u')'), | |
|
66 | ('', u'\n'), | |
|
67 | ('', u'\n'), | |
|
68 | ('', u' ') | |
|
69 | ] | |
|
70 | assert tokens == expected_tokens | |
|
70 | 71 | |
|
71 | 72 | def test_tokenize_as_text(self): |
|
72 | 73 | lexer = get_lexer_by_name('text') |
@@ -74,7 +75,7 b' class TestTokenizeString(object):' | |||
|
74 | 75 | |
|
75 | 76 | assert tokens == [ |
|
76 | 77 | ('', |
|
77 |
|
|
|
78 | '\n import this\n\n var = 6\n print("this")\n\n ') | |
|
78 | 79 | ] |
|
79 | 80 | |
|
80 | 81 | |
@@ -86,9 +87,9 b' class TestSplitTokenStream(object):' | |||
|
86 | 87 | lines = list(split_token_stream(tokens, content)) |
|
87 | 88 | |
|
88 | 89 | assert lines == [ |
|
89 |
[('type1', |
|
|
90 |
[('type1', |
|
|
91 |
[('type2', |
|
|
90 | [('type1', 'some')], | |
|
91 | [('type1', 'text'), ('type2', 'more')], | |
|
92 | [('type2', '')], | |
|
92 | 93 | ] |
|
93 | 94 | |
|
94 | 95 | def test_split_token_stream_single(self): |
@@ -126,7 +127,7 b' class TestSplitTokenStream(object):' | |||
|
126 | 127 | |
|
127 | 128 | def test_no_tokens_by_content(self): |
|
128 | 129 | tokens = [] |
|
129 |
content = |
|
|
130 | content = '\ufeff' | |
|
130 | 131 | lines = list(split_token_stream(tokens, content)) |
|
131 | 132 | assert lines == [ |
|
132 | 133 | [('', content)], |
@@ -134,15 +135,15 b' class TestSplitTokenStream(object):' | |||
|
134 | 135 | |
|
135 | 136 | def test_no_tokens_by_valid_content(self): |
|
136 | 137 | from pygments.lexers.css import CssLexer |
|
137 |
content = |
|
|
138 | content = '\ufeff table.dataTable' | |
|
138 | 139 | tokens = tokenize_string(content, CssLexer()) |
|
139 | 140 | |
|
140 | 141 | lines = list(split_token_stream(tokens, content)) |
|
141 | 142 | assert lines == [ |
|
142 |
[('', |
|
|
143 |
('nt', |
|
|
144 |
('p', |
|
|
145 |
('nc', |
|
|
143 | [('w', ' '), | |
|
144 | ('nt', 'table'), | |
|
145 | ('p', '.'), | |
|
146 | ('nc', 'dataTable')], | |
|
146 | 147 | ] |
|
147 | 148 | |
|
148 | 149 | |
@@ -248,59 +249,59 b' class TestRenderTokenStream(object):' | |||
|
248 | 249 | '', |
|
249 | 250 | ), |
|
250 | 251 | ( |
|
251 |
[('', '', |
|
|
252 | [('', '', '')], | |
|
252 | 253 | '<span></span>', |
|
253 | 254 | ), |
|
254 | 255 | ( |
|
255 |
[('', '', |
|
|
256 | [('', '', 'text')], | |
|
256 | 257 | '<span>text</span>', |
|
257 | 258 | ), |
|
258 | 259 | ( |
|
259 |
[('A', '', |
|
|
260 | [('A', '', '')], | |
|
260 | 261 | '<span class="A"></span>', |
|
261 | 262 | ), |
|
262 | 263 | ( |
|
263 |
[('A', '', |
|
|
264 | [('A', '', 'hello')], | |
|
264 | 265 | '<span class="A">hello</span>', |
|
265 | 266 | ), |
|
266 | 267 | ( |
|
267 |
[('A', '', |
|
|
268 | [('A', '', 'hel'), ('A', '', 'lo')], | |
|
268 | 269 | '<span class="A">hello</span>', |
|
269 | 270 | ), |
|
270 | 271 | ( |
|
271 |
[('A', '', |
|
|
272 | [('A', '', 'two\n'), ('A', '', 'lines')], | |
|
272 | 273 | '<span class="A">two\nlines</span>', |
|
273 | 274 | ), |
|
274 | 275 | ( |
|
275 |
[('A', '', |
|
|
276 | [('A', '', '\nthree\n'), ('A', '', 'lines')], | |
|
276 | 277 | '<span class="A">\nthree\nlines</span>', |
|
277 | 278 | ), |
|
278 | 279 | ( |
|
279 |
[('', '', |
|
|
280 | [('', '', '\n'), ('A', '', 'line')], | |
|
280 | 281 | '<span>\n</span><span class="A">line</span>', |
|
281 | 282 | ), |
|
282 | 283 | ( |
|
283 |
[('', 'ins', |
|
|
284 | [('', 'ins', '\n'), ('A', '', 'line')], | |
|
284 | 285 | '<span><ins>\n</ins></span><span class="A">line</span>', |
|
285 | 286 | ), |
|
286 | 287 | ( |
|
287 |
[('A', '', |
|
|
288 | [('A', '', 'hel'), ('A', 'ins', 'lo')], | |
|
288 | 289 | '<span class="A">hel<ins>lo</ins></span>', |
|
289 | 290 | ), |
|
290 | 291 | ( |
|
291 |
[('A', '', |
|
|
292 | [('A', '', 'hel'), ('A', 'ins', 'l'), ('A', 'ins', 'o')], | |
|
292 | 293 | '<span class="A">hel<ins>lo</ins></span>', |
|
293 | 294 | ), |
|
294 | 295 | ( |
|
295 |
[('A', '', |
|
|
296 | [('A', '', 'hel'), ('A', 'ins', 'l'), ('A', 'del', 'o')], | |
|
296 | 297 | '<span class="A">hel<ins>l</ins><del>o</del></span>', |
|
297 | 298 | ), |
|
298 | 299 | ( |
|
299 |
[('A', '', |
|
|
300 | [('A', '', 'hel'), ('B', '', 'lo')], | |
|
300 | 301 | '<span class="A">hel</span><span class="B">lo</span>', |
|
301 | 302 | ), |
|
302 | 303 | ( |
|
303 |
[('A', '', |
|
|
304 | [('A', '', 'hel'), ('B', 'ins', 'lo')], | |
|
304 | 305 | '<span class="A">hel</span><span class="B"><ins>lo</ins></span>', |
|
305 | 306 | ), |
|
306 | 307 | ], ids=no_newline_id_generator) |
@@ -310,23 +311,23 b' class TestRenderTokenStream(object):' | |||
|
310 | 311 | |
|
311 | 312 | @pytest.mark.parametrize('tokenstream,output', [ |
|
312 | 313 | ( |
|
313 |
[('A', |
|
|
314 | [('A', 'hel'), ('A', 'lo')], | |
|
314 | 315 | '<span class="A">hello</span>', |
|
315 | 316 | ), |
|
316 | 317 | ( |
|
317 |
[('A', |
|
|
318 | [('A', 'hel'), ('A', 'l'), ('A', 'o')], | |
|
318 | 319 | '<span class="A">hello</span>', |
|
319 | 320 | ), |
|
320 | 321 | ( |
|
321 |
[('A', |
|
|
322 | [('A', 'hel'), ('A', 'l'), ('A', 'o')], | |
|
322 | 323 | '<span class="A">hello</span>', |
|
323 | 324 | ), |
|
324 | 325 | ( |
|
325 |
[('A', |
|
|
326 | [('A', 'hel'), ('B', 'lo')], | |
|
326 | 327 | '<span class="A">hel</span><span class="B">lo</span>', |
|
327 | 328 | ), |
|
328 | 329 | ( |
|
329 |
[('A', |
|
|
330 | [('A', 'hel'), ('B', 'lo')], | |
|
330 | 331 | '<span class="A">hel</span><span class="B">lo</span>', |
|
331 | 332 | ), |
|
332 | 333 | ]) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -28,6 +27,7 b' from rhodecode.lib.diffs import (' | |||
|
28 | 27 | DiffProcessor, |
|
29 | 28 | NEW_FILENODE, DEL_FILENODE, MOD_FILENODE, RENAMED_FILENODE, |
|
30 | 29 | CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE) |
|
30 | from rhodecode.lib.str_utils import safe_bytes | |
|
31 | 31 | from rhodecode.lib.utils2 import AttributeDict |
|
32 | 32 | from rhodecode.lib.vcs.backends.git import GitCommit |
|
33 | 33 | from rhodecode.tests.fixture import Fixture, no_newline_id_generator |
@@ -38,134 +38,6 b' from rhodecode.lib.vcs.backends.svn.repo' | |||
|
38 | 38 | fixture = Fixture() |
|
39 | 39 | |
|
40 | 40 | |
|
41 | def test_diffprocessor_as_html_with_comments(): | |
|
42 | raw_diff = textwrap.dedent(''' | |
|
43 | diff --git a/setup.py b/setup.py | |
|
44 | index 5b36422..cfd698e 100755 | |
|
45 | --- a/setup.py | |
|
46 | +++ b/setup.py | |
|
47 | @@ -2,7 +2,7 @@ | |
|
48 | #!/usr/bin/python | |
|
49 | # Setup file for X | |
|
50 | # Copyright (C) No one | |
|
51 | - | |
|
52 | +x | |
|
53 | try: | |
|
54 | from setuptools import setup, Extension | |
|
55 | except ImportError: | |
|
56 | ''') | |
|
57 | diff = GitDiff(raw_diff) | |
|
58 | processor = DiffProcessor(diff) | |
|
59 | processor.prepare() | |
|
60 | ||
|
61 | # Note that the cell with the context in line 5 (in the html) has the | |
|
62 | # no-comment class, which will prevent the add comment icon to be displayed. | |
|
63 | expected_html = textwrap.dedent(''' | |
|
64 | <table class="code-difftable"> | |
|
65 | <tr class="line context"> | |
|
66 | <td class="add-comment-line"><span class="add-comment-content"></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td> | |
|
67 | <td class="lineno old">...</td> | |
|
68 | <td class="lineno new">...</td> | |
|
69 | <td class="code no-comment"> | |
|
70 | <pre>@@ -2,7 +2,7 @@ | |
|
71 | </pre> | |
|
72 | </td> | |
|
73 | </tr> | |
|
74 | <tr class="line unmod"> | |
|
75 | <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td> | |
|
76 | <td id="setuppy_o2" class="lineno old"><a href="#setuppy_o2" class="tooltip" | |
|
77 | title="Click to select line">2</a></td> | |
|
78 | <td id="setuppy_n2" class="lineno new"><a href="#setuppy_n2" class="tooltip" | |
|
79 | title="Click to select line">2</a></td> | |
|
80 | <td class="code"> | |
|
81 | <pre>#!/usr/bin/python | |
|
82 | </pre> | |
|
83 | </td> | |
|
84 | </tr> | |
|
85 | <tr class="line unmod"> | |
|
86 | <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td> | |
|
87 | <td id="setuppy_o3" class="lineno old"><a href="#setuppy_o3" class="tooltip" | |
|
88 | title="Click to select line">3</a></td> | |
|
89 | <td id="setuppy_n3" class="lineno new"><a href="#setuppy_n3" class="tooltip" | |
|
90 | title="Click to select line">3</a></td> | |
|
91 | <td class="code"> | |
|
92 | <pre># Setup file for X | |
|
93 | </pre> | |
|
94 | </td> | |
|
95 | </tr> | |
|
96 | <tr class="line unmod"> | |
|
97 | <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td> | |
|
98 | <td id="setuppy_o4" class="lineno old"><a href="#setuppy_o4" class="tooltip" | |
|
99 | title="Click to select line">4</a></td> | |
|
100 | <td id="setuppy_n4" class="lineno new"><a href="#setuppy_n4" class="tooltip" | |
|
101 | title="Click to select line">4</a></td> | |
|
102 | <td class="code"> | |
|
103 | <pre># Copyright (C) No one | |
|
104 | </pre> | |
|
105 | </td> | |
|
106 | </tr> | |
|
107 | <tr class="line del"> | |
|
108 | <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td> | |
|
109 | <td id="setuppy_o5" class="lineno old"><a href="#setuppy_o5" class="tooltip" | |
|
110 | title="Click to select line">5</a></td> | |
|
111 | <td class="lineno new"><a href="#setuppy_n" class="tooltip" | |
|
112 | title="Click to select line"></a></td> | |
|
113 | <td class="code"> | |
|
114 | <pre> | |
|
115 | </pre> | |
|
116 | </td> | |
|
117 | </tr> | |
|
118 | <tr class="line add"> | |
|
119 | <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td> | |
|
120 | <td class="lineno old"><a href="#setuppy_o" class="tooltip" | |
|
121 | title="Click to select line"></a></td> | |
|
122 | <td id="setuppy_n5" class="lineno new"><a href="#setuppy_n5" class="tooltip" | |
|
123 | title="Click to select line">5</a></td> | |
|
124 | <td class="code"> | |
|
125 | <pre><ins>x</ins> | |
|
126 | </pre> | |
|
127 | </td> | |
|
128 | </tr> | |
|
129 | <tr class="line unmod"> | |
|
130 | <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td> | |
|
131 | <td id="setuppy_o6" class="lineno old"><a href="#setuppy_o6" class="tooltip" | |
|
132 | title="Click to select line">6</a></td> | |
|
133 | <td id="setuppy_n6" class="lineno new"><a href="#setuppy_n6" class="tooltip" | |
|
134 | title="Click to select line">6</a></td> | |
|
135 | <td class="code"> | |
|
136 | <pre>try: | |
|
137 | </pre> | |
|
138 | </td> | |
|
139 | </tr> | |
|
140 | <tr class="line unmod"> | |
|
141 | <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td> | |
|
142 | <td id="setuppy_o7" class="lineno old"><a href="#setuppy_o7" class="tooltip" | |
|
143 | title="Click to select line">7</a></td> | |
|
144 | <td id="setuppy_n7" class="lineno new"><a href="#setuppy_n7" class="tooltip" | |
|
145 | title="Click to select line">7</a></td> | |
|
146 | <td class="code"> | |
|
147 | <pre> from setuptools import setup, Extension | |
|
148 | </pre> | |
|
149 | </td> | |
|
150 | </tr> | |
|
151 | <tr class="line unmod"> | |
|
152 | <td class="add-comment-line"><span class="add-comment-content"><a href="#"><span class="icon-comment-add"></span></a></span></td><td class="comment-toggle tooltip" title="Toggle Comment Thread"><i class="icon-comment"></i></td> | |
|
153 | <td id="setuppy_o8" class="lineno old"><a href="#setuppy_o8" class="tooltip" | |
|
154 | title="Click to select line">8</a></td> | |
|
155 | <td id="setuppy_n8" class="lineno new"><a href="#setuppy_n8" class="tooltip" | |
|
156 | title="Click to select line">8</a></td> | |
|
157 | <td class="code"> | |
|
158 | <pre>except ImportError: | |
|
159 | </pre> | |
|
160 | </td> | |
|
161 | </tr> | |
|
162 | </table> | |
|
163 | ''').strip() | |
|
164 | html = processor.as_html(enable_comments=True).replace('\t', ' ') | |
|
165 | ||
|
166 | assert html == expected_html | |
|
167 | ||
|
168 | ||
|
169 | 41 | class TestMixedFilenameEncodings(object): |
|
170 | 42 | |
|
171 | 43 | @pytest.fixture(scope="class") |
@@ -176,7 +48,7 b' class TestMixedFilenameEncodings(object)' | |||
|
176 | 48 | @pytest.fixture() |
|
177 | 49 | def processor(self, raw_diff): |
|
178 | 50 | diff = MercurialDiff(raw_diff) |
|
179 | processor = DiffProcessor(diff) | |
|
51 | processor = DiffProcessor(diff, diff_format='newdiff') | |
|
180 | 52 | return processor |
|
181 | 53 | |
|
182 | 54 | def test_filenames_are_decoded_to_unicode(self, processor): |
@@ -207,6 +79,8 b' DIFF_FIXTURES = [' | |||
|
207 | 79 | {'added': 0, |
|
208 | 80 | 'deleted': 0, |
|
209 | 81 | 'binary': True, |
|
82 | 'old_mode': '', | |
|
83 | 'new_mode': '100755', | |
|
210 | 84 | 'ops': {NEW_FILENODE: 'new file 100755', |
|
211 | 85 | BIN_FILENODE: 'binary diff hidden'}}), |
|
212 | 86 | ]), |
@@ -216,6 +90,8 b' DIFF_FIXTURES = [' | |||
|
216 | 90 | {'added': 0, |
|
217 | 91 | 'deleted': 0, |
|
218 | 92 | 'binary': True, |
|
93 | 'old_mode': '', | |
|
94 | 'new_mode': '', | |
|
219 | 95 | 'ops': {MOD_FILENODE: 'modified file', |
|
220 | 96 | BIN_FILENODE: 'binary diff hidden'}}), |
|
221 | 97 | ]), |
@@ -225,6 +101,9 b' DIFF_FIXTURES = [' | |||
|
225 | 101 | {'added': 3, |
|
226 | 102 | 'deleted': 0, |
|
227 | 103 | 'binary': False, |
|
104 | 'old_mode': '100755', | |
|
105 | 'new_mode': '100644', | |
|
106 | 'renamed': ('README.rst', 'README'), | |
|
228 | 107 | 'ops': {MOD_FILENODE: 'modified file', |
|
229 | 108 | RENAMED_FILENODE: 'file renamed from README.rst to README', |
|
230 | 109 | CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}), |
@@ -235,6 +114,8 b' DIFF_FIXTURES = [' | |||
|
235 | 114 | {'added': 2, |
|
236 | 115 | 'deleted': 1, |
|
237 | 116 | 'binary': False, |
|
117 | 'old_mode': '', | |
|
118 | 'new_mode': '', | |
|
238 | 119 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
239 | 120 | ]), |
|
240 | 121 | ('hg', |
@@ -243,6 +124,9 b' DIFF_FIXTURES = [' | |||
|
243 | 124 | {'added': 3, |
|
244 | 125 | 'deleted': 0, |
|
245 | 126 | 'binary': False, |
|
127 | 'old_mode': '', | |
|
128 | 'new_mode': '', | |
|
129 | 'renamed': ('README', 'README.rst'), | |
|
246 | 130 | 'ops': {MOD_FILENODE: 'modified file', |
|
247 | 131 | RENAMED_FILENODE: 'file renamed from README to README.rst'}}), |
|
248 | 132 | ]), |
@@ -252,6 +136,8 b' DIFF_FIXTURES = [' | |||
|
252 | 136 | {'added': 0, |
|
253 | 137 | 'deleted': 0, |
|
254 | 138 | 'binary': True, |
|
139 | 'old_mode': '', | |
|
140 | 'new_mode': '', | |
|
255 | 141 | 'ops': {DEL_FILENODE: 'deleted file', |
|
256 | 142 | BIN_FILENODE: 'binary diff hidden'}}), |
|
257 | 143 | ]), |
@@ -261,6 +147,8 b' DIFF_FIXTURES = [' | |||
|
261 | 147 | {'added': 0, |
|
262 | 148 | 'deleted': 0, |
|
263 | 149 | 'binary': True, |
|
150 | 'old_mode': '100644', | |
|
151 | 'new_mode': '100755', | |
|
264 | 152 | 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755', |
|
265 | 153 | BIN_FILENODE: 'binary diff hidden'}}), |
|
266 | 154 | ]), |
@@ -270,6 +158,8 b' DIFF_FIXTURES = [' | |||
|
270 | 158 | {'added': 0, |
|
271 | 159 | 'deleted': 0, |
|
272 | 160 | 'binary': True, |
|
161 | 'old_mode': '100755', | |
|
162 | 'new_mode': '100644', | |
|
273 | 163 | 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}), |
|
274 | 164 | ]), |
|
275 | 165 | ('hg', |
@@ -278,6 +168,9 b' DIFF_FIXTURES = [' | |||
|
278 | 168 | {'added': 0, |
|
279 | 169 | 'deleted': 0, |
|
280 | 170 | 'binary': True, |
|
171 | 'old_mode': '', | |
|
172 | 'new_mode': '', | |
|
173 | 'renamed': ('file', 'file_renamed'), | |
|
281 | 174 | 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}), |
|
282 | 175 | ]), |
|
283 | 176 | ('hg', |
@@ -286,6 +179,9 b' DIFF_FIXTURES = [' | |||
|
286 | 179 | {'added': 0, |
|
287 | 180 | 'deleted': 0, |
|
288 | 181 | 'binary': True, |
|
182 | 'old_mode': '100644', | |
|
183 | 'new_mode': '100755', | |
|
184 | 'renamed': ('README.rst', 'README'), | |
|
289 | 185 | 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755', |
|
290 | 186 | RENAMED_FILENODE: 'file renamed from README.rst to README'}}), |
|
291 | 187 | ]), |
@@ -295,43 +191,59 b' DIFF_FIXTURES = [' | |||
|
295 | 191 | {'added': 0, |
|
296 | 192 | 'deleted': 0, |
|
297 | 193 | 'binary': True, |
|
194 | 'new_mode': '100644', | |
|
195 | 'old_mode': '', | |
|
298 | 196 | 'ops': {NEW_FILENODE: 'new file 100644', |
|
299 | 197 | BIN_FILENODE: 'binary diff hidden'}}), |
|
300 | 198 | ('js/jquery/hashgrid.js', 'A', |
|
301 | 199 | {'added': 340, |
|
302 | 200 | 'deleted': 0, |
|
303 | 201 | 'binary': False, |
|
202 | 'new_mode': '100755', | |
|
203 | 'old_mode': '', | |
|
304 | 204 | 'ops': {NEW_FILENODE: 'new file 100755'}}), |
|
305 | 205 | ('index.html', 'M', |
|
306 | 206 | {'added': 3, |
|
307 | 207 | 'deleted': 2, |
|
308 | 208 | 'binary': False, |
|
209 | 'new_mode': '', | |
|
210 | 'old_mode': '', | |
|
309 | 211 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
310 | 212 | ('less/docs.less', 'M', |
|
311 | 213 | {'added': 34, |
|
312 | 214 | 'deleted': 0, |
|
313 | 215 | 'binary': False, |
|
216 | 'new_mode': '', | |
|
217 | 'old_mode': '', | |
|
314 | 218 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
315 | 219 | ('less/scaffolding.less', 'M', |
|
316 | 220 | {'added': 1, |
|
317 | 221 | 'deleted': 3, |
|
318 | 222 | 'binary': False, |
|
223 | 'new_mode': '', | |
|
224 | 'old_mode': '', | |
|
319 | 225 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
320 | 226 | ('readme.markdown', 'M', |
|
321 | 227 | {'added': 1, |
|
322 | 228 | 'deleted': 10, |
|
323 | 229 | 'binary': False, |
|
230 | 'new_mode': '', | |
|
231 | 'old_mode': '', | |
|
324 | 232 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
325 | 233 | ('img/baseline-20px.png', 'D', |
|
326 | 234 | {'added': 0, |
|
327 | 235 | 'deleted': 0, |
|
328 | 236 | 'binary': True, |
|
237 | 'new_mode': '', | |
|
238 | 'old_mode': '', | |
|
329 | 239 | 'ops': {DEL_FILENODE: 'deleted file', |
|
330 | 240 | BIN_FILENODE: 'binary diff hidden'}}), |
|
331 | 241 | ('js/global.js', 'D', |
|
332 | 242 | {'added': 0, |
|
333 | 243 | 'deleted': 75, |
|
334 | 244 | 'binary': False, |
|
245 | 'new_mode': '', | |
|
246 | 'old_mode': '', | |
|
335 | 247 | 'ops': {DEL_FILENODE: 'deleted file'}}) |
|
336 | 248 | ]), |
|
337 | 249 | ('git', |
@@ -340,6 +252,8 b' DIFF_FIXTURES = [' | |||
|
340 | 252 | {'added': 0, |
|
341 | 253 | 'deleted': 0, |
|
342 | 254 | 'binary': True, |
|
255 | 'old_mode': '100644', | |
|
256 | 'new_mode': '100755', | |
|
343 | 257 | 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}) |
|
344 | 258 | ]), |
|
345 | 259 | ('git', |
@@ -348,6 +262,8 b' DIFF_FIXTURES = [' | |||
|
348 | 262 | {'added': 1, |
|
349 | 263 | 'deleted': 0, |
|
350 | 264 | 'binary': False, |
|
265 | 'old_mode': '', | |
|
266 | 'new_mode': '100644', | |
|
351 | 267 | 'ops': {MOD_FILENODE: 'modified file'}}) |
|
352 | 268 | ]), |
|
353 | 269 | ('git', |
@@ -356,6 +272,9 b' DIFF_FIXTURES = [' | |||
|
356 | 272 | {'added': 0, |
|
357 | 273 | 'deleted': 0, |
|
358 | 274 | 'binary': True, |
|
275 | 'old_mode': '', | |
|
276 | 'new_mode': '', | |
|
277 | 'renamed': ('work-horus.xls', 'file.xls'), | |
|
359 | 278 | 'ops': { |
|
360 | 279 | RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}}) |
|
361 | 280 | ]), |
@@ -365,6 +284,8 b' DIFF_FIXTURES = [' | |||
|
365 | 284 | {'added': 0, |
|
366 | 285 | 'deleted': 0, |
|
367 | 286 | 'binary': True, |
|
287 | 'old_mode': '', | |
|
288 | 'new_mode': '', | |
|
368 | 289 | 'ops': {MOD_FILENODE: 'modified file', |
|
369 | 290 | BIN_FILENODE: 'binary diff hidden'}}) |
|
370 | 291 | ]), |
@@ -374,43 +295,59 b' DIFF_FIXTURES = [' | |||
|
374 | 295 | {'added': 0, |
|
375 | 296 | 'deleted': 0, |
|
376 | 297 | 'binary': True, |
|
298 | 'old_mode': '', | |
|
299 | 'new_mode': '100644', | |
|
377 | 300 | 'ops': {NEW_FILENODE: 'new file 100644', |
|
378 | 301 | BIN_FILENODE: 'binary diff hidden'}}), |
|
379 | 302 | ('js/jquery/hashgrid.js', 'A', |
|
380 | 303 | {'added': 340, |
|
381 | 304 | 'deleted': 0, |
|
382 | 305 | 'binary': False, |
|
306 | 'old_mode': '', | |
|
307 | 'new_mode': '100755', | |
|
383 | 308 | 'ops': {NEW_FILENODE: 'new file 100755'}}), |
|
384 | 309 | ('index.html', 'M', |
|
385 | 310 | {'added': 3, |
|
386 | 311 | 'deleted': 2, |
|
387 | 312 | 'binary': False, |
|
313 | 'old_mode': '', | |
|
314 | 'new_mode': '100644', | |
|
388 | 315 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
389 | 316 | ('less/docs.less', 'M', |
|
390 | 317 | {'added': 34, |
|
391 | 318 | 'deleted': 0, |
|
392 | 319 | 'binary': False, |
|
320 | 'old_mode': '', | |
|
321 | 'new_mode': '100644', | |
|
393 | 322 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
394 | 323 | ('less/scaffolding.less', 'M', |
|
395 | 324 | {'added': 1, |
|
396 | 325 | 'deleted': 3, |
|
397 | 326 | 'binary': False, |
|
327 | 'old_mode': '', | |
|
328 | 'new_mode': '100644', | |
|
398 | 329 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
399 | 330 | ('readme.markdown', 'M', |
|
400 | 331 | {'added': 1, |
|
401 | 332 | 'deleted': 10, |
|
402 | 333 | 'binary': False, |
|
334 | 'old_mode': '', | |
|
335 | 'new_mode': '100644', | |
|
403 | 336 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
404 | 337 | ('img/baseline-20px.png', 'D', |
|
405 | 338 | {'added': 0, |
|
406 | 339 | 'deleted': 0, |
|
407 | 340 | 'binary': True, |
|
341 | 'old_mode': '', | |
|
342 | 'new_mode': '', | |
|
408 | 343 | 'ops': {DEL_FILENODE: 'deleted file', |
|
409 | 344 | BIN_FILENODE: 'binary diff hidden'}}), |
|
410 | 345 | ('js/global.js', 'D', |
|
411 | 346 | {'added': 0, |
|
412 | 347 | 'deleted': 75, |
|
413 | 348 | 'binary': False, |
|
349 | 'old_mode': '', | |
|
350 | 'new_mode': '', | |
|
414 | 351 | 'ops': {DEL_FILENODE: 'deleted file'}}), |
|
415 | 352 | ]), |
|
416 | 353 | ('hg', |
@@ -419,26 +356,36 b' DIFF_FIXTURES = [' | |||
|
419 | 356 | {'added': 18, |
|
420 | 357 | 'deleted': 2, |
|
421 | 358 | 'binary': False, |
|
359 | 'old_mode': '', | |
|
360 | 'new_mode': '100644', | |
|
422 | 361 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
423 | 362 | ('vcs/backends/git/repository.py', 'M', |
|
424 | 363 | {'added': 46, |
|
425 | 364 | 'deleted': 15, |
|
426 | 365 | 'binary': False, |
|
366 | 'old_mode': '', | |
|
367 | 'new_mode': '100644', | |
|
427 | 368 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
428 | 369 | ('vcs/backends/hg.py', 'M', |
|
429 | 370 | {'added': 22, |
|
430 | 371 | 'deleted': 3, |
|
431 | 372 | 'binary': False, |
|
373 | 'old_mode': '', | |
|
374 | 'new_mode': '100644', | |
|
432 | 375 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
433 | 376 | ('vcs/tests/test_git.py', 'M', |
|
434 | 377 | {'added': 5, |
|
435 | 378 | 'deleted': 5, |
|
436 | 379 | 'binary': False, |
|
380 | 'old_mode': '', | |
|
381 | 'new_mode': '100644', | |
|
437 | 382 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
438 | 383 | ('vcs/tests/test_repository.py', 'M', |
|
439 | 384 | {'added': 174, |
|
440 | 385 | 'deleted': 2, |
|
441 | 386 | 'binary': False, |
|
387 | 'old_mode': '', | |
|
388 | 'new_mode': '100644', | |
|
442 | 389 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
443 | 390 | ]), |
|
444 | 391 | ('hg', |
@@ -447,6 +394,9 b' DIFF_FIXTURES = [' | |||
|
447 | 394 | {'added': 0, |
|
448 | 395 | 'deleted': 0, |
|
449 | 396 | 'binary': True, |
|
397 | 'old_mode': '', | |
|
398 | 'new_mode': '', | |
|
399 | 'copied': ('file1', 'file2'), | |
|
450 | 400 | 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}), |
|
451 | 401 | ]), |
|
452 | 402 | ('hg', |
@@ -455,6 +405,9 b' DIFF_FIXTURES = [' | |||
|
455 | 405 | {'added': 1, |
|
456 | 406 | 'deleted': 0, |
|
457 | 407 | 'binary': False, |
|
408 | 'old_mode': '', | |
|
409 | 'new_mode': '', | |
|
410 | 'copied': ('file2', 'file3'), | |
|
458 | 411 | 'ops': {COPIED_FILENODE: 'file copied from file2 to file3', |
|
459 | 412 | MOD_FILENODE: 'modified file'}}), |
|
460 | 413 | ]), |
@@ -464,6 +417,9 b' DIFF_FIXTURES = [' | |||
|
464 | 417 | {'added': 0, |
|
465 | 418 | 'deleted': 0, |
|
466 | 419 | 'binary': True, |
|
420 | 'old_mode': '100644', | |
|
421 | 'new_mode': '100755', | |
|
422 | 'copied': ('file3', 'file4'), | |
|
467 | 423 | 'ops': {COPIED_FILENODE: 'file copied from file3 to file4', |
|
468 | 424 | CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}), |
|
469 | 425 | ]), |
@@ -473,6 +429,9 b' DIFF_FIXTURES = [' | |||
|
473 | 429 | {'added': 2, |
|
474 | 430 | 'deleted': 1, |
|
475 | 431 | 'binary': False, |
|
432 | 'old_mode': '100755', | |
|
433 | 'new_mode': '100644', | |
|
434 | 'copied': ('file4', 'file5'), | |
|
476 | 435 | 'ops': {COPIED_FILENODE: 'file copied from file4 to file5', |
|
477 | 436 | CHMOD_FILENODE: 'modified file chmod 100755 => 100644', |
|
478 | 437 | MOD_FILENODE: 'modified file'}})]), |
@@ -484,6 +443,9 b' DIFF_FIXTURES = [' | |||
|
484 | 443 | {'added': 0, |
|
485 | 444 | 'deleted': 0, |
|
486 | 445 | 'binary': True, |
|
446 | 'old_mode': '', | |
|
447 | 'new_mode': '', | |
|
448 | 'renamed': ('file_with_ spaces.txt', 'file_with_ two spaces.txt'), | |
|
487 | 449 | 'ops': { |
|
488 | 450 | RENAMED_FILENODE: ( |
|
489 | 451 | 'file renamed from file_with_ spaces.txt to file_with_ ' |
@@ -495,10 +457,12 b' DIFF_FIXTURES = [' | |||
|
495 | 457 | {'added': 0, |
|
496 | 458 | 'deleted': 0, |
|
497 | 459 | 'binary': True, |
|
460 | 'old_mode': '', | |
|
461 | 'new_mode': '', | |
|
462 | 'renamed': ('file_ with update.txt', 'file_changed _.txt'), | |
|
498 | 463 | 'ops': { |
|
499 | 464 | RENAMED_FILENODE: ( |
|
500 | 'file renamed from file_ with update.txt to file_changed' | |
|
501 | ' _.txt')} | |
|
465 | 'file renamed from file_ with update.txt to file_changed _.txt')} | |
|
502 | 466 | }), ]), |
|
503 | 467 | ('hg', |
|
504 | 468 | 'hg_diff_copy_file_with_spaces.diff', |
@@ -506,6 +470,9 b' DIFF_FIXTURES = [' | |||
|
506 | 470 | {'added': 0, |
|
507 | 471 | 'deleted': 0, |
|
508 | 472 | 'binary': True, |
|
473 | 'old_mode': '', | |
|
474 | 'new_mode': '', | |
|
475 | 'copied': ('file_changed_without_spaces.txt', 'file_copied_ with spaces.txt'), | |
|
509 | 476 | 'ops': { |
|
510 | 477 | COPIED_FILENODE: ( |
|
511 | 478 | 'file copied from file_changed_without_spaces.txt to' |
@@ -520,6 +487,8 b' DIFF_FIXTURES = [' | |||
|
520 | 487 | {'added': 0, |
|
521 | 488 | 'deleted': 0, |
|
522 | 489 | 'binary': True, |
|
490 | 'old_mode': '', | |
|
491 | 'new_mode': '100644', | |
|
523 | 492 | 'ops': {NEW_FILENODE: 'new file 100644', |
|
524 | 493 | BIN_FILENODE: 'binary diff hidden'} |
|
525 | 494 | }), |
@@ -530,6 +499,8 b' DIFF_FIXTURES = [' | |||
|
530 | 499 | {'added': 0, |
|
531 | 500 | 'deleted': 0, |
|
532 | 501 | 'binary': True, |
|
502 | 'old_mode': '', | |
|
503 | 'new_mode': '100644', | |
|
533 | 504 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
534 | 505 | }), |
|
535 | 506 | ]), |
@@ -540,6 +511,8 b' DIFF_FIXTURES = [' | |||
|
540 | 511 | {'added': 0, |
|
541 | 512 | 'deleted': 0, |
|
542 | 513 | 'binary': False, |
|
514 | 'old_mode': '', | |
|
515 | 'new_mode': '10644', | |
|
543 | 516 | 'ops': {NEW_FILENODE: 'new file 10644', |
|
544 | 517 | #TODO(Marcink): depends on binary detection on svn patches |
|
545 | 518 | # BIN_FILENODE: 'binary diff hidden' |
@@ -553,6 +526,8 b' DIFF_FIXTURES = [' | |||
|
553 | 526 | {'added': 0, |
|
554 | 527 | 'deleted': 0, |
|
555 | 528 | 'binary': False, |
|
529 | 'old_mode': '', | |
|
530 | 'new_mode': '', | |
|
556 | 531 | 'ops': {MOD_FILENODE: 'modified file', |
|
557 | 532 | #TODO(Marcink): depends on binary detection on svn patches |
|
558 | 533 | # BIN_FILENODE: 'binary diff hidden' |
@@ -562,48 +537,64 b' DIFF_FIXTURES = [' | |||
|
562 | 537 | {'added': 89, |
|
563 | 538 | 'deleted': 34, |
|
564 | 539 | 'binary': False, |
|
540 | 'old_mode': '', | |
|
541 | 'new_mode': '', | |
|
565 | 542 | 'ops': {MOD_FILENODE: 'modified file'} |
|
566 | 543 | }), |
|
567 | 544 | ('trunk/doc/source/en/tsvn_ch04.xml', 'M', |
|
568 | 545 | {'added': 66, |
|
569 | 546 | 'deleted': 21, |
|
570 | 547 | 'binary': False, |
|
548 | 'old_mode': '', | |
|
549 | 'new_mode': '', | |
|
571 | 550 | 'ops': {MOD_FILENODE: 'modified file'} |
|
572 | 551 | }), |
|
573 | 552 | ('trunk/src/Changelog.txt', 'M', |
|
574 | 553 | {'added': 2, |
|
575 | 554 | 'deleted': 0, |
|
576 | 555 | 'binary': False, |
|
556 | 'old_mode': '', | |
|
557 | 'new_mode': '', | |
|
577 | 558 | 'ops': {MOD_FILENODE: 'modified file'} |
|
578 | 559 | }), |
|
579 | 560 | ('trunk/src/Resources/TortoiseProcENG.rc', 'M', |
|
580 | 561 | {'added': 19, |
|
581 | 562 | 'deleted': 13, |
|
582 | 563 | 'binary': False, |
|
564 | 'old_mode': '', | |
|
565 | 'new_mode': '', | |
|
583 | 566 | 'ops': {MOD_FILENODE: 'modified file'} |
|
584 | 567 | }), |
|
585 | 568 | ('trunk/src/TortoiseProc/SetOverlayPage.cpp', 'M', |
|
586 | 569 | {'added': 16, |
|
587 | 570 | 'deleted': 1, |
|
588 | 571 | 'binary': False, |
|
572 | 'old_mode': '', | |
|
573 | 'new_mode': '', | |
|
589 | 574 | 'ops': {MOD_FILENODE: 'modified file'} |
|
590 | 575 | }), |
|
591 | 576 | ('trunk/src/TortoiseProc/SetOverlayPage.h', 'M', |
|
592 | 577 | {'added': 3, |
|
593 | 578 | 'deleted': 0, |
|
594 | 579 | 'binary': False, |
|
580 | 'old_mode': '', | |
|
581 | 'new_mode': '', | |
|
595 | 582 | 'ops': {MOD_FILENODE: 'modified file'} |
|
596 | 583 | }), |
|
597 | 584 | ('trunk/src/TortoiseProc/resource.h', 'M', |
|
598 | 585 | {'added': 2, |
|
599 | 586 | 'deleted': 0, |
|
600 | 587 | 'binary': False, |
|
588 | 'old_mode': '', | |
|
589 | 'new_mode': '', | |
|
601 | 590 | 'ops': {MOD_FILENODE: 'modified file'} |
|
602 | 591 | }), |
|
603 | 592 | ('trunk/src/TortoiseShell/ShellCache.h', 'M', |
|
604 | 593 | {'added': 50, |
|
605 | 594 | 'deleted': 1, |
|
606 | 595 | 'binary': False, |
|
596 | 'old_mode': '', | |
|
597 | 'new_mode': '', | |
|
607 | 598 | 'ops': {MOD_FILENODE: 'modified file'} |
|
608 | 599 | }), |
|
609 | 600 | ]), |
@@ -621,6 +612,8 b' DIFF_FIXTURES_WITH_CONTENT = [' | |||
|
621 | 612 | 'added': 1, |
|
622 | 613 | 'deleted': 0, |
|
623 | 614 | 'binary': False, |
|
615 | 'old_mode': '', | |
|
616 | 'new_mode': '100644', | |
|
624 | 617 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
625 | 618 | }, |
|
626 | 619 | '@@ -0,0 +1 @@\n+test_content b\n' # diff |
@@ -637,6 +630,8 b' DIFF_FIXTURES_WITH_CONTENT = [' | |||
|
637 | 630 | 'added': 1, |
|
638 | 631 | 'deleted': 0, |
|
639 | 632 | 'binary': False, |
|
633 | 'old_mode': '', | |
|
634 | 'new_mode': '100644', | |
|
640 | 635 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
641 | 636 | }, |
|
642 | 637 | '@@ -0,0 +1 @@\n+test_content b\n' # diff |
@@ -648,6 +643,8 b' DIFF_FIXTURES_WITH_CONTENT = [' | |||
|
648 | 643 | 'added': 1, |
|
649 | 644 | 'deleted': 0, |
|
650 | 645 | 'binary': False, |
|
646 | 'old_mode': '', | |
|
647 | 'new_mode': '100644', | |
|
651 | 648 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
652 | 649 | }, |
|
653 | 650 | '@@ -0,0 +1 @@\n+test_content c\n' # diff |
@@ -664,6 +661,8 b' DIFF_FIXTURES_WITH_CONTENT = [' | |||
|
664 | 661 | 'added': 1, |
|
665 | 662 | 'deleted': 0, |
|
666 | 663 | 'binary': False, |
|
664 | 'old_mode': '', | |
|
665 | 'new_mode': '100644', | |
|
667 | 666 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
668 | 667 | }, |
|
669 | 668 | '@@ -0,0 +1 @@\n+test_content b\n\n' # diff |
@@ -675,6 +674,8 b' DIFF_FIXTURES_WITH_CONTENT = [' | |||
|
675 | 674 | 'added': 1, |
|
676 | 675 | 'deleted': 0, |
|
677 | 676 | 'binary': False, |
|
677 | 'old_mode': '', | |
|
678 | 'new_mode': '100644', | |
|
678 | 679 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
679 | 680 | }, |
|
680 | 681 | '@@ -0,0 +1 @@\n+test_content c\n' # diff |
@@ -691,6 +692,8 b' DIFF_FIXTURES_WITH_CONTENT = [' | |||
|
691 | 692 | 'added': 1, |
|
692 | 693 | 'deleted': 0, |
|
693 | 694 | 'binary': False, |
|
695 | 'old_mode': '', | |
|
696 | 'new_mode': '100644', | |
|
694 | 697 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
695 | 698 | }, |
|
696 | 699 | '@@ -0,0 +1,1 @@\n+file\n' # diff |
@@ -702,6 +705,8 b' DIFF_FIXTURES_WITH_CONTENT = [' | |||
|
702 | 705 | 'added': 1, |
|
703 | 706 | 'deleted': 0, |
|
704 | 707 | 'binary': False, |
|
708 | 'old_mode': '', | |
|
709 | 'new_mode': '100644', | |
|
705 | 710 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
706 | 711 | }, |
|
707 | 712 | '@@ -0,0 +1,1 @@\n+another line\n' # diff |
@@ -713,6 +718,8 b' DIFF_FIXTURES_WITH_CONTENT = [' | |||
|
713 | 718 | 'added': 1, |
|
714 | 719 | 'deleted': 0, |
|
715 | 720 | 'binary': False, |
|
721 | 'old_mode': '', | |
|
722 | 'new_mode': '100644', | |
|
716 | 723 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
717 | 724 | }, |
|
718 | 725 | '@@ -0,0 +1,1 @@\n+newline\n' # diff |
@@ -724,6 +731,8 b' DIFF_FIXTURES_WITH_CONTENT = [' | |||
|
724 | 731 | 'added': 1, |
|
725 | 732 | 'deleted': 0, |
|
726 | 733 | 'binary': False, |
|
734 | 'old_mode': '', | |
|
735 | 'new_mode': '100644', | |
|
727 | 736 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
728 | 737 | }, |
|
729 | 738 | '@@ -0,0 +1,1 @@\n+fil4\n\\ No newline at end of file' # diff |
@@ -741,28 +750,46 b' diff_class = {' | |||
|
741 | 750 | } |
|
742 | 751 | |
|
743 | 752 | |
|
744 | @pytest.fixture(params=DIFF_FIXTURES) | |
|
745 | def diff_fixture(request): | |
|
746 | vcs, diff_fixture, expected = request.param | |
|
747 | diff_txt = fixture.load_resource(diff_fixture) | |
|
748 | diff = diff_class[vcs](diff_txt) | |
|
749 | return diff, expected | |
|
753 | @pytest.mark.parametrize('vcs_type, diff_file, expected_data', DIFF_FIXTURES) | |
|
754 | def test_diff_lib(vcs_type, diff_file, expected_data): | |
|
755 | diff_txt = fixture.load_resource(diff_file) | |
|
756 | diff = diff_class[vcs_type](diff_txt) | |
|
750 | 757 | |
|
751 | ||
|
752 | def test_diff_lib(diff_fixture): | |
|
753 | diff, expected_data = diff_fixture | |
|
754 | diff_proc = DiffProcessor(diff) | |
|
758 | diff_proc = DiffProcessor(diff, diff_format='newdiff') | |
|
755 | 759 | diff_proc_d = diff_proc.prepare() |
|
756 |
data = [(x['filename'], x['operation'], x['stats']) |
|
|
760 | data = [(x['filename'], x['operation'], x['stats']) | |
|
761 | for x in diff_proc_d] | |
|
757 | 762 | assert expected_data == data |
|
758 | 763 | |
|
759 | 764 | |
|
760 |
@pytest. |
|
|
761 | def diff_fixture_w_content(request): | |
|
762 | vcs, diff_fixture, expected = request.param | |
|
763 | diff_txt = fixture.load_resource(diff_fixture) | |
|
764 | diff = diff_class[vcs](diff_txt) | |
|
765 | return diff, expected | |
|
765 | @pytest.mark.parametrize('vcs_type, diff_file, expected_data', DIFF_FIXTURES_WITH_CONTENT) | |
|
766 | def test_diff_lib_newlines(vcs_type, diff_file, expected_data): | |
|
767 | diff_txt = fixture.load_resource(diff_file) | |
|
768 | diff = diff_class[vcs_type](diff_txt) | |
|
769 | ||
|
770 | diff_proc = DiffProcessor(diff, diff_format='newdiff') | |
|
771 | diff_proc_d = diff_proc.prepare() | |
|
772 | data = [(x['filename'], x['operation'], x['stats'], x['raw_diff']) | |
|
773 | for x in diff_proc_d] | |
|
774 | assert expected_data == data | |
|
775 | ||
|
776 | ||
|
777 | @pytest.mark.parametrize('input_str', [ | |
|
778 | b'', | |
|
779 | b'\n', | |
|
780 | b'\n\n', | |
|
781 | b'First\n+second', | |
|
782 | b'First\n+second\n', | |
|
783 | ||
|
784 | b'\n\n\n Multi \n\n\n', | |
|
785 | b'\n\n\n Multi beginning', | |
|
786 | b'Multi end \n\n\n', | |
|
787 | b'Multi end', | |
|
788 | b'@@ -0,0 +1 @@\n+test_content \n\n b\n' | |
|
789 | ], ids=no_newline_id_generator) | |
|
790 | def test_splitlines(input_str): | |
|
791 | result = DiffProcessor.diff_splitter(input_str) | |
|
792 | assert list(result) == input_str.splitlines(True) | |
|
766 | 793 | |
|
767 | 794 | |
|
768 | 795 | def test_diff_over_limit(request): |
@@ -772,9 +799,9 b' def test_diff_over_limit(request):' | |||
|
772 | 799 | |
|
773 | 800 | raw_diff = fixture.load_resource('large_diff.diff') |
|
774 | 801 | vcs_diff = GitDiff(raw_diff) |
|
775 | diff_processor = DiffProcessor( | |
|
776 |
|
|
|
777 | show_full_diff=False) | |
|
802 | diff_processor = DiffProcessor(vcs_diff, diff_format='newdiff', | |
|
803 | diff_limit=diff_limit, file_limit=file_limit, | |
|
804 | show_full_diff=False) | |
|
778 | 805 | |
|
779 | 806 | _parsed = diff_processor.prepare() |
|
780 | 807 | |
@@ -796,30 +823,3 b' def test_diff_over_limit(request):' | |||
|
796 | 823 | |
|
797 | 824 | assert diffset.files[1].patch['filename'] == 'README.md' |
|
798 | 825 | assert diffset.files[1].limited_diff is False |
|
799 | ||
|
800 | ||
|
801 | def test_diff_lib_newlines(diff_fixture_w_content): | |
|
802 | diff, expected_data = diff_fixture_w_content | |
|
803 | diff_proc = DiffProcessor(diff) | |
|
804 | diff_proc_d = diff_proc.prepare() | |
|
805 | data = [(x['filename'], x['operation'], x['stats'], x['raw_diff']) | |
|
806 | for x in diff_proc_d] | |
|
807 | assert expected_data == data | |
|
808 | ||
|
809 | ||
|
810 | @pytest.mark.parametrize('input_str', [ | |
|
811 | '', | |
|
812 | '\n', | |
|
813 | '\n\n', | |
|
814 | 'First\n+second', | |
|
815 | 'First\n+second\n', | |
|
816 | ||
|
817 | '\n\n\n Multi \n\n\n', | |
|
818 | '\n\n\n Multi beginning', | |
|
819 | 'Multi end \n\n\n', | |
|
820 | 'Multi end', | |
|
821 | '@@ -0,0 +1 @@\n+test_content \n\n b\n' | |
|
822 | ], ids=no_newline_id_generator) | |
|
823 | def test_splitlines(input_str): | |
|
824 | result = DiffProcessor.diff_splitter(input_str) | |
|
825 | assert list(result) == input_str.splitlines(True) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -27,6 +26,7 b' import textwrap' | |||
|
27 | 26 | import pytest |
|
28 | 27 | |
|
29 | 28 | from rhodecode.lib import diffs |
|
29 | from rhodecode.lib.str_utils import safe_bytes | |
|
30 | 30 | from rhodecode.lib.vcs.backends.git.diff import GitDiff |
|
31 | 31 | |
|
32 | 32 | |
@@ -40,13 +40,13 b' def test_context_of_an_old_line_number(d' | |||
|
40 | 40 | context = diff_processor.get_context_of_line( |
|
41 | 41 | path='file.txt', diff_line=diffs.DiffLineNumber(old=7, new=None)) |
|
42 | 42 | expected_context = [ |
|
43 | ('unmod', 'line04\n'), | |
|
44 | ('unmod', 'line05\n'), | |
|
45 | ('unmod', 'line06\n'), | |
|
46 | ('unmod', 'line07\n'), | |
|
47 | ('add', 'line07a Add after line07\n'), | |
|
48 | ('unmod', 'line08\n'), | |
|
49 | ('unmod', 'line09\n'), | |
|
43 | ('unmod', b'line04\n'), | |
|
44 | ('unmod', b'line05\n'), | |
|
45 | ('unmod', b'line06\n'), | |
|
46 | ('unmod', b'line07\n'), | |
|
47 | ('add', b'line07a Add after line07\n'), | |
|
48 | ('unmod', b'line08\n'), | |
|
49 | ('unmod', b'line09\n'), | |
|
50 | 50 | ] |
|
51 | 51 | assert context == expected_context |
|
52 | 52 | |
@@ -55,13 +55,13 b' def test_context_of_a_new_line_number(di' | |||
|
55 | 55 | context = diff_processor.get_context_of_line( |
|
56 | 56 | path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=8)) |
|
57 | 57 | expected_context = [ |
|
58 | ('unmod', 'line05\n'), | |
|
59 | ('unmod', 'line06\n'), | |
|
60 | ('unmod', 'line07\n'), | |
|
61 | ('add', 'line07a Add after line07\n'), | |
|
62 | ('unmod', 'line08\n'), | |
|
63 | ('unmod', 'line09\n'), | |
|
64 | ('unmod', 'line10\n'), | |
|
58 | ('unmod', b'line05\n'), | |
|
59 | ('unmod', b'line06\n'), | |
|
60 | ('unmod', b'line07\n'), | |
|
61 | ('add', b'line07a Add after line07\n'), | |
|
62 | ('unmod', b'line08\n'), | |
|
63 | ('unmod', b'line09\n'), | |
|
64 | ('unmod', b'line10\n'), | |
|
65 | 65 | ] |
|
66 | 66 | assert context == expected_context |
|
67 | 67 | |
@@ -72,11 +72,11 b' def test_context_of_an_invisible_line_be' | |||
|
72 | 72 | context = diff_processor.get_context_of_line( |
|
73 | 73 | path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=3)) |
|
74 | 74 | expected_context = [ |
|
75 | ('unmod', 'line02\n'), | |
|
76 | ('unmod', 'line03\n'), | |
|
77 | ('unmod', 'line04\n'), | |
|
78 | ('unmod', 'line05\n'), | |
|
79 | ('unmod', 'line06\n'), | |
|
75 | ('unmod', b'line02\n'), | |
|
76 | ('unmod', b'line03\n'), | |
|
77 | ('unmod', b'line04\n'), | |
|
78 | ('unmod', b'line05\n'), | |
|
79 | ('unmod', b'line06\n'), | |
|
80 | 80 | ] |
|
81 | 81 | assert context == expected_context |
|
82 | 82 | |
@@ -87,11 +87,11 b' def test_context_of_an_invisible_line_en' | |||
|
87 | 87 | context = diff_processor.get_context_of_line( |
|
88 | 88 | path='file.txt', diff_line=diffs.DiffLineNumber(old=12, new=None)) |
|
89 | 89 | expected_context = [ |
|
90 | ('unmod', 'line09\n'), | |
|
91 | ('unmod', 'line10\n'), | |
|
92 | ('unmod', 'line11\n'), | |
|
93 | ('unmod', 'line12\n'), | |
|
94 | ('unmod', 'line13\n'), | |
|
90 | ('unmod', b'line09\n'), | |
|
91 | ('unmod', b'line10\n'), | |
|
92 | ('unmod', b'line11\n'), | |
|
93 | ('unmod', b'line12\n'), | |
|
94 | ('unmod', b'line13\n'), | |
|
95 | 95 | ] |
|
96 | 96 | assert context == expected_context |
|
97 | 97 | |
@@ -101,11 +101,11 b' def test_context_of_an_incomplete_hunk_i' | |||
|
101 | 101 | context = diff_processor.get_context_of_line( |
|
102 | 102 | path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=2)) |
|
103 | 103 | expected_context = [ |
|
104 | ('unmod', 'line01\n'), | |
|
105 | ('add', 'line01a Add line after line01\n'), | |
|
106 | ('unmod', 'line02\n'), | |
|
107 | ('unmod', 'line03\n'), | |
|
108 | ('unmod', 'line04\n'), | |
|
104 | ('unmod', b'line01\n'), | |
|
105 | ('add', b'line01a Add line after line01\n'), | |
|
106 | ('unmod', b'line02\n'), | |
|
107 | ('unmod', b'line03\n'), | |
|
108 | ('unmod', b'line04\n'), | |
|
109 | 109 | ] |
|
110 | 110 | assert context == expected_context |
|
111 | 111 | |
@@ -115,11 +115,11 b' def test_context_of_an_incomplete_hunk_i' | |||
|
115 | 115 | context = diff_processor.get_context_of_line( |
|
116 | 116 | path='file.txt', diff_line=diffs.DiffLineNumber(old=None, new=80)) |
|
117 | 117 | expected_context = [ |
|
118 | ('unmod', 'line36\n'), | |
|
119 | ('unmod', 'line37\n'), | |
|
120 | ('unmod', 'line38\n'), | |
|
121 | ('add', 'line38a Add line after line38\n'), | |
|
122 | ('unmod', 'line39\n'), | |
|
118 | ('unmod', b'line36\n'), | |
|
119 | ('unmod', b'line37\n'), | |
|
120 | ('unmod', b'line38\n'), | |
|
121 | ('add', b'line38a Add line after line38\n'), | |
|
122 | ('unmod', b'line39\n'), | |
|
123 | 123 | ] |
|
124 | 124 | assert context == expected_context |
|
125 | 125 | |
@@ -131,7 +131,7 b' def test_context_of_an_incomplete_hunk_i' | |||
|
131 | 131 | def test_appends_newline_for_each_context_line(diff_processor): |
|
132 | 132 | context = diff_processor.get_context_of_line( |
|
133 | 133 | path='file_b', diff_line=diffs.DiffLineNumber(old=None, new=1)) |
|
134 | assert context == [('add', 'test_content\n')] | |
|
134 | assert context == [('add', b'test_content\n')] | |
|
135 | 135 | |
|
136 | 136 | |
|
137 | 137 | def test_context_of_a_missing_line_raises(diff_processor): |
@@ -151,13 +151,13 b' def test_context_of_a_missing_file_raise' | |||
|
151 | 151 | |
|
152 | 152 | def test_find_context_with_full_context(diff_processor): |
|
153 | 153 | context_of_line_7 = [ |
|
154 | ('unmod', 'line05\n'), | |
|
155 | ('unmod', 'line06\n'), | |
|
156 | ('unmod', 'line07\n'), | |
|
157 | ('add', 'line07a Add after line07\n'), | |
|
158 | ('unmod', 'line08\n'), | |
|
159 | ('unmod', 'line09\n'), | |
|
160 | ('unmod', 'line10\n'), | |
|
154 | ('unmod', b'line05\n'), | |
|
155 | ('unmod', b'line06\n'), | |
|
156 | ('unmod', b'line07\n'), | |
|
157 | ('add', b'line07a Add after line07\n'), | |
|
158 | ('unmod', b'line08\n'), | |
|
159 | ('unmod', b'line09\n'), | |
|
160 | ('unmod', b'line10\n'), | |
|
161 | 161 | ] |
|
162 | 162 | found_line = diff_processor.find_context( |
|
163 | 163 | 'file.txt', context_of_line_7, offset=3) |
@@ -167,13 +167,13 b' def test_find_context_with_full_context(' | |||
|
167 | 167 | @pytest.mark.parametrize('diff_fixture', ['change-duplicated.diff']) |
|
168 | 168 | def test_find_context_multiple_times(diff_processor): |
|
169 | 169 | context = [ |
|
170 | ('unmod', 'line04\n'), | |
|
171 | ('unmod', 'line05\n'), | |
|
172 | ('unmod', 'line06\n'), | |
|
173 | ('add', 'line06a add line\n'), | |
|
174 | ('unmod', 'line07\n'), | |
|
175 | ('unmod', 'line08\n'), | |
|
176 | ('unmod', 'line09\n'), | |
|
170 | ('unmod', b'line04\n'), | |
|
171 | ('unmod', b'line05\n'), | |
|
172 | ('unmod', b'line06\n'), | |
|
173 | ('add', b'line06a add line\n'), | |
|
174 | ('unmod', b'line07\n'), | |
|
175 | ('unmod', b'line08\n'), | |
|
176 | ('unmod', b'line09\n'), | |
|
177 | 177 | ] |
|
178 | 178 | found_line = diff_processor.find_context('file.txt', context, offset=3) |
|
179 | 179 | assert found_line == [ |
@@ -185,13 +185,13 b' def test_find_context_multiple_times(dif' | |||
|
185 | 185 | @pytest.mark.parametrize('offset', [20, -20, -1, 7]) |
|
186 | 186 | def test_find_context_offset_param_raises(diff_processor, offset): |
|
187 | 187 | context_of_line_7 = [ |
|
188 | ('unmod', 'line04\n'), | |
|
189 | ('unmod', 'line05\n'), | |
|
190 | ('unmod', 'line06\n'), | |
|
191 | ('unmod', 'line07\n'), | |
|
192 | ('add', 'line07a Add after line07\n'), | |
|
193 | ('unmod', 'line08\n'), | |
|
194 | ('unmod', 'line09\n'), | |
|
188 | ('unmod', b'line04\n'), | |
|
189 | ('unmod', b'line05\n'), | |
|
190 | ('unmod', b'line06\n'), | |
|
191 | ('unmod', b'line07\n'), | |
|
192 | ('add', b'line07a Add after line07\n'), | |
|
193 | ('unmod', b'line08\n'), | |
|
194 | ('unmod', b'line09\n'), | |
|
195 | 195 | ] |
|
196 | 196 | with pytest.raises(ValueError): |
|
197 | 197 | diff_processor.find_context( |
@@ -200,10 +200,10 b' def test_find_context_offset_param_raise' | |||
|
200 | 200 | |
|
201 | 201 | def test_find_context_beginning_of_chunk(diff_processor): |
|
202 | 202 | context_of_first_line = [ |
|
203 | ('unmod', 'line02\n'), | |
|
204 | ('unmod', 'line03\n'), | |
|
205 | ('unmod', 'line04\n'), | |
|
206 | ('unmod', 'line05\n'), | |
|
203 | ('unmod', b'line02\n'), | |
|
204 | ('unmod', b'line03\n'), | |
|
205 | ('unmod', b'line04\n'), | |
|
206 | ('unmod', b'line05\n'), | |
|
207 | 207 | ] |
|
208 | 208 | found_line = diff_processor.find_context( |
|
209 | 209 | 'file.txt', context_of_first_line, offset=0) |
@@ -213,13 +213,13 b' def test_find_context_beginning_of_chunk' | |||
|
213 | 213 | @pytest.mark.parametrize('diff_fixture', ['change-in-beginning.diff']) |
|
214 | 214 | def test_find_context_beginning_of_file(diff_processor): |
|
215 | 215 | context_of_first_line = [ |
|
216 | ('add', 'line01a Add line after line01\n'), | |
|
217 | ('unmod', 'line02\n'), | |
|
218 | ('unmod', 'line03\n'), | |
|
219 | ('unmod', 'line04\n'), | |
|
220 | ('unmod', 'line05\n'), | |
|
221 | ('unmod', 'line06\n'), | |
|
222 | ('unmod', 'line07\n'), | |
|
216 | ('add', b'line01a Add line after line01\n'), | |
|
217 | ('unmod', b'line02\n'), | |
|
218 | ('unmod', b'line03\n'), | |
|
219 | ('unmod', b'line04\n'), | |
|
220 | ('unmod', b'line05\n'), | |
|
221 | ('unmod', b'line06\n'), | |
|
222 | ('unmod', b'line07\n'), | |
|
223 | 223 | ] |
|
224 | 224 | found_line = diff_processor.find_context( |
|
225 | 225 | 'file.txt', context_of_first_line, offset=3) |
@@ -228,10 +228,10 b' def test_find_context_beginning_of_file(' | |||
|
228 | 228 | |
|
229 | 229 | def test_find_context_end_of_chunk(diff_processor): |
|
230 | 230 | context_of_last_line = [ |
|
231 | ('unmod', 'line10\n'), | |
|
232 | ('unmod', 'line11\n'), | |
|
233 | ('unmod', 'line12\n'), | |
|
234 | ('unmod', 'line13\n'), | |
|
231 | ('unmod', b'line10\n'), | |
|
232 | ('unmod', b'line11\n'), | |
|
233 | ('unmod', b'line12\n'), | |
|
234 | ('unmod', b'line13\n'), | |
|
235 | 235 | ] |
|
236 | 236 | found_line = diff_processor.find_context( |
|
237 | 237 | 'file.txt', context_of_last_line, offset=3) |
@@ -242,7 +242,7 b' def test_find_context_end_of_chunk(diff_' | |||
|
242 | 242 | def diff_processor(request, diff_fixture): |
|
243 | 243 | raw_diff = diffs_store[diff_fixture] |
|
244 | 244 | diff = GitDiff(raw_diff) |
|
245 | processor = diffs.DiffProcessor(diff) | |
|
245 | processor = diffs.DiffProcessor(diff, diff_format='newdiff') | |
|
246 | 246 | processor.prepare() |
|
247 | 247 | return processor |
|
248 | 248 | |
@@ -252,7 +252,7 b' def diff_fixture():' | |||
|
252 | 252 | return 'default.diff' |
|
253 | 253 | |
|
254 | 254 | |
|
255 | diff_default = textwrap.dedent(""" | |
|
255 | diff_default: bytes = safe_bytes(textwrap.dedent(""" | |
|
256 | 256 | diff --git a/file.txt b/file.txt |
|
257 | 257 | index 76e4f2e..6f8738f 100644 |
|
258 | 258 | --- a/file.txt |
@@ -271,10 +271,10 b' diff_default = textwrap.dedent("""' | |||
|
271 | 271 | line11 |
|
272 | 272 | line12 |
|
273 | 273 | line13 |
|
274 | """) | |
|
274 | """)) | |
|
275 | 275 | |
|
276 | 276 | |
|
277 | diff_beginning = textwrap.dedent(""" | |
|
277 | diff_beginning: bytes = safe_bytes(textwrap.dedent(""" | |
|
278 | 278 | diff --git a/file.txt b/file.txt |
|
279 | 279 | index 76e4f2e..47d39f4 100644 |
|
280 | 280 | --- a/file.txt |
@@ -288,10 +288,10 b' diff_beginning = textwrap.dedent("""' | |||
|
288 | 288 | line05 |
|
289 | 289 | line06 |
|
290 | 290 | line07 |
|
291 | """) | |
|
291 | """)) | |
|
292 | 292 | |
|
293 | 293 | |
|
294 | diff_end = textwrap.dedent(""" | |
|
294 | diff_end: bytes = safe_bytes(textwrap.dedent(""" | |
|
295 | 295 | diff --git a/file.txt b/file.txt |
|
296 | 296 | index 76e4f2e..b1304db 100644 |
|
297 | 297 | --- a/file.txt |
@@ -305,10 +305,10 b' diff_end = textwrap.dedent("""' | |||
|
305 | 305 | line38 |
|
306 | 306 | +line38a Add line after line38 |
|
307 | 307 | line39 |
|
308 | """) | |
|
308 | """)) | |
|
309 | 309 | |
|
310 | 310 | |
|
311 | diff_duplicated_change = textwrap.dedent(""" | |
|
311 | diff_duplicated_change: bytes = safe_bytes(textwrap.dedent(""" | |
|
312 | 312 | diff --git a/file.txt b/file.txt |
|
313 | 313 | index 76e4f2e..55c2781 100644 |
|
314 | 314 | --- a/file.txt |
@@ -341,10 +341,10 b' diff_duplicated_change = textwrap.dedent' | |||
|
341 | 341 | line10 |
|
342 | 342 | line11 |
|
343 | 343 | line12 |
|
344 | """) | |
|
344 | """)) | |
|
345 | 345 | |
|
346 | 346 | |
|
347 | diff_single_line = textwrap.dedent(""" | |
|
347 | diff_single_line: bytes = safe_bytes(textwrap.dedent(""" | |
|
348 | 348 | diff --git a/file_b b/file_b |
|
349 | 349 | new file mode 100644 |
|
350 | 350 | index 00000000..915e94ff |
@@ -352,10 +352,10 b' diff_single_line = textwrap.dedent("""' | |||
|
352 | 352 | +++ b/file_b |
|
353 | 353 | @@ -0,0 +1 @@ |
|
354 | 354 | +test_content |
|
355 | """) | |
|
355 | """)) | |
|
356 | 356 | |
|
357 | 357 | |
|
358 | diff_single_line_two_files = textwrap.dedent(""" | |
|
358 | diff_single_line_two_files: bytes = safe_bytes(textwrap.dedent(""" | |
|
359 | 359 | diff --git a/file_b b/file_b |
|
360 | 360 | new file mode 100644 |
|
361 | 361 | index 00000000..915e94ff |
@@ -370,7 +370,7 b' diff_single_line_two_files = textwrap.de' | |||
|
370 | 370 | +++ b/file_c |
|
371 | 371 | @@ -0,0 +1 @@ |
|
372 | 372 | +test_content |
|
373 | """) | |
|
373 | """)) | |
|
374 | 374 | |
|
375 | 375 | |
|
376 | 376 | diffs_store = { |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -83,23 +82,35 b' def hook_extras(user_regular, repo_stub)' | |||
|
83 | 82 | return extras |
|
84 | 83 | |
|
85 | 84 | |
|
85 | class ExtensionMock(mock.Mock): | |
|
86 | ||
|
87 | def __repr__(self): | |
|
88 | return f'ExtensionMock({self._mock_name})' | |
|
89 | ||
|
90 | @property | |
|
91 | def output(self): | |
|
92 | return 'MOCK' | |
|
93 | ||
|
94 | @property | |
|
95 | def status(self): | |
|
96 | return 0 | |
|
97 | ||
|
98 | ||
|
86 | 99 | @pytest.mark.parametrize('func, extension, event', [ |
|
87 | 100 | (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'), |
|
88 |
(hooks_base.post_push, 'post_pu |
|
|
101 | (hooks_base.post_push, 'post_push_extension', 'RepoPushEvent'), | |
|
89 | 102 | (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'), |
|
90 |
(hooks_base.post_pull, 'post_pu |
|
|
103 | (hooks_base.post_pull, 'post_pull_extension', 'RepoPullEvent'), | |
|
91 | 104 | ]) |
|
92 | 105 | def test_hooks_propagate(func, extension, event, hook_extras): |
|
93 | 106 | """ |
|
94 | 107 | Tests that our hook code propagates to rhodecode extensions and triggers |
|
95 | 108 | the appropriate event. |
|
96 | 109 | """ |
|
97 | class ExtensionMock(mock.Mock): | |
|
98 | @property | |
|
99 | def output(self): | |
|
100 | return 'MOCK' | |
|
101 | 110 | |
|
102 | 111 | extension_mock = ExtensionMock() |
|
112 | extension_mock._mock_name = extension | |
|
113 | ||
|
103 | 114 | events_mock = mock.Mock() |
|
104 | 115 | patches = { |
|
105 | 116 | 'Repository': mock.Mock(), |
@@ -115,15 +126,15 b' def test_hooks_propagate(func, extension' | |||
|
115 | 126 | func(hook_extras) |
|
116 | 127 | |
|
117 | 128 | # Assert that extensions are called and event was fired. |
|
118 | extension_mock.called_once() | |
|
129 | extension_mock.assert_called_once() | |
|
119 | 130 | assert_called_with_mock(events_mock.trigger, event) |
|
120 | 131 | |
|
121 | 132 | |
|
122 | 133 | @pytest.mark.parametrize('func, extension, event', [ |
|
123 | 134 | (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'), |
|
124 |
(hooks_base.post_push, 'post_pu |
|
|
135 | (hooks_base.post_push, 'post_push_extension', 'RepoPushEvent'), | |
|
125 | 136 | (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'), |
|
126 |
(hooks_base.post_pull, 'post_pu |
|
|
137 | (hooks_base.post_pull, 'post_pull_extension', 'RepoPullEvent'), | |
|
127 | 138 | ]) |
|
128 | 139 | def test_hooks_propagates_not_on_shadow(func, extension, event, hook_extras): |
|
129 | 140 | """ |
@@ -131,7 +142,10 b' def test_hooks_propagates_not_on_shadow(' | |||
|
131 | 142 | internal hooks code but not external ones like rhodecode extensions or |
|
132 | 143 | trigger an event. |
|
133 | 144 | """ |
|
134 | extension_mock = mock.Mock() | |
|
145 | ||
|
146 | extension_mock = ExtensionMock() | |
|
147 | extension_mock._mock_name = extension | |
|
148 | ||
|
135 | 149 | events_mock = mock.Mock() |
|
136 | 150 | patches = { |
|
137 | 151 | 'Repository': mock.Mock(), |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -18,15 +17,19 b'' | |||
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | import json | |
|
22 | 20 | import logging |
|
23 | 21 | import io |
|
24 | 22 | |
|
25 | 23 | import mock |
|
24 | import msgpack | |
|
26 | 25 | import pytest |
|
27 | 26 | |
|
28 | 27 | from rhodecode.lib import hooks_daemon |
|
28 | from rhodecode.lib.str_utils import safe_bytes | |
|
29 | 29 | from rhodecode.tests.utils import assert_message_in_log |
|
30 | from rhodecode.lib.ext_json import json | |
|
31 | ||
|
32 | test_proto = hooks_daemon.HooksHttpHandler.MSGPACK_HOOKS_PROTO | |
|
30 | 33 | |
|
31 | 34 | |
|
32 | 35 | class TestDummyHooksCallbackDaemon(object): |
@@ -73,7 +76,10 b' class TestHooksHttpHandler(object):' | |||
|
73 | 76 | hooks_daemon.Hooks, data['method'], create=True, return_value=1) |
|
74 | 77 | |
|
75 | 78 | with hooks_patcher as hooks_mock: |
|
76 |
|
|
|
79 | handler = hooks_daemon.HooksHttpHandler | |
|
80 | handler.DEFAULT_HOOKS_PROTO = test_proto | |
|
81 | handler.wbufsize = 10240 | |
|
82 | MockServer(handler, request) | |
|
77 | 83 | |
|
78 | 84 | hooks_mock.assert_called_once_with(data['extras']) |
|
79 | 85 | |
@@ -84,33 +90,51 b' class TestHooksHttpHandler(object):' | |||
|
84 | 90 | 'first': 'one', |
|
85 | 91 | 'second': 2 |
|
86 | 92 | } |
|
93 | extras = {} | |
|
94 | ||
|
95 | # patching our _read to return test method and proto used | |
|
87 | 96 | read_patcher = mock.patch.object( |
|
88 | 97 | hooks_daemon.HooksHttpHandler, '_read_request', |
|
89 |
return_value=(rpc_method, |
|
|
98 | return_value=(test_proto, rpc_method, extras)) | |
|
99 | ||
|
100 | # patch Hooks instance to return hook_result data on 'test' call | |
|
90 | 101 | hooks_patcher = mock.patch.object( |
|
91 | 102 | hooks_daemon.Hooks, rpc_method, create=True, |
|
92 | 103 | return_value=hook_result) |
|
93 | 104 | |
|
94 | 105 | with read_patcher, hooks_patcher: |
|
95 |
|
|
|
106 | handler = hooks_daemon.HooksHttpHandler | |
|
107 | handler.DEFAULT_HOOKS_PROTO = test_proto | |
|
108 | handler.wbufsize = 10240 | |
|
109 | server = MockServer(handler, request) | |
|
96 | 110 | |
|
97 |
expected_result = |
|
|
98 | assert server.request.output_stream.buflist[-1] == expected_result | |
|
111 | expected_result = hooks_daemon.HooksHttpHandler.serialize_data(hook_result) | |
|
112 | ||
|
113 | server.request.output_stream.seek(0) | |
|
114 | assert server.request.output_stream.readlines()[-1] == expected_result | |
|
99 | 115 | |
|
100 | 116 | def test_exception_is_returned_in_response(self): |
|
101 | 117 | request = self._generate_post_request({}) |
|
102 | 118 | rpc_method = 'test' |
|
119 | ||
|
103 | 120 | read_patcher = mock.patch.object( |
|
104 | 121 | hooks_daemon.HooksHttpHandler, '_read_request', |
|
105 | return_value=(rpc_method, {})) | |
|
122 | return_value=(test_proto, rpc_method, {})) | |
|
123 | ||
|
106 | 124 | hooks_patcher = mock.patch.object( |
|
107 | 125 | hooks_daemon.Hooks, rpc_method, create=True, |
|
108 | 126 | side_effect=Exception('Test exception')) |
|
109 | 127 | |
|
110 | 128 | with read_patcher, hooks_patcher: |
|
111 |
|
|
|
129 | handler = hooks_daemon.HooksHttpHandler | |
|
130 | handler.DEFAULT_HOOKS_PROTO = test_proto | |
|
131 | handler.wbufsize = 10240 | |
|
132 | server = MockServer(handler, request) | |
|
112 | 133 | |
|
113 |
|
|
|
134 | server.request.output_stream.seek(0) | |
|
135 | data = server.request.output_stream.readlines() | |
|
136 | msgpack_data = b''.join(data[5:]) | |
|
137 | org_exc = hooks_daemon.HooksHttpHandler.deserialize_data(msgpack_data) | |
|
114 | 138 | expected_result = { |
|
115 | 139 | 'exception': 'Exception', |
|
116 | 140 | 'exception_traceback': org_exc['exception_traceback'], |
@@ -125,17 +149,22 b' class TestHooksHttpHandler(object):' | |||
|
125 | 149 | fake_date = '1/Nov/2015 00:00:00' |
|
126 | 150 | date_patcher = mock.patch.object( |
|
127 | 151 | handler, 'log_date_time_string', return_value=fake_date) |
|
152 | ||
|
128 | 153 | with date_patcher, caplog.at_level(logging.DEBUG): |
|
129 | 154 | handler.log_message('Some message %d, %s', 123, 'string') |
|
130 | 155 | |
|
131 |
expected_message = "HOOKS: {} - - [{}] Some message 123, string" |
|
|
156 | expected_message = f"HOOKS: {ip_port} - - [{fake_date}] Some message 123, string" | |
|
132 | 157 | assert_message_in_log( |
|
133 | 158 | caplog.records, expected_message, |
|
134 | 159 | levelno=logging.DEBUG, module='hooks_daemon') |
|
135 | 160 | |
|
136 | def _generate_post_request(self, data): | |
|
137 | payload = json.dumps(data) | |
|
138 | return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format( | |
|
161 | def _generate_post_request(self, data, proto=test_proto): | |
|
162 | if proto == hooks_daemon.HooksHttpHandler.MSGPACK_HOOKS_PROTO: | |
|
163 | payload = msgpack.packb(data) | |
|
164 | else: | |
|
165 | payload = json.dumps(data) | |
|
166 | ||
|
167 | return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % ( | |
|
139 | 168 | len(payload), payload) |
|
140 | 169 | |
|
141 | 170 | |
@@ -190,9 +219,9 b' class TestHttpHooksCallbackDaemon(object' | |||
|
190 | 219 | assert daemon._daemon == tcp_server |
|
191 | 220 | |
|
192 | 221 | _, port = tcp_server.server_address |
|
193 | expected_uri = '{}:{}'.format('127.0.0.1', port) | |
|
194 |
msg = |
|
|
195 |
' |
|
|
222 | ||
|
223 | msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \ | |
|
224 | f"hook object: <class 'rhodecode.lib.hooks_daemon.HooksHttpHandler'>" | |
|
196 | 225 | assert_message_in_log( |
|
197 | 226 | caplog.records, msg, levelno=logging.DEBUG, module='hooks_daemon') |
|
198 | 227 | |
@@ -205,8 +234,8 b' class TestHttpHooksCallbackDaemon(object' | |||
|
205 | 234 | expected_uri = '{}:{}'.format('127.0.0.1', port) |
|
206 | 235 | assert daemon.hooks_uri == expected_uri |
|
207 | 236 | |
|
208 |
msg = |
|
|
209 |
' |
|
|
237 | msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \ | |
|
238 | f"hook object: <class 'rhodecode.lib.hooks_daemon.HooksHttpHandler'>" | |
|
210 | 239 | assert_message_in_log( |
|
211 | 240 | caplog.records, msg, |
|
212 | 241 | levelno=logging.DEBUG, module='hooks_daemon') |
@@ -318,16 +347,19 b' class TestPrepareHooksDaemon(object):' | |||
|
318 | 347 | |
|
319 | 348 | |
|
320 | 349 | class MockRequest(object): |
|
350 | ||
|
321 | 351 | def __init__(self, request): |
|
322 | 352 | self.request = request |
|
323 |
self.input_stream = io. |
|
|
324 | self.output_stream = io.StringIO() | |
|
353 | self.input_stream = io.BytesIO(safe_bytes(self.request)) | |
|
354 | self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion | |
|
355 | self.output_stream.close = lambda: None | |
|
325 | 356 | |
|
326 | 357 | def makefile(self, mode, *args, **kwargs): |
|
327 | 358 | return self.output_stream if mode == 'wb' else self.input_stream |
|
328 | 359 | |
|
329 | 360 | |
|
330 | 361 | class MockServer(object): |
|
362 | ||
|
331 | 363 | def __init__(self, handler_cls, request): |
|
332 | 364 | ip_port = ('0.0.0.0', 8888) |
|
333 | 365 | self.request = MockRequest(request) |
@@ -339,4 +371,5 b' class MockServer(object):' | |||
|
339 | 371 | def tcp_server(): |
|
340 | 372 | server = mock.Mock() |
|
341 | 373 | server.server_address = ('127.0.0.1', 8881) |
|
374 | server.wbufsize = 1024 | |
|
342 | 375 | return server |
@@ -1,4 +1,4 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
@@ -81,7 +81,7 b' def test_mutation_types_with_nullable(en' | |||
|
81 | 81 | |
|
82 | 82 | obj_nulls = DummyModel(name='nulls') |
|
83 | 83 | obj_stuff = DummyModel( |
|
84 | name='stuff', json_list=[1,2,3], json_dict={'a': 5}, json_obj=9) | |
|
84 | name='stuff', json_list=[1, 2, 3], json_dict={'a': 5}, json_obj=9) | |
|
85 | 85 | |
|
86 | 86 | session.add(obj_nulls) |
|
87 | 87 | session.add(obj_stuff) |
@@ -90,7 +90,7 b' def test_mutation_types_with_nullable(en' | |||
|
90 | 90 | |
|
91 | 91 | assert engine.execute( |
|
92 | 92 | "select * from some_table where name = 'nulls';").first() == ( |
|
93 |
( |
|
|
93 | ('nulls', None, None, None) | |
|
94 | 94 | ) |
|
95 | 95 | ret_nulls = session.query(DummyModel).get('nulls') |
|
96 | 96 | assert ret_nulls.json_list == [] |
@@ -99,7 +99,7 b' def test_mutation_types_with_nullable(en' | |||
|
99 | 99 | |
|
100 | 100 | assert engine.execute( |
|
101 | 101 | "select * from some_table where name = 'stuff';").first() == ( |
|
102 |
( |
|
|
102 | ('stuff', '[1,2,3]', '{"a":5}', '9') | |
|
103 | 103 | ) |
|
104 | 104 | ret_stuff = session.query(DummyModel).get('stuff') |
|
105 | 105 | assert ret_stuff.json_list == [1, 2, 3] |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -142,7 +141,8 b' def test_age(age_args, expected, kw, bas' | |||
|
142 | 141 | from rhodecode.lib.utils2 import age |
|
143 | 142 | from dateutil import relativedelta |
|
144 | 143 | n = datetime.datetime(year=2012, month=5, day=17) |
|
145 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) | |
|
144 | def delt(*args, **kwargs): | |
|
145 | return relativedelta.relativedelta(*args, **kwargs) | |
|
146 | 146 | |
|
147 | 147 | def translate(elem): |
|
148 | 148 | return elem.interpolate() |
@@ -174,7 +174,8 b' def test_age_in_future(age_args, expecte' | |||
|
174 | 174 | from rhodecode.lib.utils2 import age |
|
175 | 175 | from dateutil import relativedelta |
|
176 | 176 | n = datetime.datetime(year=2012, month=5, day=17) |
|
177 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) | |
|
177 | def delt(*args, **kwargs): | |
|
178 | return relativedelta.relativedelta(*args, **kwargs) | |
|
178 | 179 | |
|
179 | 180 | def translate(elem): |
|
180 | 181 | return elem.interpolate() |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -69,7 +68,7 b' def test_markdown_inline_html():' | |||
|
69 | 68 | xss_md = '\n'.join(['> <a name="n"', |
|
70 | 69 | '> onload="javascript:alert()" href="https://rhodecode.com">link</a>']) |
|
71 | 70 | rendered_html = MarkupRenderer.markdown(xss_md) |
|
72 |
assert '<a href="https://rhodecode.com |
|
|
71 | assert '<a name="n" href="https://rhodecode.com">link</a>' in rendered_html | |
|
73 | 72 | |
|
74 | 73 | |
|
75 | 74 | def test_markdown_bleach_renders_correct(): |
@@ -301,7 +300,10 b' console.log(s);' | |||
|
301 | 300 | |
|
302 | 301 | ```python |
|
303 | 302 | s = "Python syntax highlighting" |
|
304 |
print |
|
|
303 | print(s) | |
|
304 | ||
|
305 | class Orm(object): | |
|
306 | pass | |
|
305 | 307 | ``` |
|
306 | 308 | |
|
307 | 309 | ``` |
@@ -317,7 +319,10 b' alert(s);' | |||
|
317 | 319 | |
|
318 | 320 | ```python |
|
319 | 321 | s = "Python syntax highlighting" |
|
320 |
print |
|
|
322 | print(s) | |
|
323 | ||
|
324 | class Orm(object): | |
|
325 | pass | |
|
321 | 326 | ``` |
|
322 | 327 | |
|
323 | 328 | ``` |
@@ -1,4 +1,4 b'' | |||
|
1 | import py.test | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | from rhodecode.lib.system_info import get_system_info |
|
4 | 4 | |
@@ -6,10 +6,3 b' from rhodecode.lib.system_info import ge' | |||
|
6 | 6 | def test_system_info(app): |
|
7 | 7 | info = get_system_info({}) |
|
8 | 8 | assert info['load']['value']['15_min'] != 'NOT AVAILABLE' |
|
9 | ||
|
10 | ||
|
11 | def test_system_info_without_psutil(monkeypatch, app): | |
|
12 | import rhodecode.lib.system_info | |
|
13 | monkeypatch.setattr(rhodecode.lib.system_info, 'psutil', None) | |
|
14 | info = get_system_info({}) | |
|
15 | assert info['load']['value']['15_min'] == 'NOT AVAILABLE' |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -18,7 +17,6 b'' | |||
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | import json | |
|
22 | 20 | import multiprocessing |
|
23 | 21 | import os |
|
24 | 22 | |
@@ -28,16 +26,16 b' import pytest' | |||
|
28 | 26 | |
|
29 | 27 | from rhodecode.lib import caching_query |
|
30 | 28 | from rhodecode.lib import utils |
|
31 |
from rhodecode.lib.utils |
|
|
29 | from rhodecode.lib.str_utils import safe_bytes | |
|
32 | 30 | from rhodecode.model import settings |
|
33 | 31 | from rhodecode.model import db |
|
34 | 32 | from rhodecode.model import meta |
|
35 | 33 | from rhodecode.model.repo import RepoModel |
|
36 | 34 | from rhodecode.model.repo_group import RepoGroupModel |
|
37 | from rhodecode.model.scm import ScmModel | |
|
38 | 35 | from rhodecode.model.settings import UiSetting, SettingsModel |
|
39 | 36 | from rhodecode.tests.fixture import Fixture |
|
40 | ||
|
37 | from rhodecode_tools.lib.hash_utils import md5_safe | |
|
38 | from rhodecode.lib.ext_json import json | |
|
41 | 39 | |
|
42 | 40 | fixture = Fixture() |
|
43 | 41 | |
@@ -67,11 +65,17 b' def disable_hooks(request, hooks):' | |||
|
67 | 65 | # Invalidate cache |
|
68 | 66 | ui_settings = session.query(db.RhodeCodeUi).options( |
|
69 | 67 | caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings')) |
|
70 | ui_settings.invalidate() | |
|
68 | ||
|
69 | meta.cache.invalidate( | |
|
70 | ui_settings, {}, | |
|
71 | caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings')) | |
|
71 | 72 | |
|
72 | 73 | ui_settings = session.query(db.RhodeCodeUi).options( |
|
73 | 74 | caching_query.FromCache('sql_cache_short', 'get_hook_settings')) |
|
74 | ui_settings.invalidate() | |
|
75 | ||
|
76 | meta.cache.invalidate( | |
|
77 | ui_settings, {}, | |
|
78 | caching_query.FromCache('sql_cache_short', 'get_hook_settings')) | |
|
75 | 79 | |
|
76 | 80 | @request.addfinalizer |
|
77 | 81 | def rollback(): |
@@ -110,7 +114,7 b' def test_make_db_config_hg_hooks(baseapp' | |||
|
110 | 114 | config = utils.make_db_config() |
|
111 | 115 | hooks = extract_hooks(config) |
|
112 | 116 | |
|
113 |
assert set(hooks. |
|
|
117 | assert set(hooks.keys()).intersection(HG_HOOKS) == set(expected_hooks) | |
|
114 | 118 | |
|
115 | 119 | |
|
116 | 120 | @pytest.mark.parametrize('disabled_hooks,expected_hooks', [ |
@@ -180,11 +184,19 b' def _stub_git_repo(repo_path):' | |||
|
180 | 184 | repo_path.ensure('.git', dir=True) |
|
181 | 185 | |
|
182 | 186 | |
|
183 | @pytest.mark.parametrize('str_class', [str, bytes], ids=['str', 'bytes']) | |
|
184 | def test_get_dirpaths_returns_all_paths(tmpdir, str_class): | |
|
187 | def test_get_dirpaths_returns_all_paths_on_str(tmpdir): | |
|
185 | 188 | tmpdir.ensure('test-file') |
|
186 | dirpaths = utils._get_dirpaths(str_class(tmpdir)) | |
|
187 | assert dirpaths == ['test-file'] | |
|
189 | tmpdir.ensure('test-file-1') | |
|
190 | tmp_path = str(tmpdir) | |
|
191 | dirpaths = utils.get_dirpaths(tmp_path) | |
|
192 | assert list(sorted(dirpaths)) == ['test-file', 'test-file-1'] | |
|
193 | ||
|
194 | ||
|
195 | def test_get_dirpaths_returns_all_paths_on_bytes(tmpdir): | |
|
196 | tmpdir.ensure('test-file-bytes') | |
|
197 | tmp_path = str(tmpdir) | |
|
198 | dirpaths = utils.get_dirpaths(safe_bytes(tmp_path)) | |
|
199 | assert list(sorted(dirpaths)) == [b'test-file-bytes'] | |
|
188 | 200 | |
|
189 | 201 | |
|
190 | 202 | def test_get_dirpaths_returns_all_paths_bytes( |
@@ -192,7 +204,7 b' def test_get_dirpaths_returns_all_paths_' | |||
|
192 | 204 | if platform_encodes_filenames: |
|
193 | 205 | pytest.skip("This platform seems to encode filenames.") |
|
194 | 206 | tmpdir.ensure('repo-a-umlaut-\xe4') |
|
195 |
dirpaths = utils. |
|
|
207 | dirpaths = utils.get_dirpaths(str(tmpdir)) | |
|
196 | 208 | assert dirpaths == ['repo-a-umlaut-\xe4'] |
|
197 | 209 | |
|
198 | 210 | |
@@ -201,8 +213,8 b' def test_get_dirpaths_skips_paths_it_can' | |||
|
201 | 213 | if platform_encodes_filenames: |
|
202 | 214 | pytest.skip("This platform seems to encode filenames.") |
|
203 | 215 | path_with_latin1 = 'repo-a-umlaut-\xe4' |
|
204 | tmpdir.ensure(path_with_latin1) | |
|
205 |
dirpaths = utils. |
|
|
216 | tmp_path = str(tmpdir.ensure(path_with_latin1)) | |
|
217 | dirpaths = utils.get_dirpaths(tmp_path) | |
|
206 | 218 | assert dirpaths == [] |
|
207 | 219 | |
|
208 | 220 | |
@@ -266,7 +278,8 b' def test_repo2db_mapper_installs_hooks_f' | |||
|
266 | 278 | |
|
267 | 279 | |
|
268 | 280 | class TestPasswordChanged(object): |
|
269 | def setup(self): | |
|
281 | ||
|
282 | def setup_method(self): | |
|
270 | 283 | self.session = { |
|
271 | 284 | 'rhodecode_user': { |
|
272 | 285 | 'password': '0cc175b9c0f1b6a831c399e269772661' |
@@ -282,7 +295,7 b' class TestPasswordChanged(object):' | |||
|
282 | 295 | assert result is False |
|
283 | 296 | |
|
284 | 297 | def test_returns_false_if_password_was_not_changed(self): |
|
285 | self.session['rhodecode_user']['password'] = md5( | |
|
298 | self.session['rhodecode_user']['password'] = md5_safe( | |
|
286 | 299 | self.auth_user.password) |
|
287 | 300 | result = utils.password_changed(self.auth_user, self.session) |
|
288 | 301 | assert result is False |
@@ -406,7 +419,7 b' class TestConfigDataFromDb(object):' | |||
|
406 | 419 | |
|
407 | 420 | class TestIsDirWritable(object): |
|
408 | 421 | def test_returns_false_when_not_writable(self): |
|
409 |
with mock.patch(' |
|
|
422 | with mock.patch('builtins.open', side_effect=OSError): | |
|
410 | 423 | assert not utils._is_dir_writable('/stub-path') |
|
411 | 424 | |
|
412 | 425 | def test_returns_true_when_writable(self, tmpdir): |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -18,10 +17,12 b'' | |||
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | import json | |
|
20 | ||
|
22 | 21 | |
|
23 | 22 | import requests |
|
24 | 23 | |
|
24 | from rhodecode.lib.ext_json import sjson as json | |
|
25 | ||
|
25 | 26 | |
|
26 | 27 | class ApiError(Exception): |
|
27 | 28 | """Error when accessing the API.""" |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -35,7 +34,9 b' import subprocess' | |||
|
35 | 34 | import sys |
|
36 | 35 | import time |
|
37 | 36 | import traceback |
|
38 |
import urllib.request |
|
|
37 | import urllib.request | |
|
38 | import urllib.parse | |
|
39 | import urllib.error | |
|
39 | 40 | |
|
40 | 41 | PROFILING_INTERVAL = 5 |
|
41 | 42 | RC_WEBSITE = "http://localhost:5001/" |
@@ -1,4 +1,4 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
@@ -60,9 +60,9 b' class TestGistSchema(object):' | |||
|
60 | 60 | lifetime_options=[1, 2, 3] |
|
61 | 61 | ) |
|
62 | 62 | nodes = [{ |
|
63 | 'filename': 'foobar', | |
|
64 | 'filename_org': 'foobar', | |
|
65 | 'content': 'content', | |
|
63 | 'filename': b'foobar', | |
|
64 | 'filename_org': b'foobar', | |
|
65 | 'content': b'content', | |
|
66 | 66 | 'mimetype': 'xx' |
|
67 | 67 | }] |
|
68 | 68 | schema_data = schema.deserialize(dict( |
@@ -80,9 +80,9 b' class TestGistSchema(object):' | |||
|
80 | 80 | convert_nodes=True |
|
81 | 81 | ) |
|
82 | 82 | nodes = [{ |
|
83 | 'filename': 'foobar', | |
|
83 | 'filename': b'foobar', | |
|
84 | 84 | 'filename_org': None, |
|
85 | 'content': 'content', | |
|
85 | 'content': b'content', | |
|
86 | 86 | 'mimetype': 'xx' |
|
87 | 87 | }] |
|
88 | 88 | schema_data = schema.deserialize(dict( |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -116,7 +115,8 b' def assert_inline_comments_order(query):' | |||
|
116 | 115 | """ |
|
117 | 116 | Sorting by ID will make sure that the latest comments are at the bottom. |
|
118 | 117 | """ |
|
119 | order_by = query._order_by | |
|
118 | ||
|
119 | order_by = query._order_by_clauses | |
|
120 | 120 | assert order_by |
|
121 | 121 | assert len(order_by) == 1 |
|
122 | 122 | assert str(order_by[0]) == 'changeset_comments.comment_id ASC' |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -33,15 +32,14 b' class TestModelReprImplementation(object' | |||
|
33 | 32 | |
|
34 | 33 | def test_repr_without_id(self, DBModel, klass, id_attr): |
|
35 | 34 | instance = DBModel() |
|
36 |
expected_repr = '<DB: |
|
|
35 | expected_repr = f'<DB:{klass} at {id(instance)}>' | |
|
37 | 36 | assert repr(instance) == expected_repr |
|
38 | 37 | |
|
39 | 38 | def test_repr_with_id(self, DBModel, klass, id_attr): |
|
40 | 39 | test_id = random.randint(1, 10) |
|
41 | 40 | instance = DBModel() |
|
42 | 41 | setattr(instance, id_attr, test_id) |
|
43 |
expected_repr = |
|
|
44 | '<DB:%s #%d>' % (klass, test_id)) | |
|
42 | expected_repr = f'<DB:{klass} #{test_id}>' | |
|
45 | 43 | assert repr(instance) == expected_repr |
|
46 | 44 | |
|
47 | 45 |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -35,8 +34,8 b' class TestGistModel(object):' | |||
|
35 | 34 | return_value=repo.scm_instance()) |
|
36 | 35 | with create_repo_patch as create_repo_mock: |
|
37 | 36 | gist_mapping = { |
|
38 | 'filename.txt': { | |
|
39 | 'content': 'Test content' | |
|
37 | b'filename.txt': { | |
|
38 | 'content': b'Test content' | |
|
40 | 39 | } |
|
41 | 40 | } |
|
42 | 41 | model.create('Test description', owner, gist_mapping) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -41,20 +40,20 b' class TestNotifications(object):' | |||
|
41 | 40 | def create_users(self, request, app): |
|
42 | 41 | Session.remove() |
|
43 | 42 | self.u1 = UserModel().create_or_update( |
|
44 |
username= |
|
|
45 |
email= |
|
|
43 | username='u1', password='qweqwe', | |
|
44 | email='u1@rhodecode.org', firstname='u1', lastname='u1') | |
|
46 | 45 | Session().commit() |
|
47 | 46 | self.u1 = self.u1.user_id |
|
48 | 47 | |
|
49 | 48 | self.u2 = UserModel().create_or_update( |
|
50 |
username= |
|
|
51 |
email= |
|
|
49 | username='u2', password='qweqwe', | |
|
50 | email='u2@rhodecode.org', firstname='u2', lastname='u2') | |
|
52 | 51 | Session().commit() |
|
53 | 52 | self.u2 = self.u2.user_id |
|
54 | 53 | |
|
55 | 54 | self.u3 = UserModel().create_or_update( |
|
56 |
username= |
|
|
57 |
email= |
|
|
55 | username='u3', password='qweqwe', | |
|
56 | email='u3@rhodecode.org', firstname='u3', lastname='u3') | |
|
58 | 57 | Session().commit() |
|
59 | 58 | self.u3 = self.u3.user_id |
|
60 | 59 | self.destroy_users.add('u1') |
@@ -73,8 +72,8 b' class TestNotifications(object):' | |||
|
73 | 72 | def test_create_notification(self): |
|
74 | 73 | usrs = [self.u1, self.u2] |
|
75 | 74 | notification = NotificationModel().create( |
|
76 |
created_by=self.u1, notification_subject= |
|
|
77 |
notification_body= |
|
|
75 | created_by=self.u1, notification_subject='subj', | |
|
76 | notification_body='hi there', recipients=usrs) | |
|
78 | 77 | Session().commit() |
|
79 | 78 | u1 = User.get(self.u1) |
|
80 | 79 | u2 = User.get(self.u2) |
@@ -93,32 +92,31 b' class TestNotifications(object):' | |||
|
93 | 92 | def test_create_notification_fails_for_invalid_recipients(self): |
|
94 | 93 | with pytest.raises(Exception): |
|
95 | 94 | NotificationModel().create( |
|
96 |
created_by=self.u1, notification_subject= |
|
|
97 |
notification_body= |
|
|
95 | created_by=self.u1, notification_subject='subj', | |
|
96 | notification_body='hi there', recipients=['bad_user_id']) | |
|
98 | 97 | |
|
99 | 98 | with pytest.raises(Exception): |
|
100 | 99 | NotificationModel().create( |
|
101 |
created_by=self.u1, notification_subject= |
|
|
102 |
notification_body= |
|
|
100 | created_by=self.u1, notification_subject='subj', | |
|
101 | notification_body='hi there', recipients=[]) | |
|
103 | 102 | |
|
104 | 103 | def test_user_notifications(self): |
|
105 | 104 | notification1 = NotificationModel().create( |
|
106 |
created_by=self.u1, notification_subject= |
|
|
107 |
notification_body= |
|
|
105 | created_by=self.u1, notification_subject='subj', | |
|
106 | notification_body='hi there1', recipients=[self.u3]) | |
|
108 | 107 | Session().commit() |
|
109 | 108 | notification2 = NotificationModel().create( |
|
110 |
created_by=self.u1, notification_subject= |
|
|
111 |
notification_body= |
|
|
109 | created_by=self.u1, notification_subject='subj', | |
|
110 | notification_body='hi there2', recipients=[self.u3]) | |
|
112 | 111 | Session().commit() |
|
113 | 112 | u3 = Session().query(User).get(self.u3) |
|
114 | 113 | |
|
115 |
assert |
|
|
116 | sorted([notification2, notification1]) | |
|
114 | assert [x.notification for x in u3.notifications] == [notification2, notification1] | |
|
117 | 115 | |
|
118 | 116 | def test_delete_notifications(self): |
|
119 | 117 | notification = NotificationModel().create( |
|
120 |
created_by=self.u1, notification_subject= |
|
|
121 |
notification_body= |
|
|
118 | created_by=self.u1, notification_subject='title', | |
|
119 | notification_body='hi there3', | |
|
122 | 120 | recipients=[self.u3, self.u1, self.u2]) |
|
123 | 121 | Session().commit() |
|
124 | 122 | notifications = Notification.query().all() |
@@ -136,8 +134,8 b' class TestNotifications(object):' | |||
|
136 | 134 | |
|
137 | 135 | def test_delete_association(self): |
|
138 | 136 | notification = NotificationModel().create( |
|
139 |
created_by=self.u1, notification_subject= |
|
|
140 |
notification_body= |
|
|
137 | created_by=self.u1, notification_subject='title', | |
|
138 | notification_body='hi there3', | |
|
141 | 139 | recipients=[self.u3, self.u1, self.u2]) |
|
142 | 140 | Session().commit() |
|
143 | 141 | |
@@ -180,8 +178,8 b' class TestNotifications(object):' | |||
|
180 | 178 | |
|
181 | 179 | def test_notification_counter(self): |
|
182 | 180 | NotificationModel().create( |
|
183 |
created_by=self.u1, notification_subject= |
|
|
184 |
notification_body= |
|
|
181 | created_by=self.u1, notification_subject='title', | |
|
182 | notification_body='hi there_delete', recipients=[self.u3, self.u1]) | |
|
185 | 183 | Session().commit() |
|
186 | 184 | |
|
187 | 185 | # creator has it's own notification marked as read |
@@ -190,8 +188,8 b' class TestNotifications(object):' | |||
|
190 | 188 | assert NotificationModel().get_unread_cnt_for_user(self.u3) == 1 |
|
191 | 189 | |
|
192 | 190 | NotificationModel().create( |
|
193 |
created_by=self.u1, notification_subject= |
|
|
194 |
notification_body= |
|
|
191 | created_by=self.u1, notification_subject='title', | |
|
192 | notification_body='hi there3', | |
|
195 | 193 | recipients=[self.u3, self.u1, self.u2]) |
|
196 | 194 | Session().commit() |
|
197 | 195 | # creator has it's own notification marked as read |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -23,7 +22,6 b' import pytest' | |||
|
23 | 22 | import textwrap |
|
24 | 23 | |
|
25 | 24 | import rhodecode |
|
26 | from rhodecode.lib.utils2 import safe_unicode | |
|
27 | 25 | from rhodecode.lib.vcs.backends import get_backend |
|
28 | 26 | from rhodecode.lib.vcs.backends.base import ( |
|
29 | 27 | MergeResponse, MergeFailureReason, Reference) |
@@ -34,7 +32,7 b' from rhodecode.model.db import PullReque' | |||
|
34 | 32 | from rhodecode.model.pull_request import PullRequestModel |
|
35 | 33 | from rhodecode.model.user import UserModel |
|
36 | 34 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
37 | ||
|
35 | from rhodecode.lib.str_utils import safe_str | |
|
38 | 36 | |
|
39 | 37 | pytestmark = [ |
|
40 | 38 | pytest.mark.backends("git", "hg"), |
@@ -299,10 +297,10 b' class TestPullRequestModel(object):' | |||
|
299 | 297 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' |
|
300 | 298 | u'\n\n {pr_title}'.format( |
|
301 | 299 | pr_id=pull_request.pull_request_id, |
|
302 |
source_repo=safe_ |
|
|
300 | source_repo=safe_str( | |
|
303 | 301 | pull_request.source_repo.scm_instance().name), |
|
304 | 302 | source_ref_name=pull_request.source_ref_parts.name, |
|
305 |
pr_title=safe_ |
|
|
303 | pr_title=safe_str(pull_request.title) | |
|
306 | 304 | ) |
|
307 | 305 | ) |
|
308 | 306 | self.merge_mock.assert_called_with( |
@@ -343,10 +341,10 b' class TestPullRequestModel(object):' | |||
|
343 | 341 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' |
|
344 | 342 | u'\n\n {pr_title}'.format( |
|
345 | 343 | pr_id=pull_request.pull_request_id, |
|
346 |
source_repo=safe_ |
|
|
344 | source_repo=safe_str( | |
|
347 | 345 | pull_request.source_repo.scm_instance().name), |
|
348 | 346 | source_ref_name=pull_request.source_ref_parts.name, |
|
349 |
pr_title=safe_ |
|
|
347 | pr_title=safe_str(pull_request.title) | |
|
350 | 348 | ) |
|
351 | 349 | ) |
|
352 | 350 | self.merge_mock.assert_called_with( |
@@ -382,10 +380,10 b' class TestPullRequestModel(object):' | |||
|
382 | 380 | u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}' |
|
383 | 381 | u'\n\n {pr_title}'.format( |
|
384 | 382 | pr_id=pull_request.pull_request_id, |
|
385 |
source_repo=safe_ |
|
|
383 | source_repo=safe_str( | |
|
386 | 384 | pull_request.source_repo.scm_instance().name), |
|
387 | 385 | source_ref_name=pull_request.source_ref_parts.name, |
|
388 |
pr_title=safe_ |
|
|
386 | pr_title=safe_str(pull_request.title) | |
|
389 | 387 | ) |
|
390 | 388 | ) |
|
391 | 389 | self.merge_mock.assert_called_with( |
@@ -423,7 +421,7 b' class TestPullRequestModel(object):' | |||
|
423 | 421 | diff = PullRequestModel()._get_diff_from_pr_or_version( |
|
424 | 422 | source_repo, source_ref_id, target_ref_id, |
|
425 | 423 | hide_whitespace_changes=False, diff_context=6) |
|
426 | assert 'file_1' in diff.raw | |
|
424 | assert b'file_1' in diff.raw.tobytes() | |
|
427 | 425 | |
|
428 | 426 | def test_generate_title_returns_unicode(self): |
|
429 | 427 | title = PullRequestModel().generate_pullrequest_title( |
@@ -431,7 +429,7 b' class TestPullRequestModel(object):' | |||
|
431 | 429 | source_ref='source-ref-dummy', |
|
432 | 430 | target='target-dummy', |
|
433 | 431 | ) |
|
434 |
assert type(title) == |
|
|
432 | assert type(title) == str | |
|
435 | 433 | |
|
436 | 434 | @pytest.mark.parametrize('title, has_wip', [ |
|
437 | 435 | ('hello', False), |
@@ -607,8 +605,8 b' class TestUpdateCommentHandling(object):' | |||
|
607 | 605 | def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util): |
|
608 | 606 | commits = [ |
|
609 | 607 | {'message': 'a'}, |
|
610 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, | |
|
611 | {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]}, | |
|
608 | {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]}, | |
|
609 | {'message': 'c', 'added': [FileNode(b'file_c', b'test_content\n')]}, | |
|
612 | 610 | ] |
|
613 | 611 | pull_request = pr_util.create_pull_request( |
|
614 | 612 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
@@ -618,13 +616,12 b' class TestUpdateCommentHandling(object):' | |||
|
618 | 616 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
619 | 617 | |
|
620 | 618 | def test_comment_stays_unflagged_on_change_above(self, pr_util): |
|
621 | original_content = ''.join( | |
|
622 | ['line {}\n'.format(x) for x in range(1, 11)]) | |
|
623 | updated_content = 'new_line_at_top\n' + original_content | |
|
619 | original_content = b''.join((b'line %d\n' % x for x in range(1, 11))) | |
|
620 | updated_content = b'new_line_at_top\n' + original_content | |
|
624 | 621 | commits = [ |
|
625 | 622 | {'message': 'a'}, |
|
626 | {'message': 'b', 'added': [FileNode('file_b', original_content)]}, | |
|
627 | {'message': 'c', 'changed': [FileNode('file_b', updated_content)]}, | |
|
623 | {'message': 'b', 'added': [FileNode(b'file_b', original_content)]}, | |
|
624 | {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]}, | |
|
628 | 625 | ] |
|
629 | 626 | pull_request = pr_util.create_pull_request( |
|
630 | 627 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
@@ -638,12 +635,12 b' class TestUpdateCommentHandling(object):' | |||
|
638 | 635 | assert comment.line_no == u'n9' |
|
639 | 636 | |
|
640 | 637 | def test_comment_stays_unflagged_on_change_below(self, pr_util): |
|
641 |
original_content = ''.join(['line |
|
|
642 | updated_content = original_content + 'new_line_at_end\n' | |
|
638 | original_content = b''.join([b'line %d\n' % x for x in range(10)]) | |
|
639 | updated_content = original_content + b'new_line_at_end\n' | |
|
643 | 640 | commits = [ |
|
644 | 641 | {'message': 'a'}, |
|
645 | {'message': 'b', 'added': [FileNode('file_b', original_content)]}, | |
|
646 | {'message': 'c', 'changed': [FileNode('file_b', updated_content)]}, | |
|
642 | {'message': 'b', 'added': [FileNode(b'file_b', original_content)]}, | |
|
643 | {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]}, | |
|
647 | 644 | ] |
|
648 | 645 | pull_request = pr_util.create_pull_request( |
|
649 | 646 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
@@ -654,17 +651,17 b' class TestUpdateCommentHandling(object):' | |||
|
654 | 651 | |
|
655 | 652 | @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9']) |
|
656 | 653 | def test_comment_flagged_on_change_around_context(self, pr_util, line_no): |
|
657 |
base_lines = ['line |
|
|
654 | base_lines = [b'line %d\n' % x for x in range(1, 13)] | |
|
658 | 655 | change_lines = list(base_lines) |
|
659 | change_lines.insert(6, 'line 6a added\n') | |
|
656 | change_lines.insert(6, b'line 6a added\n') | |
|
660 | 657 | |
|
661 | 658 | # Changes on the last line of sight |
|
662 | 659 | update_lines = list(change_lines) |
|
663 | update_lines[0] = 'line 1 changed\n' | |
|
664 | update_lines[-1] = 'line 12 changed\n' | |
|
660 | update_lines[0] = b'line 1 changed\n' | |
|
661 | update_lines[-1] = b'line 12 changed\n' | |
|
665 | 662 | |
|
666 | 663 | def file_b(lines): |
|
667 | return FileNode('file_b', ''.join(lines)) | |
|
664 | return FileNode(b'file_b', b''.join(lines)) | |
|
668 | 665 | |
|
669 | 666 | commits = [ |
|
670 | 667 | {'message': 'a', 'added': [file_b(base_lines)]}, |
@@ -681,14 +678,14 b' class TestUpdateCommentHandling(object):' | |||
|
681 | 678 | assert_inline_comments(pull_request, visible=0, outdated=1) |
|
682 | 679 | |
|
683 | 680 | @pytest.mark.parametrize("change, content", [ |
|
684 | ('changed', 'changed\n'), | |
|
685 | ('removed', ''), | |
|
686 | ], ids=['changed', 'removed']) | |
|
681 | ('changed', b'changed\n'), | |
|
682 | ('removed', b''), | |
|
683 | ], ids=['changed', b'removed']) | |
|
687 | 684 | def test_comment_flagged_on_change(self, pr_util, change, content): |
|
688 | 685 | commits = [ |
|
689 | 686 | {'message': 'a'}, |
|
690 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, | |
|
691 | {'message': 'c', change: [FileNode('file_b', content)]}, | |
|
687 | {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]}, | |
|
688 | {'message': 'c', change: [FileNode(b'file_b', content)]}, | |
|
692 | 689 | ] |
|
693 | 690 | pull_request = pr_util.create_pull_request( |
|
694 | 691 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
@@ -706,9 +703,9 b' class TestUpdateChangedFiles(object):' | |||
|
706 | 703 | commits = [ |
|
707 | 704 | {'message': 'a'}, |
|
708 | 705 | {'message': 'b', |
|
709 | 'added': [FileNode('file_b', 'test_content b\n')]}, | |
|
706 | 'added': [FileNode(b'file_b', b'test_content b\n')]}, | |
|
710 | 707 | {'message': 'c', |
|
711 | 'added': [FileNode('file_c', 'test_content c\n')]}, | |
|
708 | 'added': [FileNode(b'file_c', b'test_content c\n')]}, | |
|
712 | 709 | ] |
|
713 | 710 | # open a PR from a to b, adding file_b |
|
714 | 711 | pull_request = pr_util.create_pull_request( |
@@ -728,11 +725,11 b' class TestUpdateChangedFiles(object):' | |||
|
728 | 725 | commits = [ |
|
729 | 726 | {'message': 'a'}, |
|
730 | 727 | {'message': 'b', |
|
731 | 'added': [FileNode('file_b', 'test_content b\n')]}, | |
|
728 | 'added': [FileNode(b'file_b', b'test_content b\n')]}, | |
|
732 | 729 | {'message': 'c', |
|
733 | 'changed': [FileNode('file_b', 'test_content b modified\n')]}, | |
|
730 | 'changed': [FileNode(b'file_b', b'test_content b modified\n')]}, | |
|
734 | 731 | {'message': 'd', |
|
735 | 'changed': [FileNode('file_b', 'test_content b\n')]}, | |
|
732 | 'changed': [FileNode(b'file_b', b'test_content b\n')]}, | |
|
736 | 733 | ] |
|
737 | 734 | # open a PR from a to b, adding file_b |
|
738 | 735 | pull_request = pr_util.create_pull_request( |
@@ -762,13 +759,13 b' class TestUpdateChangedFiles(object):' | |||
|
762 | 759 | commits = [ |
|
763 | 760 | {'message': 'a'}, |
|
764 | 761 | {'message': 'b', 'added': [ |
|
765 | FileNode('file_a', 'test_content a\n'), | |
|
766 | FileNode('file_b', 'test_content b\n'), | |
|
767 | FileNode('file_c', 'test_content c\n')]}, | |
|
762 | FileNode(b'file_a', b'test_content a\n'), | |
|
763 | FileNode(b'file_b', b'test_content b\n'), | |
|
764 | FileNode(b'file_c', b'test_content c\n')]}, | |
|
768 | 765 | {'message': 'c', 'changed': [ |
|
769 | FileNode('file_a', 'test_content a changed\n'), | |
|
770 | FileNode('file_b', 'test_content b changed\n'), | |
|
771 | FileNode('file_c', 'test_content c changed\n')]}, | |
|
766 | FileNode(b'file_a', b'test_content a changed\n'), | |
|
767 | FileNode(b'file_b', b'test_content b changed\n'), | |
|
768 | FileNode(b'file_c', b'test_content c changed\n')]}, | |
|
772 | 769 | ] |
|
773 | 770 | # open a PR from a to b, changing 3 files |
|
774 | 771 | pull_request = pr_util.create_pull_request( |
@@ -787,13 +784,13 b' class TestUpdateChangedFiles(object):' | |||
|
787 | 784 | commits = [ |
|
788 | 785 | {'message': 'a'}, |
|
789 | 786 | {'message': 'b', 'added': [ |
|
790 | FileNode('file_a', 'test_content a\n'), | |
|
791 | FileNode('file_b', 'test_content b\n'), | |
|
792 | FileNode('file_c', 'test_content c\n')]}, | |
|
787 | FileNode(b'file_a', b'test_content a\n'), | |
|
788 | FileNode(b'file_b', b'test_content b\n'), | |
|
789 | FileNode(b'file_c', b'test_content c\n')]}, | |
|
793 | 790 | {'message': 'c', 'removed': [ |
|
794 | FileNode('file_a', 'test_content a changed\n'), | |
|
795 | FileNode('file_b', 'test_content b changed\n'), | |
|
796 | FileNode('file_c', 'test_content c changed\n')]}, | |
|
791 | FileNode(b'file_a', b'test_content a changed\n'), | |
|
792 | FileNode(b'file_b', b'test_content b changed\n'), | |
|
793 | FileNode(b'file_c', b'test_content c changed\n')]}, | |
|
797 | 794 | ] |
|
798 | 795 | # open a PR from a to b, removing 3 files |
|
799 | 796 | pull_request = pr_util.create_pull_request( |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -47,8 +46,8 b' class TestGetDiffForPrOrVersion(object):' | |||
|
47 | 46 | def _prepare_pull_request(self, pr_util): |
|
48 | 47 | commits = [ |
|
49 | 48 | {'message': 'a'}, |
|
50 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, | |
|
51 | {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]}, | |
|
49 | {'message': 'b', 'added': [FileNode(b'file_b', 'test_content\n')]}, | |
|
50 | {'message': 'c', 'added': [FileNode(b'file_c', 'test_content\n')]}, | |
|
52 | 51 | ] |
|
53 | 52 | pull_request = pr_util.create_pull_request( |
|
54 | 53 | commits=commits, target_head='a', source_head='c', |
@@ -62,7 +61,7 b' class TestGetDiffForPrOrVersion(object):' | |||
|
62 | 61 | diff = PullRequestModel()._get_diff_from_pr_or_version( |
|
63 | 62 | source_repo, source_ref_id, target_ref_id, |
|
64 | 63 | hide_whitespace_changes=False, diff_context=6) |
|
65 | assert 'file_b' in diff.raw | |
|
64 | assert b'file_b' in diff.raw.tobytes() | |
|
66 | 65 | |
|
67 | 66 | def assert_commit_cannot_be_accessed( |
|
68 | 67 | self, removed_commit_id, pull_request): |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -40,40 +39,39 b' class CommitUtility:' | |||
|
40 | 39 | def __init__(self, vcsbackend): |
|
41 | 40 | self.vcsbackend = vcsbackend |
|
42 | 41 | |
|
43 | def commit_with_files(self, filenames): | |
|
42 | def commit_with_files(self, filenames: list[bytes]): | |
|
44 | 43 | commits = [ |
|
45 | 44 | {'message': 'Adding all requested files', |
|
46 | 45 | 'added': [ |
|
47 | nodes.FileNode(filename, content='') | |
|
48 | for filename in filenames | |
|
46 | nodes.FileNode(filename, content=b'') for filename in filenames | |
|
49 | 47 | ]}] |
|
50 | 48 | repo = self.vcsbackend.create_repo(commits=commits) |
|
51 | 49 | return repo.get_commit() |
|
52 | 50 | |
|
53 | 51 | |
|
54 | 52 | def test_no_matching_file_returns_none(commit_util): |
|
55 | commit = commit_util.commit_with_files(['LIESMICH']) | |
|
53 | commit = commit_util.commit_with_files([b'LIESMICH']) | |
|
56 | 54 | finder = ReadmeFinder(default_renderer='rst') |
|
57 | 55 | filenode = finder.search(commit) |
|
58 | 56 | assert filenode is None |
|
59 | 57 | |
|
60 | 58 | |
|
61 | 59 | def test_matching_file_returns_the_file_name(commit_util): |
|
62 | commit = commit_util.commit_with_files(['README']) | |
|
60 | commit = commit_util.commit_with_files([b'README']) | |
|
63 | 61 | finder = ReadmeFinder(default_renderer='rst') |
|
64 | 62 | filenode = finder.search(commit) |
|
65 | 63 | assert filenode.path == 'README' |
|
66 | 64 | |
|
67 | 65 | |
|
68 | 66 | def test_matching_file_with_extension(commit_util): |
|
69 | commit = commit_util.commit_with_files(['README.rst']) | |
|
67 | commit = commit_util.commit_with_files([b'README.rst']) | |
|
70 | 68 | finder = ReadmeFinder(default_renderer='rst') |
|
71 | 69 | filenode = finder.search(commit) |
|
72 | 70 | assert filenode.path == 'README.rst' |
|
73 | 71 | |
|
74 | 72 | |
|
75 | 73 | def test_prefers_readme_without_extension(commit_util): |
|
76 | commit = commit_util.commit_with_files(['README.rst', 'Readme']) | |
|
74 | commit = commit_util.commit_with_files([b'README.rst', b'Readme']) | |
|
77 | 75 | finder = ReadmeFinder() |
|
78 | 76 | filenode = finder.search(commit) |
|
79 | 77 | assert filenode.path == 'Readme' |
@@ -84,23 +82,21 b' def test_prefers_readme_without_extensio' | |||
|
84 | 82 | ('markdown', 'readme.md'), |
|
85 | 83 | ]) |
|
86 | 84 | def test_prefers_renderer_extensions(commit_util, renderer, expected): |
|
87 | commit = commit_util.commit_with_files( | |
|
88 | ['readme.rst', 'readme.md', 'readme.txt']) | |
|
85 | commit = commit_util.commit_with_files([b'readme.rst', b'readme.md', b'readme.txt']) | |
|
89 | 86 | finder = ReadmeFinder(default_renderer=renderer) |
|
90 | 87 | filenode = finder.search(commit) |
|
91 | 88 | assert filenode.path == expected |
|
92 | 89 | |
|
93 | 90 | |
|
94 | 91 | def test_finds_readme_in_subdirectory(commit_util): |
|
95 | commit = commit_util.commit_with_files(['doc/README.rst', 'LIESMICH']) | |
|
92 | commit = commit_util.commit_with_files([b'doc/README.rst', b'LIESMICH']) | |
|
96 | 93 | finder = ReadmeFinder() |
|
97 | 94 | filenode = finder.search(commit) |
|
98 | 95 | assert filenode.path == 'doc/README.rst' |
|
99 | 96 | |
|
100 | 97 | |
|
101 | 98 | def test_prefers_subdirectory_with_priority(commit_util): |
|
102 | commit = commit_util.commit_with_files( | |
|
103 | ['Doc/Readme.rst', 'Docs/Readme.rst']) | |
|
99 | commit = commit_util.commit_with_files([b'Doc/Readme.rst', b'Docs/Readme.rst']) | |
|
104 | 100 | finder = ReadmeFinder() |
|
105 | 101 | filenode = finder.search(commit) |
|
106 | 102 | assert filenode.path == 'Doc/Readme.rst' |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -168,7 +167,7 b' def assert_contains_only_unicode(structu' | |||
|
168 | 167 | @pytest.mark.backends("hg", "git") |
|
169 | 168 | def test_get_non_unicode_reference(backend): |
|
170 | 169 | model = scm.ScmModel() |
|
171 |
non_unicode_list = ["AdΔ±nΔ±". |
|
|
170 | non_unicode_list = ["AdΔ±nΔ±".encode("cp1254")] | |
|
172 | 171 | |
|
173 | 172 | def scm_instance(): |
|
174 | 173 | return Mock( |
@@ -179,11 +178,11 b' def test_get_non_unicode_reference(backe' | |||
|
179 | 178 | choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo) |
|
180 | 179 | if backend.alias == 'hg': |
|
181 | 180 | valid_choices = [ |
|
182 |
'rev:tip', |
|
|
183 |
|
|
|
181 | 'rev:tip', 'branch:Ad\xc4\xb1n\xc4\xb1', | |
|
182 | 'book:Ad\xc4\xb1n\xc4\xb1', 'tag:Ad\xc4\xb1n\xc4\xb1'] | |
|
184 | 183 | else: |
|
185 | 184 | valid_choices = [ |
|
186 |
'rev:tip', |
|
|
187 |
|
|
|
185 | 'rev:tip', 'branch:Ad\xc4\xb1n\xc4\xb1', | |
|
186 | 'tag:Ad\xc4\xb1n\xc4\xb1'] | |
|
188 | 187 | |
|
189 | 188 | assert choices == valid_choices |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -21,7 +20,6 b'' | |||
|
21 | 20 | import pytest |
|
22 | 21 | import mock |
|
23 | 22 | |
|
24 | from rhodecode.lib.utils2 import safe_unicode | |
|
25 | 23 | from rhodecode.model.db import ( |
|
26 | 24 | true, User, UserGroup, UserGroupMember, UserEmailMap, Permission, UserIpMap) |
|
27 | 25 | from rhodecode.model.meta import Session |
@@ -30,6 +28,8 b' from rhodecode.model.user_group import U' | |||
|
30 | 28 | from rhodecode.model.repo import RepoModel |
|
31 | 29 | from rhodecode.model.repo_group import RepoGroupModel |
|
32 | 30 | from rhodecode.tests.fixture import Fixture |
|
31 | from rhodecode.lib.str_utils import safe_str | |
|
32 | ||
|
33 | 33 | |
|
34 | 34 | fixture = Fixture() |
|
35 | 35 | |
@@ -69,7 +69,7 b' class TestGetUsers(object):' | |||
|
69 | 69 | fake_users = [u for u in users if u['last_name'].startswith('Fake')] |
|
70 | 70 | assert len(fake_users) == 2 |
|
71 | 71 | for user in fake_users: |
|
72 |
assert user['last_name'] == safe_ |
|
|
72 | assert user['last_name'] == safe_str('Fake ΓΌnicode user') | |
|
73 | 73 | |
|
74 | 74 | def test_returns_user_filtered_by_first_name(self, backend, user_util): |
|
75 | 75 | created_users = [] |
@@ -88,7 +88,7 b' class TestGetUsers(object):' | |||
|
88 | 88 | fake_users = [u for u in users if u['last_name'].startswith('Fake')] |
|
89 | 89 | assert len(fake_users) == 2 |
|
90 | 90 | for user in fake_users: |
|
91 |
assert user['first_name'] == safe_ |
|
|
91 | assert user['first_name'] == safe_str('Fake ΓΌnicode user') | |
|
92 | 92 | |
|
93 | 93 | def test_returns_user_filtered_by_username(self, backend, user_util): |
|
94 | 94 | created_users = [] |
@@ -122,10 +122,10 b' class TestGetUsers(object):' | |||
|
122 | 122 | @pytest.fixture() |
|
123 | 123 | def test_user(request, baseapp): |
|
124 | 124 | usr = UserModel().create_or_update( |
|
125 |
username= |
|
|
126 |
password= |
|
|
127 |
email= |
|
|
128 |
firstname= |
|
|
125 | username='test_user', | |
|
126 | password='qweqwe', | |
|
127 | email='main_email@rhodecode.org', | |
|
128 | firstname='u1', lastname=u'u1') | |
|
129 | 129 | Session().commit() |
|
130 | 130 | assert User.get_by_username(u'test_user') == usr |
|
131 | 131 |
@@ -62,6 +62,9 b' use = egg:gunicorn#main' | |||
|
62 | 62 | ; The maximum number of simultaneous clients per worker. Valid only for gevent |
|
63 | 63 | #worker_connections = 10 |
|
64 | 64 | |
|
65 | ; The maximum number of pending connections worker will queue to handle | |
|
66 | #backlog = 64 | |
|
67 | ||
|
65 | 68 | ; Max number of requests that worker will handle before being gracefully restarted. |
|
66 | 69 | ; Prevents memory leaks, jitter adds variability so not all workers are restarted at once. |
|
67 | 70 | #max_requests = 1000 |
@@ -76,6 +79,40 b' use = egg:gunicorn#main' | |||
|
76 | 79 | ; 0 for unlimited |
|
77 | 80 | #limit_request_line = 0 |
|
78 | 81 | |
|
82 | ; Limit the number of HTTP headers fields in a request. | |
|
83 | ; By default this value is 100 and can't be larger than 32768. | |
|
84 | #limit_request_fields = 32768 | |
|
85 | ||
|
86 | ; Limit the allowed size of an HTTP request header field. | |
|
87 | ; Value is a positive number or 0. | |
|
88 | ; Setting it to 0 will allow unlimited header field sizes. | |
|
89 | #limit_request_field_size = 0 | |
|
90 | ||
|
91 | ; Timeout for graceful workers restart. | |
|
92 | ; After receiving a restart signal, workers have this much time to finish | |
|
93 | ; serving requests. Workers still alive after the timeout (starting from the | |
|
94 | ; receipt of the restart signal) are force killed. | |
|
95 | ; Examples: 1800 (30min), 3600 (1hr), 7200 (2hr), 43200 (12h) | |
|
96 | #graceful_timeout = 3600 | |
|
97 | ||
|
98 | # The number of seconds to wait for requests on a Keep-Alive connection. | |
|
99 | # Generally set in the 1-5 seconds range. | |
|
100 | #keepalive = 2 | |
|
101 | ||
|
102 | ; Maximum memory usage that each worker can use before it will receive a | |
|
103 | ; graceful restart signal 0 = memory monitoring is disabled | |
|
104 | ; Examples: 268435456 (256MB), 536870912 (512MB) | |
|
105 | ; 1073741824 (1GB), 2147483648 (2GB), 4294967296 (4GB) | |
|
106 | #memory_max_usage = 0 | |
|
107 | ||
|
108 | ; How often in seconds to check for memory usage for each gunicorn worker | |
|
109 | #memory_usage_check_interval = 60 | |
|
110 | ||
|
111 | ; Threshold value for which we don't recycle worker if GarbageCollection | |
|
112 | ; frees up enough resources. Before each restart we try to run GC on worker | |
|
113 | ; in case we get enough free memory after that, restart will not happen. | |
|
114 | #memory_usage_recovery_threshold = 0.8 | |
|
115 | ||
|
79 | 116 | |
|
80 | 117 | ; Prefix middleware for RhodeCode. |
|
81 | 118 | ; recommended when using proxy setup. |
@@ -108,19 +145,9 b' use = egg:rhodecode-enterprise-ce' | |||
|
108 | 145 | ; enable proxy prefix middleware, defined above |
|
109 | 146 | #filter-with = proxy-prefix |
|
110 | 147 | |
|
111 | ||
|
112 | ## RHODECODE PLUGINS ## | |
|
113 | rhodecode.includes = rhodecode.api | |
|
114 | ||
|
115 | # api prefix url | |
|
116 | rhodecode.api.url = /_admin/api | |
|
117 | ||
|
118 | ||
|
119 | ## END RHODECODE PLUGINS ## | |
|
120 | ||
|
121 | ## encryption key used to encrypt social plugin tokens, | |
|
122 | ## remote_urls with credentials etc, if not set it defaults to | |
|
123 | ## `beaker.session.secret` | |
|
148 | ; encryption key used to encrypt social plugin tokens, | |
|
149 | ; remote_urls with credentials etc, if not set it defaults to | |
|
150 | ; `beaker.session.secret` | |
|
124 | 151 | #rhodecode.encrypted_values.secret = |
|
125 | 152 | |
|
126 | 153 | ; decryption strict mode (enabled by default). It controls if decryption raises |
@@ -146,12 +173,6 b' lang = en' | |||
|
146 | 173 | ; Settings this to true could lead to very long startup time. |
|
147 | 174 | startup.import_repos = true |
|
148 | 175 | |
|
149 | ; Uncomment and set this path to use archive download cache. | |
|
150 | ; Once enabled, generated archives will be cached at this location | |
|
151 | ; and served from the cache during subsequent requests for the same archive of | |
|
152 | ; the repository. | |
|
153 | #archive_cache_dir = /tmp/tarballcache | |
|
154 | ||
|
155 | 176 | ; URL at which the application is running. This is used for Bootstrapping |
|
156 | 177 | ; requests in context when no web request is available. Used in ishell, or |
|
157 | 178 | ; SSH calls. Set this for events to receive proper url for SSH calls. |
@@ -160,8 +181,14 b' app.base_url = http://rhodecode.local' | |||
|
160 | 181 | ; Unique application ID. Should be a random unique string for security. |
|
161 | 182 | app_instance_uuid = rc-production |
|
162 | 183 | |
|
163 |
|
|
|
184 | ; Cut off limit for large diffs (size in bytes). If overall diff size on | |
|
185 | ; commit, or pull request exceeds this limit this diff will be displayed | |
|
186 | ; partially. E.g 512000 == 512Kb | |
|
164 | 187 | cut_off_limit_diff = 1024000 |
|
188 | ||
|
189 | ; Cut off limit for large files inside diffs (size in bytes). Each individual | |
|
190 | ; file inside diff which exceeds this limit will be displayed partially. | |
|
191 | ; E.g 128000 == 128Kb | |
|
165 | 192 | cut_off_limit_file = 256000 |
|
166 | 193 | |
|
167 | 194 | ; Use cached version of vcs repositories everywhere. Recommended to be `true` |
@@ -259,16 +286,20 b' allow_repo_location_change = true' | |||
|
259 | 286 | ; allows to setup custom hooks in settings page |
|
260 | 287 | allow_custom_hooks_settings = true |
|
261 | 288 | |
|
262 | ## generated license token, goto license page in RhodeCode settings to obtain | |
|
263 | ## new token | |
|
289 | ; Generated license token required for EE edition license. | |
|
290 | ; New generated token value can be found in Admin > settings > license page. | |
|
264 | 291 | license_token = abra-cada-bra1-rce3 |
|
265 | 292 | |
|
266 | ## supervisor connection uri, for managing supervisor and logs. | |
|
293 | ; This flag hides sensitive information on the license page such as token, and license data | |
|
294 | license.hide_license_info = false | |
|
295 | ||
|
296 | ; supervisor connection uri, for managing supervisor and logs. | |
|
267 | 297 | supervisor.uri = |
|
268 | ## supervisord group name/id we only want this RC instance to handle | |
|
298 | ||
|
299 | ; supervisord group name/id we only want this RC instance to handle | |
|
269 | 300 | supervisor.group_id = dev |
|
270 | 301 | |
|
271 |
|
|
|
302 | ; Display extended labs settings | |
|
272 | 303 | labs_settings_active = true |
|
273 | 304 | |
|
274 | 305 | ; Custom exception store path, defaults to TMPDIR |
@@ -295,6 +326,20 b' file_store.backend = local' | |||
|
295 | 326 | ; path to store the uploaded binaries |
|
296 | 327 | file_store.storage_path = %(here)s/data/file_store |
|
297 | 328 | |
|
329 | ; Uncomment and set this path to control settings for archive download cache. | |
|
330 | ; Generated repo archives will be cached at this location | |
|
331 | ; and served from the cache during subsequent requests for the same archive of | |
|
332 | ; the repository. This path is important to be shared across filesystems and with | |
|
333 | ; RhodeCode and vcsserver | |
|
334 | ||
|
335 | ; Default is $cache_dir/archive_cache if not set | |
|
336 | archive_cache.store_dir = /tmp/rc-test-data/archive_cache | |
|
337 | ||
|
338 | ; The limit in GB sets how much data we cache before recycling last used, defaults to 10 gb | |
|
339 | archive_cache.cache_size_gb = 10 | |
|
340 | ||
|
341 | ; By default cache uses sharding technique, this specifies how many shards are there | |
|
342 | archive_cache.cache_shards = 10 | |
|
298 | 343 | |
|
299 | 344 | ; ############# |
|
300 | 345 | ; CELERY CONFIG |
@@ -325,35 +370,108 b' celery.task_always_eager = false' | |||
|
325 | 370 | |
|
326 | 371 | ; Default cache dir for caches. Putting this into a ramdisk can boost performance. |
|
327 | 372 | ; eg. /tmpfs/data_ramdisk, however this directory might require large amount of space |
|
328 | cache_dir = %(here)s/data | |
|
329 | ||
|
330 | ## locking and default file storage for Beaker. Putting this into a ramdisk | |
|
331 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data | |
|
332 | beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data | |
|
333 | beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock | |
|
334 | ||
|
335 | beaker.cache.regions = long_term | |
|
336 | ||
|
337 | beaker.cache.long_term.type = memory | |
|
338 | beaker.cache.long_term.expire = 36000 | |
|
339 | beaker.cache.long_term.key_length = 256 | |
|
373 | cache_dir = %(here)s/rc-test-data | |
|
340 | 374 | |
|
341 | ||
|
342 | ##################################### | |
|
343 | ### DOGPILE CACHE #### | |
|
344 | ##################################### | |
|
345 | ||
|
346 | ## permission tree cache settings | |
|
347 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace | |
|
348 | rc_cache.cache_perms.expiration_time = 0 | |
|
349 | rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1 | |
|
350 | ||
|
351 | ||
|
352 | ## cache settings for SQL queries | |
|
375 | ; ********************************************* | |
|
376 | ; `sql_cache_short` cache for heavy SQL queries | |
|
377 | ; Only supported backend is `memory_lru` | |
|
378 | ; ********************************************* | |
|
353 | 379 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru |
|
354 | 380 | rc_cache.sql_cache_short.expiration_time = 0 |
|
355 | 381 | |
|
356 | 382 | |
|
383 | ; ***************************************************** | |
|
384 | ; `cache_repo_longterm` cache for repo object instances | |
|
385 | ; Only supported backend is `memory_lru` | |
|
386 | ; ***************************************************** | |
|
387 | rc_cache.cache_repo_longterm.backend = dogpile.cache.rc.memory_lru | |
|
388 | ; by default we use 30 Days, cache is still invalidated on push | |
|
389 | rc_cache.cache_repo_longterm.expiration_time = 2592000 | |
|
390 | ; max items in LRU cache, set to smaller number to save memory, and expire last used caches | |
|
391 | rc_cache.cache_repo_longterm.max_size = 10000 | |
|
392 | ||
|
393 | ||
|
394 | ; ********************************************* | |
|
395 | ; `cache_general` cache for general purpose use | |
|
396 | ; for simplicity use rc.file_namespace backend, | |
|
397 | ; for performance and scale use rc.redis | |
|
398 | ; ********************************************* | |
|
399 | rc_cache.cache_general.backend = dogpile.cache.rc.file_namespace | |
|
400 | rc_cache.cache_general.expiration_time = 43200 | |
|
401 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
|
402 | rc_cache.cache_general.arguments.filename = %(here)s/cache-backend/cache_general_db | |
|
403 | ||
|
404 | ; alternative `cache_general` redis backend with distributed lock | |
|
405 | #rc_cache.cache_general.backend = dogpile.cache.rc.redis | |
|
406 | #rc_cache.cache_general.expiration_time = 300 | |
|
407 | ||
|
408 | ; redis_expiration_time needs to be greater then expiration_time | |
|
409 | #rc_cache.cache_general.arguments.redis_expiration_time = 7200 | |
|
410 | ||
|
411 | #rc_cache.cache_general.arguments.host = localhost | |
|
412 | #rc_cache.cache_general.arguments.port = 6379 | |
|
413 | #rc_cache.cache_general.arguments.db = 0 | |
|
414 | #rc_cache.cache_general.arguments.socket_timeout = 30 | |
|
415 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
|
416 | #rc_cache.cache_general.arguments.distributed_lock = true | |
|
417 | ||
|
418 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
|
419 | #rc_cache.cache_general.arguments.lock_auto_renewal = true | |
|
420 | ||
|
421 | ; ************************************************* | |
|
422 | ; `cache_perms` cache for permission tree, auth TTL | |
|
423 | ; for simplicity use rc.file_namespace backend, | |
|
424 | ; for performance and scale use rc.redis | |
|
425 | ; ************************************************* | |
|
426 | rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace | |
|
427 | rc_cache.cache_perms.expiration_time = 0 | |
|
428 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
|
429 | rc_cache.cache_perms.arguments.filename = %(here)s/cache-backend/cache_perms_db | |
|
430 | ||
|
431 | ; alternative `cache_perms` redis backend with distributed lock | |
|
432 | #rc_cache.cache_perms.backend = dogpile.cache.rc.redis | |
|
433 | #rc_cache.cache_perms.expiration_time = 300 | |
|
434 | ||
|
435 | ; redis_expiration_time needs to be greater then expiration_time | |
|
436 | #rc_cache.cache_perms.arguments.redis_expiration_time = 7200 | |
|
437 | ||
|
438 | #rc_cache.cache_perms.arguments.host = localhost | |
|
439 | #rc_cache.cache_perms.arguments.port = 6379 | |
|
440 | #rc_cache.cache_perms.arguments.db = 0 | |
|
441 | #rc_cache.cache_perms.arguments.socket_timeout = 30 | |
|
442 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
|
443 | #rc_cache.cache_perms.arguments.distributed_lock = true | |
|
444 | ||
|
445 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
|
446 | #rc_cache.cache_perms.arguments.lock_auto_renewal = true | |
|
447 | ||
|
448 | ; *************************************************** | |
|
449 | ; `cache_repo` cache for file tree, Readme, RSS FEEDS | |
|
450 | ; for simplicity use rc.file_namespace backend, | |
|
451 | ; for performance and scale use rc.redis | |
|
452 | ; *************************************************** | |
|
453 | rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace | |
|
454 | rc_cache.cache_repo.expiration_time = 2592000 | |
|
455 | ; file cache store path. Defaults to `cache_dir =` value or tempdir if both values are not set | |
|
456 | rc_cache.cache_repo.arguments.filename = %(here)s/cache-backend/cache_repo_db | |
|
457 | ||
|
458 | ; alternative `cache_repo` redis backend with distributed lock | |
|
459 | #rc_cache.cache_repo.backend = dogpile.cache.rc.redis | |
|
460 | #rc_cache.cache_repo.expiration_time = 2592000 | |
|
461 | ||
|
462 | ; redis_expiration_time needs to be greater then expiration_time | |
|
463 | #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400 | |
|
464 | ||
|
465 | #rc_cache.cache_repo.arguments.host = localhost | |
|
466 | #rc_cache.cache_repo.arguments.port = 6379 | |
|
467 | #rc_cache.cache_repo.arguments.db = 1 | |
|
468 | #rc_cache.cache_repo.arguments.socket_timeout = 30 | |
|
469 | ; more Redis options: https://dogpilecache.sqlalchemy.org/en/latest/api.html#redis-backends | |
|
470 | #rc_cache.cache_repo.arguments.distributed_lock = true | |
|
471 | ||
|
472 | ; auto-renew lock to prevent stale locks, slower but safer. Use only if problems happen | |
|
473 | #rc_cache.cache_repo.arguments.lock_auto_renewal = true | |
|
474 | ||
|
357 | 475 | ; ############## |
|
358 | 476 | ; BEAKER SESSION |
|
359 | 477 | ; ############## |
@@ -362,7 +480,7 b' rc_cache.sql_cache_short.expiration_time' | |||
|
362 | 480 | ; types are file, ext:redis, ext:database, ext:memcached, and memory (default if not specified). |
|
363 | 481 | ; Fastest ones are Redis and ext:database |
|
364 | 482 | beaker.session.type = file |
|
365 |
beaker.session.data_dir = %(here)s/rc/data/sessions |
|
|
483 | beaker.session.data_dir = %(here)s/rc-tests/data/sessions | |
|
366 | 484 | |
|
367 | 485 | ; Redis based sessions |
|
368 | 486 | #beaker.session.type = ext:redis |
@@ -378,7 +496,7 b' beaker.session.data_dir = %(here)s/rc/da' | |||
|
378 | 496 | |
|
379 | 497 | beaker.session.key = rhodecode |
|
380 | 498 | beaker.session.secret = test-rc-uytcxaz |
|
381 |
beaker.session.lock_dir = %(here)s/ |
|
|
499 | beaker.session.lock_dir = %(here)s/data/sessions/lock | |
|
382 | 500 | |
|
383 | 501 | ; Secure encrypted cookie. Requires AES and AES python libraries |
|
384 | 502 | ; you must disable beaker.session.secret to use this |
@@ -476,7 +594,8 b' sqlalchemy.db1.pool_recycle = 3600' | |||
|
476 | 594 | ; VCS CONFIG |
|
477 | 595 | ; ########## |
|
478 | 596 | vcs.server.enable = true |
|
479 | vcs.server = localhost:9901 | |
|
597 | #vcs.server = localhost:9901 | |
|
598 | vcs.server = vcsserver:10010 | |
|
480 | 599 | |
|
481 | 600 | ; Web server connectivity protocol, responsible for web based VCS operations |
|
482 | 601 | ; Available protocols are: |
@@ -491,8 +610,9 b' vcs.scm_app_implementation = http' | |||
|
491 | 610 | ; `http` - use http-rpc backend (default) |
|
492 | 611 | vcs.hooks.protocol = http |
|
493 | 612 | |
|
494 |
; Host on which this instance is listening for hooks. |
|
|
495 | ; this should be adjusted. | |
|
613 | ; Host on which this instance is listening for hooks. vcsserver will call this host to pull/push hooks so it should be | |
|
614 | ; accessible via network. | |
|
615 | ; Use vcs.hooks.host = "*" to bind to current hostname (for Docker) | |
|
496 | 616 | vcs.hooks.host = * |
|
497 | 617 | |
|
498 | 618 | ; Start VCSServer with this instance as a subprocess, useful for development |
@@ -617,7 +737,7 b' custom.conf = 1' | |||
|
617 | 737 | ; ##################### |
|
618 | 738 | |
|
619 | 739 | [loggers] |
|
620 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper | |
|
740 | keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper, dogpile | |
|
621 | 741 | |
|
622 | 742 | [handlers] |
|
623 | 743 | keys = console, console_sql |
@@ -651,6 +771,12 b' handlers =' | |||
|
651 | 771 | qualname = beaker.container |
|
652 | 772 | propagate = 1 |
|
653 | 773 | |
|
774 | [logger_dogpile] | |
|
775 | level = INFO | |
|
776 | handlers = console | |
|
777 | qualname = dogpile | |
|
778 | propagate = 1 | |
|
779 | ||
|
654 | 780 | [logger_rhodecode] |
|
655 | 781 | level = DEBUG |
|
656 | 782 | handlers = |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -28,8 +27,12 b' watch -n1 ./rhodecode/tests/mem_watch' | |||
|
28 | 27 | |
|
29 | 28 | |
|
30 | 29 | import cookielib |
|
31 |
import urllib.request |
|
|
32 | import urllib.request, urllib.error, urllib.parse | |
|
30 | import urllib.request | |
|
31 | import urllib.parse | |
|
32 | import urllib.error | |
|
33 | import urllib.request | |
|
34 | import urllib.error | |
|
35 | import urllib.parse | |
|
33 | 36 | import time |
|
34 | 37 | import os |
|
35 | 38 | import sys |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -58,7 +57,7 b' def assert_no_running_instance(url):' | |||
|
58 | 57 | class ServerBase(object): |
|
59 | 58 | _args = [] |
|
60 | 59 | log_file_name = 'NOT_DEFINED.log' |
|
61 | status_url_tmpl = 'http://{host}:{port}' | |
|
60 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' | |
|
62 | 61 | |
|
63 | 62 | def __init__(self, config_file, log_file): |
|
64 | 63 | self.config_file = config_file |
@@ -99,6 +98,10 b' class ServerBase(object):' | |||
|
99 | 98 | with open(self.log_file) as f: |
|
100 | 99 | return f.read() |
|
101 | 100 | |
|
101 | def assert_message_in_server_logs(self, message): | |
|
102 | server_logs = self.get_rc_log() | |
|
103 | assert message in server_logs | |
|
104 | ||
|
102 | 105 | def wait_until_ready(self, timeout=30): |
|
103 | 106 | host = self._config['host'] |
|
104 | 107 | port = self._config['port'] |
@@ -140,10 +143,15 b' class RcVCSServer(ServerBase):' | |||
|
140 | 143 | log_file_name = 'rc-vcsserver.log' |
|
141 | 144 | status_url_tmpl = 'http://{host}:{port}/status' |
|
142 | 145 | |
|
143 | def __init__(self, config_file, log_file=None): | |
|
146 | def __init__(self, config_file, log_file=None, workers='2'): | |
|
144 | 147 | super(RcVCSServer, self).__init__(config_file, log_file) |
|
145 | 148 | self._args = [ |
|
146 |
'gunicorn', |
|
|
149 | 'gunicorn', | |
|
150 | '--bind', self.bind_addr, | |
|
151 | '--worker-class', 'gevent', | |
|
152 | '--backlog', '16', | |
|
153 | '--timeout', '300', | |
|
154 | '--workers', workers, | |
|
147 | 155 | '--paste', self.config_file] |
|
148 | 156 | |
|
149 | 157 | def start(self): |
@@ -172,10 +180,15 b' class RcWebServer(ServerBase):' | |||
|
172 | 180 | log_file_name = 'rc-web.log' |
|
173 | 181 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' |
|
174 | 182 | |
|
175 | def __init__(self, config_file, log_file=None): | |
|
183 | def __init__(self, config_file, log_file=None, workers='1'): | |
|
176 | 184 | super(RcWebServer, self).__init__(config_file, log_file) |
|
177 | 185 | self._args = [ |
|
178 | 'gunicorn', '--bind', self.bind_addr, '--worker-class', 'gevent', | |
|
186 | 'gunicorn', | |
|
187 | '--bind', self.bind_addr, | |
|
188 | '--worker-class', 'gevent', | |
|
189 | '--backlog', '16', | |
|
190 | '--timeout', '300', | |
|
191 | '--workers', workers, | |
|
179 | 192 | '--paste', self.config_file] |
|
180 | 193 | |
|
181 | 194 | def start(self): |
@@ -204,5 +217,5 b' class RcWebServer(ServerBase):' | |||
|
204 | 217 | 'cloned_repo': repo_name, |
|
205 | 218 | } |
|
206 | 219 | params.update(**kwargs) |
|
207 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params | |
|
220 | _url = f"http://{params['user']}:{params['passwd']}@{params['host']}/{params['cloned_repo']}" | |
|
208 | 221 | return _url |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -78,28 +77,25 b' class CustomTestResponse(TestResponse):' | |||
|
78 | 77 | else: |
|
79 | 78 | no = [] |
|
80 | 79 | if kw: |
|
81 | raise TypeError( | |
|
82 | "The only keyword argument allowed is 'no' got %s" % kw) | |
|
80 | raise TypeError(f"The only keyword argument allowed is 'no' got {kw}") | |
|
83 | 81 | |
|
84 | 82 | f = self._save_output(str(self)) |
|
85 | 83 | |
|
86 | 84 | for s in strings: |
|
87 | 85 | if s not in self: |
|
88 |
print_stderr("Actual response (no |
|
|
89 |
print_stderr("body output saved as ` |
|
|
86 | print_stderr(f"Actual response (no {s!r}):") | |
|
87 | print_stderr(f"body output saved as `{f}`") | |
|
90 | 88 | if print_body: |
|
91 | 89 | print_stderr(str(self)) |
|
92 | raise IndexError( | |
|
93 | "Body does not contain string %r, body output saved as %s" % (s, f)) | |
|
90 | raise IndexError(f"Body does not contain string {s!r}, body output saved as {f}") | |
|
94 | 91 | |
|
95 | 92 | for no_s in no: |
|
96 | 93 | if no_s in self: |
|
97 |
print_stderr("Actual response (has |
|
|
98 |
print_stderr("body output saved as ` |
|
|
94 | print_stderr(f"Actual response (has {no_s!r})") | |
|
95 | print_stderr(f"body output saved as `{f}`") | |
|
99 | 96 | if print_body: |
|
100 | 97 | print_stderr(str(self)) |
|
101 | raise IndexError( | |
|
102 | "Body contains bad string %r, body output saved as %s" % (no_s, f)) | |
|
98 | raise IndexError(f"Body contains bad string {no_s!r}, body output saved as {f}") | |
|
103 | 99 | |
|
104 | 100 | def assert_response(self): |
|
105 | 101 | return AssertResponse(self) |
@@ -166,6 +162,10 b' class CustomTestApp(TestApp):' | |||
|
166 | 162 | def _pyramid_settings(self): |
|
167 | 163 | return self._pyramid_registry.settings |
|
168 | 164 | |
|
165 | def do_request(self, req, status=None, expect_errors=None): | |
|
166 | # you can put custom code here | |
|
167 | return super().do_request(req, status, expect_errors) | |
|
168 | ||
|
169 | 169 | |
|
170 | 170 | def set_anonymous_access(enabled): |
|
171 | 171 | """(Dis)allows anonymous access depending on parameter `enabled`""" |
@@ -406,12 +406,12 b' def wait_for_url(url, timeout=10):' | |||
|
406 | 406 | pytest.fail(f"Timeout while waiting for URL {url}") |
|
407 | 407 | |
|
408 | 408 | |
|
409 |
def is_url_reachable(url: str, log_exc: bool = |
|
|
409 | def is_url_reachable(url: str, log_exc: bool = False) -> bool: | |
|
410 | 410 | try: |
|
411 | 411 | urllib.request.urlopen(url) |
|
412 | 412 | except urllib.error.URLError: |
|
413 | 413 | if log_exc: |
|
414 |
log.exception('URL `{}` reach error' |
|
|
414 | log.exception(f'URL `{url}` reach error') | |
|
415 | 415 | return False |
|
416 | 416 | return True |
|
417 | 417 | |
@@ -425,7 +425,7 b' def repo_on_filesystem(repo_name):' | |||
|
425 | 425 | |
|
426 | 426 | |
|
427 | 427 | def commit_change( |
|
428 | repo, filename, content, message, vcs_type, parent=None, newfile=False): | |
|
428 | repo, filename: bytes, content: bytes, message, vcs_type, parent=None, newfile=False): | |
|
429 | 429 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
430 | 430 | |
|
431 | 431 | repo = Repository.get_by_repo_name(repo) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -24,6 +23,7 b' import datetime' | |||
|
24 | 23 | |
|
25 | 24 | import pytest |
|
26 | 25 | |
|
26 | from rhodecode.lib.str_utils import safe_bytes | |
|
27 | 27 | from rhodecode.lib.vcs.backends import get_backend |
|
28 | 28 | from rhodecode.lib.vcs.backends.base import Config |
|
29 | 29 | from rhodecode.lib.vcs.nodes import FileNode |
@@ -137,17 +137,25 b' def _add_commits_to_repo(repo, commits):' | |||
|
137 | 137 | |
|
138 | 138 | for commit in commits: |
|
139 | 139 | for node in commit.get('added', []): |
|
140 | imc.add(FileNode(node.path, content=node.content)) | |
|
140 | if not isinstance(node, FileNode): | |
|
141 | node = FileNode(safe_bytes(node.path), content=node.content) | |
|
142 | imc.add(node) | |
|
143 | ||
|
141 | 144 | for node in commit.get('changed', []): |
|
142 | imc.change(FileNode(node.path, content=node.content)) | |
|
145 | if not isinstance(node, FileNode): | |
|
146 | node = FileNode(safe_bytes(node.path), content=node.content) | |
|
147 | imc.change(node) | |
|
148 | ||
|
143 | 149 | for node in commit.get('removed', []): |
|
144 | imc.remove(FileNode(node.path)) | |
|
150 | imc.remove(FileNode(safe_bytes(node.path))) | |
|
145 | 151 | |
|
146 | 152 | tip = imc.commit( |
|
147 | 153 | message=str(commit['message']), |
|
148 | 154 | author=str(commit['author']), |
|
149 | 155 | date=commit['date'], |
|
150 |
branch=commit.get('branch') |
|
|
156 | branch=commit.get('branch') | |
|
157 | ) | |
|
158 | ||
|
151 | 159 | return tip |
|
152 | 160 | |
|
153 | 161 | |
@@ -167,7 +175,7 b' def vcs_repo(request, backend_alias):' | |||
|
167 | 175 | @pytest.fixture() |
|
168 | 176 | def generate_repo_with_commits(vcs_repo): |
|
169 | 177 | """ |
|
170 | Creates a fabric to generate N comits with some file nodes on a randomly | |
|
178 | Creates a fabric to generate N commits with some file nodes on a randomly | |
|
171 | 179 | generated repository |
|
172 | 180 | """ |
|
173 | 181 | |
@@ -179,11 +187,11 b' def generate_repo_with_commits(vcs_repo)' | |||
|
179 | 187 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
180 | 188 | 'date': start_date + datetime.timedelta(hours=12 * x), |
|
181 | 189 | 'added': [ |
|
182 | FileNode('file_%d.txt' % x, content='Foobar %d' % x), | |
|
190 | FileNode(b'file_%d.txt' % x, content=b'Foobar %d' % x), | |
|
183 | 191 | ], |
|
184 | 192 | 'modified': [ |
|
185 | FileNode('file_%d.txt' % x, | |
|
186 | content='Foobar %d modified' % (x-1)), | |
|
193 | FileNode(b'file_%d.txt' % x, | |
|
194 | content=b'Foobar %d modified' % (x-1)), | |
|
187 | 195 | ] |
|
188 | 196 | } |
|
189 | 197 | |
@@ -229,24 +237,24 b' class BackendTestMixin(object):' | |||
|
229 | 237 | def _get_commits(cls): |
|
230 | 238 | commits = [ |
|
231 | 239 | { |
|
232 |
'message': |
|
|
233 |
'author': |
|
|
240 | 'message': 'Initial commit', | |
|
241 | 'author': 'Joe Doe <joe.doe@example.com>', | |
|
234 | 242 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
235 | 243 | 'added': [ |
|
236 | FileNode('foobar', content='Foobar'), | |
|
237 | FileNode('foobar2', content='Foobar II'), | |
|
238 | FileNode('foo/bar/baz', content='baz here!'), | |
|
244 | FileNode(b'foobar', content=b'Foobar'), | |
|
245 | FileNode(b'foobar2', content=b'Foobar II'), | |
|
246 | FileNode(b'foo/bar/baz', content=b'baz here!'), | |
|
239 | 247 | ], |
|
240 | 248 | }, |
|
241 | 249 | { |
|
242 |
'message': |
|
|
243 |
'author': |
|
|
250 | 'message': 'Changes...', | |
|
251 | 'author': 'Jane Doe <jane.doe@example.com>', | |
|
244 | 252 | 'date': datetime.datetime(2010, 1, 1, 21), |
|
245 | 253 | 'added': [ |
|
246 | FileNode('some/new.txt', content='news...'), | |
|
254 | FileNode(b'some/new.txt', content=b'news...'), | |
|
247 | 255 | ], |
|
248 | 256 | 'changed': [ |
|
249 | FileNode('foobar', 'Foobar I'), | |
|
257 | FileNode(b'foobar', b'Foobar I'), | |
|
250 | 258 | ], |
|
251 | 259 | 'removed': [], |
|
252 | 260 | }, |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -22,30 +21,29 b' import datetime' | |||
|
22 | 21 | import os |
|
23 | 22 | import shutil |
|
24 | 23 | import tarfile |
|
25 | import tempfile | |
|
26 | 24 | import zipfile |
|
27 | 25 | import io |
|
28 | 26 | |
|
29 | 27 | import mock |
|
30 | 28 | import pytest |
|
31 | 29 | |
|
30 | import rhodecode | |
|
31 | from rhodecode.lib.rc_cache.archive_cache import get_archival_config | |
|
32 | from rhodecode.lib.str_utils import ascii_bytes | |
|
32 | 33 | from rhodecode.lib.vcs.backends import base |
|
33 | 34 | from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError, VCSError |
|
34 | 35 | from rhodecode.lib.vcs.nodes import FileNode |
|
35 | 36 | from rhodecode.tests.vcs.conftest import BackendTestMixin |
|
36 | 37 | |
|
37 | 38 | |
|
39 | @pytest.fixture() | |
|
40 | def d_cache_config(): | |
|
41 | return get_archival_config(config=rhodecode.CONFIG) | |
|
42 | ||
|
43 | ||
|
38 | 44 | @pytest.mark.usefixtures("vcs_repository_support") |
|
39 | 45 | class TestArchives(BackendTestMixin): |
|
40 | 46 | |
|
41 | @pytest.fixture(autouse=True) | |
|
42 | def tempfile(self, request): | |
|
43 | self.temp_file = tempfile.mkstemp()[1] | |
|
44 | ||
|
45 | @request.addfinalizer | |
|
46 | def cleanup(): | |
|
47 | os.remove(self.temp_file) | |
|
48 | ||
|
49 | 47 | @classmethod |
|
50 | 48 | def _get_commits(cls): |
|
51 | 49 | start_date = datetime.datetime(2010, 1, 1, 20) |
@@ -54,9 +52,9 b' class TestArchives(BackendTestMixin):' | |||
|
54 | 52 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
55 | 53 | 'date': start_date + datetime.timedelta(hours=12), |
|
56 | 54 | 'added': [ |
|
57 |
FileNode('executable_0o100755', ' |
|
|
58 |
FileNode('executable_0o100500', ' |
|
|
59 |
FileNode('not_executable', ' |
|
|
55 | FileNode(b'executable_0o100755', b'mode_755', mode=0o100755), | |
|
56 | FileNode(b'executable_0o100500', b'mode_500', mode=0o100500), | |
|
57 | FileNode(b'not_executable', b'mode_644', mode=0o100644), | |
|
60 | 58 | ], |
|
61 | 59 | } |
|
62 | 60 | for x in range(5): |
@@ -65,22 +63,27 b' class TestArchives(BackendTestMixin):' | |||
|
65 | 63 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
66 | 64 | 'date': start_date + datetime.timedelta(hours=12 * x), |
|
67 | 65 | 'added': [ |
|
68 | FileNode('%d/file_%d.txt' % (x, x), content='Foobar %d' % x), | |
|
66 | FileNode(b'%d/file_%d.txt' % (x, x), content=b'Foobar %d' % x), | |
|
69 | 67 | ], |
|
70 | 68 | } |
|
71 | 69 | |
|
72 | 70 | @pytest.mark.parametrize('compressor', ['gz', 'bz2']) |
|
73 | def test_archive_tar(self, compressor): | |
|
74 | self.tip.archive_repo( | |
|
75 | self.temp_file, kind='t{}'.format(compressor), archive_dir_name='repo') | |
|
76 | out_dir = tempfile.mkdtemp() | |
|
77 | out_file = tarfile.open(self.temp_file, 'r|{}'.format(compressor)) | |
|
71 | def test_archive_tar(self, compressor, tmpdir, tmp_path, d_cache_config): | |
|
72 | ||
|
73 | archive_node = tmp_path / 'archive-node' | |
|
74 | archive_node.touch() | |
|
75 | ||
|
76 | archive_lnk = self.tip.archive_repo( | |
|
77 | str(archive_node), kind=f't{compressor}', archive_dir_name='repo', cache_config=d_cache_config) | |
|
78 | ||
|
79 | out_dir = tmpdir | |
|
80 | out_file = tarfile.open(str(archive_lnk), f'r|{compressor}') | |
|
78 | 81 | out_file.extractall(out_dir) |
|
79 | 82 | out_file.close() |
|
80 | 83 | |
|
81 | 84 | for x in range(5): |
|
82 | 85 | node_path = '%d/file_%d.txt' % (x, x) |
|
83 | with open(os.path.join(out_dir, 'repo/' + node_path)) as f: | |
|
86 | with open(os.path.join(out_dir, 'repo/' + node_path), 'rb') as f: | |
|
84 | 87 | file_content = f.read() |
|
85 | 88 | assert file_content == self.tip.get_node(node_path).content |
|
86 | 89 | |
@@ -88,53 +91,72 b' class TestArchives(BackendTestMixin):' | |||
|
88 | 91 | |
|
89 | 92 | @pytest.mark.parametrize('compressor', ['gz', 'bz2']) |
|
90 | 93 | def test_archive_tar_symlink(self, compressor): |
|
91 | return False | |
|
94 | pytest.skip('Not supported') | |
|
92 | 95 | |
|
93 | 96 | @pytest.mark.parametrize('compressor', ['gz', 'bz2']) |
|
94 | def test_archive_tar_file_modes(self, compressor): | |
|
95 | self.tip.archive_repo( | |
|
96 | self.temp_file, kind='t{}'.format(compressor), archive_dir_name='repo') | |
|
97 | out_dir = tempfile.mkdtemp() | |
|
98 | out_file = tarfile.open(self.temp_file, 'r|{}'.format(compressor)) | |
|
97 | def test_archive_tar_file_modes(self, compressor, tmpdir, tmp_path, d_cache_config): | |
|
98 | archive_node = tmp_path / 'archive-node' | |
|
99 | archive_node.touch() | |
|
100 | ||
|
101 | archive_lnk = self.tip.archive_repo( | |
|
102 | str(archive_node), kind='t{}'.format(compressor), archive_dir_name='repo', cache_config=d_cache_config) | |
|
103 | ||
|
104 | out_dir = tmpdir | |
|
105 | out_file = tarfile.open(str(archive_lnk), 'r|{}'.format(compressor)) | |
|
99 | 106 | out_file.extractall(out_dir) |
|
100 | 107 | out_file.close() |
|
101 | dest = lambda inp: os.path.join(out_dir, 'repo/' + inp) | |
|
102 | 108 | |
|
103 | assert oct(os.stat(dest('not_executable')).st_mode) == '0100644' | |
|
109 | def dest(inp): | |
|
110 | return os.path.join(out_dir, "repo/" + inp) | |
|
111 | ||
|
112 | assert oct(os.stat(dest('not_executable')).st_mode) == '0o100644' | |
|
104 | 113 | |
|
105 | def test_archive_zip(self): | |
|
106 | self.tip.archive_repo(self.temp_file, kind='zip', archive_dir_name='repo') | |
|
107 | out = zipfile.ZipFile(self.temp_file) | |
|
114 | def test_archive_zip(self, tmp_path, d_cache_config): | |
|
115 | archive_node = tmp_path / 'archive-node' | |
|
116 | archive_node.touch() | |
|
117 | ||
|
118 | archive_lnk = self.tip.archive_repo(str(archive_node), kind='zip', | |
|
119 | archive_dir_name='repo', cache_config=d_cache_config) | |
|
120 | zip_file = zipfile.ZipFile(str(archive_lnk)) | |
|
108 | 121 | |
|
109 | 122 | for x in range(5): |
|
110 | 123 | node_path = '%d/file_%d.txt' % (x, x) |
|
111 | decompressed = io.StringIO() | |
|
112 | decompressed.write(out.read('repo/' + node_path)) | |
|
124 | data = zip_file.read(f'repo/{node_path}') | |
|
125 | ||
|
126 | decompressed = io.BytesIO() | |
|
127 | decompressed.write(data) | |
|
113 | 128 | assert decompressed.getvalue() == \ |
|
114 | 129 | self.tip.get_node(node_path).content |
|
115 | 130 | decompressed.close() |
|
116 | 131 | |
|
117 | def test_archive_zip_with_metadata(self): | |
|
118 | self.tip.archive_repo(self.temp_file, kind='zip', | |
|
119 | archive_dir_name='repo', write_metadata=True) | |
|
132 | def test_archive_zip_with_metadata(self, tmp_path, d_cache_config): | |
|
133 | archive_node = tmp_path / 'archive-node' | |
|
134 | archive_node.touch() | |
|
120 | 135 | |
|
121 | out = zipfile.ZipFile(self.temp_file) | |
|
122 | metafile = out.read('repo/.archival.txt') | |
|
136 | archive_lnk = self.tip.archive_repo(str(archive_node), kind='zip', | |
|
137 | archive_dir_name='repo', write_metadata=True, cache_config=d_cache_config) | |
|
123 | 138 | |
|
124 | raw_id = self.tip.raw_id | |
|
125 | assert 'commit_id:%s' % raw_id in metafile | |
|
139 | zip_file = zipfile.ZipFile(str(archive_lnk)) | |
|
140 | metafile = zip_file.read('repo/.archival.txt') | |
|
141 | ||
|
142 | raw_id = ascii_bytes(self.tip.raw_id) | |
|
143 | assert b'commit_id:%b' % raw_id in metafile | |
|
126 | 144 | |
|
127 | 145 | for x in range(5): |
|
128 | 146 | node_path = '%d/file_%d.txt' % (x, x) |
|
129 | decompressed = io.StringIO() | |
|
130 | decompressed.write(out.read('repo/' + node_path)) | |
|
147 | data = zip_file.read(f'repo/{node_path}') | |
|
148 | decompressed = io.BytesIO() | |
|
149 | decompressed.write(data) | |
|
131 | 150 | assert decompressed.getvalue() == \ |
|
132 | 151 | self.tip.get_node(node_path).content |
|
133 | 152 | decompressed.close() |
|
134 | 153 | |
|
135 | def test_archive_wrong_kind(self): | |
|
154 | def test_archive_wrong_kind(self, tmp_path, d_cache_config): | |
|
155 | archive_node = tmp_path / 'archive-node' | |
|
156 | archive_node.touch() | |
|
157 | ||
|
136 | 158 | with pytest.raises(ImproperArchiveTypeError): |
|
137 |
self.tip.archive_repo(s |
|
|
159 | self.tip.archive_repo(str(archive_node), kind='wrong kind', cache_config=d_cache_config) | |
|
138 | 160 | |
|
139 | 161 | |
|
140 | 162 | @pytest.fixture() |
@@ -144,15 +166,14 b' def base_commit():' | |||
|
144 | 166 | """ |
|
145 | 167 | commit = base.BaseCommit() |
|
146 | 168 | commit.repository = mock.Mock() |
|
147 |
commit.repository.name = |
|
|
169 | commit.repository.name = 'fake_repo' | |
|
148 | 170 | commit.short_id = 'fake_id' |
|
149 | 171 | return commit |
|
150 | 172 | |
|
151 | 173 | |
|
152 | @pytest.mark.parametrize("prefix", [u"unicode-prefix", u"ΓnΓ―cΓΆdΓ«"]) | |
|
153 | def test_validate_archive_prefix_enforces_bytes_as_prefix(prefix, base_commit): | |
|
154 | with pytest.raises(ValueError): | |
|
155 | base_commit._validate_archive_prefix(prefix) | |
|
174 | def test_validate_archive_prefix_enforces_non_ascii_as_prefix(base_commit): | |
|
175 | with pytest.raises(VCSError): | |
|
176 | base_commit._validate_archive_prefix("ΓnΓ―cΓΆdΓ«") | |
|
156 | 177 | |
|
157 | 178 | |
|
158 | 179 | def test_validate_archive_prefix_empty_prefix(base_commit): |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -54,9 +53,9 b' class TestBranches(BackendTestMixin):' | |||
|
54 | 53 | # This check must not be removed to ensure the 'branches' LazyProperty |
|
55 | 54 | # gets hit *before* the new 'foobar' branch got created: |
|
56 | 55 | assert 'foobar' not in self.repo.branches |
|
57 |
self.imc.add( |
|
|
58 | 'docs/index.txt', | |
|
59 | content='Documentation\n')) | |
|
56 | self.imc.add( | |
|
57 | FileNode(b'docs/index.txt', content=b'Documentation\n') | |
|
58 | ) | |
|
60 | 59 | foobar_tip = self.imc.commit( |
|
61 | 60 | message=u'New branch: foobar', |
|
62 | 61 | author=u'joe <joe@rhodecode.com>', |
@@ -68,9 +67,10 b' class TestBranches(BackendTestMixin):' | |||
|
68 | 67 | @pytest.mark.backends("git", "hg") |
|
69 | 68 | def test_new_head(self): |
|
70 | 69 | tip = self.repo.get_commit() |
|
71 |
self.imc.add( |
|
|
72 | 'docs/index.txt', | |
|
73 |
content='Documentation\n') |
|
|
70 | self.imc.add( | |
|
71 | FileNode(b'docs/index.txt', | |
|
72 | content=b'Documentation\n') | |
|
73 | ) | |
|
74 | 74 | foobar_tip = self.imc.commit( |
|
75 | 75 | message=u'New branch: foobar', |
|
76 | 76 | author=u'joe <joe@rhodecode.com>', |
@@ -78,8 +78,8 b' class TestBranches(BackendTestMixin):' | |||
|
78 | 78 | parents=[tip], |
|
79 | 79 | ) |
|
80 | 80 | self.imc.change(FileNode( |
|
81 | 'docs/index.txt', | |
|
82 | content='Documentation\nand more...\n')) | |
|
81 | b'docs/index.txt', | |
|
82 | content=b'Documentation\nand more...\n')) | |
|
83 | 83 | newtip = self.imc.commit( |
|
84 | 84 | message=u'At default branch', |
|
85 | 85 | author=u'joe <joe@rhodecode.com>', |
@@ -99,7 +99,7 b' class TestBranches(BackendTestMixin):' | |||
|
99 | 99 | |
|
100 | 100 | @pytest.mark.backends("git", "hg") |
|
101 | 101 | def test_branch_with_slash_in_name(self): |
|
102 | self.imc.add(FileNode('extrafile', content='Some data\n')) | |
|
102 | self.imc.add(FileNode(b'extrafile', content=b'Some data\n')) | |
|
103 | 103 | self.imc.commit( |
|
104 | 104 | u'Branch with a slash!', author=u'joe <joe@rhodecode.com>', |
|
105 | 105 | branch='issue/123') |
@@ -107,11 +107,11 b' class TestBranches(BackendTestMixin):' | |||
|
107 | 107 | |
|
108 | 108 | @pytest.mark.backends("git", "hg") |
|
109 | 109 | def test_branch_with_slash_in_name_and_similar_without(self): |
|
110 | self.imc.add(FileNode('extrafile', content='Some data\n')) | |
|
110 | self.imc.add(FileNode(b'extrafile', content=b'Some data\n')) | |
|
111 | 111 | self.imc.commit( |
|
112 | 112 | u'Branch with a slash!', author=u'joe <joe@rhodecode.com>', |
|
113 | 113 | branch='issue/123') |
|
114 | self.imc.add(FileNode('extrafile II', content='Some data\n')) | |
|
114 | self.imc.add(FileNode(b'extrafile II', content=b'Some data\n')) | |
|
115 | 115 | self.imc.commit( |
|
116 | 116 | u'Branch without a slash...', author=u'joe <joe@rhodecode.com>', |
|
117 | 117 | branch='123') |
@@ -138,10 +138,10 b' class TestSvnBranches(object):' | |||
|
138 | 138 | 'branches/argparse', |
|
139 | 139 | 'trunk', |
|
140 | 140 | ] |
|
141 | assert repo.branches.keys() == expected_branches | |
|
141 | assert list(repo.branches.keys()) == expected_branches | |
|
142 | 142 | |
|
143 | 143 | def test_discovers_ordered_tags(self, vcsbackend_svn): |
|
144 | 144 | repo = vcsbackend_svn['svn-simple-layout'] |
|
145 | 145 | expected_tags = [ |
|
146 | 146 | 'tags/v0.1', 'tags/v0.2', 'tags/v0.3', 'tags/v0.5'] |
|
147 | assert repo.tags.keys() == expected_tags | |
|
147 | assert list(repo.tags.keys()) == expected_tags |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -23,6 +22,7 b' import time' | |||
|
23 | 22 | |
|
24 | 23 | import pytest |
|
25 | 24 | |
|
25 | from rhodecode.lib.str_utils import safe_bytes | |
|
26 | 26 | from rhodecode.lib.vcs.backends.base import ( |
|
27 | 27 | CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit) |
|
28 | 28 | from rhodecode.lib.vcs.exceptions import ( |
@@ -62,7 +62,8 b' class TestCommitsInNonEmptyRepo(BackendT' | |||
|
62 | 62 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
63 | 63 | 'date': start_date + datetime.timedelta(hours=12 * x), |
|
64 | 64 | 'added': [ |
|
65 |
FileNode('file_%d.txt' % x, |
|
|
65 | FileNode(b'file_%d.txt' % x, | |
|
66 | content=b'Foobar %d' % x), | |
|
66 | 67 | ], |
|
67 | 68 | } |
|
68 | 69 | |
@@ -72,42 +73,39 b' class TestCommitsInNonEmptyRepo(BackendT' | |||
|
72 | 73 | |
|
73 | 74 | @pytest.mark.backends("git", "hg") |
|
74 | 75 | def test_new_branch(self): |
|
75 | self.imc.add(FileNode('docs/index.txt', | |
|
76 | content='Documentation\n')) | |
|
76 | self.imc.add(FileNode(b'docs/index.txt', content=b'Documentation\n')) | |
|
77 | 77 | foobar_tip = self.imc.commit( |
|
78 |
message= |
|
|
79 |
author= |
|
|
78 | message='New branch: foobar', | |
|
79 | author='joe <joe@rhodecode.com>', | |
|
80 | 80 | branch='foobar', |
|
81 | 81 | ) |
|
82 | 82 | assert 'foobar' in self.repo.branches |
|
83 | 83 | assert foobar_tip.branch == 'foobar' |
|
84 | 84 | # 'foobar' should be the only branch that contains the new commit |
|
85 | branch = self.repo.branches.values() | |
|
85 | branch = list(self.repo.branches.values()) | |
|
86 | 86 | assert branch[0] != branch[1] |
|
87 | 87 | |
|
88 | 88 | @pytest.mark.backends("git", "hg") |
|
89 | 89 | def test_new_head_in_default_branch(self): |
|
90 | 90 | tip = self.repo.get_commit() |
|
91 | self.imc.add(FileNode('docs/index.txt', | |
|
92 | content='Documentation\n')) | |
|
91 | self.imc.add(FileNode(b'docs/index.txt', content=b'Documentation\n')) | |
|
93 | 92 | foobar_tip = self.imc.commit( |
|
94 |
message= |
|
|
95 |
author= |
|
|
93 | message='New branch: foobar', | |
|
94 | author='joe <joe@rhodecode.com>', | |
|
96 | 95 | branch='foobar', |
|
97 | 96 | parents=[tip], |
|
98 | 97 | ) |
|
99 | self.imc.change(FileNode('docs/index.txt', | |
|
100 | content='Documentation\nand more...\n')) | |
|
98 | self.imc.change(FileNode(b'docs/index.txt', content=b'Documentation\nand more...\n')) | |
|
101 | 99 | newtip = self.imc.commit( |
|
102 |
message= |
|
|
103 |
author= |
|
|
100 | message='At default branch', | |
|
101 | author='joe <joe@rhodecode.com>', | |
|
104 | 102 | branch=foobar_tip.branch, |
|
105 | 103 | parents=[foobar_tip], |
|
106 | 104 | ) |
|
107 | 105 | |
|
108 | 106 | newest_tip = self.imc.commit( |
|
109 |
message= |
|
|
110 |
author= |
|
|
107 | message='Merged with %s' % foobar_tip.raw_id, | |
|
108 | author='joe <joe@rhodecode.com>', | |
|
111 | 109 | branch=self.backend_class.DEFAULT_BRANCH_NAME, |
|
112 | 110 | parents=[newtip, foobar_tip], |
|
113 | 111 | ) |
@@ -131,31 +129,31 b' class TestCommitsInNonEmptyRepo(BackendT' | |||
|
131 | 129 | TEST_BRANCH = 'docs' |
|
132 | 130 | org_tip = self.repo.get_commit() |
|
133 | 131 | |
|
134 | self.imc.add(FileNode('readme.txt', content='Document\n')) | |
|
132 | self.imc.add(FileNode(b'readme.txt', content=b'Document\n')) | |
|
135 | 133 | initial = self.imc.commit( |
|
136 |
message= |
|
|
137 |
author= |
|
|
134 | message='Initial commit', | |
|
135 | author='joe <joe@rhodecode.com>', | |
|
138 | 136 | parents=[org_tip], |
|
139 | 137 | branch=DEFAULT_BRANCH,) |
|
140 | 138 | |
|
141 | self.imc.add(FileNode('newdoc.txt', content='foobar\n')) | |
|
139 | self.imc.add(FileNode(b'newdoc.txt', content=b'foobar\n')) | |
|
142 | 140 | docs_branch_commit1 = self.imc.commit( |
|
143 |
message= |
|
|
144 |
author= |
|
|
141 | message='New branch: docs', | |
|
142 | author='joe <joe@rhodecode.com>', | |
|
145 | 143 | parents=[initial], |
|
146 | 144 | branch=TEST_BRANCH,) |
|
147 | 145 | |
|
148 | self.imc.add(FileNode('newdoc2.txt', content='foobar2\n')) | |
|
146 | self.imc.add(FileNode(b'newdoc2.txt', content=b'foobar2\n')) | |
|
149 | 147 | docs_branch_commit2 = self.imc.commit( |
|
150 |
message= |
|
|
151 |
author= |
|
|
148 | message='New branch: docs2', | |
|
149 | author='joe <joe@rhodecode.com>', | |
|
152 | 150 | parents=[docs_branch_commit1], |
|
153 | 151 | branch=TEST_BRANCH,) |
|
154 | 152 | |
|
155 | self.imc.add(FileNode('newfile', content='hello world\n')) | |
|
153 | self.imc.add(FileNode(b'newfile', content=b'hello world\n')) | |
|
156 | 154 | self.imc.commit( |
|
157 |
message= |
|
|
158 |
author= |
|
|
155 | message='Back in default branch', | |
|
156 | author='joe <joe@rhodecode.com>', | |
|
159 | 157 | parents=[initial], |
|
160 | 158 | branch=DEFAULT_BRANCH,) |
|
161 | 159 | |
@@ -210,11 +208,12 b' class TestCommits(BackendTestMixin):' | |||
|
210 | 208 | start_date = datetime.datetime(2010, 1, 1, 20) |
|
211 | 209 | for x in range(5): |
|
212 | 210 | yield { |
|
213 |
'message': |
|
|
214 |
'author': |
|
|
211 | 'message': 'Commit %d' % x, | |
|
212 | 'author': 'Joe Doe <joe.doe@example.com>', | |
|
215 | 213 | 'date': start_date + datetime.timedelta(hours=12 * x), |
|
216 | 214 | 'added': [ |
|
217 |
FileNode('file_%d.txt' % x, |
|
|
215 | FileNode(b'file_%d.txt' % x, | |
|
216 | content=b'Foobar %d' % x) | |
|
218 | 217 | ], |
|
219 | 218 | } |
|
220 | 219 | |
@@ -269,24 +268,24 b' class TestCommits(BackendTestMixin):' | |||
|
269 | 268 | |
|
270 | 269 | def test_author(self): |
|
271 | 270 | tip = self.repo.get_commit() |
|
272 |
assert_text_equal(tip.author, |
|
|
271 | assert_text_equal(tip.author, 'Joe Doe <joe.doe@example.com>') | |
|
273 | 272 | |
|
274 | 273 | def test_author_name(self): |
|
275 | 274 | tip = self.repo.get_commit() |
|
276 |
assert_text_equal(tip.author_name, |
|
|
275 | assert_text_equal(tip.author_name, 'Joe Doe') | |
|
277 | 276 | |
|
278 | 277 | def test_author_email(self): |
|
279 | 278 | tip = self.repo.get_commit() |
|
280 |
assert_text_equal(tip.author_email, |
|
|
279 | assert_text_equal(tip.author_email, 'joe.doe@example.com') | |
|
281 | 280 | |
|
282 | 281 | def test_message(self): |
|
283 | 282 | tip = self.repo.get_commit() |
|
284 |
assert_text_equal(tip.message, |
|
|
283 | assert_text_equal(tip.message, 'Commit 4') | |
|
285 | 284 | |
|
286 | 285 | def test_diff(self): |
|
287 | 286 | tip = self.repo.get_commit() |
|
288 | 287 | diff = tip.diff() |
|
289 | assert "+Foobar 4" in diff.raw | |
|
288 | assert b"+Foobar 4" in diff.raw.tobytes() | |
|
290 | 289 | |
|
291 | 290 | def test_prev(self): |
|
292 | 291 | tip = self.repo.get_commit() |
@@ -490,8 +489,8 b' class TestCommits(BackendTestMixin):' | |||
|
490 | 489 | assert commit2 == commit2 |
|
491 | 490 | assert commit1 != commit2 |
|
492 | 491 | assert commit2 != commit1 |
|
493 |
assert commit1 |
|
|
494 |
assert None |
|
|
492 | assert commit1 is not None | |
|
493 | assert commit2 is not None | |
|
495 | 494 | assert 1 != commit1 |
|
496 | 495 | assert 'string' != commit1 |
|
497 | 496 | |
@@ -514,53 +513,49 b' class TestCommitsChanges(BackendTestMixi' | |||
|
514 | 513 | def _get_commits(cls): |
|
515 | 514 | return [ |
|
516 | 515 | { |
|
517 |
'message': |
|
|
518 |
'author': |
|
|
516 | 'message': 'Initial', | |
|
517 | 'author': 'Joe Doe <joe.doe@example.com>', | |
|
519 | 518 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
520 | 519 | 'added': [ |
|
521 | FileNode('foo/bar', content='foo'), | |
|
522 | FileNode('foo/baΕ', content='foo'), | |
|
523 | FileNode('foobar', content='foo'), | |
|
524 | FileNode('qwe', content='foo'), | |
|
520 | FileNode(b'foo/bar', content=b'foo'), | |
|
521 | FileNode(safe_bytes('foo/baΕ'), content=b'foo'), | |
|
522 | FileNode(b'foobar', content=b'foo'), | |
|
523 | FileNode(b'qwe', content=b'foo'), | |
|
525 | 524 | ], |
|
526 | 525 | }, |
|
527 | 526 | { |
|
528 |
'message': |
|
|
529 |
'author': |
|
|
527 | 'message': 'Massive changes', | |
|
528 | 'author': 'Joe Doe <joe.doe@example.com>', | |
|
530 | 529 | 'date': datetime.datetime(2010, 1, 1, 22), |
|
531 | 'added': [FileNode('fallout', content='War never changes')], | |
|
530 | 'added': [FileNode(b'fallout', content=b'War never changes')], | |
|
532 | 531 | 'changed': [ |
|
533 | FileNode('foo/bar', content='baz'), | |
|
534 | FileNode('foobar', content='baz'), | |
|
532 | FileNode(b'foo/bar', content=b'baz'), | |
|
533 | FileNode(b'foobar', content=b'baz'), | |
|
535 | 534 | ], |
|
536 | 'removed': [FileNode('qwe')], | |
|
535 | 'removed': [FileNode(b'qwe')], | |
|
537 | 536 | }, |
|
538 | 537 | ] |
|
539 | 538 | |
|
540 | 539 | def test_initial_commit(self, local_dt_to_utc): |
|
541 | 540 | commit = self.repo.get_commit(commit_idx=0) |
|
542 |
assert set(commit.added) == |
|
|
541 | assert set(commit.added) == { | |
|
543 | 542 | commit.get_node('foo/bar'), |
|
544 | 543 | commit.get_node('foo/baΕ'), |
|
545 | 544 | commit.get_node('foobar'), |
|
546 |
commit.get_node('qwe') |
|
|
547 |
|
|
|
545 | commit.get_node('qwe') | |
|
546 | } | |
|
548 | 547 | assert set(commit.changed) == set() |
|
549 | 548 | assert set(commit.removed) == set() |
|
550 |
assert set(commit.affected_files) == |
|
|
551 | ['foo/bar', 'foo/baΕ', 'foobar', 'qwe']) | |
|
549 | assert set(commit.affected_files) == {'foo/bar', 'foo/baΕ', 'foobar', 'qwe'} | |
|
552 | 550 | assert commit.date == local_dt_to_utc( |
|
553 | 551 | datetime.datetime(2010, 1, 1, 20, 0)) |
|
554 | 552 | |
|
555 | 553 | def test_head_added(self): |
|
556 | 554 | commit = self.repo.get_commit() |
|
557 | 555 | assert isinstance(commit.added, AddedFileNodesGenerator) |
|
558 |
assert set(commit.added) == |
|
|
556 | assert set(commit.added) == {commit.get_node('fallout')} | |
|
559 | 557 | assert isinstance(commit.changed, ChangedFileNodesGenerator) |
|
560 |
assert set(commit.changed) == |
|
|
561 | commit.get_node('foo/bar'), | |
|
562 | commit.get_node('foobar'), | |
|
563 | ]) | |
|
558 | assert set(commit.changed) == {commit.get_node('foo/bar'), commit.get_node('foobar')} | |
|
564 | 559 | assert isinstance(commit.removed, RemovedFileNodesGenerator) |
|
565 | 560 | assert len(commit.removed) == 1 |
|
566 | 561 | assert list(commit.removed)[0].path == 'qwe' |
@@ -572,7 +567,7 b' class TestCommitsChanges(BackendTestMixi' | |||
|
572 | 567 | def test_get_filemode_non_ascii(self): |
|
573 | 568 | commit = self.repo.get_commit() |
|
574 | 569 | assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ') |
|
575 |
assert FILEMODE_DEFAULT == commit.get_file_mode( |
|
|
570 | assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ') | |
|
576 | 571 | |
|
577 | 572 | def test_get_path_history(self): |
|
578 | 573 | commit = self.repo.get_commit() |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -34,6 +33,7 b' class TestGetDiffValidation:' | |||
|
34 | 33 | |
|
35 | 34 | def test_raises_if_commits_not_of_this_repository(self, vcsbackend): |
|
36 | 35 | repo = vcsbackend.repo |
|
36 | ||
|
37 | 37 | target_repo = vcsbackend.create_repo(number_of_commits=1) |
|
38 | 38 | repo_commit = repo[0] |
|
39 | 39 | wrong_commit = target_repo[0] |
@@ -81,8 +81,8 b' class TestRepositoryGetDiff(BackendTestM' | |||
|
81 | 81 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
82 | 82 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
83 | 83 | 'added': [ |
|
84 | FileNode('foobar', content='foobar'), | |
|
85 | FileNode('foobar2', content='foobar2'), | |
|
84 | FileNode(b'foobar', content=b'foobar'), | |
|
85 | FileNode(b'foobar2', content=b'foobar2'), | |
|
86 | 86 | ], |
|
87 | 87 | }, |
|
88 | 88 | { |
@@ -90,10 +90,10 b' class TestRepositoryGetDiff(BackendTestM' | |||
|
90 | 90 | 'author': 'Jane Doe <jane.doe@example.com>', |
|
91 | 91 | 'date': datetime.datetime(2010, 1, 1, 21), |
|
92 | 92 | 'added': [ |
|
93 | FileNode('foobar3', content='foobar3'), | |
|
93 | FileNode(b'foobar3', content=b'foobar3'), | |
|
94 | 94 | ], |
|
95 | 95 | 'changed': [ |
|
96 | FileNode('foobar', 'FOOBAR'), | |
|
96 | FileNode(b'foobar', b'FOOBAR'), | |
|
97 | 97 | ], |
|
98 | 98 | }, |
|
99 | 99 | { |
@@ -101,16 +101,16 b' class TestRepositoryGetDiff(BackendTestM' | |||
|
101 | 101 | 'author': 'Jane Doe <jane.doe@example.com>', |
|
102 | 102 | 'date': datetime.datetime(2010, 1, 1, 22), |
|
103 | 103 | 'changed': [ |
|
104 | FileNode('foobar3', content='FOOBAR\nFOOBAR\nFOOBAR\n'), | |
|
104 | FileNode(b'foobar3', content=b'FOOBAR\nFOOBAR\nFOOBAR\n'), | |
|
105 | 105 | ], |
|
106 | 'removed': [FileNode('foobar')], | |
|
106 | 'removed': [FileNode(b'foobar')], | |
|
107 | 107 | }, |
|
108 | 108 | { |
|
109 | 109 | 'message': 'Whitespace changes', |
|
110 | 110 | 'author': 'Jane Doe <jane.doe@example.com>', |
|
111 | 111 | 'date': datetime.datetime(2010, 1, 1, 23), |
|
112 | 112 | 'changed': [ |
|
113 | FileNode('foobar3', content='FOOBAR \nFOOBAR\nFOOBAR\n'), | |
|
113 | FileNode(b'foobar3', content=b'FOOBAR \nFOOBAR\nFOOBAR\n'), | |
|
114 | 114 | ], |
|
115 | 115 | }, |
|
116 | 116 | ] |
@@ -119,39 +119,39 b' class TestRepositoryGetDiff(BackendTestM' | |||
|
119 | 119 | def test_initial_commit_diff(self): |
|
120 | 120 | initial_commit = self.repo[0] |
|
121 | 121 | diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, initial_commit) |
|
122 | assert diff.raw == self.first_commit_diffs[self.repo.alias] | |
|
122 | assert diff.raw.tobytes() == self.first_commit_diffs[self.repo.alias] | |
|
123 | 123 | |
|
124 | 124 | def test_second_commit_diff(self): |
|
125 | 125 | diff = self.repo.get_diff(self.repo[0], self.repo[1]) |
|
126 | assert diff.raw == self.second_commit_diffs[self.repo.alias] | |
|
126 | assert diff.raw.tobytes() == self.second_commit_diffs[self.repo.alias] | |
|
127 | 127 | |
|
128 | 128 | def test_third_commit_diff(self): |
|
129 | 129 | diff = self.repo.get_diff(self.repo[1], self.repo[2]) |
|
130 | assert diff.raw == self.third_commit_diffs[self.repo.alias] | |
|
130 | assert diff.raw.tobytes() == self.third_commit_diffs[self.repo.alias] | |
|
131 | 131 | |
|
132 | 132 | def test_ignore_whitespace(self): |
|
133 | 133 | diff = self.repo.get_diff( |
|
134 | 134 | self.repo[2], self.repo[3], ignore_whitespace=True) |
|
135 | assert '@@' not in diff.raw | |
|
135 | assert b'@@' not in diff.raw.tobytes() | |
|
136 | 136 | |
|
137 | 137 | def test_only_one_file(self): |
|
138 | 138 | diff = self.repo.get_diff( |
|
139 | 139 | self.repo.EMPTY_COMMIT, self.repo[0], path='foobar') |
|
140 | assert 'foobar2' not in diff.raw | |
|
140 | assert b'foobar2' not in diff.raw.tobytes() | |
|
141 | 141 | |
|
142 | 142 | def test_context_parameter(self): |
|
143 | 143 | first_commit = self.repo.get_commit(commit_idx=0) |
|
144 | 144 | diff = self.repo.get_diff( |
|
145 | 145 | self.repo.EMPTY_COMMIT, first_commit, context=2) |
|
146 | assert diff.raw == self.first_commit_diffs[self.repo.alias] | |
|
146 | assert diff.raw.tobytes() == self.first_commit_diffs[self.repo.alias] | |
|
147 | 147 | |
|
148 | 148 | def test_context_only_one_file(self): |
|
149 | 149 | diff = self.repo.get_diff( |
|
150 | 150 | self.repo.EMPTY_COMMIT, self.repo[0], path='foobar', context=2) |
|
151 | assert diff.raw == self.first_commit_one_file[self.repo.alias] | |
|
151 | assert diff.raw.tobytes() == self.first_commit_one_file[self.repo.alias] | |
|
152 | 152 | |
|
153 | 153 | first_commit_diffs = { |
|
154 | 'git': r"""diff --git a/foobar b/foobar | |
|
154 | 'git': br"""diff --git a/foobar b/foobar | |
|
155 | 155 | new file mode 100644 |
|
156 | 156 | index 0000000..f6ea049 |
|
157 | 157 | --- /dev/null |
@@ -168,7 +168,7 b' index 0000000..e8c9d6b' | |||
|
168 | 168 | +foobar2 |
|
169 | 169 | \ No newline at end of file |
|
170 | 170 | """, |
|
171 | 'hg': r"""diff --git a/foobar b/foobar | |
|
171 | 'hg': br"""diff --git a/foobar b/foobar | |
|
172 | 172 | new file mode 100644 |
|
173 | 173 | --- /dev/null |
|
174 | 174 | +++ b/foobar |
@@ -183,7 +183,7 b' new file mode 100644' | |||
|
183 | 183 | +foobar2 |
|
184 | 184 | \ No newline at end of file |
|
185 | 185 | """, |
|
186 | 'svn': """Index: foobar | |
|
186 | 'svn': b"""Index: foobar | |
|
187 | 187 | =================================================================== |
|
188 | 188 | diff --git a/foobar b/foobar |
|
189 | 189 | new file mode 10644 |
@@ -205,7 +205,7 b' new file mode 10644' | |||
|
205 | 205 | } |
|
206 | 206 | |
|
207 | 207 | second_commit_diffs = { |
|
208 | 'git': r"""diff --git a/foobar b/foobar | |
|
208 | 'git': br"""diff --git a/foobar b/foobar | |
|
209 | 209 | index f6ea049..389865b 100644 |
|
210 | 210 | --- a/foobar |
|
211 | 211 | +++ b/foobar |
@@ -223,7 +223,7 b' index 0000000..c11c37d' | |||
|
223 | 223 | +foobar3 |
|
224 | 224 | \ No newline at end of file |
|
225 | 225 | """, |
|
226 | 'hg': r"""diff --git a/foobar b/foobar | |
|
226 | 'hg': br"""diff --git a/foobar b/foobar | |
|
227 | 227 | --- a/foobar |
|
228 | 228 | +++ b/foobar |
|
229 | 229 | @@ -1,1 +1,1 @@ |
@@ -239,7 +239,7 b' new file mode 100644' | |||
|
239 | 239 | +foobar3 |
|
240 | 240 | \ No newline at end of file |
|
241 | 241 | """, |
|
242 | 'svn': """Index: foobar | |
|
242 | 'svn': b"""Index: foobar | |
|
243 | 243 | =================================================================== |
|
244 | 244 | diff --git a/foobar b/foobar |
|
245 | 245 | --- a/foobar\t(revision 1) |
@@ -262,7 +262,7 b' new file mode 10644' | |||
|
262 | 262 | } |
|
263 | 263 | |
|
264 | 264 | third_commit_diffs = { |
|
265 | 'git': r"""diff --git a/foobar b/foobar | |
|
265 | 'git': br"""diff --git a/foobar b/foobar | |
|
266 | 266 | deleted file mode 100644 |
|
267 | 267 | index 389865b..0000000 |
|
268 | 268 | --- a/foobar |
@@ -281,7 +281,7 b' index c11c37d..f932447 100644' | |||
|
281 | 281 | +FOOBAR |
|
282 | 282 | +FOOBAR |
|
283 | 283 | """, |
|
284 | 'hg': r"""diff --git a/foobar b/foobar | |
|
284 | 'hg': br"""diff --git a/foobar b/foobar | |
|
285 | 285 | deleted file mode 100644 |
|
286 | 286 | --- a/foobar |
|
287 | 287 | +++ /dev/null |
@@ -298,7 +298,7 b' diff --git a/foobar3 b/foobar3' | |||
|
298 | 298 | +FOOBAR |
|
299 | 299 | +FOOBAR |
|
300 | 300 | """, |
|
301 | 'svn': """Index: foobar | |
|
301 | 'svn': b"""Index: foobar | |
|
302 | 302 | =================================================================== |
|
303 | 303 | diff --git a/foobar b/foobar |
|
304 | 304 | deleted file mode 10644 |
@@ -322,7 +322,7 b' diff --git a/foobar3 b/foobar3' | |||
|
322 | 322 | } |
|
323 | 323 | |
|
324 | 324 | first_commit_one_file = { |
|
325 | 'git': r"""diff --git a/foobar b/foobar | |
|
325 | 'git': br"""diff --git a/foobar b/foobar | |
|
326 | 326 | new file mode 100644 |
|
327 | 327 | index 0000000..f6ea049 |
|
328 | 328 | --- /dev/null |
@@ -331,7 +331,7 b' index 0000000..f6ea049' | |||
|
331 | 331 | +foobar |
|
332 | 332 | \ No newline at end of file |
|
333 | 333 | """, |
|
334 | 'hg': r"""diff --git a/foobar b/foobar | |
|
334 | 'hg': br"""diff --git a/foobar b/foobar | |
|
335 | 335 | new file mode 100644 |
|
336 | 336 | --- /dev/null |
|
337 | 337 | +++ b/foobar |
@@ -339,7 +339,7 b' new file mode 100644' | |||
|
339 | 339 | +foobar |
|
340 | 340 | \ No newline at end of file |
|
341 | 341 | """, |
|
342 | 'svn': """Index: foobar | |
|
342 | 'svn': b"""Index: foobar | |
|
343 | 343 | =================================================================== |
|
344 | 344 | diff --git a/foobar b/foobar |
|
345 | 345 | new file mode 10644 |
@@ -363,9 +363,9 b' class TestSvnGetDiff(object):' | |||
|
363 | 363 | commit1 = repo[-2] |
|
364 | 364 | commit2 = repo[-1] |
|
365 | 365 | diff = repo.get_diff(commit1, commit2, path=path, path1=path1) |
|
366 | assert diff.raw == self.expected_diff_v_0_2 | |
|
366 | assert diff.raw.tobytes() == self.expected_diff_v_0_2 | |
|
367 | 367 | |
|
368 | expected_diff_v_0_2 = '''Index: example.py | |
|
368 | expected_diff_v_0_2 = b'''Index: example.py | |
|
369 | 369 | =================================================================== |
|
370 | 370 | diff --git a/example.py b/example.py |
|
371 | 371 | --- a/example.py\t(revision 25) |
@@ -390,7 +390,7 b' diff --git a/example.py b/example.py' | |||
|
390 | 390 | diff = repo.get_diff(repo[0], repo[1]) |
|
391 | 391 | # TODO: johbo: Think about supporting svn directory nodes |
|
392 | 392 | # a little bit better, source is here like a file |
|
393 | expected_diff = """Index: source | |
|
393 | expected_diff = b"""Index: source | |
|
394 | 394 | =================================================================== |
|
395 | 395 | diff --git a/source b/source |
|
396 | 396 | deleted file mode 10644 |
@@ -403,7 +403,7 b' new file mode 10644' | |||
|
403 | 403 | --- /dev/null\t(revision 0) |
|
404 | 404 | +++ b/target/file\t(revision 2) |
|
405 | 405 | """ |
|
406 | assert diff.raw == expected_diff | |
|
406 | assert diff.raw.tobytes() == expected_diff | |
|
407 | 407 | |
|
408 | 408 | |
|
409 | 409 | @pytest.mark.usefixtures("vcs_repository_support") |
@@ -412,8 +412,8 b' class TestGetDiffBinary(BackendTestMixin' | |||
|
412 | 412 | recreate_repo_per_test = False |
|
413 | 413 | |
|
414 | 414 | # Note: "Fake" PNG files, has the correct magic as prefix |
|
415 | BINARY = """\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00""" | |
|
416 | BINARY2 = """\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x01\x00\x00""" | |
|
415 | BINARY = b"""\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00""" | |
|
416 | BINARY2 = b"""\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x01\x00\x00""" | |
|
417 | 417 | |
|
418 | 418 | @staticmethod |
|
419 | 419 | def _get_commits(): |
@@ -423,21 +423,21 b' class TestGetDiffBinary(BackendTestMixin' | |||
|
423 | 423 | 'author': 'Joe Doe <joe.deo@example.com>', |
|
424 | 424 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
425 | 425 | 'added': [ |
|
426 | FileNode('image.png', content=TestGetDiffBinary.BINARY), | |
|
426 | FileNode(b'image.png', content=TestGetDiffBinary.BINARY), | |
|
427 | 427 | ]}, |
|
428 | 428 | { |
|
429 | 429 | 'message': 'Modify image.png', |
|
430 | 430 | 'author': 'Joe Doe <joe.deo@example.com>', |
|
431 | 431 | 'date': datetime.datetime(2010, 1, 1, 21), |
|
432 | 432 | 'changed': [ |
|
433 | FileNode('image.png', content=TestGetDiffBinary.BINARY2), | |
|
433 | FileNode(b'image.png', content=TestGetDiffBinary.BINARY2), | |
|
434 | 434 | ]}, |
|
435 | 435 | { |
|
436 | 436 | 'message': 'Remove image.png', |
|
437 | 437 | 'author': 'Joe Doe <joe.deo@example.com>', |
|
438 | 438 | 'date': datetime.datetime(2010, 1, 1, 21), |
|
439 | 439 | 'removed': [ |
|
440 | FileNode('image.png'), | |
|
440 | FileNode(b'image.png'), | |
|
441 | 441 | ]}, |
|
442 | 442 | ] |
|
443 | 443 | return commits |
@@ -446,7 +446,7 b' class TestGetDiffBinary(BackendTestMixin' | |||
|
446 | 446 | diff = self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[0]) |
|
447 | 447 | |
|
448 | 448 | expected = { |
|
449 | 'git': """diff --git a/image.png b/image.png | |
|
449 | 'git': b"""diff --git a/image.png b/image.png | |
|
450 | 450 | new file mode 100644 |
|
451 | 451 | index 0000000000000000000000000000000000000000..28380fd4a25c58be1b68b523ba2a314f4459ee9c |
|
452 | 452 | GIT binary patch |
@@ -457,15 +457,15 b' literal 0' | |||
|
457 | 457 | Hc$@<O00001 |
|
458 | 458 | |
|
459 | 459 | """, |
|
460 | 'hg': """diff --git a/image.png b/image.png | |
|
460 | 'hg': b"""diff --git a/image.png b/image.png | |
|
461 | 461 | new file mode 100644 |
|
462 | index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..28380fd4a25c58be1b68b523ba2a314f4459ee9c | |
|
462 | index 0000000000000000000000000000000000000000..28380fd4a25c58be1b68b523ba2a314f4459ee9c | |
|
463 | 463 | GIT binary patch |
|
464 | 464 | literal 19 |
|
465 | 465 | Yc%17D@N?(olHy`uVBq!ia0vp^03%2O-T(jq |
|
466 | 466 | |
|
467 | 467 | """, |
|
468 | 'svn': """=================================================================== | |
|
468 | 'svn': b"""=================================================================== | |
|
469 | 469 | Cannot display: file marked as a binary type. |
|
470 | 470 | svn:mime-type = application/octet-stream |
|
471 | 471 | Index: image.png |
@@ -476,13 +476,13 b' new file mode 10644' | |||
|
476 | 476 | +++ b/image.png\t(revision 1) |
|
477 | 477 | """, |
|
478 | 478 | } |
|
479 | assert diff.raw == expected[self.repo.alias] | |
|
479 | assert diff.raw.tobytes() == expected[self.repo.alias] | |
|
480 | 480 | |
|
481 | 481 | def test_update_a_binary_file(self): |
|
482 | 482 | diff = self.repo.get_diff(self.repo[0], self.repo[1]) |
|
483 | 483 | |
|
484 | 484 | expected = { |
|
485 | 'git': """diff --git a/image.png b/image.png | |
|
485 | 'git': b"""diff --git a/image.png b/image.png | |
|
486 | 486 | index 28380fd4a25c58be1b68b523ba2a314f4459ee9c..1008a77cd372386a1c24fbd96019333f67ad0065 100644 |
|
487 | 487 | GIT binary patch |
|
488 | 488 | literal 19 |
@@ -492,14 +492,14 b' literal 19' | |||
|
492 | 492 | Yc%17D@N?(olHy`uVBq!ia0vp^03%2O-T(jq |
|
493 | 493 | |
|
494 | 494 | """, |
|
495 | 'hg': """diff --git a/image.png b/image.png | |
|
495 | 'hg': b"""diff --git a/image.png b/image.png | |
|
496 | 496 | index 28380fd4a25c58be1b68b523ba2a314f4459ee9c..1008a77cd372386a1c24fbd96019333f67ad0065 |
|
497 | 497 | GIT binary patch |
|
498 | 498 | literal 19 |
|
499 | 499 | ac%17D@N?(olHy`uVBq!ia0y~$U;qFkO9I~j |
|
500 | 500 | |
|
501 | 501 | """, |
|
502 | 'svn': """=================================================================== | |
|
502 | 'svn': b"""=================================================================== | |
|
503 | 503 | Cannot display: file marked as a binary type. |
|
504 | 504 | svn:mime-type = application/octet-stream |
|
505 | 505 | Index: image.png |
@@ -509,13 +509,13 b' diff --git a/image.png b/image.png' | |||
|
509 | 509 | +++ b/image.png\t(revision 2) |
|
510 | 510 | """, |
|
511 | 511 | } |
|
512 | assert diff.raw == expected[self.repo.alias] | |
|
512 | assert diff.raw.tobytes() == expected[self.repo.alias] | |
|
513 | 513 | |
|
514 | 514 | def test_remove_a_binary_file(self): |
|
515 | 515 | diff = self.repo.get_diff(self.repo[1], self.repo[2]) |
|
516 | 516 | |
|
517 | 517 | expected = { |
|
518 | 'git': """diff --git a/image.png b/image.png | |
|
518 | 'git': b"""diff --git a/image.png b/image.png | |
|
519 | 519 | deleted file mode 100644 |
|
520 | 520 | index 1008a77cd372386a1c24fbd96019333f67ad0065..0000000000000000000000000000000000000000 |
|
521 | 521 | GIT binary patch |
@@ -526,15 +526,15 b' literal 19' | |||
|
526 | 526 | ac%17D@N?(olHy`uVBq!ia0y~$U;qFkO9I~j |
|
527 | 527 | |
|
528 | 528 | """, |
|
529 | 'hg': """diff --git a/image.png b/image.png | |
|
529 | 'hg': b"""diff --git a/image.png b/image.png | |
|
530 | 530 | deleted file mode 100644 |
|
531 | index 1008a77cd372386a1c24fbd96019333f67ad0065..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 | |
|
531 | index 1008a77cd372386a1c24fbd96019333f67ad0065..0000000000000000000000000000000000000000 | |
|
532 | 532 | GIT binary patch |
|
533 | 533 | literal 0 |
|
534 | 534 | Hc$@<O00001 |
|
535 | 535 | |
|
536 | 536 | """, |
|
537 | 'svn': """=================================================================== | |
|
537 | 'svn': b"""=================================================================== | |
|
538 | 538 | Cannot display: file marked as a binary type. |
|
539 | 539 | svn:mime-type = application/octet-stream |
|
540 | 540 | Index: image.png |
@@ -545,4 +545,4 b' deleted file mode 10644' | |||
|
545 | 545 | +++ /dev/null\t(revision 3) |
|
546 | 546 | """, |
|
547 | 547 | } |
|
548 | assert diff.raw == expected[self.repo.alias] | |
|
548 | assert diff.raw.tobytes() == expected[self.repo.alias] |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -21,6 +20,8 b'' | |||
|
21 | 20 | import datetime |
|
22 | 21 | |
|
23 | 22 | import pytest |
|
23 | ||
|
24 | from rhodecode.lib.str_utils import safe_bytes | |
|
24 | 25 | from rhodecode.lib.vcs.nodes import FileNode |
|
25 | 26 | from rhodecode.tests.vcs.conftest import BackendTestMixin |
|
26 | 27 | |
@@ -28,13 +29,13 b' from rhodecode.tests.vcs.conftest import' | |||
|
28 | 29 | @pytest.mark.usefixtures("vcs_repository_support") |
|
29 | 30 | class TestFileNodeUnicodePath(BackendTestMixin): |
|
30 | 31 | |
|
31 | fname = 'Δ ΕΓ°Δ ΔΕΔ Δ.txt' | |
|
32 | fname = safe_bytes('Δ ΕΓ°Δ ΔΕΔ Δ.txt') | |
|
32 | 33 | ufname = fname |
|
33 | 34 | |
|
34 | 35 | @classmethod |
|
35 | 36 | def _get_commits(cls): |
|
36 | 37 | nodes = [ |
|
37 | FileNode(cls.fname, content='Foobar'), | |
|
38 | FileNode(cls.fname, content=b'Foobar'), | |
|
38 | 39 | ] |
|
39 | 40 | |
|
40 | 41 | commits = [ |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -39,7 +38,7 b' class TestGetitem(BackendTestMixin):' | |||
|
39 | 38 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
40 | 39 | 'date': start_date + datetime.timedelta(hours=12 * x), |
|
41 | 40 | 'added': [ |
|
42 | FileNode('file_%d.txt' % x, content='Foobar %d' % x), | |
|
41 | FileNode(b'file_%d.txt' % x, content='Foobar %d' % x), | |
|
43 | 42 | ], |
|
44 | 43 | } |
|
45 | 44 |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -36,7 +35,7 b' class TestGetslice(BackendTestMixin):' | |||
|
36 | 35 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
37 | 36 | 'date': start_date + datetime.timedelta(hours=12 * x), |
|
38 | 37 | 'added': [ |
|
39 | FileNode('file_%d.txt' % x, content='Foobar %d' % x), | |
|
38 | FileNode(b'file_%d.txt' % x, content='Foobar %d' % x), | |
|
40 | 39 | ], |
|
41 | 40 | } |
|
42 | 41 |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -41,6 +40,29 b' from rhodecode.tests.vcs.conftest import' | |||
|
41 | 40 | pytestmark = pytest.mark.backends("git") |
|
42 | 41 | |
|
43 | 42 | |
|
43 | DIFF_FROM_REMOTE = br"""diff --git a/foobar b/foobar | |
|
44 | new file mode 100644 | |
|
45 | index 0000000..f6ea049 | |
|
46 | --- /dev/null | |
|
47 | +++ b/foobar | |
|
48 | @@ -0,0 +1 @@ | |
|
49 | +foobar | |
|
50 | \ No newline at end of file | |
|
51 | diff --git a/foobar2 b/foobar2 | |
|
52 | new file mode 100644 | |
|
53 | index 0000000..e8c9d6b | |
|
54 | --- /dev/null | |
|
55 | +++ b/foobar2 | |
|
56 | @@ -0,0 +1 @@ | |
|
57 | +foobar2 | |
|
58 | \ No newline at end of file | |
|
59 | """ | |
|
60 | ||
|
61 | ||
|
62 | def callable_get_diff(*args, **kwargs): | |
|
63 | return DIFF_FROM_REMOTE | |
|
64 | ||
|
65 | ||
|
44 | 66 | class TestGitRepository(object): |
|
45 | 67 | |
|
46 | 68 | @pytest.fixture(autouse=True) |
@@ -253,7 +275,7 b' TODO: To be written...' | |||
|
253 | 275 | """ |
|
254 | 276 | node = commit10.get_node('README.rst') |
|
255 | 277 | assert node.kind == NodeKind.FILE |
|
256 | assert node.content == README | |
|
278 | assert node.str_content == README | |
|
257 | 279 | |
|
258 | 280 | def test_head(self): |
|
259 | 281 | assert self.repo.head == self.repo.get_commit().raw_id |
@@ -420,7 +442,7 b' TODO: To be written...' | |||
|
420 | 442 | |
|
421 | 443 | def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git): |
|
422 | 444 | target_repo = vcsbackend_git.create_repo(number_of_commits=1) |
|
423 | vcsbackend_git.ensure_file('README', 'I will conflict with you!!!') | |
|
445 | vcsbackend_git.ensure_file(b'README', b'I will conflict with you!!!') | |
|
424 | 446 | |
|
425 | 447 | target_repo._local_fetch(self.repo.path, 'master') |
|
426 | 448 | with pytest.raises(RepositoryError): |
@@ -971,11 +993,12 b' class TestGitCommit(object):' | |||
|
971 | 993 | for commit in self.repo: |
|
972 | 994 | assert type(commit.author) == str |
|
973 | 995 | |
|
974 |
def test_repo_files_content_ |
|
|
996 | def test_repo_files_content_types(self): | |
|
975 | 997 | commit = self.repo.get_commit() |
|
976 | 998 | for node in commit.get_node('/'): |
|
977 | 999 | if node.is_file(): |
|
978 |
assert type(node.content) == |
|
|
1000 | assert type(node.content) == bytes | |
|
1001 | assert type(node.str_content) == str | |
|
979 | 1002 | |
|
980 | 1003 | def test_wrong_path(self): |
|
981 | 1004 | # There is 'setup.py' in the root dir but not there: |
@@ -1041,11 +1064,9 b' class TestGitSpecificWithRepo(BackendTes' | |||
|
1041 | 1064 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
1042 | 1065 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
1043 | 1066 | 'added': [ |
|
1044 | FileNode('foobar/static/js/admin/base.js', content='base'), | |
|
1045 | FileNode( | |
|
1046 |
|
|
|
1047 | mode=0o120000), # this is a link | |
|
1048 | FileNode('foo', content='foo'), | |
|
1067 | FileNode(b'foobar/static/js/admin/base.js', content=b'base'), | |
|
1068 | FileNode(b'foobar/static/admin', content=b'admin', mode=0o120000), # this is a link | |
|
1069 | FileNode(b'foo', content=b'foo'), | |
|
1049 | 1070 | ], |
|
1050 | 1071 | }, |
|
1051 | 1072 | { |
@@ -1053,7 +1074,7 b' class TestGitSpecificWithRepo(BackendTes' | |||
|
1053 | 1074 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
1054 | 1075 | 'date': datetime.datetime(2010, 1, 1, 22), |
|
1055 | 1076 | 'added': [ |
|
1056 | FileNode('foo2', content='foo2'), | |
|
1077 | FileNode(b'foo2', content=b'foo2'), | |
|
1057 | 1078 | ], |
|
1058 | 1079 | }, |
|
1059 | 1080 | ] |
@@ -1061,17 +1082,18 b' class TestGitSpecificWithRepo(BackendTes' | |||
|
1061 | 1082 | def test_paths_slow_traversing(self): |
|
1062 | 1083 | commit = self.repo.get_commit() |
|
1063 | 1084 | assert commit.get_node('foobar').get_node('static').get_node('js')\ |
|
1064 | .get_node('admin').get_node('base.js').content == 'base' | |
|
1085 | .get_node('admin').get_node('base.js').content == b'base' | |
|
1065 | 1086 | |
|
1066 | 1087 | def test_paths_fast_traversing(self): |
|
1067 | 1088 | commit = self.repo.get_commit() |
|
1068 | assert commit.get_node('foobar/static/js/admin/base.js').content == 'base' | |
|
1089 | assert commit.get_node('foobar/static/js/admin/base.js').content == b'base' | |
|
1069 | 1090 | |
|
1070 | 1091 | def test_get_diff_runs_git_command_with_hashes(self): |
|
1071 | 1092 | comm1 = self.repo[0] |
|
1072 | 1093 | comm2 = self.repo[1] |
|
1073 | 1094 | |
|
1074 | with mock.patch.object(self.repo, '_remote') as remote_mock: | |
|
1095 | with mock.patch.object(self.repo, '_remote', return_value=mock.Mock()) as remote_mock: | |
|
1096 | remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff) | |
|
1075 | 1097 | self.repo.get_diff(comm1, comm2) |
|
1076 | 1098 | |
|
1077 | 1099 | remote_mock.diff.assert_called_once_with( |
@@ -1080,8 +1102,11 b' class TestGitSpecificWithRepo(BackendTes' | |||
|
1080 | 1102 | |
|
1081 | 1103 | def test_get_diff_runs_git_command_with_str_hashes(self): |
|
1082 | 1104 | comm2 = self.repo[1] |
|
1083 | with mock.patch.object(self.repo, '_remote') as remote_mock: | |
|
1105 | ||
|
1106 | with mock.patch.object(self.repo, '_remote', return_value=mock.Mock()) as remote_mock: | |
|
1107 | remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff) | |
|
1084 | 1108 | self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2) |
|
1109 | ||
|
1085 | 1110 | remote_mock.diff.assert_called_once_with( |
|
1086 | 1111 | self.repo.EMPTY_COMMIT.raw_id, comm2.raw_id, |
|
1087 | 1112 | file_filter=None, opt_ignorews=False, context=3) |
@@ -1089,8 +1114,11 b' class TestGitSpecificWithRepo(BackendTes' | |||
|
1089 | 1114 | def test_get_diff_runs_git_command_with_path_if_its_given(self): |
|
1090 | 1115 | comm1 = self.repo[0] |
|
1091 | 1116 | comm2 = self.repo[1] |
|
1092 | with mock.patch.object(self.repo, '_remote') as remote_mock: | |
|
1117 | ||
|
1118 | with mock.patch.object(self.repo, '_remote', return_value=mock.Mock()) as remote_mock: | |
|
1119 | remote_mock.diff = mock.MagicMock(side_effect=callable_get_diff) | |
|
1093 | 1120 | self.repo.get_diff(comm1, comm2, 'foo') |
|
1121 | ||
|
1094 | 1122 | remote_mock.diff.assert_called_once_with( |
|
1095 | 1123 | self.repo._lookup_commit(0), comm2.raw_id, |
|
1096 | 1124 | file_filter='foo', opt_ignorews=False, context=3) |
@@ -1107,9 +1135,9 b' class TestGitRegression(BackendTestMixin' | |||
|
1107 | 1135 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
1108 | 1136 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
1109 | 1137 | 'added': [ |
|
1110 | FileNode('bot/__init__.py', content='base'), | |
|
1111 | FileNode('bot/templates/404.html', content='base'), | |
|
1112 | FileNode('bot/templates/500.html', content='base'), | |
|
1138 | FileNode(b'bot/__init__.py', content=b'base'), | |
|
1139 | FileNode(b'bot/templates/404.html', content=b'base'), | |
|
1140 | FileNode(b'bot/templates/500.html', content=b'base'), | |
|
1113 | 1141 | ], |
|
1114 | 1142 | }, |
|
1115 | 1143 | { |
@@ -1117,14 +1145,12 b' class TestGitRegression(BackendTestMixin' | |||
|
1117 | 1145 | 'author': 'Joe Doe <joe.doe@example.com>', |
|
1118 | 1146 | 'date': datetime.datetime(2010, 1, 1, 22), |
|
1119 | 1147 | 'added': [ |
|
1120 | FileNode('bot/build/migrations/1.py', content='foo2'), | |
|
1121 | FileNode('bot/build/migrations/2.py', content='foo2'), | |
|
1122 | FileNode( | |
|
1123 |
|
|
|
1124 | FileNode( | |
|
1125 |
|
|
|
1126 | FileNode('bot/build/templates/err.html', content='foo2'), | |
|
1127 | FileNode('bot/build/templates/err2.html', content='foo2'), | |
|
1148 | FileNode(b'bot/build/migrations/1.py', content=b'foo2'), | |
|
1149 | FileNode(b'bot/build/migrations/2.py', content=b'foo2'), | |
|
1150 | FileNode(b'bot/build/static/templates/f.html', content=b'foo2'), | |
|
1151 | FileNode(b'bot/build/static/templates/f1.html', content=b'foo2'), | |
|
1152 | FileNode(b'bot/build/templates/err.html', content=b'foo2'), | |
|
1153 | FileNode(b'bot/build/templates/err2.html', content=b'foo2'), | |
|
1128 | 1154 | ], |
|
1129 | 1155 | }, |
|
1130 | 1156 | ] |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -23,6 +22,7 b' import os' | |||
|
23 | 22 | import mock |
|
24 | 23 | import pytest |
|
25 | 24 | |
|
25 | from rhodecode.lib.str_utils import safe_bytes | |
|
26 | 26 | from rhodecode.lib.utils import make_db_config |
|
27 | 27 | from rhodecode.lib.vcs import backends |
|
28 | 28 | from rhodecode.lib.vcs.backends.base import ( |
@@ -84,23 +84,23 b' class TestMercurialRepository(object):' | |||
|
84 | 84 | |
|
85 | 85 | def test_unicode_path_repo(self): |
|
86 | 86 | with pytest.raises(VCSError): |
|
87 |
MercurialRepository( |
|
|
87 | MercurialRepository('iShouldFail') | |
|
88 | 88 | |
|
89 | 89 | def test_unicode_commit_id(self): |
|
90 | 90 | with pytest.raises(CommitDoesNotExistError): |
|
91 |
self.repo.get_commit( |
|
|
91 | self.repo.get_commit('unicode-commit-id') | |
|
92 | 92 | with pytest.raises(CommitDoesNotExistError): |
|
93 |
self.repo.get_commit( |
|
|
93 | self.repo.get_commit('unΓcΓΈde-spéçial-chΓ€rΓ₯cter-commit-id') | |
|
94 | 94 | |
|
95 | 95 | def test_unicode_bookmark(self): |
|
96 |
self.repo.bookmark( |
|
|
97 |
self.repo.bookmark( |
|
|
96 | self.repo.bookmark('unicode-bookmark') | |
|
97 | self.repo.bookmark('unΓcΓΈde-spéçial-chΓ€rΓ₯cter-bookmark') | |
|
98 | 98 | |
|
99 | 99 | def test_unicode_branch(self): |
|
100 | 100 | with pytest.raises(KeyError): |
|
101 |
self.repo.branches[ |
|
|
101 | assert self.repo.branches['unicode-branch'] | |
|
102 | 102 | with pytest.raises(KeyError): |
|
103 |
self.repo.branches[ |
|
|
103 | assert self.repo.branches['unΓcΓΈde-spéçial-chΓ€rΓ₯cter-branch'] | |
|
104 | 104 | |
|
105 | 105 | def test_repo_clone(self): |
|
106 | 106 | if os.path.exists(TEST_HG_REPO_CLONE): |
@@ -141,27 +141,17 b' class TestMercurialRepository(object):' | |||
|
141 | 141 | def test_commit_ids(self): |
|
142 | 142 | # there are 21 commits at bitbucket now |
|
143 | 143 | # so we can assume they would be available from now on |
|
144 | subset = set([ | |
|
145 | 'b986218ba1c9b0d6a259fac9b050b1724ed8e545', | |
|
146 | '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', | |
|
147 | '6cba7170863a2411822803fa77a0a264f1310b35', | |
|
148 | '56349e29c2af3ac913b28bde9a2c6154436e615b', | |
|
149 | '2dda4e345facb0ccff1a191052dd1606dba6781d', | |
|
150 | '6fff84722075f1607a30f436523403845f84cd9e', | |
|
151 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', | |
|
152 | '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', | |
|
153 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', | |
|
154 | 'be90031137367893f1c406e0a8683010fd115b79', | |
|
155 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', | |
|
156 | '84478366594b424af694a6c784cb991a16b87c21', | |
|
157 | '17f8e105dddb9f339600389c6dc7175d395a535c', | |
|
158 | '20a662e756499bde3095ffc9bc0643d1def2d0eb', | |
|
159 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', | |
|
160 | '786facd2c61deb9cf91e9534735124fb8fc11842', | |
|
161 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', | |
|
162 | 'aa6a0de05b7612707db567078e130a6cd114a9a7', | |
|
163 | 'eada5a770da98ab0dd7325e29d00e0714f228d09' | |
|
164 | ]) | |
|
144 | subset = {'b986218ba1c9b0d6a259fac9b050b1724ed8e545', '3d8f361e72ab303da48d799ff1ac40d5ac37c67e', | |
|
145 | '6cba7170863a2411822803fa77a0a264f1310b35', '56349e29c2af3ac913b28bde9a2c6154436e615b', | |
|
146 | '2dda4e345facb0ccff1a191052dd1606dba6781d', '6fff84722075f1607a30f436523403845f84cd9e', | |
|
147 | '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb', | |
|
148 | 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', 'be90031137367893f1c406e0a8683010fd115b79', | |
|
149 | 'db8e58be770518cbb2b1cdfa69146e47cd481481', '84478366594b424af694a6c784cb991a16b87c21', | |
|
150 | '17f8e105dddb9f339600389c6dc7175d395a535c', '20a662e756499bde3095ffc9bc0643d1def2d0eb', | |
|
151 | '2e319b85e70a707bba0beff866d9f9de032aa4f9', '786facd2c61deb9cf91e9534735124fb8fc11842', | |
|
152 | '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', 'aa6a0de05b7612707db567078e130a6cd114a9a7', | |
|
153 | 'eada5a770da98ab0dd7325e29d00e0714f228d09' | |
|
154 | } | |
|
165 | 155 | assert subset.issubset(set(self.repo.commit_ids)) |
|
166 | 156 | |
|
167 | 157 | # check if we have the proper order of commits |
@@ -301,7 +291,7 b' TODO: To be written...' | |||
|
301 | 291 | """ |
|
302 | 292 | node = commit10.get_node('README.rst') |
|
303 | 293 | assert node.kind == NodeKind.FILE |
|
304 | assert node.content == README | |
|
294 | assert node.str_content == README | |
|
305 | 295 | |
|
306 | 296 | def test_local_clone(self): |
|
307 | 297 | clone_path = next(REPO_PATH_GENERATOR) |
@@ -583,10 +573,10 b' TODO: To be written...' | |||
|
583 | 573 | vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') |
|
584 | 574 | vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') |
|
585 | 575 | imc = source_repo.in_memory_commit |
|
586 | imc.add(FileNode('file_x', content=source_repo.name)) | |
|
576 | imc.add(FileNode(b'file_x', content=source_repo.name)) | |
|
587 | 577 | imc.commit( |
|
588 |
message= |
|
|
589 |
author= |
|
|
578 | message='Automatic commit from repo merge test', | |
|
579 | author='Automatic <automatic@rhodecode.com>') | |
|
590 | 580 | target_commit = target_repo.get_commit() |
|
591 | 581 | source_commit = source_repo.get_commit() |
|
592 | 582 | default_branch = target_repo.DEFAULT_BRANCH_NAME |
@@ -627,10 +617,10 b' TODO: To be written...' | |||
|
627 | 617 | target_repo = vcsbackend_hg.create_repo(number_of_commits=1) |
|
628 | 618 | source_repo = vcsbackend_hg.clone_repo(target_repo) |
|
629 | 619 | imc = source_repo.in_memory_commit |
|
630 | imc.add(FileNode('file_x', content=source_repo.name)) | |
|
620 | imc.add(FileNode(b'file_x', content=source_repo.name)) | |
|
631 | 621 | imc.commit( |
|
632 |
message= |
|
|
633 |
author= |
|
|
622 | message='Automatic commit from repo merge test', | |
|
623 | author='Automatic <automatic@rhodecode.com>') | |
|
634 | 624 | target_commit = target_repo.get_commit() |
|
635 | 625 | source_commit = source_repo.get_commit() |
|
636 | 626 | default_branch = target_repo.DEFAULT_BRANCH_NAME |
@@ -665,11 +655,11 b' TODO: To be written...' | |||
|
665 | 655 | |
|
666 | 656 | # add an extra head to the target repo |
|
667 | 657 | imc = target_repo.in_memory_commit |
|
668 | imc.add(FileNode('file_x', content='foo')) | |
|
658 | imc.add(FileNode(b'file_x', content='foo')) | |
|
669 | 659 | commits = list(target_repo.get_commits()) |
|
670 | 660 | imc.commit( |
|
671 |
message= |
|
|
672 |
author= |
|
|
661 | message='Automatic commit from repo merge test', | |
|
662 | author='Automatic <automatic@rhodecode.com>', parents=commits[0:1]) | |
|
673 | 663 | |
|
674 | 664 | target_commit = target_repo.get_commit() |
|
675 | 665 | source_commit = source_repo.get_commit() |
@@ -698,11 +688,13 b' TODO: To be written...' | |||
|
698 | 688 | source_repo = vcsbackend_hg.clone_repo(target_repo) |
|
699 | 689 | vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1') |
|
700 | 690 | vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2') |
|
691 | ||
|
701 | 692 | imc = source_repo.in_memory_commit |
|
702 | imc.add(FileNode('file_x', content=source_repo.name)) | |
|
693 | imc.add(FileNode(b'file_x', content=safe_bytes(source_repo.name))) | |
|
703 | 694 | imc.commit( |
|
704 |
message= |
|
|
705 |
author= |
|
|
695 | message='Automatic commit from repo merge test', | |
|
696 | author='Automatic <automatic@rhodecode.com>') | |
|
697 | ||
|
706 | 698 | target_commit = target_repo.get_commit() |
|
707 | 699 | source_commit = source_repo.get_commit() |
|
708 | 700 | |
@@ -1090,11 +1082,12 b' class TestMercurialCommit(object):' | |||
|
1090 | 1082 | for cm in self.repo: |
|
1091 | 1083 | assert type(cm.author) == str |
|
1092 | 1084 | |
|
1093 |
def test_repo_files_content_ |
|
|
1085 | def test_repo_files_content_type(self): | |
|
1094 | 1086 | test_commit = self.repo.get_commit(commit_idx=100) |
|
1095 | 1087 | for node in test_commit.get_node('/'): |
|
1096 | 1088 | if node.is_file(): |
|
1097 |
assert type(node.content) == |
|
|
1089 | assert type(node.content) == bytes | |
|
1090 | assert type(node.str_content) == str | |
|
1098 | 1091 | |
|
1099 | 1092 | def test_wrong_path(self): |
|
1100 | 1093 | # There is 'setup.py' in the root dir but not there: |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -25,7 +24,7 b' import datetime' | |||
|
25 | 24 | |
|
26 | 25 | import pytest |
|
27 | 26 | |
|
28 |
from rhodecode.lib.utils |
|
|
27 | from rhodecode.lib.str_utils import safe_bytes, safe_str | |
|
29 | 28 | from rhodecode.lib.vcs.exceptions import ( |
|
30 | 29 | EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError, |
|
31 | 30 | NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError, |
@@ -37,20 +36,18 b' from rhodecode.tests.vcs.conftest import' | |||
|
37 | 36 | @pytest.fixture() |
|
38 | 37 | def nodes(): |
|
39 | 38 | nodes = [ |
|
40 | FileNode('foobar', content='Foo & bar'), | |
|
41 | FileNode('foobar2', content='Foo & bar, doubled!'), | |
|
42 | FileNode('foo bar with spaces', content=''), | |
|
43 | FileNode('foo/bar/baz', content='Inside'), | |
|
44 | FileNode( | |
|
45 | 'foo/bar/file.bin', | |
|
46 | content=( | |
|
47 |
'\x |
|
|
48 |
'\x00\x00\x00\x00\x00\x00\x00\x00\x00 |
|
|
49 |
' |
|
|
50 |
' |
|
|
51 | '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00' | |
|
52 | '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff' | |
|
53 | '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' | |
|
39 | FileNode(b'foobar', content=b'Foo & bar'), | |
|
40 | FileNode(b'foobar2', content=b'Foo & bar, doubled!'), | |
|
41 | FileNode(b'foo bar with spaces', content=b''), | |
|
42 | FileNode(b'foo/bar/baz', content=b'Inside'), | |
|
43 | FileNode(b'foo/bar/file.bin', content=( | |
|
44 | b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00' | |
|
45 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe' | |
|
46 | b'\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
|
47 | b'\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00' | |
|
48 | b'\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00' | |
|
49 | b'\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff' | |
|
50 | b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' | |
|
54 | 51 | ) |
|
55 | 52 | ), |
|
56 | 53 | ] |
@@ -77,94 +74,98 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
77 | 74 | self.imc.add(node) |
|
78 | 75 | |
|
79 | 76 | self.commit() |
|
80 | self.assert_succesful_commit(nodes) | |
|
77 | self.assert_successful_commit(nodes) | |
|
81 | 78 | |
|
82 | 79 | @pytest.mark.backends("hg") |
|
83 | 80 | def test_add_on_branch_hg(self, nodes): |
|
84 | 81 | for node in nodes: |
|
85 | 82 | self.imc.add(node) |
|
86 |
self.commit(branch= |
|
|
87 | self.assert_succesful_commit(nodes) | |
|
83 | self.commit(branch='stable') | |
|
84 | self.assert_successful_commit(nodes) | |
|
88 | 85 | |
|
89 | 86 | @pytest.mark.backends("git") |
|
90 | 87 | def test_add_on_branch_git(self, nodes): |
|
91 | 88 | for node in nodes: |
|
92 | 89 | self.imc.add(node) |
|
93 |
self.commit(branch= |
|
|
94 | self.assert_succesful_commit(nodes) | |
|
90 | self.commit(branch='stable') | |
|
91 | self.assert_successful_commit(nodes) | |
|
95 | 92 | |
|
96 | 93 | def test_add_in_bulk(self, nodes): |
|
97 | 94 | self.imc.add(*nodes) |
|
98 | 95 | |
|
99 | 96 | self.commit() |
|
100 | self.assert_succesful_commit(nodes) | |
|
97 | self.assert_successful_commit(nodes) | |
|
101 | 98 | |
|
102 | 99 | def test_add_non_ascii_files(self): |
|
103 | 100 | nodes = [ |
|
104 |
FileNode('ΕΌΓ³Εwik/zwierzΔ
tko_utf8_str |
|
|
105 | FileNode(u'ΕΌΓ³Εwik/zwierzΔ tko_unicode', content=u'ΔΔΔΔ'), | |
|
101 | FileNode(safe_bytes('ΕΌΓ³Εwik/zwierzΔ tko_utf8_str'), | |
|
102 | content=safe_bytes('ΔΔΔΔ')), | |
|
103 | FileNode(safe_bytes('ΕΌΓ³Εwik/zwierzΔ tko_unicode'), | |
|
104 | content=safe_bytes('ΔΔΔΔ')), | |
|
106 | 105 | ] |
|
107 | 106 | |
|
108 | 107 | for node in nodes: |
|
109 | 108 | self.imc.add(node) |
|
110 | 109 | |
|
111 | 110 | self.commit() |
|
112 | self.assert_succesful_commit(nodes) | |
|
111 | self.assert_successful_commit(nodes) | |
|
113 | 112 | |
|
114 | 113 | def commit(self, branch=None): |
|
115 | 114 | self.old_commit_count = len(self.repo.commit_ids) |
|
116 |
self.commit_message = |
|
|
117 |
self.commit_author = |
|
|
115 | self.commit_message = 'Test commit with unicode: ΕΌΓ³Εwik' | |
|
116 | self.commit_author = f'{self.__class__.__name__} <foo@email.com>' | |
|
118 | 117 | self.commit = self.imc.commit( |
|
119 | 118 | message=self.commit_message, author=self.commit_author, |
|
120 | 119 | branch=branch) |
|
121 | 120 | |
|
122 | 121 | def test_add_actually_adds_all_nodes_at_second_commit_too(self): |
|
123 | 122 | to_add = [ |
|
124 | FileNode('foo/bar/image.png', content='\0'), | |
|
125 | FileNode('foo/README.txt', content='readme!'), | |
|
123 | FileNode(b'foo/bar/image.png', content=b'\0'), | |
|
124 | FileNode(b'foo/README.txt', content=b'readme!'), | |
|
126 | 125 | ] |
|
127 | 126 | self.imc.add(*to_add) |
|
128 |
commit = self.imc.commit( |
|
|
127 | commit = self.imc.commit('Initial', 'joe doe <joe.doe@example.com>') | |
|
129 | 128 | assert isinstance(commit.get_node('foo'), DirNode) |
|
130 | 129 | assert isinstance(commit.get_node('foo/bar'), DirNode) |
|
131 | 130 | self.assert_nodes_in_commit(commit, to_add) |
|
132 | 131 | |
|
133 | 132 | # commit some more files again |
|
134 | 133 | to_add = [ |
|
135 | FileNode('foo/bar/foobaz/bar', content='foo'), | |
|
136 | FileNode('foo/bar/another/bar', content='foo'), | |
|
137 | FileNode('foo/baz.txt', content='foo'), | |
|
138 | FileNode('foobar/foobaz/file', content='foo'), | |
|
139 | FileNode('foobar/barbaz', content='foo'), | |
|
134 | FileNode(b'foo/bar/foobaz/bar', content=b'foo'), | |
|
135 | FileNode(b'foo/bar/another/bar', content=b'foo'), | |
|
136 | FileNode(b'foo/baz.txt', content=b'foo'), | |
|
137 | FileNode(b'foobar/foobaz/file', content=b'foo'), | |
|
138 | FileNode(b'foobar/barbaz', content=b'foo'), | |
|
140 | 139 | ] |
|
141 | 140 | self.imc.add(*to_add) |
|
142 |
commit = self.imc.commit( |
|
|
141 | commit = self.imc.commit('Another', 'joe doe <joe.doe@example.com>') | |
|
143 | 142 | self.assert_nodes_in_commit(commit, to_add) |
|
144 | 143 | |
|
145 | 144 | def test_add_raise_already_added(self): |
|
146 | node = FileNode('foobar', content='baz') | |
|
145 | node = FileNode(b'foobar', content=b'baz') | |
|
147 | 146 | self.imc.add(node) |
|
148 | 147 | with pytest.raises(NodeAlreadyAddedError): |
|
149 | 148 | self.imc.add(node) |
|
150 | 149 | |
|
151 | 150 | def test_check_integrity_raise_already_exist(self): |
|
152 | node = FileNode('foobar', content='baz') | |
|
151 | node = FileNode(b'foobar', content=b'baz') | |
|
153 | 152 | self.imc.add(node) |
|
154 |
self.imc.commit(message= |
|
|
153 | self.imc.commit(message='Added foobar', | |
|
154 | author='Some Name <foo@bar.com>') | |
|
155 | 155 | self.imc.add(node) |
|
156 | 156 | with pytest.raises(NodeAlreadyExistsError): |
|
157 |
self.imc.commit(message='new message', |
|
|
157 | self.imc.commit(message='new message', | |
|
158 | author='Some Name <foo@bar.com>') | |
|
158 | 159 | |
|
159 | 160 | def test_change(self): |
|
160 | self.imc.add(FileNode('foo/bar/baz', content='foo')) | |
|
161 | self.imc.add(FileNode('foo/fbar', content='foobar')) | |
|
162 |
tip = self.imc.commit( |
|
|
161 | self.imc.add(FileNode(b'foo/bar/baz', content=b'foo')) | |
|
162 | self.imc.add(FileNode(b'foo/fbar', content=b'foobar')) | |
|
163 | tip = self.imc.commit('Initial', 'joe doe <joe.doe@example.com>') | |
|
163 | 164 | |
|
164 | 165 | # Change node's content |
|
165 | node = FileNode('foo/bar/baz', content='My **changed** content') | |
|
166 | node = FileNode(b'foo/bar/baz', content=b'My **changed** content') | |
|
166 | 167 | self.imc.change(node) |
|
167 |
self.imc.commit( |
|
|
168 | self.imc.commit('Changed %s' % node.path, 'joe doe <joe.doe@example.com>') | |
|
168 | 169 | |
|
169 | 170 | newtip = self.repo.get_commit() |
|
170 | 171 | assert tip != newtip |
@@ -173,25 +174,28 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
173 | 174 | |
|
174 | 175 | def test_change_non_ascii(self): |
|
175 | 176 | to_add = [ |
|
176 |
FileNode('ΕΌΓ³Εwik/zwierzΔ
tko |
|
|
177 | FileNode(u'ΕΌΓ³Εwik/zwierzΔ tko_uni', content=u'ΔΔΔΔ'), | |
|
177 | FileNode(safe_bytes('ΕΌΓ³Εwik/zwierzΔ tko'), | |
|
178 | content=safe_bytes('ΔΔΔΔ')), | |
|
179 | FileNode(safe_bytes('ΕΌΓ³Εwik/zwierzΔ tko_uni'), | |
|
180 | content=safe_bytes('ΔΔΔΔ')), | |
|
178 | 181 | ] |
|
179 | 182 | for node in to_add: |
|
180 | 183 | self.imc.add(node) |
|
181 | 184 | |
|
182 |
tip = self.imc.commit( |
|
|
185 | tip = self.imc.commit('Initial', 'joe doe <joe.doe@example.com>') | |
|
183 | 186 | |
|
184 | 187 | # Change node's content |
|
185 |
node = FileNode('ΕΌΓ³Εwik/zwierzΔ
tko', |
|
|
188 | node = FileNode(safe_bytes('ΕΌΓ³Εwik/zwierzΔ tko'), | |
|
189 | content=b'My **changed** content') | |
|
186 | 190 | self.imc.change(node) |
|
187 |
self.imc.commit( |
|
|
188 |
author= |
|
|
191 | self.imc.commit('Changed %s' % safe_str(node.path), | |
|
192 | author='joe doe <joe.doe@example.com>') | |
|
189 | 193 | |
|
190 | node_uni = FileNode( | |
|
191 |
|
|
|
194 | node_uni = FileNode(safe_bytes('ΕΌΓ³Εwik/zwierzΔ tko_uni'), | |
|
195 | content=b'My **changed** content') | |
|
192 | 196 | self.imc.change(node_uni) |
|
193 |
self.imc.commit( |
|
|
194 |
author= |
|
|
197 | self.imc.commit('Changed %s' % safe_str(node_uni.path), | |
|
198 | author='joe doe <joe.doe@example.com>') | |
|
195 | 199 | |
|
196 | 200 | newtip = self.repo.get_commit() |
|
197 | 201 | assert tip != newtip |
@@ -200,24 +204,24 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
200 | 204 | self.assert_nodes_in_commit(newtip, (node, node_uni)) |
|
201 | 205 | |
|
202 | 206 | def test_change_raise_empty_repository(self): |
|
203 | node = FileNode('foobar') | |
|
207 | node = FileNode(b'foobar') | |
|
204 | 208 | with pytest.raises(EmptyRepositoryError): |
|
205 | 209 | self.imc.change(node) |
|
206 | 210 | |
|
207 | 211 | def test_check_integrity_change_raise_node_does_not_exist(self): |
|
208 | node = FileNode('foobar', content='baz') | |
|
212 | node = FileNode(b'foobar', content=b'baz') | |
|
209 | 213 | self.imc.add(node) |
|
210 |
self.imc.commit(message= |
|
|
211 | node = FileNode('not-foobar', content='') | |
|
214 | self.imc.commit(message='Added foobar', author='Some Name <foo@bar.com>') | |
|
215 | node = FileNode(b'not-foobar', content=b'') | |
|
212 | 216 | self.imc.change(node) |
|
213 | 217 | with pytest.raises(NodeDoesNotExistError): |
|
214 |
self.imc.commit(message='Changed not existing node', author= |
|
|
218 | self.imc.commit(message='Changed not existing node', author='Some Name <foo@bar.com>') | |
|
215 | 219 | |
|
216 | 220 | def test_change_raise_node_already_changed(self): |
|
217 | node = FileNode('foobar', content='baz') | |
|
221 | node = FileNode(b'foobar', content=b'baz') | |
|
218 | 222 | self.imc.add(node) |
|
219 |
self.imc.commit(message= |
|
|
220 | node = FileNode('foobar', content='more baz') | |
|
223 | self.imc.commit(message='Added foobar', author='Some Nam <foo@bar.com>') | |
|
224 | node = FileNode(b'foobar', content=b'more baz') | |
|
221 | 225 | self.imc.change(node) |
|
222 | 226 | with pytest.raises(NodeAlreadyChangedError): |
|
223 | 227 | self.imc.change(node) |
@@ -225,18 +229,18 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
225 | 229 | def test_check_integrity_change_raise_node_not_changed(self, nodes): |
|
226 | 230 | self.test_add(nodes) # Performs first commit |
|
227 | 231 | |
|
228 | node = FileNode(nodes[0].path, content=nodes[0].content) | |
|
232 | node = FileNode(nodes[0].bytes_path, content=nodes[0].content) | |
|
229 | 233 | self.imc.change(node) |
|
230 | 234 | with pytest.raises(NodeNotChangedError): |
|
231 | 235 | self.imc.commit( |
|
232 |
message= |
|
|
233 |
author= |
|
|
236 | message='Trying to mark node as changed without touching it', | |
|
237 | author='Some Name <foo@bar.com>') | |
|
234 | 238 | |
|
235 | 239 | def test_change_raise_node_already_removed(self): |
|
236 | node = FileNode('foobar', content='baz') | |
|
240 | node = FileNode(b'foobar', content=b'baz') | |
|
237 | 241 | self.imc.add(node) |
|
238 |
self.imc.commit(message= |
|
|
239 | self.imc.remove(FileNode('foobar')) | |
|
242 | self.imc.commit(message='Added foobar', author='Some Name <foo@bar.com>') | |
|
243 | self.imc.remove(FileNode(b'foobar')) | |
|
240 | 244 | with pytest.raises(NodeAlreadyRemovedError): |
|
241 | 245 | self.imc.change(node) |
|
242 | 246 | |
@@ -247,8 +251,7 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
247 | 251 | node = nodes[0] |
|
248 | 252 | assert node.content == tip.get_node(node.path).content |
|
249 | 253 | self.imc.remove(node) |
|
250 | self.imc.commit( | |
|
251 | message=u'Removed %s' % node.path, author=u'{} <foo@bar.com>'.format(self)) | |
|
254 | self.imc.commit(message=f'Removed {node.path}', author='Some Name <foo@bar.com>') | |
|
252 | 255 | |
|
253 | 256 | newtip = self.repo.get_commit() |
|
254 | 257 | assert tip != newtip |
@@ -257,12 +260,12 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
257 | 260 | newtip.get_node(node.path) |
|
258 | 261 | |
|
259 | 262 | def test_remove_last_file_from_directory(self): |
|
260 | node = FileNode('omg/qwe/foo/bar', content='foobar') | |
|
263 | node = FileNode(b'omg/qwe/foo/bar', content=b'foobar') | |
|
261 | 264 | self.imc.add(node) |
|
262 |
self.imc.commit( |
|
|
265 | self.imc.commit('added', author='joe doe <joe@doe.com>') | |
|
263 | 266 | |
|
264 | 267 | self.imc.remove(node) |
|
265 |
tip = self.imc.commit( |
|
|
268 | tip = self.imc.commit('removed', 'joe doe <joe@doe.com>') | |
|
266 | 269 | with pytest.raises(NodeDoesNotExistError): |
|
267 | 270 | tip.get_node('omg/qwe/foo/bar') |
|
268 | 271 | |
@@ -271,22 +274,22 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
271 | 274 | with pytest.raises(NodeDoesNotExistError): |
|
272 | 275 | self.imc.commit( |
|
273 | 276 | message='Trying to remove node at empty repository', |
|
274 |
author= |
|
|
277 | author='Some Name <foo@bar.com>') | |
|
275 | 278 | |
|
276 | 279 | def test_check_integrity_remove_raise_node_does_not_exist(self, nodes): |
|
277 | 280 | self.test_add(nodes) # Performs first commit |
|
278 | 281 | |
|
279 | node = FileNode('no-such-file') | |
|
282 | node = FileNode(b'no-such-file') | |
|
280 | 283 | self.imc.remove(node) |
|
281 | 284 | with pytest.raises(NodeDoesNotExistError): |
|
282 | 285 | self.imc.commit( |
|
283 |
message= |
|
|
284 |
author= |
|
|
286 | message='Trying to remove not existing node', | |
|
287 | author='Some Name <foo@bar.com>') | |
|
285 | 288 | |
|
286 | 289 | def test_remove_raise_node_already_removed(self, nodes): |
|
287 | 290 | self.test_add(nodes) # Performs first commit |
|
288 | 291 | |
|
289 | node = FileNode(nodes[0].path) | |
|
292 | node = FileNode(nodes[0].bytes_path) | |
|
290 | 293 | self.imc.remove(node) |
|
291 | 294 | with pytest.raises(NodeAlreadyRemovedError): |
|
292 | 295 | self.imc.remove(node) |
@@ -294,15 +297,15 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
294 | 297 | def test_remove_raise_node_already_changed(self, nodes): |
|
295 | 298 | self.test_add(nodes) # Performs first commit |
|
296 | 299 | |
|
297 | node = FileNode(nodes[0].path, content='Bending time') | |
|
300 | node = FileNode(nodes[0].bytes_path, content=b'Bending time') | |
|
298 | 301 | self.imc.change(node) |
|
299 | 302 | with pytest.raises(NodeAlreadyChangedError): |
|
300 | 303 | self.imc.remove(node) |
|
301 | 304 | |
|
302 | 305 | def test_reset(self): |
|
303 | self.imc.add(FileNode('foo', content='bar')) | |
|
304 | # self.imc.change(FileNode('baz', content='new')) | |
|
305 | # self.imc.remove(FileNode('qwe')) | |
|
306 | self.imc.add(FileNode(b'foo', content=b'bar')) | |
|
307 | # self.imc.change(FileNode(b'baz', content='new')) | |
|
308 | # self.imc.remove(FileNode(b'qwe')) | |
|
306 | 309 | self.imc.reset() |
|
307 | 310 | assert not any((self.imc.added, self.imc.changed, self.imc.removed)) |
|
308 | 311 | |
@@ -310,11 +313,11 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
310 | 313 | N = 3 # number of commits to perform |
|
311 | 314 | last = None |
|
312 | 315 | for x in range(N): |
|
313 | fname = 'file%s' % str(x).rjust(5, '0') | |
|
314 | content = 'foobar\n' * x | |
|
316 | fname = safe_bytes('file%s' % str(x).rjust(5, '0')) | |
|
317 | content = safe_bytes('foobar\n' * x) | |
|
315 | 318 | node = FileNode(fname, content=content) |
|
316 | 319 | self.imc.add(node) |
|
317 |
commit = self.imc.commit( |
|
|
320 | commit = self.imc.commit("Commit no. %s" % (x + 1), author='Vcs User <foo@bar.com>') | |
|
318 | 321 | assert last != commit |
|
319 | 322 | last = commit |
|
320 | 323 | |
@@ -326,16 +329,16 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
326 | 329 | assert len(repo.commit_ids) == N |
|
327 | 330 | |
|
328 | 331 | def test_date_attr(self, local_dt_to_utc): |
|
329 | node = FileNode('foobar.txt', content='Foobared!') | |
|
332 | node = FileNode(b'foobar.txt', content=b'Foobared!') | |
|
330 | 333 | self.imc.add(node) |
|
331 | 334 | date = datetime.datetime(1985, 1, 30, 1, 45) |
|
332 | 335 | commit = self.imc.commit( |
|
333 |
|
|
|
334 |
author= |
|
|
336 | "Committed at time when I was born ;-)", | |
|
337 | author='Test User <foo@bar.com>', date=date) | |
|
335 | 338 | |
|
336 | 339 | assert commit.date == local_dt_to_utc(date) |
|
337 | 340 | |
|
338 | def assert_succesful_commit(self, added_nodes): | |
|
341 | def assert_successful_commit(self, added_nodes): | |
|
339 | 342 | newtip = self.repo.get_commit() |
|
340 | 343 | assert self.commit == newtip |
|
341 | 344 | assert self.old_commit_count + 1 == len(self.repo.commit_ids) |
@@ -346,4 +349,5 b' class TestInMemoryCommit(BackendTestMixi' | |||
|
346 | 349 | |
|
347 | 350 | def assert_nodes_in_commit(self, commit, nodes): |
|
348 | 351 | for node in nodes: |
|
352 | assert commit.get_node(node.path).path == node.path | |
|
349 | 353 | assert commit.get_node(node.path).content == node.content |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -22,6 +21,7 b' import stat' | |||
|
22 | 21 | |
|
23 | 22 | import pytest |
|
24 | 23 | |
|
24 | from rhodecode.lib.str_utils import safe_bytes | |
|
25 | 25 | from rhodecode.lib.vcs.nodes import DirNode |
|
26 | 26 | from rhodecode.lib.vcs.nodes import FileNode |
|
27 | 27 | from rhodecode.lib.vcs.nodes import Node |
@@ -34,29 +34,29 b' from rhodecode.tests.vcs.conftest import' | |||
|
34 | 34 | def binary_filenode(): |
|
35 | 35 | def node_maker(filename): |
|
36 | 36 | data = ( |
|
37 | "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00" | |
|
38 | "\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7" | |
|
39 | "\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00" | |
|
40 | "\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a" | |
|
41 | "\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?\"\x14j?\xa2M\x7fB\x14F\x9aQ?&" | |
|
42 | "\x842?\x0b\x89\"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?=" | |
|
43 | "\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04\'`EpNp\xa2X\'U?pVq\"Sw." | |
|
44 | "\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/" | |
|
45 | "\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H" | |
|
46 | "\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$\"q[" | |
|
47 | "\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?" | |
|
48 | "\x9f\x8cE??x\x94??\r\xbdtoJU5\"0N\x10U?\x00??V\t\x02\x9f\x81?U?" | |
|
49 | "\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&\"?\xb7ZP \x0c<?O" | |
|
50 | "\xa5\t}\xb8?\x99\xa6?\x87?\x1di|/\xa0??0\xbe\x1fp?d&\x1a\xad" | |
|
51 | "\x95\x8a\x07?\t*\x10??b:?d?.\x13C\x8a?\x12\xbe\xbf\x8e?{???" | |
|
52 | "\x08?\x80\xa7\x13+d\x13>J?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1" | |
|
53 | "\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J" | |
|
54 | "\x0bV\"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X" | |
|
55 | "\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~" | |
|
56 | "\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u" | |
|
57 | "\xb2?1\xbe|/\x92M@\xa2!F?\xa9>\"\r<DT?>\x92\x8e?>\x9a9Qv\x127?a" | |
|
58 | "\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00" | |
|
59 | "IEND\xaeB`\x82") | |
|
37 | b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00" | |
|
38 | b"\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7" | |
|
39 | b"\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00" | |
|
40 | b"\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a" | |
|
41 | b"\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?\"\x14j?\xa2M\x7fB\x14F\x9aQ?&" | |
|
42 | b"\x842?\x0b\x89\"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?=" | |
|
43 | b"\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04\'`EpNp\xa2X\'U?pVq\"Sw." | |
|
44 | b"\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/" | |
|
45 | b"\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H" | |
|
46 | b"\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$\"q[" | |
|
47 | b"\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?" | |
|
48 | b"\x9f\x8cE??x\x94??\r\xbdtoJU5\"0N\x10U?\x00??V\t\x02\x9f\x81?U?" | |
|
49 | b"\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&\"?\xb7ZP \x0c<?O" | |
|
50 | b"\xa5\t}\xb8?\x99\xa6?\x87?\x1di|/\xa0??0\xbe\x1fp?d&\x1a\xad" | |
|
51 | b"\x95\x8a\x07?\t*\x10??b:?d?.\x13C\x8a?\x12\xbe\xbf\x8e?{???" | |
|
52 | b"\x08?\x80\xa7\x13+d\x13>J?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1" | |
|
53 | b"\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J" | |
|
54 | b"\x0bV\"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X" | |
|
55 | b"\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~" | |
|
56 | b"\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u" | |
|
57 | b"\xb2?1\xbe|/\x92M@\xa2!F?\xa9>\"\r<DT?>\x92\x8e?>\x9a9Qv\x127?a" | |
|
58 | b"\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00" | |
|
59 | b"IEND\xaeB`\x82") | |
|
60 | 60 | return FileNode(filename, content=data) |
|
61 | 61 | return node_maker |
|
62 | 62 | |
@@ -68,8 +68,9 b' class TestNodeBasics:' | |||
|
68 | 68 | "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"]) |
|
69 | 69 | def test_init_wrong_paths(self, path, kind): |
|
70 | 70 | """ |
|
71 |
Cannot in |
|
|
71 | Cannot initialize Node objects with path with slash at the beginning. | |
|
72 | 72 | """ |
|
73 | path = safe_bytes(path) | |
|
73 | 74 | with pytest.raises(NodeError): |
|
74 | 75 | Node(path, kind) |
|
75 | 76 | |
@@ -77,47 +78,49 b' class TestNodeBasics:' | |||
|
77 | 78 | @pytest.mark.parametrize( |
|
78 | 79 | "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"]) |
|
79 | 80 | def test_name(self, path, kind): |
|
81 | path = safe_bytes(path) | |
|
80 | 82 | node = Node(path, kind) |
|
81 | 83 | assert node.name == 'path' |
|
82 | 84 | |
|
83 | 85 | def test_name_root(self): |
|
84 | node = Node('', NodeKind.DIR) | |
|
86 | node = Node(b'', NodeKind.DIR) | |
|
85 | 87 | assert node.name == '' |
|
86 | 88 | |
|
87 | 89 | def test_root_node_cannot_be_file(self): |
|
88 | 90 | with pytest.raises(NodeError): |
|
89 | Node('', NodeKind.FILE) | |
|
91 | Node(b'', NodeKind.FILE) | |
|
90 | 92 | |
|
91 | 93 | def test_kind_setter(self): |
|
92 | node = Node('', NodeKind.DIR) | |
|
94 | node = Node(b'', NodeKind.DIR) | |
|
93 | 95 | with pytest.raises(NodeError): |
|
94 | 96 | node.kind = NodeKind.FILE |
|
95 | 97 | |
|
96 | 98 | def test_compare_equal(self): |
|
97 | node1 = FileNode('test', content='') | |
|
98 | node2 = FileNode('test', content='') | |
|
99 | node1 = FileNode(b'test', content=b'') | |
|
100 | node2 = FileNode(b'test', content=b'') | |
|
99 | 101 | assert node1 == node2 |
|
100 | 102 | assert not node1 != node2 |
|
101 | 103 | |
|
102 | 104 | def test_compare_unequal(self): |
|
103 | node1 = FileNode('test', content='a') | |
|
104 | node2 = FileNode('test', content='b') | |
|
105 | node1 = FileNode(b'test', content=b'a') | |
|
106 | node2 = FileNode(b'test', content=b'b') | |
|
105 | 107 | assert node1 != node2 |
|
106 | 108 | assert not node1 == node2 |
|
107 | 109 | |
|
108 | 110 | @pytest.mark.parametrize("node_path, expected_parent_path", [ |
|
109 | ('', ''), | |
|
110 | ('some/path/', 'some/'), | |
|
111 | ('some/longer/path/', 'some/longer/'), | |
|
111 | ('', b''), | |
|
112 | ('some/path/', b'some/'), | |
|
113 | ('some/longer/path/', b'some/longer/'), | |
|
112 | 114 | ]) |
|
113 | 115 | def test_parent_path_new(self, node_path, expected_parent_path): |
|
114 | 116 | """ |
|
115 | 117 | Tests if node's parent path are properly computed. |
|
116 | 118 | """ |
|
119 | node_path = safe_bytes(node_path) | |
|
117 | 120 | node = Node(node_path, NodeKind.DIR) |
|
118 | 121 | parent_path = node.get_parent_path() |
|
119 | assert (parent_path.endswith('/') or | |
|
120 | node.is_root() and parent_path == '') | |
|
122 | assert (parent_path.endswith(b'/') or | |
|
123 | node.is_root() and parent_path == b'') | |
|
121 | 124 | assert parent_path == expected_parent_path |
|
122 | 125 | |
|
123 | 126 | ''' |
@@ -134,34 +137,34 b' class TestNodeBasics:' | |||
|
134 | 137 | ''' |
|
135 | 138 | |
|
136 | 139 | def test_is_file(self): |
|
137 | node = Node('any', NodeKind.FILE) | |
|
140 | node = Node(b'any', NodeKind.FILE) | |
|
138 | 141 | assert node.is_file() |
|
139 | 142 | |
|
140 | node = FileNode('any') | |
|
143 | node = FileNode(b'any') | |
|
141 | 144 | assert node.is_file() |
|
142 | 145 | with pytest.raises(AttributeError): |
|
143 | node.nodes | |
|
146 | node.nodes # noqa | |
|
144 | 147 | |
|
145 | 148 | def test_is_dir(self): |
|
146 | node = Node('any_dir', NodeKind.DIR) | |
|
149 | node = Node(b'any_dir', NodeKind.DIR) | |
|
147 | 150 | assert node.is_dir() |
|
148 | 151 | |
|
149 | node = DirNode('any_dir') | |
|
152 | node = DirNode(b'any_dir') | |
|
150 | 153 | |
|
151 | 154 | assert node.is_dir() |
|
152 | 155 | with pytest.raises(NodeError): |
|
153 | node.content | |
|
156 | node.content # noqa | |
|
154 | 157 | |
|
155 | 158 | def test_dir_node_iter(self): |
|
156 | 159 | nodes = [ |
|
157 | DirNode('docs'), | |
|
158 | DirNode('tests'), | |
|
159 | FileNode('bar'), | |
|
160 | FileNode('foo'), | |
|
161 | FileNode('readme.txt'), | |
|
162 | FileNode('setup.py'), | |
|
160 | DirNode(b'docs'), | |
|
161 | DirNode(b'tests'), | |
|
162 | FileNode(b'bar'), | |
|
163 | FileNode(b'foo'), | |
|
164 | FileNode(b'readme.txt'), | |
|
165 | FileNode(b'setup.py'), | |
|
163 | 166 | ] |
|
164 | dirnode = DirNode('', nodes=nodes) | |
|
167 | dirnode = DirNode(b'', nodes=nodes) | |
|
165 | 168 | for node in dirnode: |
|
166 | 169 | assert node == dirnode.get_node(node.path) |
|
167 | 170 | |
@@ -169,15 +172,15 b' class TestNodeBasics:' | |||
|
169 | 172 | """ |
|
170 | 173 | Without link to commit nodes should raise NodeError. |
|
171 | 174 | """ |
|
172 | node = FileNode('anything') | |
|
175 | node = FileNode(b'anything') | |
|
173 | 176 | with pytest.raises(NodeError): |
|
174 | node.state | |
|
175 | node = DirNode('anything') | |
|
177 | node.state # noqa | |
|
178 | node = DirNode(b'anything') | |
|
176 | 179 | with pytest.raises(NodeError): |
|
177 | node.state | |
|
180 | node.state # noqa | |
|
178 | 181 | |
|
179 | 182 | def test_file_node_stat(self): |
|
180 | node = FileNode('foobar', 'empty... almost') | |
|
183 | node = FileNode(b'foobar', b'empty... almost') | |
|
181 | 184 | mode = node.mode # default should be 0100644 |
|
182 | 185 | assert mode & stat.S_IRUSR |
|
183 | 186 | assert mode & stat.S_IWUSR |
@@ -190,29 +193,29 b' class TestNodeBasics:' | |||
|
190 | 193 | assert not mode & stat.S_IXOTH |
|
191 | 194 | |
|
192 | 195 | def test_file_node_is_executable(self): |
|
193 | node = FileNode('foobar', 'empty... almost', mode=0o100755) | |
|
196 | node = FileNode(b'foobar', b'empty... almost', mode=0o100755) | |
|
194 | 197 | assert node.is_executable |
|
195 | 198 | |
|
196 | node = FileNode('foobar', 'empty... almost', mode=0o100500) | |
|
199 | node = FileNode(b'foobar', b'empty... almost', mode=0o100500) | |
|
197 | 200 | assert node.is_executable |
|
198 | 201 | |
|
199 | node = FileNode('foobar', 'empty... almost', mode=0o100644) | |
|
202 | node = FileNode(b'foobar', b'empty... almost', mode=0o100644) | |
|
200 | 203 | assert not node.is_executable |
|
201 | 204 | |
|
202 | 205 | def test_file_node_is_not_symlink(self): |
|
203 | node = FileNode('foobar', 'empty...') | |
|
206 | node = FileNode(b'foobar', b'empty...') | |
|
204 | 207 | assert not node.is_link() |
|
205 | 208 | |
|
206 | 209 | def test_mimetype(self): |
|
207 | py_node = FileNode('test.py') | |
|
208 | tar_node = FileNode('test.tar.gz') | |
|
210 | py_node = FileNode(b'test.py') | |
|
211 | tar_node = FileNode(b'test.tar.gz') | |
|
209 | 212 | |
|
210 | 213 | ext = 'CustomExtension' |
|
211 | 214 | |
|
212 | my_node2 = FileNode('myfile2') | |
|
215 | my_node2 = FileNode(b'myfile2') | |
|
213 | 216 | my_node2._mimetype = [ext] |
|
214 | 217 | |
|
215 | my_node3 = FileNode('myfile3') | |
|
218 | my_node3 = FileNode(b'myfile3') | |
|
216 | 219 | my_node3._mimetype = [ext, ext] |
|
217 | 220 | |
|
218 | 221 | assert py_node.mimetype == 'text/x-python' |
@@ -229,20 +232,20 b' class TestNodeBasics:' | |||
|
229 | 232 | |
|
230 | 233 | def test_lines_counts(self): |
|
231 | 234 | lines = [ |
|
232 | 'line1\n', | |
|
233 | 'line2\n', | |
|
234 | 'line3\n', | |
|
235 | '\n', | |
|
236 | '\n', | |
|
237 | 'line4\n', | |
|
235 | b'line1\n', | |
|
236 | b'line2\n', | |
|
237 | b'line3\n', | |
|
238 | b'\n', | |
|
239 | b'\n', | |
|
240 | b'line4\n', | |
|
238 | 241 | ] |
|
239 | py_node = FileNode('test.py', ''.join(lines)) | |
|
242 | py_node = FileNode(b'test.py', b''.join(lines)) | |
|
240 | 243 | |
|
241 | 244 | assert (len(lines), len(lines)) == py_node.lines() |
|
242 | 245 | assert (len(lines), len(lines) - 2) == py_node.lines(count_empty=True) |
|
243 | 246 | |
|
244 | 247 | def test_lines_no_newline(self): |
|
245 | py_node = FileNode('test.py', 'oneline') | |
|
248 | py_node = FileNode(b'test.py', b'oneline') | |
|
246 | 249 | |
|
247 | 250 | assert (1, 1) == py_node.lines() |
|
248 | 251 | assert (1, 1) == py_node.lines(count_empty=True) |
@@ -251,15 +254,15 b' class TestNodeBasics:' | |||
|
251 | 254 | class TestNodeContent(object): |
|
252 | 255 | |
|
253 | 256 | def test_if_binary(self, binary_filenode): |
|
254 | filenode = binary_filenode('calendar.jpg') | |
|
257 | filenode = binary_filenode(b'calendar.jpg') | |
|
255 | 258 | assert filenode.is_binary |
|
256 | 259 | |
|
257 | 260 | def test_binary_line_counts(self, binary_filenode): |
|
258 | tar_node = binary_filenode('archive.tar.gz') | |
|
261 | tar_node = binary_filenode(b'archive.tar.gz') | |
|
259 | 262 | assert (0, 0) == tar_node.lines(count_empty=True) |
|
260 | 263 | |
|
261 | 264 | def test_binary_mimetype(self, binary_filenode): |
|
262 | tar_node = binary_filenode('archive.tar.gz') | |
|
265 | tar_node = binary_filenode(b'archive.tar.gz') | |
|
263 | 266 | assert tar_node.mimetype == 'application/x-tar' |
|
264 | 267 | |
|
265 | 268 | |
@@ -271,5 +274,5 b' class TestNodesCommits(BackendTestMixin)' | |||
|
271 | 274 | last_commit = repo.get_commit() |
|
272 | 275 | |
|
273 | 276 | for x in range(3): |
|
274 |
node = last_commit.get_node('file_ |
|
|
277 | node = last_commit.get_node(f'file_{x}.txt') | |
|
275 | 278 | assert node.last_commit == repo[x] |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -56,7 +55,7 b' class TestRepositoryBase(BackendTestMixi' | |||
|
56 | 55 | self.Backend(path) |
|
57 | 56 | |
|
58 | 57 | def test_has_commits_attribute(self): |
|
59 | self.repo.commit_ids | |
|
58 | assert self.repo.commit_ids | |
|
60 | 59 | |
|
61 | 60 | def test_name(self): |
|
62 | 61 | assert self.repo.name.startswith('vcs-test') |
@@ -81,11 +80,20 b' class TestRepositoryBase(BackendTestMixi' | |||
|
81 | 80 | def test_bookmarks(self): |
|
82 | 81 | assert len(self.repo.bookmarks) == 0 |
|
83 | 82 | |
|
84 | # TODO: Cover two cases: Local repo path, remote URL | |
|
85 | def test_check_url(self): | |
|
83 | def test_check_url_on_path(self): | |
|
86 | 84 | config = Config() |
|
87 | 85 | assert self.Backend.check_url(self.repo.path, config) |
|
88 | 86 | |
|
87 | def test_check_url_on_remote_url(self): | |
|
88 | config = Config() | |
|
89 | url = { | |
|
90 | 'hg': 'https://code.rhodecode.com/rhodecode-vcsserver', | |
|
91 | 'svn': 'https://code.rhodecode.com/svn-doc', | |
|
92 | 'git': 'https://code.rhodecode.com/appenlight', | |
|
93 | }[self.repo.alias] | |
|
94 | ||
|
95 | assert self.Backend.check_url(url, config) | |
|
96 | ||
|
89 | 97 | def test_check_url_invalid(self): |
|
90 | 98 | config = Config() |
|
91 | 99 | with pytest.raises(URLError): |
@@ -189,7 +197,7 b' class TestRepositoryCompare:' | |||
|
189 | 197 | source_repo = vcsbackend.clone_repo(target_repo) |
|
190 | 198 | assert target_repo != source_repo |
|
191 | 199 | |
|
192 | vcsbackend.add_file(source_repo, 'newfile', 'somecontent') | |
|
200 | vcsbackend.add_file(source_repo, b'newfile', b'somecontent') | |
|
193 | 201 | source_commit = source_repo.get_commit() |
|
194 | 202 | |
|
195 | 203 | target_repo.compare( |
@@ -232,7 +240,7 b' class TestRepositoryGetCommonAncestor:' | |||
|
232 | 240 | source_repo = vcsbackend.clone_repo(target_repo) |
|
233 | 241 | assert target_repo != source_repo |
|
234 | 242 | |
|
235 | vcsbackend.add_file(source_repo, 'newfile', 'somecontent') | |
|
243 | vcsbackend.add_file(source_repo, b'newfile', b'somecontent') | |
|
236 | 244 | source_commit = source_repo.get_commit() |
|
237 | 245 | |
|
238 | 246 | expected_ancestor = target_repo[4].raw_id |
@@ -273,10 +281,10 b' class TestRepositoryMerge(object):' | |||
|
273 | 281 | def prepare_for_success(self, vcsbackend): |
|
274 | 282 | self.target_repo = vcsbackend.create_repo(number_of_commits=1) |
|
275 | 283 | self.source_repo = vcsbackend.clone_repo(self.target_repo) |
|
276 | vcsbackend.add_file(self.target_repo, 'README_MERGE1', 'Version 1') | |
|
277 | vcsbackend.add_file(self.source_repo, 'README_MERGE2', 'Version 2') | |
|
284 | vcsbackend.add_file(self.target_repo, b'README_MERGE1', b'Version 1') | |
|
285 | vcsbackend.add_file(self.source_repo, b'README_MERGE2', b'Version 2') | |
|
278 | 286 | imc = self.source_repo.in_memory_commit |
|
279 | imc.add(FileNode('file_x', content=self.source_repo.name)) | |
|
287 | imc.add(FileNode(b'file_x', content=self.source_repo.name)) | |
|
280 | 288 | imc.commit( |
|
281 | 289 | message=u'Automatic commit from repo merge test', |
|
282 | 290 | author=u'Automatic <automatic@rhodecode.com>') |
@@ -292,8 +300,8 b' class TestRepositoryMerge(object):' | |||
|
292 | 300 | def prepare_for_conflict(self, vcsbackend): |
|
293 | 301 | self.target_repo = vcsbackend.create_repo(number_of_commits=1) |
|
294 | 302 | self.source_repo = vcsbackend.clone_repo(self.target_repo) |
|
295 | vcsbackend.add_file(self.target_repo, 'README_MERGE', 'Version 1') | |
|
296 | vcsbackend.add_file(self.source_repo, 'README_MERGE', 'Version 2') | |
|
303 | vcsbackend.add_file(self.target_repo, b'README_MERGE', b'Version 1') | |
|
304 | vcsbackend.add_file(self.source_repo, b'README_MERGE', b'Version 2') | |
|
297 | 305 | self.target_commit = self.target_repo.get_commit() |
|
298 | 306 | self.source_commit = self.source_repo.get_commit() |
|
299 | 307 | # This only works for Git and Mercurial |
@@ -363,10 +371,10 b' class TestRepositoryMerge(object):' | |||
|
363 | 371 | |
|
364 | 372 | # Multiple merges may differ in their commit id. Therefore we set the |
|
365 | 373 | # commit id to `None` before comparing the merge responses. |
|
366 |
new_merge_ref = merge_response.merge_ref. |
|
|
374 | new_merge_ref = merge_response.merge_ref.commit_id = None | |
|
367 | 375 | merge_response.merge_ref = new_merge_ref |
|
368 | 376 | |
|
369 |
new_update_merge_ref = merge_response_update.merge_ref. |
|
|
377 | new_update_merge_ref = merge_response_update.merge_ref.commit_id = None | |
|
370 | 378 | merge_response_update.merge_ref = new_update_merge_ref |
|
371 | 379 | |
|
372 | 380 | assert merge_response == merge_response_update |
@@ -480,8 +488,8 b' class TestRepositoryStrip(BackendTestMix' | |||
|
480 | 488 | 'date': datetime.datetime(2010, 1, 1, 20), |
|
481 | 489 | 'branch': 'master', |
|
482 | 490 | 'added': [ |
|
483 | FileNode('foobar', content='foobar'), | |
|
484 | FileNode('foobar2', content='foobar2'), | |
|
491 | FileNode(b'foobar', content='foobar'), | |
|
492 | FileNode(b'foobar2', content='foobar2'), | |
|
485 | 493 | ], |
|
486 | 494 | }, |
|
487 | 495 | ] |
@@ -492,7 +500,7 b' class TestRepositoryStrip(BackendTestMix' | |||
|
492 | 500 | 'date': datetime.datetime(2010, 1, 1, 21, x), |
|
493 | 501 | 'branch': 'master', |
|
494 | 502 | 'changed': [ |
|
495 | FileNode('foobar', 'FOOBAR - %s' % x), | |
|
503 | FileNode(b'foobar', 'FOOBAR - %s' % x), | |
|
496 | 504 | ], |
|
497 | 505 | } |
|
498 | 506 | commits.append(commit_data) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -23,6 +22,7 b' import os' | |||
|
23 | 22 | import mock |
|
24 | 23 | import pytest |
|
25 | 24 | |
|
25 | from rhodecode.lib.str_utils import safe_bytes | |
|
26 | 26 | from rhodecode.tests import SVN_REPO, TEST_DIR, TESTS_TMP_PATH |
|
27 | 27 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository |
|
28 | 28 | from rhodecode.lib.vcs.conf import settings |
@@ -83,8 +83,8 b' def test_commit_author(head):' | |||
|
83 | 83 | |
|
84 | 84 | |
|
85 | 85 | @pytest.mark.parametrize("filename, content, mime_type", [ |
|
86 | ('test.txt', 'Text content\n', None), | |
|
87 | ('test.bin', '\0 binary \0', 'application/octet-stream'), | |
|
86 | (b'test.txt', b'Text content\n', None), | |
|
87 | (b'test.bin', b'\0 binary \0', 'application/octet-stream'), | |
|
88 | 88 | ], ids=['text', 'binary']) |
|
89 | 89 | def test_sets_mime_type_correctly(vcsbackend, filename, content, mime_type): |
|
90 | 90 | repo = vcsbackend.create_repo() |
@@ -132,12 +132,14 b' def test_topnode_files_attribute(head):' | |||
|
132 | 132 | topnode.files |
|
133 | 133 | |
|
134 | 134 | |
|
135 | ||
|
136 | ||
|
135 | 137 | @pytest.mark.parametrize("filename, content, branch, mime_type", [ |
|
136 |
( |
|
|
137 |
( |
|
|
138 | 'application/octet-stream'), | |
|
138 | ('branches/plain/test.txt', b'Text content\n', 'plain', None), | |
|
139 | ('branches/uniçâβe/test.bin', b'\0 binary \0', 'uniçâβe', 'application/octet-stream'), | |
|
139 | 140 | ], ids=['text', 'binary']) |
|
140 | 141 | def test_unicode_refs(vcsbackend, filename, content, branch, mime_type): |
|
142 | filename = safe_bytes(filename) | |
|
141 | 143 | repo = vcsbackend.create_repo() |
|
142 | 144 | vcsbackend.ensure_file(filename, content) |
|
143 | 145 | with mock.patch(("rhodecode.lib.vcs.backends.svn.repository" |
@@ -184,3 +186,10 b' class TestSVNCommit(object):' | |||
|
184 | 186 | node_ids = [commit.raw_id for commit in node.history] |
|
185 | 187 | assert ['18', |
|
186 | 188 | '8'] == node_ids |
|
189 | ||
|
190 | def test_repo_files_content_type(self): | |
|
191 | test_commit = self.repo.get_commit(commit_idx=100) | |
|
192 | for node in test_commit.get_node('/'): | |
|
193 | if node.is_file(): | |
|
194 | assert type(node.content) == bytes | |
|
195 | assert type(node.str_content) == str |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -24,6 +23,7 b' import subprocess' | |||
|
24 | 23 | |
|
25 | 24 | import pytest |
|
26 | 25 | |
|
26 | from rhodecode.lib.vcs.backends.git import GitRepository | |
|
27 | 27 | from rhodecode.lib.vcs.exceptions import VCSError |
|
28 | 28 | from rhodecode.lib.vcs.utils import author_email, author_name |
|
29 | 29 | from rhodecode.lib.vcs.utils.helpers import get_scm |
@@ -85,9 +85,10 b' class TestGetScm(object):' | |||
|
85 | 85 | |
|
86 | 86 | def test_get_two_scms_for_path(self, tmpdir): |
|
87 | 87 | multialias_repo_path = str(tmpdir) |
|
88 | git_default_branch = GitRepository.DEFAULT_BRANCH_NAME | |
|
88 | 89 | |
|
89 | 90 | subprocess.check_call(['hg', 'init', multialias_repo_path]) |
|
90 | subprocess.check_call(['git', 'init', multialias_repo_path]) | |
|
91 | subprocess.check_call(['git', '-c', f'init.defaultBranch={git_default_branch}', 'init', multialias_repo_path]) | |
|
91 | 92 | |
|
92 | 93 | with pytest.raises(VCSError): |
|
93 | 94 | get_scm(multialias_repo_path) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -67,7 +66,7 b' class SCMFetcher(object):' | |||
|
67 | 66 | self.alias = alias |
|
68 | 67 | self.test_repo_path = test_repo_path |
|
69 | 68 | |
|
70 | def setup(self): | |
|
69 | def setup_method(self): | |
|
71 | 70 | if not os.path.isdir(self.test_repo_path): |
|
72 | 71 | self.fetch_repo() |
|
73 | 72 | |
@@ -100,7 +99,7 b' def get_normalized_path(path):' | |||
|
100 | 99 | m = matcher.match(name) |
|
101 | 100 | if not m: |
|
102 | 101 | # Haven't append number yet so return first |
|
103 |
newname = ' |
|
|
102 | newname = f'{name}-00000' | |
|
104 | 103 | newpath = os.path.join(dir, newname) |
|
105 | 104 | if ext: |
|
106 | 105 | newpath = '.'.join((newpath, ext)) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -95,33 +94,31 b' def _add_files(vcs, dest, clone_url=None' | |||
|
95 | 94 | author_str = 'Marcin KuΕΊminski <me@email.com>' |
|
96 | 95 | |
|
97 | 96 | for i in range(kwargs.get('files_no', 3)): |
|
98 |
cmd = """echo 'added_line |
|
|
97 | cmd = f"""echo 'added_line{i}' >> {added_file}""" | |
|
99 | 98 | Command(cwd).execute(cmd) |
|
100 | 99 | |
|
101 | 100 | if vcs == 'hg': |
|
102 |
cmd = """hg commit -m 'committed new |
|
|
103 | i, author_str, added_file | |
|
104 | ) | |
|
101 | cmd = f"""hg commit -m 'committed new {i}' -u '{author_str}' {added_file} """ | |
|
105 | 102 | elif vcs == 'git': |
|
106 |
cmd = """ |
|
|
107 | git_ident, i, added_file) | |
|
103 | cmd = f"""{git_ident} && git commit -m 'committed new {i}' {added_file}""" | |
|
108 | 104 | Command(cwd).execute(cmd) |
|
109 | 105 | |
|
110 | 106 | for tag in tags: |
|
111 | 107 | if vcs == 'hg': |
|
112 | 108 | Command(cwd).execute( |
|
113 |
|
|
|
109 | f"""hg tag -m "{tag['commit']}" -u "{author_str}" """, | |
|
110 | tag['name']) | |
|
114 | 111 | elif vcs == 'git': |
|
115 | 112 | if tag['commit']: |
|
116 | 113 | # annotated tag |
|
117 | 114 | _stdout, _stderr = Command(cwd).execute( |
|
118 |
|
|
|
119 | git_ident, tag['name'], tag['commit'])) | |
|
115 | f"""{git_ident} && git tag -a {tag['name']} -m "{tag['commit']}" """ | |
|
116 | ) | |
|
120 | 117 | else: |
|
121 | 118 | # lightweight tag |
|
122 | 119 | _stdout, _stderr = Command(cwd).execute( |
|
123 |
|
|
|
124 | git_ident, tag['name'])) | |
|
120 | f"""{git_ident} && git tag {tag['name']}""" | |
|
121 | ) | |
|
125 | 122 | |
|
126 | 123 | |
|
127 | 124 | def _add_files_and_push(vcs, dest, clone_url=None, tags=None, target_branch=None, |
@@ -130,9 +127,8 b' def _add_files_and_push(vcs, dest, clone' | |||
|
130 | 127 | Generate some files, add it to DEST repo and push back |
|
131 | 128 | vcs is git or hg and defines what VCS we want to make those files for |
|
132 | 129 | """ |
|
133 |
git_ident = "git config user.name |
|
|
134 | 'Marcin KuΕΊminski', 'me@email.com') | |
|
135 | cwd = path = jn(dest) | |
|
130 | git_ident = "git config user.name Marcin KuΕΊminski && git config user.email me@email.com" | |
|
131 | cwd = jn(dest) | |
|
136 | 132 | |
|
137 | 133 | # commit some stuff into this repo |
|
138 | 134 | _add_files(vcs, dest, clone_url, tags, target_branch, new_branch, **kwargs) |
@@ -151,7 +147,7 b' def _add_files_and_push(vcs, dest, clone' | |||
|
151 | 147 | if new_branch: |
|
152 | 148 | maybe_new_branch = '--new-branch' |
|
153 | 149 | stdout, stderr = Command(cwd).execute( |
|
154 | 'hg push --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url) | |
|
150 | 'hg push --traceback --verbose {} -r {} {}'.format(maybe_new_branch, target_branch, clone_url) | |
|
155 | 151 | ) |
|
156 | 152 | elif vcs == 'git': |
|
157 | 153 | stdout, stderr = Command(cwd).execute( |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -32,6 +31,7 b' import tempfile' | |||
|
32 | 31 | import textwrap |
|
33 | 32 | import pytest |
|
34 | 33 | import logging |
|
34 | import requests | |
|
35 | 35 | |
|
36 | 36 | from rhodecode import events |
|
37 | 37 | from rhodecode.lib.str_utils import safe_bytes |
@@ -40,19 +40,33 b' from rhodecode.model.db import Integrati' | |||
|
40 | 40 | from rhodecode.model.integration import IntegrationModel |
|
41 | 41 | from rhodecode.model.db import Repository |
|
42 | 42 | from rhodecode.model.meta import Session |
|
43 | from rhodecode.model.settings import SettingsModel | |
|
44 | 43 | from rhodecode.integrations.types.webhook import WebhookIntegrationType |
|
45 | 44 | |
|
46 | 45 | from rhodecode.tests import GIT_REPO, HG_REPO |
|
47 | 46 | from rhodecode.tests.fixture import Fixture |
|
48 | 47 | from rhodecode.tests.server_utils import RcWebServer |
|
49 | 48 | |
|
49 | ||
|
50 | 50 | REPO_GROUP = 'a_repo_group' |
|
51 |
HG_REPO_WITH_GROUP = ' |
|
|
52 |
GIT_REPO_WITH_GROUP = ' |
|
|
51 | HG_REPO_WITH_GROUP = f'{REPO_GROUP}/{HG_REPO}' | |
|
52 | GIT_REPO_WITH_GROUP = f'{REPO_GROUP}/{GIT_REPO}' | |
|
53 | 53 | |
|
54 | 54 | log = logging.getLogger(__name__) |
|
55 | 55 | |
|
56 | # Docker image running httpbin... | |
|
57 | HTTPBIN_DOMAIN = 'http://httpbin' | |
|
58 | HTTPBIN_POST = HTTPBIN_DOMAIN + '/post' | |
|
59 | ||
|
60 | ||
|
61 | def check_httpbin_connection(): | |
|
62 | try: | |
|
63 | response = requests.get(HTTPBIN_DOMAIN) | |
|
64 | return response.status_code == 200 | |
|
65 | except Exception as e: | |
|
66 | print(e) | |
|
67 | ||
|
68 | return False | |
|
69 | ||
|
56 | 70 | |
|
57 | 71 | @pytest.fixture(scope="module") |
|
58 | 72 | def rcextensions(request, db_connection, tmpdir_factory): |
@@ -74,10 +88,9 b' def rcextensions(request, db_connection,' | |||
|
74 | 88 | pytest.fail( |
|
75 | 89 | "Path for rcextensions already exists, please clean up before " |
|
76 | 90 | "test run this path: %s" % (rcextensions_path, )) |
|
77 | return | |
|
78 | ||
|
79 | request.addfinalizer(rcextensions_path.remove) | |
|
80 | init_path.write_binary(safe_bytes(init_content), ensure=True) | |
|
91 | else: | |
|
92 | request.addfinalizer(rcextensions_path.remove) | |
|
93 | init_path.write_binary(safe_bytes(init_content), ensure=True) | |
|
81 | 94 | |
|
82 | 95 | |
|
83 | 96 | @pytest.fixture(scope="module") |
@@ -127,7 +140,7 b' def rc_web_server(' | |||
|
127 | 140 | request, vcsserver_factory, available_port_factory, |
|
128 | 141 | rc_web_server_config_factory, repos, rcextensions): |
|
129 | 142 | """ |
|
130 |
Run the web server as a subprocess. with it |
|
|
143 | Run the web server as a subprocess. with its own instance of vcsserver | |
|
131 | 144 | """ |
|
132 | 145 | rcweb_port = available_port_factory() |
|
133 | 146 | log.info('Using rcweb ops test port {}'.format(rcweb_port)) |
@@ -175,55 +188,6 b' def disable_locking(baseapp):' | |||
|
175 | 188 | |
|
176 | 189 | |
|
177 | 190 | @pytest.fixture() |
|
178 | def enable_auth_plugins(request, baseapp, csrf_token): | |
|
179 | """ | |
|
180 | Return a factory object that when called, allows to control which | |
|
181 | authentication plugins are enabled. | |
|
182 | """ | |
|
183 | def _enable_plugins(plugins_list, override=None): | |
|
184 | override = override or {} | |
|
185 | params = { | |
|
186 | 'auth_plugins': ','.join(plugins_list), | |
|
187 | } | |
|
188 | ||
|
189 | # helper translate some names to others | |
|
190 | name_map = { | |
|
191 | 'token': 'authtoken' | |
|
192 | } | |
|
193 | ||
|
194 | for module in plugins_list: | |
|
195 | plugin_name = module.partition('#')[-1] | |
|
196 | if plugin_name in name_map: | |
|
197 | plugin_name = name_map[plugin_name] | |
|
198 | enabled_plugin = 'auth_%s_enabled' % plugin_name | |
|
199 | cache_ttl = 'auth_%s_cache_ttl' % plugin_name | |
|
200 | ||
|
201 | # default params that are needed for each plugin, | |
|
202 | # `enabled` and `cache_ttl` | |
|
203 | params.update({ | |
|
204 | enabled_plugin: True, | |
|
205 | cache_ttl: 0 | |
|
206 | }) | |
|
207 | if override.get: | |
|
208 | params.update(override.get(module, {})) | |
|
209 | ||
|
210 | validated_params = params | |
|
211 | for k, v in validated_params.items(): | |
|
212 | setting = SettingsModel().create_or_update_setting(k, v) | |
|
213 | Session().add(setting) | |
|
214 | Session().commit() | |
|
215 | ||
|
216 | SettingsModel().invalidate_settings_cache() | |
|
217 | ||
|
218 | def cleanup(): | |
|
219 | _enable_plugins(['egg:rhodecode-enterprise-ce#rhodecode']) | |
|
220 | ||
|
221 | request.addfinalizer(cleanup) | |
|
222 | ||
|
223 | return _enable_plugins | |
|
224 | ||
|
225 | ||
|
226 | @pytest.fixture() | |
|
227 | 191 | def fs_repo_only(request, rhodecode_fixtures): |
|
228 | 192 | def fs_repo_fabric(repo_name, repo_type): |
|
229 | 193 | rhodecode_fixtures.create_repo(repo_name, repo_type=repo_type) |
@@ -245,7 +209,7 b' def enable_webhook_push_integration(requ' | |||
|
245 | 209 | Session().add(integration) |
|
246 | 210 | |
|
247 | 211 | settings = dict( |
|
248 | url='http://httpbin.org/post', | |
|
212 | url=HTTPBIN_POST, | |
|
249 | 213 | secret_token='secret', |
|
250 | 214 | username=None, |
|
251 | 215 | password=None, |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -37,7 +36,7 b' from rhodecode.tests.vcs_operations impo' | |||
|
37 | 36 | def rc_web_server_config_modification(): |
|
38 | 37 | return [ |
|
39 | 38 | {'app:main': {'auth_ret_code': '403'}}, |
|
40 | {'app:main': {'auth_ret_code_detection': 'true'}}, | |
|
39 | #{'app:main': {'auth_ret_code_detection': 'true'}}, | |
|
41 | 40 | ] |
|
42 | 41 | |
|
43 | 42 |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -37,7 +36,7 b' from rhodecode.tests.vcs_operations impo' | |||
|
37 | 36 | def rc_web_server_config_modification(): |
|
38 | 37 | return [ |
|
39 | 38 | {'app:main': {'auth_ret_code': '404'}}, |
|
40 | {'app:main': {'auth_ret_code_detection': 'false'}}, | |
|
39 | #{'app:main': {'auth_ret_code_detection': 'false'}}, | |
|
41 | 40 | ] |
|
42 | 41 | |
|
43 | 42 |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -37,7 +36,7 b' from rhodecode.tests.vcs_operations impo' | |||
|
37 | 36 | def rc_web_server_config_modification(): |
|
38 | 37 | return [ |
|
39 | 38 | {'app:main': {'auth_ret_code': '600'}}, |
|
40 | {'app:main': {'auth_ret_code_detection': 'false'}}, | |
|
39 | #{'app:main': {'auth_ret_code_detection': 'false'}}, | |
|
41 | 40 | ] |
|
42 | 41 | |
|
43 | 42 | |
@@ -48,10 +47,10 b' class TestVCSOperationsOnCustomIniConfig' | |||
|
48 | 47 | clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!') |
|
49 | 48 | stdout, stderr = Command('/tmp').execute( |
|
50 | 49 | 'hg clone', clone_url, tmpdir.strpath) |
|
51 |
assert 'abort: |
|
|
50 | assert 'abort: authorization failed' in stderr | |
|
52 | 51 | |
|
53 | 52 | def test_clone_wrong_credentials_git_ret_code(self, rc_web_server, tmpdir): |
|
54 | 53 | clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!') |
|
55 | 54 | stdout, stderr = Command('/tmp').execute( |
|
56 | 55 | 'git clone', clone_url, tmpdir.strpath) |
|
57 | assert 'The requested URL returned error: 403' in stderr | |
|
56 | assert 'fatal: Authentication failed' in stderr |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -28,7 +27,6 b' Test suite for making push/pull operatio' | |||
|
28 | 27 | """ |
|
29 | 28 | |
|
30 | 29 | import os |
|
31 | import pytest | |
|
32 | 30 | |
|
33 | 31 | from rhodecode.lib.vcs.backends.git.repository import GitRepository |
|
34 | 32 | from rhodecode.lib.vcs.nodes import FileNode |
@@ -55,7 +53,7 b' def test_git_push_with_small_push_buffer' | |||
|
55 | 53 | cmd.execute('git clone', clone_url) |
|
56 | 54 | |
|
57 | 55 | repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
58 | repo.in_memory_commit.add(FileNode('readme.md', content='## Hello')) | |
|
56 | repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello')) | |
|
59 | 57 | repo.in_memory_commit.commit( |
|
60 | 58 | message='Commit on branch Master', |
|
61 | 59 | author='Automatic test <automatic@rhodecode.com>', |
@@ -63,5 +61,5 b' def test_git_push_with_small_push_buffer' | |||
|
63 | 61 | |
|
64 | 62 | repo_cmd = Command(repo.path) |
|
65 | 63 | stdout, stderr = repo_cmd.execute( |
|
66 |
'git -c http.postBuffer=1024 push --verbose |
|
|
64 | f'git -c http.postBuffer=1024 push --verbose {clone_url} master') | |
|
67 | 65 | _check_proper_git_push(stdout, stderr, branch='master') |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -29,6 +28,7 b' Test suite for making push/pull operatio' | |||
|
29 | 28 | |
|
30 | 29 | |
|
31 | 30 | import time |
|
31 | import logging | |
|
32 | 32 | |
|
33 | 33 | import pytest |
|
34 | 34 | |
@@ -39,6 +39,7 b' from rhodecode.model.meta import Session' | |||
|
39 | 39 | from rhodecode.model.repo import RepoModel |
|
40 | 40 | from rhodecode.model.user import UserModel |
|
41 | 41 | from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN) |
|
42 | from rhodecode.tests.utils import assert_message_in_log | |
|
42 | 43 | |
|
43 | 44 | from rhodecode.tests.vcs_operations import ( |
|
44 | 45 | Command, _check_proper_clone, _check_proper_git_push, |
@@ -372,112 +373,3 b' class TestVCSOperations(object):' | |||
|
372 | 373 | stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath) |
|
373 | 374 | cmd.assert_returncode_success() |
|
374 | 375 | _check_proper_clone(stdout, stderr, 'git') |
|
375 | ||
|
376 | def test_clone_by_auth_token( | |
|
377 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
378 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', | |
|
379 | 'egg:rhodecode-enterprise-ce#rhodecode']) | |
|
380 | ||
|
381 | user = user_util.create_user() | |
|
382 | token = user.auth_tokens[1] | |
|
383 | ||
|
384 | clone_url = rc_web_server.repo_clone_url( | |
|
385 | HG_REPO, user=user.username, passwd=token) | |
|
386 | ||
|
387 | stdout, stderr = Command('/tmp').execute( | |
|
388 | 'hg clone', clone_url, tmpdir.strpath) | |
|
389 | _check_proper_clone(stdout, stderr, 'hg') | |
|
390 | ||
|
391 | def test_clone_by_auth_token_expired( | |
|
392 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
393 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', | |
|
394 | 'egg:rhodecode-enterprise-ce#rhodecode']) | |
|
395 | ||
|
396 | user = user_util.create_user() | |
|
397 | auth_token = AuthTokenModel().create( | |
|
398 | user.user_id, u'test-token', -10, AuthTokenModel.cls.ROLE_VCS) | |
|
399 | token = auth_token.api_key | |
|
400 | ||
|
401 | clone_url = rc_web_server.repo_clone_url( | |
|
402 | HG_REPO, user=user.username, passwd=token) | |
|
403 | ||
|
404 | stdout, stderr = Command('/tmp').execute( | |
|
405 | 'hg clone', clone_url, tmpdir.strpath) | |
|
406 | assert 'abort: authorization failed' in stderr | |
|
407 | ||
|
408 | def test_clone_by_auth_token_bad_role( | |
|
409 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
410 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', | |
|
411 | 'egg:rhodecode-enterprise-ce#rhodecode']) | |
|
412 | ||
|
413 | user = user_util.create_user() | |
|
414 | auth_token = AuthTokenModel().create( | |
|
415 | user.user_id, u'test-token', -1, AuthTokenModel.cls.ROLE_API) | |
|
416 | token = auth_token.api_key | |
|
417 | ||
|
418 | clone_url = rc_web_server.repo_clone_url( | |
|
419 | HG_REPO, user=user.username, passwd=token) | |
|
420 | ||
|
421 | stdout, stderr = Command('/tmp').execute( | |
|
422 | 'hg clone', clone_url, tmpdir.strpath) | |
|
423 | assert 'abort: authorization failed' in stderr | |
|
424 | ||
|
425 | def test_clone_by_auth_token_user_disabled( | |
|
426 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
427 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', | |
|
428 | 'egg:rhodecode-enterprise-ce#rhodecode']) | |
|
429 | user = user_util.create_user() | |
|
430 | user.active = False | |
|
431 | Session().add(user) | |
|
432 | Session().commit() | |
|
433 | token = user.auth_tokens[1] | |
|
434 | ||
|
435 | clone_url = rc_web_server.repo_clone_url( | |
|
436 | HG_REPO, user=user.username, passwd=token) | |
|
437 | ||
|
438 | stdout, stderr = Command('/tmp').execute( | |
|
439 | 'hg clone', clone_url, tmpdir.strpath) | |
|
440 | assert 'abort: authorization failed' in stderr | |
|
441 | ||
|
442 | def test_clone_by_auth_token_with_scope( | |
|
443 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
444 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', | |
|
445 | 'egg:rhodecode-enterprise-ce#rhodecode']) | |
|
446 | user = user_util.create_user() | |
|
447 | auth_token = AuthTokenModel().create( | |
|
448 | user.user_id, u'test-token', -1, AuthTokenModel.cls.ROLE_VCS) | |
|
449 | token = auth_token.api_key | |
|
450 | ||
|
451 | # manually set scope | |
|
452 | auth_token.repo = Repository.get_by_repo_name(HG_REPO) | |
|
453 | Session().add(auth_token) | |
|
454 | Session().commit() | |
|
455 | ||
|
456 | clone_url = rc_web_server.repo_clone_url( | |
|
457 | HG_REPO, user=user.username, passwd=token) | |
|
458 | ||
|
459 | stdout, stderr = Command('/tmp').execute( | |
|
460 | 'hg clone', clone_url, tmpdir.strpath) | |
|
461 | _check_proper_clone(stdout, stderr, 'hg') | |
|
462 | ||
|
463 | def test_clone_by_auth_token_with_wrong_scope( | |
|
464 | self, rc_web_server, tmpdir, user_util, enable_auth_plugins): | |
|
465 | enable_auth_plugins(['egg:rhodecode-enterprise-ce#token', | |
|
466 | 'egg:rhodecode-enterprise-ce#rhodecode']) | |
|
467 | user = user_util.create_user() | |
|
468 | auth_token = AuthTokenModel().create( | |
|
469 | user.user_id, u'test-token', -1, AuthTokenModel.cls.ROLE_VCS) | |
|
470 | token = auth_token.api_key | |
|
471 | ||
|
472 | # manually set scope | |
|
473 | auth_token.repo = Repository.get_by_repo_name(GIT_REPO) | |
|
474 | Session().add(auth_token) | |
|
475 | Session().commit() | |
|
476 | ||
|
477 | clone_url = rc_web_server.repo_clone_url( | |
|
478 | HG_REPO, user=user.username, passwd=token) | |
|
479 | ||
|
480 | stdout, stderr = Command('/tmp').execute( | |
|
481 | 'hg clone', clone_url, tmpdir.strpath) | |
|
482 | assert 'abort: authorization failed' in stderr | |
|
483 |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -58,7 +57,7 b' class TestVCSOperations(object):' | |||
|
58 | 57 | if branch_perm in ['branch.push', 'branch.push_force']: |
|
59 | 58 | _check_proper_hg_push(stdout, stderr) |
|
60 | 59 | else: |
|
61 |
msg = "Branch `default` changes rejected by rule `*`=>{}" |
|
|
60 | msg = f"Branch `default` changes rejected by rule `*`=>{branch_perm}" | |
|
62 | 61 | assert msg in stdout |
|
63 | 62 | assert "transaction abort" in stdout |
|
64 | 63 | |
@@ -89,6 +88,6 b' class TestVCSOperations(object):' | |||
|
89 | 88 | if branch_perm in ['branch.push', 'branch.push_force']: |
|
90 | 89 | _check_proper_git_push(stdout, stderr) |
|
91 | 90 | else: |
|
92 |
msg = "Branch `master` changes rejected by rule `*`=>{}" |
|
|
91 | msg = f"Branch `master` changes rejected by rule `*`=>{branch_perm}" | |
|
93 | 92 | assert msg in stderr |
|
94 | 93 | assert "(pre-receive hook declined)" in stderr |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -47,7 +46,7 b' class TestVCSOperations(object):' | |||
|
47 | 46 | _add_files('hg', tmpdir.strpath, clone_url=clone_url) |
|
48 | 47 | |
|
49 | 48 | stdout, stderr = Command(tmpdir.strpath).execute( |
|
50 |
'hg push --verbose -f {}' |
|
|
49 | f'hg push --verbose -f {clone_url}') | |
|
51 | 50 | |
|
52 | 51 | _check_proper_hg_push(stdout, stderr) |
|
53 | 52 | |
@@ -65,7 +64,7 b' class TestVCSOperations(object):' | |||
|
65 | 64 | Command(tmpdir.strpath).execute( |
|
66 | 65 | 'git reset --hard HEAD~2') |
|
67 | 66 | stdout, stderr = Command(tmpdir.strpath).execute( |
|
68 |
'git push -f {} master' |
|
|
67 | f'git push -f {clone_url} master') | |
|
69 | 68 | |
|
70 | 69 | assert '(forced update)' in stderr |
|
71 | 70 | |
@@ -90,7 +89,7 b' class TestVCSOperations(object):' | |||
|
90 | 89 | _add_files('hg', tmpdir.strpath, clone_url=clone_url) |
|
91 | 90 | |
|
92 | 91 | stdout, stderr = Command(tmpdir.strpath).execute( |
|
93 |
'hg push --verbose -f {}' |
|
|
92 | f'hg push --verbose -f {clone_url}') | |
|
94 | 93 | |
|
95 | 94 | assert "Branch `default` changes rejected by rule `*`=>branch.push" in stdout |
|
96 | 95 | assert "FORCE PUSH FORBIDDEN" in stdout |
@@ -120,3 +119,4 b' class TestVCSOperations(object):' | |||
|
120 | 119 | assert "Branch `master` changes rejected by rule `*`=>branch.push" in stderr |
|
121 | 120 | assert "FORCE PUSH FORBIDDEN" in stderr |
|
122 | 121 | assert "(pre-receive hook declined)" in stderr |
|
122 |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -28,24 +27,15 b' Test suite for making push/pull operatio' | |||
|
28 | 27 | """ |
|
29 | 28 | |
|
30 | 29 | import pytest |
|
31 | import requests | |
|
30 | ||
|
32 | 31 | |
|
33 | 32 | from rhodecode.tests import GIT_REPO, HG_REPO |
|
34 | 33 | from rhodecode.tests.vcs_operations import Command, _add_files_and_push |
|
35 | ||
|
36 | ||
|
37 | def check_connection(): | |
|
38 | try: | |
|
39 | response = requests.get('http://httpbin.org') | |
|
40 | return response.status_code == 200 | |
|
41 | except Exception as e: | |
|
42 | print(e) | |
|
43 | ||
|
44 | return False | |
|
34 | from rhodecode.tests.vcs_operations.conftest import check_httpbin_connection | |
|
45 | 35 | |
|
46 | 36 | |
|
47 | 37 | connection_available = pytest.mark.skipif( |
|
48 | not check_connection(), reason="No outside internet connection available") | |
|
38 | not check_httpbin_connection(), reason="No outside internet connection available") | |
|
49 | 39 | |
|
50 | 40 | |
|
51 | 41 | @pytest.mark.usefixtures("baseapp", "enable_webhook_push_integration") |
@@ -54,8 +44,7 b' class TestVCSOperationsOnCustomIniConfig' | |||
|
54 | 44 | def test_push_with_webhook_hg(self, rc_web_server, tmpdir): |
|
55 | 45 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
56 | 46 | |
|
57 | stdout, stderr = Command('/tmp').execute( | |
|
58 | 'hg clone', clone_url, tmpdir.strpath) | |
|
47 | Command('/tmp').execute('hg clone', clone_url, tmpdir.strpath) | |
|
59 | 48 | |
|
60 | 49 | push_url = rc_web_server.repo_clone_url(HG_REPO) |
|
61 | 50 | _add_files_and_push('hg', tmpdir.strpath, clone_url=push_url) |
@@ -65,11 +54,10 b' class TestVCSOperationsOnCustomIniConfig' | |||
|
65 | 54 | assert "executing task TASK:<@task: rhodecode.integrations.types.webhook.post_to_webhook" in rc_log |
|
66 | 55 | assert "handling event repo-push with integration <rhodecode.integrations.types.webhook.WebhookIntegrationType" in rc_log |
|
67 | 56 | |
|
68 |
def test_push_with_webhook_g |
|
|
57 | def test_push_with_webhook_git(self, rc_web_server, tmpdir): | |
|
69 | 58 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
70 | 59 | |
|
71 | stdout, stderr = Command('/tmp').execute( | |
|
72 | 'git clone', clone_url, tmpdir.strpath) | |
|
60 | Command('/tmp').execute('git clone', clone_url, tmpdir.strpath) | |
|
73 | 61 | |
|
74 | 62 | push_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
75 | 63 | _add_files_and_push('git', tmpdir.strpath, clone_url=push_url) |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -27,7 +26,6 b' from rhodecode.lib.vcs.backends.hg.repos' | |||
|
27 | 26 | from rhodecode.lib.vcs.nodes import FileNode |
|
28 | 27 | from rhodecode.model.db import Repository |
|
29 | 28 | from rhodecode.model.meta import Session |
|
30 | from rhodecode.tests import GIT_REPO, HG_REPO | |
|
31 | 29 | |
|
32 | 30 | from rhodecode.tests.vcs_operations import ( |
|
33 | 31 | Command, _check_proper_clone, _check_proper_git_push, _check_proper_hg_push, |
@@ -47,7 +45,7 b' class TestVCSOperationsSpecial(object):' | |||
|
47 | 45 | |
|
48 | 46 | repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
49 | 47 | repo._checkout('test', create=True) |
|
50 | repo.in_memory_commit.add(FileNode('file', content='')) | |
|
48 | repo.in_memory_commit.add(FileNode(b'file', content=b'some-content')) | |
|
51 | 49 | repo.in_memory_commit.commit( |
|
52 | 50 | message='Commit on branch test', |
|
53 | 51 | author='Automatic test <automatic@rhodecode.com>', |
@@ -89,7 +87,7 b' class TestVCSOperationsSpecial(object):' | |||
|
89 | 87 | cmd.execute('git clone', clone_url) |
|
90 | 88 | |
|
91 | 89 | repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
92 | repo.in_memory_commit.add(FileNode('readme.md', content='## Hello')) | |
|
90 | repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello')) | |
|
93 | 91 | repo.in_memory_commit.commit( |
|
94 | 92 | message='Commit on branch Master', |
|
95 | 93 | author='Automatic test <automatic@rhodecode.com>', |
@@ -99,14 +97,13 b' class TestVCSOperationsSpecial(object):' | |||
|
99 | 97 | stdout, stderr = repo_cmd.execute('git push --verbose origin master') |
|
100 | 98 | _check_proper_git_push(stdout, stderr, branch='master') |
|
101 | 99 | |
|
102 |
ref = '{}/{}/pull-request/new?branch=master' |
|
|
103 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
104 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr | |
|
100 | ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=master' | |
|
101 | assert f'remote: RhodeCode: open pull request link: {ref}' in stderr | |
|
105 | 102 | assert 'remote: RhodeCode: push completed' in stderr |
|
106 | 103 | |
|
107 | 104 | # push on the same branch |
|
108 | 105 | repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
109 | repo.in_memory_commit.add(FileNode('setup.py', content='print\n')) | |
|
106 | repo.in_memory_commit.add(FileNode(b'setup.py', content=b'print\n')) | |
|
110 | 107 | repo.in_memory_commit.commit( |
|
111 | 108 | message='Commit2 on branch Master', |
|
112 | 109 | author='Automatic test2 <automatic@rhodecode.com>', |
@@ -116,12 +113,13 b' class TestVCSOperationsSpecial(object):' | |||
|
116 | 113 | stdout, stderr = repo_cmd.execute('git push --verbose origin master') |
|
117 | 114 | _check_proper_git_push(stdout, stderr, branch='master') |
|
118 | 115 | |
|
119 |
assert 'remote: RhodeCode: open pull request link: {}' |
|
|
116 | assert f'remote: RhodeCode: open pull request link: {ref}' in stderr | |
|
120 | 117 | assert 'remote: RhodeCode: push completed' in stderr |
|
121 | 118 | |
|
122 | 119 | # new Branch |
|
123 | 120 | repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
124 | repo.in_memory_commit.add(FileNode('feature1.py', content='## Hello world')) | |
|
121 | repo._create_branch('feature', repo.commit_ids[1]) | |
|
122 | repo.in_memory_commit.add(FileNode(b'feature1.py', content=b'## Hello world')) | |
|
125 | 123 | repo.in_memory_commit.commit( |
|
126 | 124 | message='Commit on branch feature', |
|
127 | 125 | author='Automatic test <automatic@rhodecode.com>', |
@@ -131,9 +129,8 b' class TestVCSOperationsSpecial(object):' | |||
|
131 | 129 | stdout, stderr = repo_cmd.execute('git push --verbose origin feature') |
|
132 | 130 | _check_proper_git_push(stdout, stderr, branch='feature') |
|
133 | 131 | |
|
134 |
ref = '{}/{}/pull-request/new?branch=feature' |
|
|
135 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
136 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr | |
|
132 | ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature' | |
|
133 | assert f'remote: RhodeCode: open pull request link: {ref}' in stderr | |
|
137 | 134 | assert 'remote: RhodeCode: push completed' in stderr |
|
138 | 135 | |
|
139 | 136 | def test_hg_push_shows_pull_request_refs(self, backend_hg, rc_web_server, tmpdir): |
@@ -145,7 +142,7 b' class TestVCSOperationsSpecial(object):' | |||
|
145 | 142 | cmd.execute('hg clone', clone_url) |
|
146 | 143 | |
|
147 | 144 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
148 |
repo.in_memory_commit.add(FileNode( |
|
|
145 | repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello')) | |
|
149 | 146 | repo.in_memory_commit.commit( |
|
150 | 147 | message=u'Commit on branch default', |
|
151 | 148 | author=u'Automatic test', |
@@ -157,14 +154,13 b' class TestVCSOperationsSpecial(object):' | |||
|
157 | 154 | stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url) |
|
158 | 155 | _check_proper_hg_push(stdout, stderr, branch='default') |
|
159 | 156 | |
|
160 |
ref = '{}/{}/pull-request/new?branch=default' |
|
|
161 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
162 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
157 | ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default' | |
|
158 | assert f'remote: RhodeCode: open pull request link: {ref}' in stdout | |
|
163 | 159 | assert 'remote: RhodeCode: push completed' in stdout |
|
164 | 160 | |
|
165 | 161 | # push on the same branch |
|
166 | 162 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
167 |
repo.in_memory_commit.add(FileNode( |
|
|
163 | repo.in_memory_commit.add(FileNode(b'setup.py', content=b'print\n')) | |
|
168 | 164 | repo.in_memory_commit.commit( |
|
169 | 165 | message=u'Commit2 on branch default', |
|
170 | 166 | author=u'Automatic test2', |
@@ -176,12 +172,12 b' class TestVCSOperationsSpecial(object):' | |||
|
176 | 172 | stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url) |
|
177 | 173 | _check_proper_hg_push(stdout, stderr, branch='default') |
|
178 | 174 | |
|
179 |
assert 'remote: RhodeCode: open pull request link: {}' |
|
|
175 | assert f'remote: RhodeCode: open pull request link: {ref}' in stdout | |
|
180 | 176 | assert 'remote: RhodeCode: push completed' in stdout |
|
181 | 177 | |
|
182 | 178 | # new Branch |
|
183 | 179 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
184 |
repo.in_memory_commit.add(FileNode( |
|
|
180 | repo.in_memory_commit.add(FileNode(b'feature1.py', content=b'## Hello world')) | |
|
185 | 181 | repo.in_memory_commit.commit( |
|
186 | 182 | message=u'Commit on branch feature', |
|
187 | 183 | author=u'Automatic test', |
@@ -193,9 +189,8 b' class TestVCSOperationsSpecial(object):' | |||
|
193 | 189 | stdout, stderr = repo_cmd.execute('hg push --new-branch --verbose', clone_url) |
|
194 | 190 | _check_proper_hg_push(stdout, stderr, branch='feature') |
|
195 | 191 | |
|
196 |
ref = '{}/{}/pull-request/new?branch=feature' |
|
|
197 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
198 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
192 | ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=feature' | |
|
193 | assert f'remote: RhodeCode: open pull request link: {ref}' in stdout | |
|
199 | 194 | assert 'remote: RhodeCode: push completed' in stdout |
|
200 | 195 | |
|
201 | 196 | def test_hg_push_shows_pull_request_refs_book(self, backend_hg, rc_web_server, tmpdir): |
@@ -207,7 +202,7 b' class TestVCSOperationsSpecial(object):' | |||
|
207 | 202 | cmd.execute('hg clone', clone_url) |
|
208 | 203 | |
|
209 | 204 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
210 |
repo.in_memory_commit.add(FileNode( |
|
|
205 | repo.in_memory_commit.add(FileNode(b'readme.md', content=b'## Hello')) | |
|
211 | 206 | repo.in_memory_commit.commit( |
|
212 | 207 | message=u'Commit on branch default', |
|
213 | 208 | author=u'Automatic test', |
@@ -219,14 +214,13 b' class TestVCSOperationsSpecial(object):' | |||
|
219 | 214 | stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url) |
|
220 | 215 | _check_proper_hg_push(stdout, stderr, branch='default') |
|
221 | 216 | |
|
222 |
ref = '{}/{}/pull-request/new?branch=default' |
|
|
223 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
224 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
217 | ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default' | |
|
218 | assert f'remote: RhodeCode: open pull request link: {ref}' in stdout | |
|
225 | 219 | assert 'remote: RhodeCode: push completed' in stdout |
|
226 | 220 | |
|
227 | 221 | # add bookmark |
|
228 | 222 | repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name)) |
|
229 |
repo.in_memory_commit.add(FileNode( |
|
|
223 | repo.in_memory_commit.add(FileNode(b'setup.py', content=b'print\n')) | |
|
230 | 224 | repo.in_memory_commit.commit( |
|
231 | 225 | message=u'Commit2 on branch default', |
|
232 | 226 | author=u'Automatic test2', |
@@ -238,12 +232,10 b' class TestVCSOperationsSpecial(object):' | |||
|
238 | 232 | stdout, stderr = repo_cmd.execute('hg push -B feature2 --verbose', clone_url) |
|
239 | 233 | _check_proper_hg_push(stdout, stderr, branch='default') |
|
240 | 234 | |
|
241 |
ref = '{}/{}/pull-request/new?branch=default' |
|
|
242 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
243 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
244 | ref = '{}/{}/pull-request/new?bookmark=feature2'.format( | |
|
245 | rc_web_server.host_url(), empty_repo.repo_name) | |
|
246 | assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout | |
|
235 | ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?branch=default' | |
|
236 | assert f'remote: RhodeCode: open pull request link: {ref}' in stdout | |
|
237 | ref = f'{rc_web_server.host_url()}/{empty_repo.repo_name}/pull-request/new?bookmark=feature2' | |
|
238 | assert f'remote: RhodeCode: open pull request link: {ref}' in stdout | |
|
247 | 239 | assert 'remote: RhodeCode: push completed' in stdout |
|
248 | 240 | assert 'exporting bookmark feature2' in stdout |
|
249 | 241 |
@@ -1,4 +1,3 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
@@ -28,24 +27,14 b' Test suite for making push/pull operatio' | |||
|
28 | 27 | """ |
|
29 | 28 | |
|
30 | 29 | import pytest |
|
31 | import requests | |
|
32 | 30 | |
|
33 | 31 | from rhodecode.tests import GIT_REPO, HG_REPO |
|
34 | 32 | from rhodecode.tests.vcs_operations import Command, _add_files_and_push |
|
35 | ||
|
36 | ||
|
37 | def check_connection(): | |
|
38 | try: | |
|
39 | response = requests.get('http://httpbin.org') | |
|
40 | return response.status_code == 200 | |
|
41 | except Exception as e: | |
|
42 | print(e) | |
|
43 | ||
|
44 | return False | |
|
33 | from rhodecode.tests.vcs_operations.conftest import check_httpbin_connection | |
|
45 | 34 | |
|
46 | 35 | |
|
47 | 36 | connection_available = pytest.mark.skipif( |
|
48 | not check_connection(), reason="No outside internet connection available") | |
|
37 | not check_httpbin_connection(), reason="No outside internet connection available") | |
|
49 | 38 | |
|
50 | 39 | |
|
51 | 40 | @pytest.mark.usefixtures( |
@@ -65,7 +54,7 b' class TestVCSOperationsOnCustomIniConfig' | |||
|
65 | 54 | |
|
66 | 55 | rc_log = rc_web_server.get_rc_log() |
|
67 | 56 | assert 'ERROR' not in rc_log |
|
68 |
assert "'name': |
|
|
57 | assert "{'name': 'v1.0.0'," in rc_log | |
|
69 | 58 | |
|
70 | 59 | def test_push_tag_with_commit_git( |
|
71 | 60 | self, rc_web_server, tmpdir): |
@@ -80,7 +69,7 b' class TestVCSOperationsOnCustomIniConfig' | |||
|
80 | 69 | |
|
81 | 70 | rc_log = rc_web_server.get_rc_log() |
|
82 | 71 | assert 'ERROR' not in rc_log |
|
83 |
assert "'name': |
|
|
72 | assert "{'name': 'v1.0.0'," in rc_log | |
|
84 | 73 | |
|
85 | 74 | def test_push_tag_with_no_commit_git( |
|
86 | 75 | self, rc_web_server, tmpdir): |
@@ -95,4 +84,4 b' class TestVCSOperationsOnCustomIniConfig' | |||
|
95 | 84 | |
|
96 | 85 | rc_log = rc_web_server.get_rc_log() |
|
97 | 86 | assert 'ERROR' not in rc_log |
|
98 |
assert "'name': |
|
|
87 | assert "{'name': 'v1.0.0'," in rc_log |
General Comments 0
You need to be logged in to leave comments.
Login now