##// END OF EJS Templates
pytest: restructure fixtures/plugins to fix problems with pkg_resources and Can't perform this operation for unregistered loader type errors....
super-admin -
r4986:8782a2c5 default
parent child Browse files
Show More
@@ -0,0 +1,199 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import pytest
22 from rhodecode.lib import ext_json
23
24
25 pytest_plugins = [
26 "rhodecode.tests.fixture_mods.fixture_pyramid",
27 "rhodecode.tests.fixture_mods.fixture_utils",
28 ]
29
30
31 def pytest_configure(config):
32 from rhodecode.config import patches
33
34
35 def pytest_addoption(parser):
36
37 def _parse_json(value):
38 return ext_json.str_json(value) if value else None
39
40 def _split_comma(value):
41 return value.split(',')
42
43 parser.addoption(
44 '--keep-tmp-path', action='store_true',
45 help="Keep the test temporary directories")
46 parser.addoption(
47 '--backends', action='store', type=_split_comma,
48 default=['git', 'hg', 'svn'],
49 help="Select which backends to test for backend specific tests.")
50 parser.addoption(
51 '--dbs', action='store', type=_split_comma,
52 default=['sqlite'],
53 help="Select which database to test for database specific tests. "
54 "Possible options are sqlite,postgres,mysql")
55 parser.addoption(
56 '--appenlight', '--ae', action='store_true',
57 help="Track statistics in appenlight.")
58 parser.addoption(
59 '--appenlight-api-key', '--ae-key',
60 help="API key for Appenlight.")
61 parser.addoption(
62 '--appenlight-url', '--ae-url',
63 default="https://ae.rhodecode.com",
64 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
65 parser.addoption(
66 '--sqlite-connection-string', action='store',
67 default='', help="Connection string for the dbs tests with SQLite")
68 parser.addoption(
69 '--postgres-connection-string', action='store',
70 default='', help="Connection string for the dbs tests with Postgres")
71 parser.addoption(
72 '--mysql-connection-string', action='store',
73 default='', help="Connection string for the dbs tests with MySQL")
74 parser.addoption(
75 '--repeat', type=int, default=100,
76 help="Number of repetitions in performance tests.")
77
78 parser.addoption(
79 '--test-loglevel', dest='test_loglevel',
80 help="Set default Logging level for tests, critical(default), error, warn , info, debug")
81 group = parser.getgroup('pylons')
82 group.addoption(
83 '--with-pylons', dest='pyramid_config',
84 help="Set up a Pylons environment with the specified config file.")
85 group.addoption(
86 '--ini-config-override', action='store', type=_parse_json,
87 default=None, dest='pyramid_config_override', help=(
88 "Overrides the .ini file settings. Should be specified in JSON"
89 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
90 )
91 )
92 parser.addini(
93 'pyramid_config',
94 "Set up a Pyramid environment with the specified config file.")
95
96 vcsgroup = parser.getgroup('vcs')
97 vcsgroup.addoption(
98 '--without-vcsserver', dest='with_vcsserver', action='store_false',
99 help="Do not start the VCSServer in a background process.")
100 vcsgroup.addoption(
101 '--with-vcsserver-http', dest='vcsserver_config_http',
102 help="Start the HTTP VCSServer with the specified config file.")
103 vcsgroup.addoption(
104 '--vcsserver-protocol', dest='vcsserver_protocol',
105 help="Start the VCSServer with HTTP protocol support.")
106 vcsgroup.addoption(
107 '--vcsserver-config-override', action='store', type=_parse_json,
108 default=None, dest='vcsserver_config_override', help=(
109 "Overrides the .ini file settings for the VCSServer. "
110 "Should be specified in JSON "
111 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
112 )
113 )
114 vcsgroup.addoption(
115 '--vcsserver-port', action='store', type=int,
116 default=None, help=(
117 "Allows to set the port of the vcsserver. Useful when testing "
118 "against an already running server and random ports cause "
119 "trouble."))
120 parser.addini(
121 'vcsserver_config_http',
122 "Start the HTTP VCSServer with the specified config file.")
123 parser.addini(
124 'vcsserver_protocol',
125 "Start the VCSServer with HTTP protocol support.")
126
127
128 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
129 def pytest_runtest_makereport(item, call):
130 """
131 Adding the remote traceback if the exception has this information.
132
133 VCSServer attaches this information as the attribute `_vcs_server_traceback`
134 to the exception instance.
135 """
136 outcome = yield
137 report = outcome.get_result()
138 if call.excinfo:
139 exc = call.excinfo.value
140 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
141
142 if vcsserver_traceback:
143 section = 'VCSServer remote traceback ' + report.when
144 report.sections.append((section, vcsserver_traceback))
145
146
147 def pytest_collection_modifyitems(session, config, items):
148 # nottest marked, compare nose, used for transition from nose to pytest
149 remaining = [
150 i for i in items if getattr(i.obj, '__test__', True)]
151 items[:] = remaining
152
153 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
154 # be executed at the end for faster test feedback
155 def sorter(item):
156 pos = 0
157 key = item._nodeid
158 if key.startswith('rhodecode/tests/database'):
159 pos = 1
160 elif key.startswith('rhodecode/tests/vcs_operations'):
161 pos = 2
162
163 return pos
164
165 items.sort(key=sorter)
166
167
168 def get_backends_from_metafunc(metafunc):
169 requested_backends = set(metafunc.config.getoption('--backends'))
170 backend_mark = metafunc.definition.get_closest_marker('backends')
171 if backend_mark:
172 # Supported backends by this test function, created from
173 # pytest.mark.backends
174 backends = backend_mark.args
175 elif hasattr(metafunc.cls, 'backend_alias'):
176 # Support class attribute "backend_alias", this is mainly
177 # for legacy reasons for tests not yet using pytest.mark.backends
178 backends = [metafunc.cls.backend_alias]
179 else:
180 backends = metafunc.config.getoption('--backends')
181 return requested_backends.intersection(backends)
182
183
184 def pytest_generate_tests(metafunc):
185
186 # Support test generation based on --backend parameter
187 if 'backend_alias' in metafunc.fixturenames:
188 backends = get_backends_from_metafunc(metafunc)
189 scope = None
190 if not backends:
191 pytest.skip("Not enabled for any of selected backends")
192
193 metafunc.parametrize('backend_alias', backends, scope=scope)
194
195 backend_mark = metafunc.definition.get_closest_marker('backends')
196 if backend_mark:
197 backends = get_backends_from_metafunc(metafunc)
198 if not backends:
199 pytest.skip("Not enabled for any of selected backends")
1 NO CONTENT: new file 100644
@@ -1,210 +1,210 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.model.repo import RepoModel
25 25 from rhodecode.model.scm import ScmModel
26 26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
27 27 from rhodecode.api.tests.utils import (
28 28 build_data, api_call, assert_error, assert_ok, crash, jsonify)
29 29 from rhodecode.tests.fixture import Fixture
30 from rhodecode.tests.plugin import plain_http_host_only_stub
30 from rhodecode.tests.fixture_mods.fixture_utils import plain_http_host_only_stub
31 31
32 32 fixture = Fixture()
33 33
34 34 UPDATE_REPO_NAME = 'api_update_me'
35 35
36 36
37 37 class SAME_AS_UPDATES(object):
38 38 """ Constant used for tests below """
39 39
40 40
41 41 @pytest.mark.usefixtures("testuser_api", "app")
42 42 class TestApiUpdateRepo(object):
43 43
44 44 @pytest.mark.parametrize("updates, expected", [
45 45 ({'owner': TEST_USER_REGULAR_LOGIN},
46 46 SAME_AS_UPDATES),
47 47
48 48 ({'description': 'new description'},
49 49 SAME_AS_UPDATES),
50 50
51 51 ({'clone_uri': 'http://foo.com/repo'},
52 52 SAME_AS_UPDATES),
53 53
54 54 ({'clone_uri': None},
55 55 {'clone_uri': ''}),
56 56
57 57 ({'clone_uri': ''},
58 58 {'clone_uri': ''}),
59 59
60 60 ({'clone_uri': 'http://example.com/repo_pull'},
61 61 {'clone_uri': 'http://example.com/repo_pull'}),
62 62
63 63 ({'push_uri': ''},
64 64 {'push_uri': ''}),
65 65
66 66 ({'push_uri': 'http://example.com/repo_push'},
67 67 {'push_uri': 'http://example.com/repo_push'}),
68 68
69 69 ({'landing_rev': None}, # auto-updated based on type of repo
70 70 {'landing_rev': [None, None]}),
71 71
72 72 ({'enable_statistics': True},
73 73 SAME_AS_UPDATES),
74 74
75 75 ({'enable_locking': True},
76 76 SAME_AS_UPDATES),
77 77
78 78 ({'enable_downloads': True},
79 79 SAME_AS_UPDATES),
80 80
81 81 ({'repo_name': 'new_repo_name'},
82 82 {
83 83 'repo_name': 'new_repo_name',
84 84 'url': 'http://{}/new_repo_name'.format(plain_http_host_only_stub())
85 85 }),
86 86
87 87 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
88 88 '_group': 'test_group_for_update'},
89 89 {
90 90 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
91 91 'url': 'http://{}/test_group_for_update/{}'.format(
92 92 plain_http_host_only_stub(), UPDATE_REPO_NAME)
93 93 }),
94 94 ])
95 95 def test_api_update_repo(self, updates, expected, backend):
96 96 repo_name = UPDATE_REPO_NAME
97 97 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
98 98 if updates.get('_group'):
99 99 fixture.create_repo_group(updates['_group'])
100 100
101 101 if 'landing_rev' in updates:
102 102 default_landing_ref, _lbl = ScmModel.backend_landing_ref(backend.alias)
103 103 _type, _name = default_landing_ref.split(':')
104 104 updates['landing_rev'] = default_landing_ref
105 105 expected['landing_rev'] = [_type, _name]
106 106
107 107 expected_api_data = repo.get_api_data(include_secrets=True)
108 108 if expected is SAME_AS_UPDATES:
109 109 expected_api_data.update(updates)
110 110 else:
111 111 expected_api_data.update(expected)
112 112
113 113 id_, params = build_data(
114 114 self.apikey, 'update_repo', repoid=repo_name, **updates)
115 115
116 116 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
117 117 response = api_call(self.app, params)
118 118
119 119 if updates.get('repo_name'):
120 120 repo_name = updates['repo_name']
121 121
122 122 try:
123 123 expected = {
124 124 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
125 125 'repository': jsonify(expected_api_data)
126 126 }
127 127 assert_ok(id_, expected, given=response.body)
128 128 finally:
129 129 fixture.destroy_repo(repo_name)
130 130 if updates.get('_group'):
131 131 fixture.destroy_repo_group(updates['_group'])
132 132
133 133 def test_api_update_repo_fork_of_field(self, backend):
134 134 master_repo = backend.create_repo()
135 135 repo = backend.create_repo()
136 136 updates = {
137 137 'fork_of': master_repo.repo_name,
138 138 'fork_of_id': master_repo.repo_id
139 139 }
140 140 expected_api_data = repo.get_api_data(include_secrets=True)
141 141 expected_api_data.update(updates)
142 142
143 143 id_, params = build_data(
144 144 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
145 145 response = api_call(self.app, params)
146 146 expected = {
147 147 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
148 148 'repository': jsonify(expected_api_data)
149 149 }
150 150 assert_ok(id_, expected, given=response.body)
151 151 result = response.json['result']['repository']
152 152 assert result['fork_of'] == master_repo.repo_name
153 153 assert result['fork_of_id'] == master_repo.repo_id
154 154
155 155 def test_api_update_repo_fork_of_not_found(self, backend):
156 156 master_repo_name = 'fake-parent-repo'
157 157 repo = backend.create_repo()
158 158 updates = {
159 159 'fork_of': master_repo_name
160 160 }
161 161 id_, params = build_data(
162 162 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
163 163 response = api_call(self.app, params)
164 164 expected = {
165 165 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
166 166 master_repo_name)}
167 167 assert_error(id_, expected, given=response.body)
168 168
169 169 def test_api_update_repo_with_repo_group_not_existing(self):
170 170 repo_name = 'admin_owned'
171 171 fake_repo_group = 'test_group_for_update'
172 172 fixture.create_repo(repo_name)
173 173 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
174 174 id_, params = build_data(
175 175 self.apikey, 'update_repo', repoid=repo_name, **updates)
176 176 response = api_call(self.app, params)
177 177 try:
178 178 expected = {
179 179 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
180 180 }
181 181 assert_error(id_, expected, given=response.body)
182 182 finally:
183 183 fixture.destroy_repo(repo_name)
184 184
185 185 def test_api_update_repo_regular_user_not_allowed(self):
186 186 repo_name = 'admin_owned'
187 187 fixture.create_repo(repo_name)
188 188 updates = {'active': False}
189 189 id_, params = build_data(
190 190 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
191 191 response = api_call(self.app, params)
192 192 try:
193 193 expected = 'repository `%s` does not exist' % (repo_name,)
194 194 assert_error(id_, expected, given=response.body)
195 195 finally:
196 196 fixture.destroy_repo(repo_name)
197 197
198 198 @mock.patch.object(RepoModel, 'update', crash)
199 199 def test_api_update_repo_exception_occurred(self, backend):
200 200 repo_name = UPDATE_REPO_NAME
201 201 fixture.create_repo(repo_name, repo_type=backend.alias)
202 202 id_, params = build_data(
203 203 self.apikey, 'update_repo', repoid=repo_name,
204 204 owner=TEST_USER_ADMIN_LOGIN,)
205 205 response = api_call(self.app, params)
206 206 try:
207 207 expected = 'failed to update repo `%s`' % (repo_name,)
208 208 assert_error(id_, expected, given=response.body)
209 209 finally:
210 210 fixture.destroy_repo(repo_name)
@@ -1,285 +1,222 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import json
22 import platform
23 import socket
24 import random
25 21 import pytest
26 22
27 23 from rhodecode.lib.pyramid_utils import get_app_config
28 24 from rhodecode.tests.fixture import TestINI
29 25 from rhodecode.tests.server_utils import RcVCSServer
30 26
31 27
32 def _parse_json(value):
33 return json.loads(value) if value else None
34
35
36 def pytest_addoption(parser):
37 parser.addoption(
38 '--test-loglevel', dest='test_loglevel',
39 help="Set default Logging level for tests, critical(default), error, warn , info, debug")
40 group = parser.getgroup('pylons')
41 group.addoption(
42 '--with-pylons', dest='pyramid_config',
43 help="Set up a Pylons environment with the specified config file.")
44 group.addoption(
45 '--ini-config-override', action='store', type=_parse_json,
46 default=None, dest='pyramid_config_override', help=(
47 "Overrides the .ini file settings. Should be specified in JSON"
48 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
49 )
50 )
51 parser.addini(
52 'pyramid_config',
53 "Set up a Pyramid environment with the specified config file.")
54
55 vcsgroup = parser.getgroup('vcs')
56 vcsgroup.addoption(
57 '--without-vcsserver', dest='with_vcsserver', action='store_false',
58 help="Do not start the VCSServer in a background process.")
59 vcsgroup.addoption(
60 '--with-vcsserver-http', dest='vcsserver_config_http',
61 help="Start the HTTP VCSServer with the specified config file.")
62 vcsgroup.addoption(
63 '--vcsserver-protocol', dest='vcsserver_protocol',
64 help="Start the VCSServer with HTTP protocol support.")
65 vcsgroup.addoption(
66 '--vcsserver-config-override', action='store', type=_parse_json,
67 default=None, dest='vcsserver_config_override', help=(
68 "Overrides the .ini file settings for the VCSServer. "
69 "Should be specified in JSON "
70 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
71 )
72 )
73 vcsgroup.addoption(
74 '--vcsserver-port', action='store', type=int,
75 default=None, help=(
76 "Allows to set the port of the vcsserver. Useful when testing "
77 "against an already running server and random ports cause "
78 "trouble."))
79 parser.addini(
80 'vcsserver_config_http',
81 "Start the HTTP VCSServer with the specified config file.")
82 parser.addini(
83 'vcsserver_protocol',
84 "Start the VCSServer with HTTP protocol support.")
85
86
87 28 @pytest.fixture(scope='session')
88 29 def vcsserver(request, vcsserver_port, vcsserver_factory):
89 30 """
90 31 Session scope VCSServer.
91 32
92 33 Tests wich need the VCSServer have to rely on this fixture in order
93 34 to ensure it will be running.
94 35
95 36 For specific needs, the fixture vcsserver_factory can be used. It allows to
96 37 adjust the configuration file for the test run.
97 38
98 39 Command line args:
99 40
100 41 --without-vcsserver: Allows to switch this fixture off. You have to
101 42 manually start the server.
102 43
103 44 --vcsserver-port: Will expect the VCSServer to listen on this port.
104 45 """
105 46
106 47 if not request.config.getoption('with_vcsserver'):
107 48 return None
108 49
109 50 return vcsserver_factory(
110 51 request, vcsserver_port=vcsserver_port)
111 52
112 53
113 54 @pytest.fixture(scope='session')
114 55 def vcsserver_factory(tmpdir_factory):
115 56 """
116 57 Use this if you need a running vcsserver with a special configuration.
117 58 """
118 59
119 60 def factory(request, overrides=(), vcsserver_port=None,
120 61 log_file=None):
121 62
122 63 if vcsserver_port is None:
123 64 vcsserver_port = get_available_port()
124 65
125 66 overrides = list(overrides)
126 67 overrides.append({'server:main': {'port': vcsserver_port}})
127 68
128 69 option_name = 'vcsserver_config_http'
129 70 override_option_name = 'vcsserver_config_override'
130 71 config_file = get_config(
131 72 request.config, option_name=option_name,
132 73 override_option_name=override_option_name, overrides=overrides,
133 74 basetemp=tmpdir_factory.getbasetemp().strpath,
134 75 prefix='test_vcs_')
135 76
136 77 server = RcVCSServer(config_file, log_file)
137 78 server.start()
138 79
139 80 @request.addfinalizer
140 81 def cleanup():
141 82 server.shutdown()
142 83
143 84 server.wait_until_ready()
144 85 return server
145 86
146 87 return factory
147 88
148 89
149 def is_cygwin():
150 return 'cygwin' in platform.system().lower()
151
152
153 90 def _use_log_level(config):
154 91 level = config.getoption('test_loglevel') or 'critical'
155 92 return level.upper()
156 93
157 94
158 95 @pytest.fixture(scope='session')
159 96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
160 97 option_name = 'pyramid_config'
161 98 log_level = _use_log_level(request.config)
162 99
163 100 overrides = [
164 101 {'server:main': {'port': rcserver_port}},
165 102 {'app:main': {
166 103 'vcs.server': 'localhost:%s' % vcsserver_port,
167 104 # johbo: We will always start the VCSServer on our own based on the
168 105 # fixtures of the test cases. For the test run it must always be
169 106 # off in the INI file.
170 107 'vcs.start_server': 'false',
171 108
172 109 'vcs.server.protocol': 'http',
173 110 'vcs.scm_app_implementation': 'http',
174 111 'vcs.hooks.protocol': 'http',
175 112 'vcs.hooks.host': '127.0.0.1',
176 113 }},
177 114
178 115 {'handler_console': {
179 116 'class': 'StreamHandler',
180 117 'args': '(sys.stderr,)',
181 118 'level': log_level,
182 119 }},
183 120
184 121 ]
185 122
186 123 filename = get_config(
187 124 request.config, option_name=option_name,
188 125 override_option_name='{}_override'.format(option_name),
189 126 overrides=overrides,
190 127 basetemp=tmpdir_factory.getbasetemp().strpath,
191 128 prefix='test_rce_')
192 129 return filename
193 130
194 131
195 132 @pytest.fixture(scope='session')
196 133 def ini_settings(ini_config):
197 134 ini_path = ini_config
198 135 return get_app_config(ini_path)
199 136
200 137
201 138 def get_available_port(min_port=40000, max_port=55555):
202 139 from rhodecode.lib.utils2 import get_available_port as _get_port
203 140 return _get_port(min_port, max_port)
204 141
205 142
206 143 @pytest.fixture(scope='session')
207 144 def rcserver_port(request):
208 145 port = get_available_port()
209 146 print('Using rhodecode port {}'.format(port))
210 147 return port
211 148
212 149
213 150 @pytest.fixture(scope='session')
214 151 def vcsserver_port(request):
215 152 port = request.config.getoption('--vcsserver-port')
216 153 if port is None:
217 154 port = get_available_port()
218 155 print('Using vcsserver port {}'.format(port))
219 156 return port
220 157
221 158
222 159 @pytest.fixture(scope='session')
223 160 def available_port_factory():
224 161 """
225 162 Returns a callable which returns free port numbers.
226 163 """
227 164 return get_available_port
228 165
229 166
230 167 @pytest.fixture()
231 168 def available_port(available_port_factory):
232 169 """
233 170 Gives you one free port for the current test.
234 171
235 172 Uses "available_port_factory" to retrieve the port.
236 173 """
237 174 return available_port_factory()
238 175
239 176
240 177 @pytest.fixture(scope='session')
241 178 def testini_factory(tmpdir_factory, ini_config):
242 179 """
243 180 Factory to create an INI file based on TestINI.
244 181
245 182 It will make sure to place the INI file in the correct directory.
246 183 """
247 184 basetemp = tmpdir_factory.getbasetemp().strpath
248 185 return TestIniFactory(basetemp, ini_config)
249 186
250 187
251 188 class TestIniFactory(object):
252 189
253 190 def __init__(self, basetemp, template_ini):
254 191 self._basetemp = basetemp
255 192 self._template_ini = template_ini
256 193
257 194 def __call__(self, ini_params, new_file_prefix='test'):
258 195 ini_file = TestINI(
259 196 self._template_ini, ini_params=ini_params,
260 197 new_file_prefix=new_file_prefix, dir=self._basetemp)
261 198 result = ini_file.create()
262 199 return result
263 200
264 201
265 202 def get_config(
266 203 config, option_name, override_option_name, overrides=None,
267 204 basetemp=None, prefix='test'):
268 205 """
269 206 Find a configuration file and apply overrides for the given `prefix`.
270 207 """
271 208 config_file = (
272 209 config.getoption(option_name) or config.getini(option_name))
273 210 if not config_file:
274 211 pytest.exit(
275 212 "Configuration error, could not extract {}.".format(option_name))
276 213
277 214 overrides = overrides or []
278 215 config_override = config.getoption(override_option_name)
279 216 if config_override:
280 217 overrides.append(config_override)
281 218 temp_ini_file = TestINI(
282 219 config_file, ini_params=overrides, new_file_prefix=prefix,
283 220 dir=basetemp)
284 221
285 222 return temp_ini_file.create()
@@ -1,1848 +1,1724 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33 import logging
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
48 48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 60 from rhodecode.lib.vcs.backends import get_backend
61 61 from rhodecode.lib.vcs.nodes import FileNode
62 62 from rhodecode.tests import (
63 63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 65 TEST_USER_REGULAR_PASS)
66 66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 67 from rhodecode.tests.fixture import Fixture
68 68 from rhodecode.config import utils as config_utils
69 69
70 70 log = logging.getLogger(__name__)
71 71
72 72
73 73 def cmp(a, b):
74 74 # backport cmp from python2 so we can still use it in the custom code in this module
75 75 return (a > b) - (a < b)
76 76
77
78 def _split_comma(value):
79 return value.split(',')
80
81
82 def pytest_addoption(parser):
83 parser.addoption(
84 '--keep-tmp-path', action='store_true',
85 help="Keep the test temporary directories")
86 parser.addoption(
87 '--backends', action='store', type=_split_comma,
88 default=['git', 'hg', 'svn'],
89 help="Select which backends to test for backend specific tests.")
90 parser.addoption(
91 '--dbs', action='store', type=_split_comma,
92 default=['sqlite'],
93 help="Select which database to test for database specific tests. "
94 "Possible options are sqlite,postgres,mysql")
95 parser.addoption(
96 '--appenlight', '--ae', action='store_true',
97 help="Track statistics in appenlight.")
98 parser.addoption(
99 '--appenlight-api-key', '--ae-key',
100 help="API key for Appenlight.")
101 parser.addoption(
102 '--appenlight-url', '--ae-url',
103 default="https://ae.rhodecode.com",
104 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
105 parser.addoption(
106 '--sqlite-connection-string', action='store',
107 default='', help="Connection string for the dbs tests with SQLite")
108 parser.addoption(
109 '--postgres-connection-string', action='store',
110 default='', help="Connection string for the dbs tests with Postgres")
111 parser.addoption(
112 '--mysql-connection-string', action='store',
113 default='', help="Connection string for the dbs tests with MySQL")
114 parser.addoption(
115 '--repeat', type=int, default=100,
116 help="Number of repetitions in performance tests.")
117
118
119 def pytest_configure(config):
120 from rhodecode.config import patches
121
122
123 def pytest_collection_modifyitems(session, config, items):
124 # nottest marked, compare nose, used for transition from nose to pytest
125 remaining = [
126 i for i in items if getattr(i.obj, '__test__', True)]
127 items[:] = remaining
128
129 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
130 # be executed at the end for faster test feedback
131 def sorter(item):
132 pos = 0
133 key = item._nodeid
134 if key.startswith('rhodecode/tests/database'):
135 pos = 1
136 elif key.startswith('rhodecode/tests/vcs_operations'):
137 pos = 2
138
139 return pos
140
141 items.sort(key=sorter)
142
143
144 def pytest_generate_tests(metafunc):
145
146 # Support test generation based on --backend parameter
147 if 'backend_alias' in metafunc.fixturenames:
148 backends = get_backends_from_metafunc(metafunc)
149 scope = None
150 if not backends:
151 pytest.skip("Not enabled for any of selected backends")
152
153 metafunc.parametrize('backend_alias', backends, scope=scope)
154
155 backend_mark = metafunc.definition.get_closest_marker('backends')
156 if backend_mark:
157 backends = get_backends_from_metafunc(metafunc)
158 if not backends:
159 pytest.skip("Not enabled for any of selected backends")
160
161
162 def get_backends_from_metafunc(metafunc):
163 requested_backends = set(metafunc.config.getoption('--backends'))
164 backend_mark = metafunc.definition.get_closest_marker('backends')
165 if backend_mark:
166 # Supported backends by this test function, created from
167 # pytest.mark.backends
168 backends = backend_mark.args
169 elif hasattr(metafunc.cls, 'backend_alias'):
170 # Support class attribute "backend_alias", this is mainly
171 # for legacy reasons for tests not yet using pytest.mark.backends
172 backends = [metafunc.cls.backend_alias]
173 else:
174 backends = metafunc.config.getoption('--backends')
175 return requested_backends.intersection(backends)
176
177
178 77 @pytest.fixture(scope='session', autouse=True)
179 78 def activate_example_rcextensions(request):
180 79 """
181 80 Patch in an example rcextensions module which verifies passed in kwargs.
182 81 """
183 82 from rhodecode.config import rcextensions
184 83
185 84 old_extensions = rhodecode.EXTENSIONS
186 85 rhodecode.EXTENSIONS = rcextensions
187 86 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
188 87
189 88 @request.addfinalizer
190 89 def cleanup():
191 90 rhodecode.EXTENSIONS = old_extensions
192 91
193 92
194 93 @pytest.fixture()
195 94 def capture_rcextensions():
196 95 """
197 96 Returns the recorded calls to entry points in rcextensions.
198 97 """
199 98 calls = rhodecode.EXTENSIONS.calls
200 99 calls.clear()
201 100 # Note: At this moment, it is still the empty dict, but that will
202 101 # be filled during the test run and since it is a reference this
203 102 # is enough to make it work.
204 103 return calls
205 104
206 105
207 106 @pytest.fixture(scope='session')
208 107 def http_environ_session():
209 108 """
210 109 Allow to use "http_environ" in session scope.
211 110 """
212 111 return plain_http_environ()
213 112
214 113
215 114 def plain_http_host_stub():
216 115 """
217 116 Value of HTTP_HOST in the test run.
218 117 """
219 118 return 'example.com:80'
220 119
221 120
222 121 @pytest.fixture()
223 122 def http_host_stub():
224 123 """
225 124 Value of HTTP_HOST in the test run.
226 125 """
227 126 return plain_http_host_stub()
228 127
229 128
230 129 def plain_http_host_only_stub():
231 130 """
232 131 Value of HTTP_HOST in the test run.
233 132 """
234 133 return plain_http_host_stub().split(':')[0]
235 134
236 135
237 136 @pytest.fixture()
238 137 def http_host_only_stub():
239 138 """
240 139 Value of HTTP_HOST in the test run.
241 140 """
242 141 return plain_http_host_only_stub()
243 142
244 143
245 144 def plain_http_environ():
246 145 """
247 146 HTTP extra environ keys.
248 147
249 148 User by the test application and as well for setting up the pylons
250 149 environment. In the case of the fixture "app" it should be possible
251 150 to override this for a specific test case.
252 151 """
253 152 return {
254 153 'SERVER_NAME': plain_http_host_only_stub(),
255 154 'SERVER_PORT': plain_http_host_stub().split(':')[1],
256 155 'HTTP_HOST': plain_http_host_stub(),
257 156 'HTTP_USER_AGENT': 'rc-test-agent',
258 157 'REQUEST_METHOD': 'GET'
259 158 }
260 159
261 160
262 161 @pytest.fixture()
263 162 def http_environ():
264 163 """
265 164 HTTP extra environ keys.
266 165
267 166 User by the test application and as well for setting up the pylons
268 167 environment. In the case of the fixture "app" it should be possible
269 168 to override this for a specific test case.
270 169 """
271 170 return plain_http_environ()
272 171
273 172
274 173 @pytest.fixture(scope='session')
275 174 def baseapp(ini_config, vcsserver, http_environ_session):
276 175 from rhodecode.lib.pyramid_utils import get_app_config
277 176 from rhodecode.config.middleware import make_pyramid_app
278 177
279 178 log.info("Using the RhodeCode configuration:{}".format(ini_config))
280 179 pyramid.paster.setup_logging(ini_config)
281 180
282 181 settings = get_app_config(ini_config)
283 182 app = make_pyramid_app({'__file__': ini_config}, **settings)
284 183
285 184 return app
286 185
287 186
288 187 @pytest.fixture(scope='function')
289 188 def app(request, config_stub, baseapp, http_environ):
290 189 app = CustomTestApp(
291 190 baseapp,
292 191 extra_environ=http_environ)
293 192 if request.cls:
294 193 request.cls.app = app
295 194 return app
296 195
297 196
298 197 @pytest.fixture(scope='session')
299 198 def app_settings(baseapp, ini_config):
300 199 """
301 200 Settings dictionary used to create the app.
302 201
303 202 Parses the ini file and passes the result through the sanitize and apply
304 203 defaults mechanism in `rhodecode.config.middleware`.
305 204 """
306 205 return baseapp.config.get_settings()
307 206
308 207
309 208 @pytest.fixture(scope='session')
310 209 def db_connection(ini_settings):
311 210 # Initialize the database connection.
312 211 config_utils.initialize_database(ini_settings)
313 212
314 213
315 214 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
316 215
317 216
318 217 def _autologin_user(app, *args):
319 218 session = login_user_session(app, *args)
320 219 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
321 220 return LoginData(csrf_token, session['rhodecode_user'])
322 221
323 222
324 223 @pytest.fixture()
325 224 def autologin_user(app):
326 225 """
327 226 Utility fixture which makes sure that the admin user is logged in
328 227 """
329 228 return _autologin_user(app)
330 229
331 230
332 231 @pytest.fixture()
333 232 def autologin_regular_user(app):
334 233 """
335 234 Utility fixture which makes sure that the regular user is logged in
336 235 """
337 236 return _autologin_user(
338 237 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
339 238
340 239
341 240 @pytest.fixture(scope='function')
342 241 def csrf_token(request, autologin_user):
343 242 return autologin_user.csrf_token
344 243
345 244
346 245 @pytest.fixture(scope='function')
347 246 def xhr_header(request):
348 247 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
349 248
350 249
351 250 @pytest.fixture()
352 251 def real_crypto_backend(monkeypatch):
353 252 """
354 253 Switch the production crypto backend on for this test.
355 254
356 255 During the test run the crypto backend is replaced with a faster
357 256 implementation based on the MD5 algorithm.
358 257 """
359 258 monkeypatch.setattr(rhodecode, 'is_test', False)
360 259
361 260
362 261 @pytest.fixture(scope='class')
363 262 def index_location(request, baseapp):
364 263 index_location = baseapp.config.get_settings()['search.location']
365 264 if request.cls:
366 265 request.cls.index_location = index_location
367 266 return index_location
368 267
369 268
370 269 @pytest.fixture(scope='session', autouse=True)
371 270 def tests_tmp_path(request):
372 271 """
373 272 Create temporary directory to be used during the test session.
374 273 """
375 274 if not os.path.exists(TESTS_TMP_PATH):
376 275 os.makedirs(TESTS_TMP_PATH)
377 276
378 277 if not request.config.getoption('--keep-tmp-path'):
379 278 @request.addfinalizer
380 279 def remove_tmp_path():
381 280 shutil.rmtree(TESTS_TMP_PATH)
382 281
383 282 return TESTS_TMP_PATH
384 283
385 284
386 285 @pytest.fixture()
387 286 def test_repo_group(request):
388 287 """
389 288 Create a temporary repository group, and destroy it after
390 289 usage automatically
391 290 """
392 291 fixture = Fixture()
393 292 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
394 293 repo_group = fixture.create_repo_group(repogroupid)
395 294
396 295 def _cleanup():
397 296 fixture.destroy_repo_group(repogroupid)
398 297
399 298 request.addfinalizer(_cleanup)
400 299 return repo_group
401 300
402 301
403 302 @pytest.fixture()
404 303 def test_user_group(request):
405 304 """
406 305 Create a temporary user group, and destroy it after
407 306 usage automatically
408 307 """
409 308 fixture = Fixture()
410 309 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
411 310 user_group = fixture.create_user_group(usergroupid)
412 311
413 312 def _cleanup():
414 313 fixture.destroy_user_group(user_group)
415 314
416 315 request.addfinalizer(_cleanup)
417 316 return user_group
418 317
419 318
420 319 @pytest.fixture(scope='session')
421 320 def test_repo(request):
422 321 container = TestRepoContainer()
423 322 request.addfinalizer(container._cleanup)
424 323 return container
425 324
426 325
427 326 class TestRepoContainer(object):
428 327 """
429 328 Container for test repositories which are used read only.
430 329
431 330 Repositories will be created on demand and re-used during the lifetime
432 331 of this object.
433 332
434 333 Usage to get the svn test repository "minimal"::
435 334
436 335 test_repo = TestContainer()
437 336 repo = test_repo('minimal', 'svn')
438 337
439 338 """
440 339
441 340 dump_extractors = {
442 341 'git': utils.extract_git_repo_from_dump,
443 342 'hg': utils.extract_hg_repo_from_dump,
444 343 'svn': utils.extract_svn_repo_from_dump,
445 344 }
446 345
447 346 def __init__(self):
448 347 self._cleanup_repos = []
449 348 self._fixture = Fixture()
450 349 self._repos = {}
451 350
452 351 def __call__(self, dump_name, backend_alias, config=None):
453 352 key = (dump_name, backend_alias)
454 353 if key not in self._repos:
455 354 repo = self._create_repo(dump_name, backend_alias, config)
456 355 self._repos[key] = repo.repo_id
457 356 return Repository.get(self._repos[key])
458 357
459 358 def _create_repo(self, dump_name, backend_alias, config):
460 359 repo_name = '%s-%s' % (backend_alias, dump_name)
461 360 backend = get_backend(backend_alias)
462 361 dump_extractor = self.dump_extractors[backend_alias]
463 362 repo_path = dump_extractor(dump_name, repo_name)
464 363
465 364 vcs_repo = backend(repo_path, config=config)
466 365 repo2db_mapper({repo_name: vcs_repo})
467 366
468 367 repo = RepoModel().get_by_repo_name(repo_name)
469 368 self._cleanup_repos.append(repo_name)
470 369 return repo
471 370
472 371 def _cleanup(self):
473 372 for repo_name in reversed(self._cleanup_repos):
474 373 self._fixture.destroy_repo(repo_name)
475 374
476 375
477 376 def backend_base(request, backend_alias, baseapp, test_repo):
478 377 if backend_alias not in request.config.getoption('--backends'):
479 378 pytest.skip("Backend %s not selected." % (backend_alias, ))
480 379
481 380 utils.check_xfail_backends(request.node, backend_alias)
482 381 utils.check_skip_backends(request.node, backend_alias)
483 382
484 383 repo_name = 'vcs_test_%s' % (backend_alias, )
485 384 backend = Backend(
486 385 alias=backend_alias,
487 386 repo_name=repo_name,
488 387 test_name=request.node.name,
489 388 test_repo_container=test_repo)
490 389 request.addfinalizer(backend.cleanup)
491 390 return backend
492 391
493 392
494 393 @pytest.fixture()
495 394 def backend(request, backend_alias, baseapp, test_repo):
496 395 """
497 396 Parametrized fixture which represents a single backend implementation.
498 397
499 398 It respects the option `--backends` to focus the test run on specific
500 399 backend implementations.
501 400
502 401 It also supports `pytest.mark.xfail_backends` to mark tests as failing
503 402 for specific backends. This is intended as a utility for incremental
504 403 development of a new backend implementation.
505 404 """
506 405 return backend_base(request, backend_alias, baseapp, test_repo)
507 406
508 407
509 408 @pytest.fixture()
510 409 def backend_git(request, baseapp, test_repo):
511 410 return backend_base(request, 'git', baseapp, test_repo)
512 411
513 412
514 413 @pytest.fixture()
515 414 def backend_hg(request, baseapp, test_repo):
516 415 return backend_base(request, 'hg', baseapp, test_repo)
517 416
518 417
519 418 @pytest.fixture()
520 419 def backend_svn(request, baseapp, test_repo):
521 420 return backend_base(request, 'svn', baseapp, test_repo)
522 421
523 422
524 423 @pytest.fixture()
525 424 def backend_random(backend_git):
526 425 """
527 426 Use this to express that your tests need "a backend.
528 427
529 428 A few of our tests need a backend, so that we can run the code. This
530 429 fixture is intended to be used for such cases. It will pick one of the
531 430 backends and run the tests.
532 431
533 432 The fixture `backend` would run the test multiple times for each
534 433 available backend which is a pure waste of time if the test is
535 434 independent of the backend type.
536 435 """
537 436 # TODO: johbo: Change this to pick a random backend
538 437 return backend_git
539 438
540 439
541 440 @pytest.fixture()
542 441 def backend_stub(backend_git):
543 442 """
544 443 Use this to express that your tests need a backend stub
545 444
546 445 TODO: mikhail: Implement a real stub logic instead of returning
547 446 a git backend
548 447 """
549 448 return backend_git
550 449
551 450
552 451 @pytest.fixture()
553 452 def repo_stub(backend_stub):
554 453 """
555 454 Use this to express that your tests need a repository stub
556 455 """
557 456 return backend_stub.create_repo()
558 457
559 458
560 459 class Backend(object):
561 460 """
562 461 Represents the test configuration for one supported backend
563 462
564 463 Provides easy access to different test repositories based on
565 464 `__getitem__`. Such repositories will only be created once per test
566 465 session.
567 466 """
568 467
569 468 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
570 469 _master_repo = None
571 470 _master_repo_path = ''
572 471 _commit_ids = {}
573 472
574 473 def __init__(self, alias, repo_name, test_name, test_repo_container):
575 474 self.alias = alias
576 475 self.repo_name = repo_name
577 476 self._cleanup_repos = []
578 477 self._test_name = test_name
579 478 self._test_repo_container = test_repo_container
580 479 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
581 480 # Fixture will survive in the end.
582 481 self._fixture = Fixture()
583 482
584 483 def __getitem__(self, key):
585 484 return self._test_repo_container(key, self.alias)
586 485
587 486 def create_test_repo(self, key, config=None):
588 487 return self._test_repo_container(key, self.alias, config)
589 488
590 489 @property
591 490 def repo(self):
592 491 """
593 492 Returns the "current" repository. This is the vcs_test repo or the
594 493 last repo which has been created with `create_repo`.
595 494 """
596 495 from rhodecode.model.db import Repository
597 496 return Repository.get_by_repo_name(self.repo_name)
598 497
599 498 @property
600 499 def default_branch_name(self):
601 500 VcsRepository = get_backend(self.alias)
602 501 return VcsRepository.DEFAULT_BRANCH_NAME
603 502
604 503 @property
605 504 def default_head_id(self):
606 505 """
607 506 Returns the default head id of the underlying backend.
608 507
609 508 This will be the default branch name in case the backend does have a
610 509 default branch. In the other cases it will point to a valid head
611 510 which can serve as the base to create a new commit on top of it.
612 511 """
613 512 vcsrepo = self.repo.scm_instance()
614 513 head_id = (
615 514 vcsrepo.DEFAULT_BRANCH_NAME or
616 515 vcsrepo.commit_ids[-1])
617 516 return head_id
618 517
619 518 @property
620 519 def commit_ids(self):
621 520 """
622 521 Returns the list of commits for the last created repository
623 522 """
624 523 return self._commit_ids
625 524
626 525 def create_master_repo(self, commits):
627 526 """
628 527 Create a repository and remember it as a template.
629 528
630 529 This allows to easily create derived repositories to construct
631 530 more complex scenarios for diff, compare and pull requests.
632 531
633 532 Returns a commit map which maps from commit message to raw_id.
634 533 """
635 534 self._master_repo = self.create_repo(commits=commits)
636 535 self._master_repo_path = self._master_repo.repo_full_path
637 536
638 537 return self._commit_ids
639 538
640 539 def create_repo(
641 540 self, commits=None, number_of_commits=0, heads=None,
642 541 name_suffix=u'', bare=False, **kwargs):
643 542 """
644 543 Create a repository and record it for later cleanup.
645 544
646 545 :param commits: Optional. A sequence of dict instances.
647 546 Will add a commit per entry to the new repository.
648 547 :param number_of_commits: Optional. If set to a number, this number of
649 548 commits will be added to the new repository.
650 549 :param heads: Optional. Can be set to a sequence of of commit
651 550 names which shall be pulled in from the master repository.
652 551 :param name_suffix: adds special suffix to generated repo name
653 552 :param bare: set a repo as bare (no checkout)
654 553 """
655 554 self.repo_name = self._next_repo_name() + name_suffix
656 555 repo = self._fixture.create_repo(
657 556 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
658 557 self._cleanup_repos.append(repo.repo_name)
659 558
660 559 commits = commits or [
661 560 {'message': 'Commit %s of %s' % (x, self.repo_name)}
662 561 for x in range(number_of_commits)]
663 562 vcs_repo = repo.scm_instance()
664 563 vcs_repo.count()
665 564 self._add_commits_to_repo(vcs_repo, commits)
666 565 if heads:
667 566 self.pull_heads(repo, heads)
668 567
669 568 return repo
670 569
671 570 def pull_heads(self, repo, heads):
672 571 """
673 572 Make sure that repo contains all commits mentioned in `heads`
674 573 """
675 574 vcsrepo = repo.scm_instance()
676 575 vcsrepo.config.clear_section('hooks')
677 576 commit_ids = [self._commit_ids[h] for h in heads]
678 577 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
679 578
680 579 def create_fork(self):
681 580 repo_to_fork = self.repo_name
682 581 self.repo_name = self._next_repo_name()
683 582 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
684 583 self._cleanup_repos.append(self.repo_name)
685 584 return repo
686 585
687 586 def new_repo_name(self, suffix=u''):
688 587 self.repo_name = self._next_repo_name() + suffix
689 588 self._cleanup_repos.append(self.repo_name)
690 589 return self.repo_name
691 590
692 591 def _next_repo_name(self):
693 592 return u"%s_%s" % (
694 593 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
695 594
696 595 def ensure_file(self, filename, content='Test content\n'):
697 596 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
698 597 commits = [
699 598 {'added': [
700 599 FileNode(filename, content=content),
701 600 ]},
702 601 ]
703 602 self._add_commits_to_repo(self.repo.scm_instance(), commits)
704 603
705 604 def enable_downloads(self):
706 605 repo = self.repo
707 606 repo.enable_downloads = True
708 607 Session().add(repo)
709 608 Session().commit()
710 609
711 610 def cleanup(self):
712 611 for repo_name in reversed(self._cleanup_repos):
713 612 self._fixture.destroy_repo(repo_name)
714 613
715 614 def _add_commits_to_repo(self, repo, commits):
716 615 commit_ids = _add_commits_to_repo(repo, commits)
717 616 if not commit_ids:
718 617 return
719 618 self._commit_ids = commit_ids
720 619
721 620 # Creating refs for Git to allow fetching them from remote repository
722 621 if self.alias == 'git':
723 622 refs = {}
724 623 for message in self._commit_ids:
725 624 # TODO: mikhail: do more special chars replacements
726 625 ref_name = 'refs/test-refs/{}'.format(
727 626 message.replace(' ', ''))
728 627 refs[ref_name] = self._commit_ids[message]
729 628 self._create_refs(repo, refs)
730 629
731 630 def _create_refs(self, repo, refs):
732 631 for ref_name in refs:
733 632 repo.set_refs(ref_name, refs[ref_name])
734 633
735 634
736 635 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
737 636 if backend_alias not in request.config.getoption('--backends'):
738 637 pytest.skip("Backend %s not selected." % (backend_alias, ))
739 638
740 639 utils.check_xfail_backends(request.node, backend_alias)
741 640 utils.check_skip_backends(request.node, backend_alias)
742 641
743 642 repo_name = 'vcs_test_%s' % (backend_alias, )
744 643 repo_path = os.path.join(tests_tmp_path, repo_name)
745 644 backend = VcsBackend(
746 645 alias=backend_alias,
747 646 repo_path=repo_path,
748 647 test_name=request.node.name,
749 648 test_repo_container=test_repo)
750 649 request.addfinalizer(backend.cleanup)
751 650 return backend
752 651
753 652
754 653 @pytest.fixture()
755 654 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
756 655 """
757 656 Parametrized fixture which represents a single vcs backend implementation.
758 657
759 658 See the fixture `backend` for more details. This one implements the same
760 659 concept, but on vcs level. So it does not provide model instances etc.
761 660
762 661 Parameters are generated dynamically, see :func:`pytest_generate_tests`
763 662 for how this works.
764 663 """
765 664 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
766 665
767 666
768 667 @pytest.fixture()
769 668 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
770 669 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
771 670
772 671
773 672 @pytest.fixture()
774 673 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
775 674 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
776 675
777 676
778 677 @pytest.fixture()
779 678 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
780 679 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
781 680
782 681
783 682 @pytest.fixture()
784 683 def vcsbackend_stub(vcsbackend_git):
785 684 """
786 685 Use this to express that your test just needs a stub of a vcsbackend.
787 686
788 687 Plan is to eventually implement an in-memory stub to speed tests up.
789 688 """
790 689 return vcsbackend_git
791 690
792 691
793 692 class VcsBackend(object):
794 693 """
795 694 Represents the test configuration for one supported vcs backend.
796 695 """
797 696
798 697 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
799 698
800 699 def __init__(self, alias, repo_path, test_name, test_repo_container):
801 700 self.alias = alias
802 701 self._repo_path = repo_path
803 702 self._cleanup_repos = []
804 703 self._test_name = test_name
805 704 self._test_repo_container = test_repo_container
806 705
807 706 def __getitem__(self, key):
808 707 return self._test_repo_container(key, self.alias).scm_instance()
809 708
810 709 @property
811 710 def repo(self):
812 711 """
813 712 Returns the "current" repository. This is the vcs_test repo of the last
814 713 repo which has been created.
815 714 """
816 715 Repository = get_backend(self.alias)
817 716 return Repository(self._repo_path)
818 717
819 718 @property
820 719 def backend(self):
821 720 """
822 721 Returns the backend implementation class.
823 722 """
824 723 return get_backend(self.alias)
825 724
826 725 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
827 726 bare=False):
828 727 repo_name = self._next_repo_name()
829 728 self._repo_path = get_new_dir(repo_name)
830 729 repo_class = get_backend(self.alias)
831 730 src_url = None
832 731 if _clone_repo:
833 732 src_url = _clone_repo.path
834 733 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
835 734 self._cleanup_repos.append(repo)
836 735
837 736 commits = commits or [
838 737 {'message': 'Commit %s of %s' % (x, repo_name)}
839 738 for x in range(number_of_commits)]
840 739 _add_commits_to_repo(repo, commits)
841 740 return repo
842 741
843 742 def clone_repo(self, repo):
844 743 return self.create_repo(_clone_repo=repo)
845 744
846 745 def cleanup(self):
847 746 for repo in self._cleanup_repos:
848 747 shutil.rmtree(repo.path)
849 748
850 749 def new_repo_path(self):
851 750 repo_name = self._next_repo_name()
852 751 self._repo_path = get_new_dir(repo_name)
853 752 return self._repo_path
854 753
855 754 def _next_repo_name(self):
856 755 return "%s_%s" % (
857 756 self.invalid_repo_name.sub('_', self._test_name),
858 757 len(self._cleanup_repos))
859 758
860 759 def add_file(self, repo, filename, content='Test content\n'):
861 760 imc = repo.in_memory_commit
862 761 imc.add(FileNode(filename, content=content))
863 762 imc.commit(
864 763 message=u'Automatic commit from vcsbackend fixture',
865 764 author=u'Automatic <automatic@rhodecode.com>')
866 765
867 766 def ensure_file(self, filename, content='Test content\n'):
868 767 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
869 768 self.add_file(self.repo, filename, content)
870 769
871 770
872 771 def _add_commits_to_repo(vcs_repo, commits):
873 772 commit_ids = {}
874 773 if not commits:
875 774 return commit_ids
876 775
877 776 imc = vcs_repo.in_memory_commit
878 777 commit = None
879 778
880 779 for idx, commit in enumerate(commits):
881 780 message = str(commit.get('message', 'Commit %s' % idx))
882 781
883 782 for node in commit.get('added', []):
884 783 imc.add(FileNode(node.path, content=node.content))
885 784 for node in commit.get('changed', []):
886 785 imc.change(FileNode(node.path, content=node.content))
887 786 for node in commit.get('removed', []):
888 787 imc.remove(FileNode(node.path))
889 788
890 789 parents = [
891 790 vcs_repo.get_commit(commit_id=commit_ids[p])
892 791 for p in commit.get('parents', [])]
893 792
894 793 operations = ('added', 'changed', 'removed')
895 794 if not any((commit.get(o) for o in operations)):
896 795 imc.add(FileNode('file_%s' % idx, content=message))
897 796
898 797 commit = imc.commit(
899 798 message=message,
900 799 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
901 800 date=commit.get('date'),
902 801 branch=commit.get('branch'),
903 802 parents=parents)
904 803
905 804 commit_ids[commit.message] = commit.raw_id
906 805
907 806 return commit_ids
908 807
909 808
910 809 @pytest.fixture()
911 810 def reposerver(request):
912 811 """
913 812 Allows to serve a backend repository
914 813 """
915 814
916 815 repo_server = RepoServer()
917 816 request.addfinalizer(repo_server.cleanup)
918 817 return repo_server
919 818
920 819
921 820 class RepoServer(object):
922 821 """
923 822 Utility to serve a local repository for the duration of a test case.
924 823
925 824 Supports only Subversion so far.
926 825 """
927 826
928 827 url = None
929 828
930 829 def __init__(self):
931 830 self._cleanup_servers = []
932 831
933 832 def serve(self, vcsrepo):
934 833 if vcsrepo.alias != 'svn':
935 834 raise TypeError("Backend %s not supported" % vcsrepo.alias)
936 835
937 836 proc = subprocess.Popen(
938 837 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
939 838 '--root', vcsrepo.path])
940 839 self._cleanup_servers.append(proc)
941 840 self.url = 'svn://localhost'
942 841
943 842 def cleanup(self):
944 843 for proc in self._cleanup_servers:
945 844 proc.terminate()
946 845
947 846
948 847 @pytest.fixture()
949 848 def pr_util(backend, request, config_stub):
950 849 """
951 850 Utility for tests of models and for functional tests around pull requests.
952 851
953 852 It gives an instance of :class:`PRTestUtility` which provides various
954 853 utility methods around one pull request.
955 854
956 855 This fixture uses `backend` and inherits its parameterization.
957 856 """
958 857
959 858 util = PRTestUtility(backend)
960 859 request.addfinalizer(util.cleanup)
961 860
962 861 return util
963 862
964 863
965 864 class PRTestUtility(object):
966 865
967 866 pull_request = None
968 867 pull_request_id = None
969 868 mergeable_patcher = None
970 869 mergeable_mock = None
971 870 notification_patcher = None
972 871
973 872 def __init__(self, backend):
974 873 self.backend = backend
975 874
976 875 def create_pull_request(
977 876 self, commits=None, target_head=None, source_head=None,
978 877 revisions=None, approved=False, author=None, mergeable=False,
979 878 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
980 879 title=u"Test", description=u"Description"):
981 880 self.set_mergeable(mergeable)
982 881 if not enable_notifications:
983 882 # mock notification side effect
984 883 self.notification_patcher = mock.patch(
985 884 'rhodecode.model.notification.NotificationModel.create')
986 885 self.notification_patcher.start()
987 886
988 887 if not self.pull_request:
989 888 if not commits:
990 889 commits = [
991 890 {'message': 'c1'},
992 891 {'message': 'c2'},
993 892 {'message': 'c3'},
994 893 ]
995 894 target_head = 'c1'
996 895 source_head = 'c2'
997 896 revisions = ['c2']
998 897
999 898 self.commit_ids = self.backend.create_master_repo(commits)
1000 899 self.target_repository = self.backend.create_repo(
1001 900 heads=[target_head], name_suffix=name_suffix)
1002 901 self.source_repository = self.backend.create_repo(
1003 902 heads=[source_head], name_suffix=name_suffix)
1004 903 self.author = author or UserModel().get_by_username(
1005 904 TEST_USER_ADMIN_LOGIN)
1006 905
1007 906 model = PullRequestModel()
1008 907 self.create_parameters = {
1009 908 'created_by': self.author,
1010 909 'source_repo': self.source_repository.repo_name,
1011 910 'source_ref': self._default_branch_reference(source_head),
1012 911 'target_repo': self.target_repository.repo_name,
1013 912 'target_ref': self._default_branch_reference(target_head),
1014 913 'revisions': [self.commit_ids[r] for r in revisions],
1015 914 'reviewers': reviewers or self._get_reviewers(),
1016 915 'observers': observers or self._get_observers(),
1017 916 'title': title,
1018 917 'description': description,
1019 918 }
1020 919 self.pull_request = model.create(**self.create_parameters)
1021 920 assert model.get_versions(self.pull_request) == []
1022 921
1023 922 self.pull_request_id = self.pull_request.pull_request_id
1024 923
1025 924 if approved:
1026 925 self.approve()
1027 926
1028 927 Session().add(self.pull_request)
1029 928 Session().commit()
1030 929
1031 930 return self.pull_request
1032 931
1033 932 def approve(self):
1034 933 self.create_status_votes(
1035 934 ChangesetStatus.STATUS_APPROVED,
1036 935 *self.pull_request.reviewers)
1037 936
1038 937 def close(self):
1039 938 PullRequestModel().close_pull_request(self.pull_request, self.author)
1040 939
1041 940 def _default_branch_reference(self, commit_message):
1042 941 reference = '%s:%s:%s' % (
1043 942 'branch',
1044 943 self.backend.default_branch_name,
1045 944 self.commit_ids[commit_message])
1046 945 return reference
1047 946
1048 947 def _get_reviewers(self):
1049 948 role = PullRequestReviewers.ROLE_REVIEWER
1050 949 return [
1051 950 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
1052 951 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
1053 952 ]
1054 953
1055 954 def _get_observers(self):
1056 955 return [
1057 956
1058 957 ]
1059 958
1060 959 def update_source_repository(self, head=None):
1061 960 heads = [head or 'c3']
1062 961 self.backend.pull_heads(self.source_repository, heads=heads)
1063 962
1064 963 def add_one_commit(self, head=None):
1065 964 self.update_source_repository(head=head)
1066 965 old_commit_ids = set(self.pull_request.revisions)
1067 966 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1068 967 commit_ids = set(self.pull_request.revisions)
1069 968 new_commit_ids = commit_ids - old_commit_ids
1070 969 assert len(new_commit_ids) == 1
1071 970 return new_commit_ids.pop()
1072 971
1073 972 def remove_one_commit(self):
1074 973 assert len(self.pull_request.revisions) == 2
1075 974 source_vcs = self.source_repository.scm_instance()
1076 975 removed_commit_id = source_vcs.commit_ids[-1]
1077 976
1078 977 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1079 978 # remove the if once that's sorted out.
1080 979 if self.backend.alias == "git":
1081 980 kwargs = {'branch_name': self.backend.default_branch_name}
1082 981 else:
1083 982 kwargs = {}
1084 983 source_vcs.strip(removed_commit_id, **kwargs)
1085 984
1086 985 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1087 986 assert len(self.pull_request.revisions) == 1
1088 987 return removed_commit_id
1089 988
1090 989 def create_comment(self, linked_to=None):
1091 990 comment = CommentsModel().create(
1092 991 text=u"Test comment",
1093 992 repo=self.target_repository.repo_name,
1094 993 user=self.author,
1095 994 pull_request=self.pull_request)
1096 995 assert comment.pull_request_version_id is None
1097 996
1098 997 if linked_to:
1099 998 PullRequestModel()._link_comments_to_version(linked_to)
1100 999
1101 1000 return comment
1102 1001
1103 1002 def create_inline_comment(
1104 1003 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1105 1004 comment = CommentsModel().create(
1106 1005 text=u"Test comment",
1107 1006 repo=self.target_repository.repo_name,
1108 1007 user=self.author,
1109 1008 line_no=line_no,
1110 1009 f_path=file_path,
1111 1010 pull_request=self.pull_request)
1112 1011 assert comment.pull_request_version_id is None
1113 1012
1114 1013 if linked_to:
1115 1014 PullRequestModel()._link_comments_to_version(linked_to)
1116 1015
1117 1016 return comment
1118 1017
1119 1018 def create_version_of_pull_request(self):
1120 1019 pull_request = self.create_pull_request()
1121 1020 version = PullRequestModel()._create_version_from_snapshot(
1122 1021 pull_request)
1123 1022 return version
1124 1023
1125 1024 def create_status_votes(self, status, *reviewers):
1126 1025 for reviewer in reviewers:
1127 1026 ChangesetStatusModel().set_status(
1128 1027 repo=self.pull_request.target_repo,
1129 1028 status=status,
1130 1029 user=reviewer.user_id,
1131 1030 pull_request=self.pull_request)
1132 1031
1133 1032 def set_mergeable(self, value):
1134 1033 if not self.mergeable_patcher:
1135 1034 self.mergeable_patcher = mock.patch.object(
1136 1035 VcsSettingsModel, 'get_general_settings')
1137 1036 self.mergeable_mock = self.mergeable_patcher.start()
1138 1037 self.mergeable_mock.return_value = {
1139 1038 'rhodecode_pr_merge_enabled': value}
1140 1039
1141 1040 def cleanup(self):
1142 1041 # In case the source repository is already cleaned up, the pull
1143 1042 # request will already be deleted.
1144 1043 pull_request = PullRequest().get(self.pull_request_id)
1145 1044 if pull_request:
1146 1045 PullRequestModel().delete(pull_request, pull_request.author)
1147 1046 Session().commit()
1148 1047
1149 1048 if self.notification_patcher:
1150 1049 self.notification_patcher.stop()
1151 1050
1152 1051 if self.mergeable_patcher:
1153 1052 self.mergeable_patcher.stop()
1154 1053
1155 1054
1156 1055 @pytest.fixture()
1157 1056 def user_admin(baseapp):
1158 1057 """
1159 1058 Provides the default admin test user as an instance of `db.User`.
1160 1059 """
1161 1060 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1162 1061 return user
1163 1062
1164 1063
1165 1064 @pytest.fixture()
1166 1065 def user_regular(baseapp):
1167 1066 """
1168 1067 Provides the default regular test user as an instance of `db.User`.
1169 1068 """
1170 1069 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1171 1070 return user
1172 1071
1173 1072
1174 1073 @pytest.fixture()
1175 1074 def user_util(request, db_connection):
1176 1075 """
1177 1076 Provides a wired instance of `UserUtility` with integrated cleanup.
1178 1077 """
1179 1078 utility = UserUtility(test_name=request.node.name)
1180 1079 request.addfinalizer(utility.cleanup)
1181 1080 return utility
1182 1081
1183 1082
1184 1083 # TODO: johbo: Split this up into utilities per domain or something similar
1185 1084 class UserUtility(object):
1186 1085
1187 1086 def __init__(self, test_name="test"):
1188 1087 self._test_name = self._sanitize_name(test_name)
1189 1088 self.fixture = Fixture()
1190 1089 self.repo_group_ids = []
1191 1090 self.repos_ids = []
1192 1091 self.user_ids = []
1193 1092 self.user_group_ids = []
1194 1093 self.user_repo_permission_ids = []
1195 1094 self.user_group_repo_permission_ids = []
1196 1095 self.user_repo_group_permission_ids = []
1197 1096 self.user_group_repo_group_permission_ids = []
1198 1097 self.user_user_group_permission_ids = []
1199 1098 self.user_group_user_group_permission_ids = []
1200 1099 self.user_permissions = []
1201 1100
1202 1101 def _sanitize_name(self, name):
1203 1102 for char in ['[', ']']:
1204 1103 name = name.replace(char, '_')
1205 1104 return name
1206 1105
1207 1106 def create_repo_group(
1208 1107 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1209 1108 group_name = "{prefix}_repogroup_{count}".format(
1210 1109 prefix=self._test_name,
1211 1110 count=len(self.repo_group_ids))
1212 1111 repo_group = self.fixture.create_repo_group(
1213 1112 group_name, cur_user=owner)
1214 1113 if auto_cleanup:
1215 1114 self.repo_group_ids.append(repo_group.group_id)
1216 1115 return repo_group
1217 1116
1218 1117 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1219 1118 auto_cleanup=True, repo_type='hg', bare=False):
1220 1119 repo_name = "{prefix}_repository_{count}".format(
1221 1120 prefix=self._test_name,
1222 1121 count=len(self.repos_ids))
1223 1122
1224 1123 repository = self.fixture.create_repo(
1225 1124 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1226 1125 if auto_cleanup:
1227 1126 self.repos_ids.append(repository.repo_id)
1228 1127 return repository
1229 1128
1230 1129 def create_user(self, auto_cleanup=True, **kwargs):
1231 1130 user_name = "{prefix}_user_{count}".format(
1232 1131 prefix=self._test_name,
1233 1132 count=len(self.user_ids))
1234 1133 user = self.fixture.create_user(user_name, **kwargs)
1235 1134 if auto_cleanup:
1236 1135 self.user_ids.append(user.user_id)
1237 1136 return user
1238 1137
1239 1138 def create_additional_user_email(self, user, email):
1240 1139 uem = self.fixture.create_additional_user_email(user=user, email=email)
1241 1140 return uem
1242 1141
1243 1142 def create_user_with_group(self):
1244 1143 user = self.create_user()
1245 1144 user_group = self.create_user_group(members=[user])
1246 1145 return user, user_group
1247 1146
1248 1147 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1249 1148 auto_cleanup=True, **kwargs):
1250 1149 group_name = "{prefix}_usergroup_{count}".format(
1251 1150 prefix=self._test_name,
1252 1151 count=len(self.user_group_ids))
1253 1152 user_group = self.fixture.create_user_group(
1254 1153 group_name, cur_user=owner, **kwargs)
1255 1154
1256 1155 if auto_cleanup:
1257 1156 self.user_group_ids.append(user_group.users_group_id)
1258 1157 if members:
1259 1158 for user in members:
1260 1159 UserGroupModel().add_user_to_group(user_group, user)
1261 1160 return user_group
1262 1161
1263 1162 def grant_user_permission(self, user_name, permission_name):
1264 1163 self.inherit_default_user_permissions(user_name, False)
1265 1164 self.user_permissions.append((user_name, permission_name))
1266 1165
1267 1166 def grant_user_permission_to_repo_group(
1268 1167 self, repo_group, user, permission_name):
1269 1168 permission = RepoGroupModel().grant_user_permission(
1270 1169 repo_group, user, permission_name)
1271 1170 self.user_repo_group_permission_ids.append(
1272 1171 (repo_group.group_id, user.user_id))
1273 1172 return permission
1274 1173
1275 1174 def grant_user_group_permission_to_repo_group(
1276 1175 self, repo_group, user_group, permission_name):
1277 1176 permission = RepoGroupModel().grant_user_group_permission(
1278 1177 repo_group, user_group, permission_name)
1279 1178 self.user_group_repo_group_permission_ids.append(
1280 1179 (repo_group.group_id, user_group.users_group_id))
1281 1180 return permission
1282 1181
1283 1182 def grant_user_permission_to_repo(
1284 1183 self, repo, user, permission_name):
1285 1184 permission = RepoModel().grant_user_permission(
1286 1185 repo, user, permission_name)
1287 1186 self.user_repo_permission_ids.append(
1288 1187 (repo.repo_id, user.user_id))
1289 1188 return permission
1290 1189
1291 1190 def grant_user_group_permission_to_repo(
1292 1191 self, repo, user_group, permission_name):
1293 1192 permission = RepoModel().grant_user_group_permission(
1294 1193 repo, user_group, permission_name)
1295 1194 self.user_group_repo_permission_ids.append(
1296 1195 (repo.repo_id, user_group.users_group_id))
1297 1196 return permission
1298 1197
1299 1198 def grant_user_permission_to_user_group(
1300 1199 self, target_user_group, user, permission_name):
1301 1200 permission = UserGroupModel().grant_user_permission(
1302 1201 target_user_group, user, permission_name)
1303 1202 self.user_user_group_permission_ids.append(
1304 1203 (target_user_group.users_group_id, user.user_id))
1305 1204 return permission
1306 1205
1307 1206 def grant_user_group_permission_to_user_group(
1308 1207 self, target_user_group, user_group, permission_name):
1309 1208 permission = UserGroupModel().grant_user_group_permission(
1310 1209 target_user_group, user_group, permission_name)
1311 1210 self.user_group_user_group_permission_ids.append(
1312 1211 (target_user_group.users_group_id, user_group.users_group_id))
1313 1212 return permission
1314 1213
1315 1214 def revoke_user_permission(self, user_name, permission_name):
1316 1215 self.inherit_default_user_permissions(user_name, True)
1317 1216 UserModel().revoke_perm(user_name, permission_name)
1318 1217
1319 1218 def inherit_default_user_permissions(self, user_name, value):
1320 1219 user = UserModel().get_by_username(user_name)
1321 1220 user.inherit_default_permissions = value
1322 1221 Session().add(user)
1323 1222 Session().commit()
1324 1223
1325 1224 def cleanup(self):
1326 1225 self._cleanup_permissions()
1327 1226 self._cleanup_repos()
1328 1227 self._cleanup_repo_groups()
1329 1228 self._cleanup_user_groups()
1330 1229 self._cleanup_users()
1331 1230
1332 1231 def _cleanup_permissions(self):
1333 1232 if self.user_permissions:
1334 1233 for user_name, permission_name in self.user_permissions:
1335 1234 self.revoke_user_permission(user_name, permission_name)
1336 1235
1337 1236 for permission in self.user_repo_permission_ids:
1338 1237 RepoModel().revoke_user_permission(*permission)
1339 1238
1340 1239 for permission in self.user_group_repo_permission_ids:
1341 1240 RepoModel().revoke_user_group_permission(*permission)
1342 1241
1343 1242 for permission in self.user_repo_group_permission_ids:
1344 1243 RepoGroupModel().revoke_user_permission(*permission)
1345 1244
1346 1245 for permission in self.user_group_repo_group_permission_ids:
1347 1246 RepoGroupModel().revoke_user_group_permission(*permission)
1348 1247
1349 1248 for permission in self.user_user_group_permission_ids:
1350 1249 UserGroupModel().revoke_user_permission(*permission)
1351 1250
1352 1251 for permission in self.user_group_user_group_permission_ids:
1353 1252 UserGroupModel().revoke_user_group_permission(*permission)
1354 1253
1355 1254 def _cleanup_repo_groups(self):
1356 1255 def _repo_group_compare(first_group_id, second_group_id):
1357 1256 """
1358 1257 Gives higher priority to the groups with the most complex paths
1359 1258 """
1360 1259 first_group = RepoGroup.get(first_group_id)
1361 1260 second_group = RepoGroup.get(second_group_id)
1362 1261 first_group_parts = (
1363 1262 len(first_group.group_name.split('/')) if first_group else 0)
1364 1263 second_group_parts = (
1365 1264 len(second_group.group_name.split('/')) if second_group else 0)
1366 1265 return cmp(second_group_parts, first_group_parts)
1367 1266
1368 1267 sorted_repo_group_ids = sorted(
1369 1268 self.repo_group_ids, cmp=_repo_group_compare)
1370 1269 for repo_group_id in sorted_repo_group_ids:
1371 1270 self.fixture.destroy_repo_group(repo_group_id)
1372 1271
1373 1272 def _cleanup_repos(self):
1374 1273 sorted_repos_ids = sorted(self.repos_ids)
1375 1274 for repo_id in sorted_repos_ids:
1376 1275 self.fixture.destroy_repo(repo_id)
1377 1276
1378 1277 def _cleanup_user_groups(self):
1379 1278 def _user_group_compare(first_group_id, second_group_id):
1380 1279 """
1381 1280 Gives higher priority to the groups with the most complex paths
1382 1281 """
1383 1282 first_group = UserGroup.get(first_group_id)
1384 1283 second_group = UserGroup.get(second_group_id)
1385 1284 first_group_parts = (
1386 1285 len(first_group.users_group_name.split('/'))
1387 1286 if first_group else 0)
1388 1287 second_group_parts = (
1389 1288 len(second_group.users_group_name.split('/'))
1390 1289 if second_group else 0)
1391 1290 return cmp(second_group_parts, first_group_parts)
1392 1291
1393 1292 sorted_user_group_ids = sorted(
1394 1293 self.user_group_ids, cmp=_user_group_compare)
1395 1294 for user_group_id in sorted_user_group_ids:
1396 1295 self.fixture.destroy_user_group(user_group_id)
1397 1296
1398 1297 def _cleanup_users(self):
1399 1298 for user_id in self.user_ids:
1400 1299 self.fixture.destroy_user(user_id)
1401 1300
1402 1301
1403 # TODO: Think about moving this into a pytest-pyro package and make it a
1404 # pytest plugin
1405 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1406 def pytest_runtest_makereport(item, call):
1407 """
1408 Adding the remote traceback if the exception has this information.
1409
1410 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1411 to the exception instance.
1412 """
1413 outcome = yield
1414 report = outcome.get_result()
1415 if call.excinfo:
1416 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1417
1418
1419 def _add_vcsserver_remote_traceback(report, exc):
1420 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1421
1422 if vcsserver_traceback:
1423 section = 'VCSServer remote traceback ' + report.when
1424 report.sections.append((section, vcsserver_traceback))
1425
1426
1427 1302 @pytest.fixture(scope='session')
1428 1303 def testrun():
1429 1304 return {
1430 1305 'uuid': uuid.uuid4(),
1431 1306 'start': datetime.datetime.utcnow().isoformat(),
1432 1307 'timestamp': int(time.time()),
1433 1308 }
1434 1309
1435 1310
1436 1311 class AppenlightClient(object):
1437 1312
1438 1313 url_template = '{url}?protocol_version=0.5'
1439 1314
1440 1315 def __init__(
1441 1316 self, url, api_key, add_server=True, add_timestamp=True,
1442 1317 namespace=None, request=None, testrun=None):
1443 1318 self.url = self.url_template.format(url=url)
1444 1319 self.api_key = api_key
1445 1320 self.add_server = add_server
1446 1321 self.add_timestamp = add_timestamp
1447 1322 self.namespace = namespace
1448 1323 self.request = request
1449 1324 self.server = socket.getfqdn(socket.gethostname())
1450 1325 self.tags_before = {}
1451 1326 self.tags_after = {}
1452 1327 self.stats = []
1453 1328 self.testrun = testrun or {}
1454 1329
1455 1330 def tag_before(self, tag, value):
1456 1331 self.tags_before[tag] = value
1457 1332
1458 1333 def tag_after(self, tag, value):
1459 1334 self.tags_after[tag] = value
1460 1335
1461 1336 def collect(self, data):
1462 1337 if self.add_server:
1463 1338 data.setdefault('server', self.server)
1464 1339 if self.add_timestamp:
1465 1340 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1466 1341 if self.namespace:
1467 1342 data.setdefault('namespace', self.namespace)
1468 1343 if self.request:
1469 1344 data.setdefault('request', self.request)
1470 1345 self.stats.append(data)
1471 1346
1472 1347 def send_stats(self):
1473 1348 tags = [
1474 1349 ('testrun', self.request),
1475 1350 ('testrun.start', self.testrun['start']),
1476 1351 ('testrun.timestamp', self.testrun['timestamp']),
1477 1352 ('test', self.namespace),
1478 1353 ]
1479 1354 for key, value in self.tags_before.items():
1480 1355 tags.append((key + '.before', value))
1481 1356 try:
1482 1357 delta = self.tags_after[key] - value
1483 1358 tags.append((key + '.delta', delta))
1484 1359 except Exception:
1485 1360 pass
1486 1361 for key, value in self.tags_after.items():
1487 1362 tags.append((key + '.after', value))
1488 1363 self.collect({
1489 1364 'message': "Collected tags",
1490 1365 'tags': tags,
1491 1366 })
1492 1367
1493 1368 response = requests.post(
1494 1369 self.url,
1495 1370 headers={
1496 1371 'X-appenlight-api-key': self.api_key},
1497 1372 json=self.stats,
1498 1373 )
1499 1374
1500 1375 if not response.status_code == 200:
1501 1376 pprint.pprint(self.stats)
1502 1377 print(response.headers)
1503 1378 print(response.text)
1504 1379 raise Exception('Sending to appenlight failed')
1505 1380
1506 1381
1507 1382 @pytest.fixture()
1508 1383 def gist_util(request, db_connection):
1509 1384 """
1510 1385 Provides a wired instance of `GistUtility` with integrated cleanup.
1511 1386 """
1512 1387 utility = GistUtility()
1513 1388 request.addfinalizer(utility.cleanup)
1514 1389 return utility
1515 1390
1516 1391
1517 1392 class GistUtility(object):
1518 1393 def __init__(self):
1519 1394 self.fixture = Fixture()
1520 1395 self.gist_ids = []
1521 1396
1522 1397 def create_gist(self, **kwargs):
1523 1398 gist = self.fixture.create_gist(**kwargs)
1524 1399 self.gist_ids.append(gist.gist_id)
1525 1400 return gist
1526 1401
1527 1402 def cleanup(self):
1528 1403 for id_ in self.gist_ids:
1529 1404 self.fixture.destroy_gists(str(id_))
1530 1405
1531 1406
1532 1407 @pytest.fixture()
1533 1408 def enabled_backends(request):
1534 1409 backends = request.config.option.backends
1535 1410 return backends[:]
1536 1411
1537 1412
1538 1413 @pytest.fixture()
1539 1414 def settings_util(request, db_connection):
1540 1415 """
1541 1416 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1542 1417 """
1543 1418 utility = SettingsUtility()
1544 1419 request.addfinalizer(utility.cleanup)
1545 1420 return utility
1546 1421
1547 1422
1548 1423 class SettingsUtility(object):
1549 1424 def __init__(self):
1550 1425 self.rhodecode_ui_ids = []
1551 1426 self.rhodecode_setting_ids = []
1552 1427 self.repo_rhodecode_ui_ids = []
1553 1428 self.repo_rhodecode_setting_ids = []
1554 1429
1555 1430 def create_repo_rhodecode_ui(
1556 1431 self, repo, section, value, key=None, active=True, cleanup=True):
1557 1432 key = key or hashlib.sha1(
1558 1433 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1559 1434
1560 1435 setting = RepoRhodeCodeUi()
1561 1436 setting.repository_id = repo.repo_id
1562 1437 setting.ui_section = section
1563 1438 setting.ui_value = value
1564 1439 setting.ui_key = key
1565 1440 setting.ui_active = active
1566 1441 Session().add(setting)
1567 1442 Session().commit()
1568 1443
1569 1444 if cleanup:
1570 1445 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1571 1446 return setting
1572 1447
1573 1448 def create_rhodecode_ui(
1574 1449 self, section, value, key=None, active=True, cleanup=True):
1575 1450 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1576 1451
1577 1452 setting = RhodeCodeUi()
1578 1453 setting.ui_section = section
1579 1454 setting.ui_value = value
1580 1455 setting.ui_key = key
1581 1456 setting.ui_active = active
1582 1457 Session().add(setting)
1583 1458 Session().commit()
1584 1459
1585 1460 if cleanup:
1586 1461 self.rhodecode_ui_ids.append(setting.ui_id)
1587 1462 return setting
1588 1463
1589 1464 def create_repo_rhodecode_setting(
1590 1465 self, repo, name, value, type_, cleanup=True):
1591 1466 setting = RepoRhodeCodeSetting(
1592 1467 repo.repo_id, key=name, val=value, type=type_)
1593 1468 Session().add(setting)
1594 1469 Session().commit()
1595 1470
1596 1471 if cleanup:
1597 1472 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1598 1473 return setting
1599 1474
1600 1475 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1601 1476 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1602 1477 Session().add(setting)
1603 1478 Session().commit()
1604 1479
1605 1480 if cleanup:
1606 1481 self.rhodecode_setting_ids.append(setting.app_settings_id)
1607 1482
1608 1483 return setting
1609 1484
1610 1485 def cleanup(self):
1611 1486 for id_ in self.rhodecode_ui_ids:
1612 1487 setting = RhodeCodeUi.get(id_)
1613 1488 Session().delete(setting)
1614 1489
1615 1490 for id_ in self.rhodecode_setting_ids:
1616 1491 setting = RhodeCodeSetting.get(id_)
1617 1492 Session().delete(setting)
1618 1493
1619 1494 for id_ in self.repo_rhodecode_ui_ids:
1620 1495 setting = RepoRhodeCodeUi.get(id_)
1621 1496 Session().delete(setting)
1622 1497
1623 1498 for id_ in self.repo_rhodecode_setting_ids:
1624 1499 setting = RepoRhodeCodeSetting.get(id_)
1625 1500 Session().delete(setting)
1626 1501
1627 1502 Session().commit()
1628 1503
1629 1504
1630 1505 @pytest.fixture()
1631 1506 def no_notifications(request):
1632 1507 notification_patcher = mock.patch(
1633 1508 'rhodecode.model.notification.NotificationModel.create')
1634 1509 notification_patcher.start()
1635 1510 request.addfinalizer(notification_patcher.stop)
1636 1511
1637 1512
1638 1513 @pytest.fixture(scope='session')
1639 1514 def repeat(request):
1640 1515 """
1641 1516 The number of repetitions is based on this fixture.
1642 1517
1643 1518 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1644 1519 tests are not too slow in our default test suite.
1645 1520 """
1646 1521 return request.config.getoption('--repeat')
1647 1522
1648 1523
1649 1524 @pytest.fixture()
1650 1525 def rhodecode_fixtures():
1651 1526 return Fixture()
1652 1527
1653 1528
1654 1529 @pytest.fixture()
1655 1530 def context_stub():
1656 1531 """
1657 1532 Stub context object.
1658 1533 """
1659 1534 context = pyramid.testing.DummyResource()
1660 1535 return context
1661 1536
1662 1537
1663 1538 @pytest.fixture()
1664 1539 def request_stub():
1665 1540 """
1666 1541 Stub request object.
1667 1542 """
1668 1543 from rhodecode.lib.base import bootstrap_request
1669 1544 request = bootstrap_request(scheme='https')
1670 1545 return request
1671 1546
1672 1547
1673 1548 @pytest.fixture()
1674 1549 def config_stub(request, request_stub):
1675 1550 """
1676 1551 Set up pyramid.testing and return the Configurator.
1677 1552 """
1678 1553 from rhodecode.lib.base import bootstrap_config
1679 1554 config = bootstrap_config(request=request_stub)
1680 1555
1681 1556 @request.addfinalizer
1682 1557 def cleanup():
1683 1558 pyramid.testing.tearDown()
1684 1559
1685 1560 return config
1686 1561
1687 1562
1688 1563 @pytest.fixture()
1689 1564 def StubIntegrationType():
1690 1565 class _StubIntegrationType(IntegrationTypeBase):
1691 1566 """ Test integration type class """
1692 1567
1693 1568 key = 'test'
1694 1569 display_name = 'Test integration type'
1695 1570 description = 'A test integration type for testing'
1696 1571
1697 1572 @classmethod
1698 1573 def icon(cls):
1699 1574 return 'test_icon_html_image'
1700 1575
1701 1576 def __init__(self, settings):
1702 1577 super(_StubIntegrationType, self).__init__(settings)
1703 1578 self.sent_events = [] # for testing
1704 1579
1705 1580 def send_event(self, event):
1706 1581 self.sent_events.append(event)
1707 1582
1708 1583 def settings_schema(self):
1709 1584 class SettingsSchema(colander.Schema):
1710 1585 test_string_field = colander.SchemaNode(
1711 1586 colander.String(),
1712 1587 missing=colander.required,
1713 1588 title='test string field',
1714 1589 )
1715 1590 test_int_field = colander.SchemaNode(
1716 1591 colander.Int(),
1717 1592 title='some integer setting',
1718 1593 )
1719 1594 return SettingsSchema()
1720 1595
1721 1596
1722 1597 integration_type_registry.register_integration_type(_StubIntegrationType)
1723 1598 return _StubIntegrationType
1724 1599
1600
1725 1601 @pytest.fixture()
1726 1602 def stub_integration_settings():
1727 1603 return {
1728 1604 'test_string_field': 'some data',
1729 1605 'test_int_field': 100,
1730 1606 }
1731 1607
1732 1608
1733 1609 @pytest.fixture()
1734 1610 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1735 1611 stub_integration_settings):
1736 1612 integration = IntegrationModel().create(
1737 1613 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1738 1614 name='test repo integration',
1739 1615 repo=repo_stub, repo_group=None, child_repos_only=None)
1740 1616
1741 1617 @request.addfinalizer
1742 1618 def cleanup():
1743 1619 IntegrationModel().delete(integration)
1744 1620
1745 1621 return integration
1746 1622
1747 1623
1748 1624 @pytest.fixture()
1749 1625 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1750 1626 stub_integration_settings):
1751 1627 integration = IntegrationModel().create(
1752 1628 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1753 1629 name='test repogroup integration',
1754 1630 repo=None, repo_group=test_repo_group, child_repos_only=True)
1755 1631
1756 1632 @request.addfinalizer
1757 1633 def cleanup():
1758 1634 IntegrationModel().delete(integration)
1759 1635
1760 1636 return integration
1761 1637
1762 1638
1763 1639 @pytest.fixture()
1764 1640 def repogroup_recursive_integration_stub(request, test_repo_group,
1765 1641 StubIntegrationType, stub_integration_settings):
1766 1642 integration = IntegrationModel().create(
1767 1643 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1768 1644 name='test recursive repogroup integration',
1769 1645 repo=None, repo_group=test_repo_group, child_repos_only=False)
1770 1646
1771 1647 @request.addfinalizer
1772 1648 def cleanup():
1773 1649 IntegrationModel().delete(integration)
1774 1650
1775 1651 return integration
1776 1652
1777 1653
1778 1654 @pytest.fixture()
1779 1655 def global_integration_stub(request, StubIntegrationType,
1780 1656 stub_integration_settings):
1781 1657 integration = IntegrationModel().create(
1782 1658 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1783 1659 name='test global integration',
1784 1660 repo=None, repo_group=None, child_repos_only=None)
1785 1661
1786 1662 @request.addfinalizer
1787 1663 def cleanup():
1788 1664 IntegrationModel().delete(integration)
1789 1665
1790 1666 return integration
1791 1667
1792 1668
1793 1669 @pytest.fixture()
1794 1670 def root_repos_integration_stub(request, StubIntegrationType,
1795 1671 stub_integration_settings):
1796 1672 integration = IntegrationModel().create(
1797 1673 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1798 1674 name='test global integration',
1799 1675 repo=None, repo_group=None, child_repos_only=True)
1800 1676
1801 1677 @request.addfinalizer
1802 1678 def cleanup():
1803 1679 IntegrationModel().delete(integration)
1804 1680
1805 1681 return integration
1806 1682
1807 1683
1808 1684 @pytest.fixture()
1809 1685 def local_dt_to_utc():
1810 1686 def _factory(dt):
1811 1687 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1812 1688 dateutil.tz.tzutc()).replace(tzinfo=None)
1813 1689 return _factory
1814 1690
1815 1691
1816 1692 @pytest.fixture()
1817 1693 def disable_anonymous_user(request, baseapp):
1818 1694 set_anonymous_access(False)
1819 1695
1820 1696 @request.addfinalizer
1821 1697 def cleanup():
1822 1698 set_anonymous_access(True)
1823 1699
1824 1700
1825 1701 @pytest.fixture(scope='module')
1826 1702 def rc_fixture(request):
1827 1703 return Fixture()
1828 1704
1829 1705
1830 1706 @pytest.fixture()
1831 1707 def repo_groups(request):
1832 1708 fixture = Fixture()
1833 1709
1834 1710 session = Session()
1835 1711 zombie_group = fixture.create_repo_group('zombie')
1836 1712 parent_group = fixture.create_repo_group('parent')
1837 1713 child_group = fixture.create_repo_group('parent/child')
1838 1714 groups_in_db = session.query(RepoGroup).all()
1839 1715 assert len(groups_in_db) == 3
1840 1716 assert child_group.group_parent_id == parent_group.group_id
1841 1717
1842 1718 @request.addfinalizer
1843 1719 def cleanup():
1844 1720 fixture.destroy_repo_group(zombie_group)
1845 1721 fixture.destroy_repo_group(child_group)
1846 1722 fixture.destroy_repo_group(parent_group)
1847 1723
1848 1724 return zombie_group, parent_group, child_group
@@ -1,467 +1,472 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import threading
22 22 import time
23 23 import logging
24 24 import os.path
25 25 import subprocess
26 26 import tempfile
27 27 import urllib.request, urllib.error, urllib.parse
28 28 from lxml.html import fromstring, tostring
29 29 from lxml.cssselect import CSSSelector
30 30 from urllib.parse import unquote_plus
31 31 import webob
32 32
33 33 from webtest.app import TestResponse, TestApp
34 34 from webtest.compat import print_stderr
35 35
36 36 import pytest
37
38 try:
37 39 import rc_testdata
40 except ImportError:
41 raise ImportError('Failed to import rc_testdata, '
42 'please make sure this package is installed from requirements_test.txt')
38 43
39 44 from rhodecode.model.db import User, Repository
40 45 from rhodecode.model.meta import Session
41 46 from rhodecode.model.scm import ScmModel
42 47 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
43 48 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 49 from rhodecode.tests import login_user_session
45 50
46 51 log = logging.getLogger(__name__)
47 52
48 53
49 54 class CustomTestResponse(TestResponse):
50 55
51 56 def _save_output(self, out):
52 57 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
53 58 f.write(out)
54 59 return f.name
55 60
56 61 def mustcontain(self, *strings, **kw):
57 62 """
58 63 Assert that the response contains all of the strings passed
59 64 in as arguments.
60 65
61 66 Equivalent to::
62 67
63 68 assert string in res
64 69 """
65 70 print_body = kw.pop('print_body', False)
66 71 if 'no' in kw:
67 72 no = kw['no']
68 73 del kw['no']
69 74 if isinstance(no, str):
70 75 no = [no]
71 76 else:
72 77 no = []
73 78 if kw:
74 79 raise TypeError(
75 80 "The only keyword argument allowed is 'no' got %s" % kw)
76 81
77 82 f = self._save_output(str(self))
78 83
79 84 for s in strings:
80 85 if not s in self:
81 86 print_stderr("Actual response (no %r):" % s)
82 87 print_stderr("body output saved as `%s`" % f)
83 88 if print_body:
84 89 print_stderr(str(self))
85 90 raise IndexError(
86 91 "Body does not contain string %r, body output saved as %s" % (s, f))
87 92
88 93 for no_s in no:
89 94 if no_s in self:
90 95 print_stderr("Actual response (has %r)" % no_s)
91 96 print_stderr("body output saved as `%s`" % f)
92 97 if print_body:
93 98 print_stderr(str(self))
94 99 raise IndexError(
95 100 "Body contains bad string %r, body output saved as %s" % (no_s, f))
96 101
97 102 def assert_response(self):
98 103 return AssertResponse(self)
99 104
100 105 def get_session_from_response(self):
101 106 """
102 107 This returns the session from a response object.
103 108 """
104 109 from rhodecode.lib.rc_beaker import session_factory_from_settings
105 110 session = session_factory_from_settings(self.test_app._pyramid_settings)
106 111 return session(self.request)
107 112
108 113
109 114 class TestRequest(webob.BaseRequest):
110 115
111 116 # for py.test
112 117 disabled = True
113 118 ResponseClass = CustomTestResponse
114 119
115 120 def add_response_callback(self, callback):
116 121 pass
117 122
118 123
119 124 class CustomTestApp(TestApp):
120 125 """
121 126 Custom app to make mustcontain more Useful, and extract special methods
122 127 """
123 128 RequestClass = TestRequest
124 129 rc_login_data = {}
125 130 rc_current_session = None
126 131
127 132 def login(self, username=None, password=None):
128 133 from rhodecode.lib import auth
129 134
130 135 if username and password:
131 136 session = login_user_session(self, username, password)
132 137 else:
133 138 session = login_user_session(self)
134 139
135 140 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
136 141 self.rc_current_session = session
137 142 return session['rhodecode_user']
138 143
139 144 @property
140 145 def csrf_token(self):
141 146 return self.rc_login_data['csrf_token']
142 147
143 148 @property
144 149 def _pyramid_registry(self):
145 150 return self.app.config.registry
146 151
147 152 @property
148 153 def _pyramid_settings(self):
149 154 return self._pyramid_registry.settings
150 155
151 156
152 157 def set_anonymous_access(enabled):
153 158 """(Dis)allows anonymous access depending on parameter `enabled`"""
154 159 user = User.get_default_user()
155 160 user.active = enabled
156 161 Session().add(user)
157 162 Session().commit()
158 163 time.sleep(1.5) # must sleep for cache (1s to expire)
159 164 log.info('anonymous access is now: %s', enabled)
160 165 assert enabled == User.get_default_user().active, (
161 166 'Cannot set anonymous access')
162 167
163 168
164 169 def check_xfail_backends(node, backend_alias):
165 170 # Using "xfail_backends" here intentionally, since this marks work
166 171 # which is "to be done" soon.
167 172 skip_marker = node.get_closest_marker('xfail_backends')
168 173 if skip_marker and backend_alias in skip_marker.args:
169 174 msg = "Support for backend %s to be developed." % (backend_alias, )
170 175 msg = skip_marker.kwargs.get('reason', msg)
171 176 pytest.xfail(msg)
172 177
173 178
174 179 def check_skip_backends(node, backend_alias):
175 180 # Using "skip_backends" here intentionally, since this marks work which is
176 181 # not supported.
177 182 skip_marker = node.get_closest_marker('skip_backends')
178 183 if skip_marker and backend_alias in skip_marker.args:
179 184 msg = "Feature not supported for backend %s." % (backend_alias, )
180 185 msg = skip_marker.kwargs.get('reason', msg)
181 186 pytest.skip(msg)
182 187
183 188
184 189 def extract_git_repo_from_dump(dump_name, repo_name):
185 190 """Create git repo `repo_name` from dump `dump_name`."""
186 191 repos_path = ScmModel().repos_path
187 192 target_path = os.path.join(repos_path, repo_name)
188 193 rc_testdata.extract_git_dump(dump_name, target_path)
189 194 return target_path
190 195
191 196
192 197 def extract_hg_repo_from_dump(dump_name, repo_name):
193 198 """Create hg repo `repo_name` from dump `dump_name`."""
194 199 repos_path = ScmModel().repos_path
195 200 target_path = os.path.join(repos_path, repo_name)
196 201 rc_testdata.extract_hg_dump(dump_name, target_path)
197 202 return target_path
198 203
199 204
200 205 def extract_svn_repo_from_dump(dump_name, repo_name):
201 206 """Create a svn repo `repo_name` from dump `dump_name`."""
202 207 repos_path = ScmModel().repos_path
203 208 target_path = os.path.join(repos_path, repo_name)
204 209 SubversionRepository(target_path, create=True)
205 210 _load_svn_dump_into_repo(dump_name, target_path)
206 211 return target_path
207 212
208 213
209 214 def assert_message_in_log(log_records, message, levelno, module):
210 215 messages = [
211 216 r.message for r in log_records
212 217 if r.module == module and r.levelno == levelno
213 218 ]
214 219 assert message in messages
215 220
216 221
217 222 def _load_svn_dump_into_repo(dump_name, repo_path):
218 223 """
219 224 Utility to populate a svn repository with a named dump
220 225
221 226 Currently the dumps are in rc_testdata. They might later on be
222 227 integrated with the main repository once they stabilize more.
223 228 """
224 229 dump = rc_testdata.load_svn_dump(dump_name)
225 230 load_dump = subprocess.Popen(
226 231 ['svnadmin', 'load', repo_path],
227 232 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
228 233 stderr=subprocess.PIPE)
229 234 out, err = load_dump.communicate(dump)
230 235 if load_dump.returncode != 0:
231 236 log.error("Output of load_dump command: %s", out)
232 237 log.error("Error output of load_dump command: %s", err)
233 238 raise Exception(
234 239 'Failed to load dump "%s" into repository at path "%s".'
235 240 % (dump_name, repo_path))
236 241
237 242
238 243 class AssertResponse(object):
239 244 """
240 245 Utility that helps to assert things about a given HTML response.
241 246 """
242 247
243 248 def __init__(self, response):
244 249 self.response = response
245 250
246 251 def get_imports(self):
247 252 return fromstring, tostring, CSSSelector
248 253
249 254 def one_element_exists(self, css_selector):
250 255 self.get_element(css_selector)
251 256
252 257 def no_element_exists(self, css_selector):
253 258 assert not self._get_elements(css_selector)
254 259
255 260 def element_equals_to(self, css_selector, expected_content):
256 261 element = self.get_element(css_selector)
257 262 element_text = self._element_to_string(element)
258 263 assert expected_content in element_text
259 264
260 265 def element_contains(self, css_selector, expected_content):
261 266 element = self.get_element(css_selector)
262 267 assert expected_content in element.text_content()
263 268
264 269 def element_value_contains(self, css_selector, expected_content):
265 270 element = self.get_element(css_selector)
266 271 assert expected_content in element.value
267 272
268 273 def contains_one_link(self, link_text, href):
269 274 fromstring, tostring, CSSSelector = self.get_imports()
270 275 doc = fromstring(self.response.body)
271 276 sel = CSSSelector('a[href]')
272 277 elements = [
273 278 e for e in sel(doc) if e.text_content().strip() == link_text]
274 279 assert len(elements) == 1, "Did not find link or found multiple links"
275 280 self._ensure_url_equal(elements[0].attrib.get('href'), href)
276 281
277 282 def contains_one_anchor(self, anchor_id):
278 283 fromstring, tostring, CSSSelector = self.get_imports()
279 284 doc = fromstring(self.response.body)
280 285 sel = CSSSelector('#' + anchor_id)
281 286 elements = sel(doc)
282 287 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
283 288
284 289 def _ensure_url_equal(self, found, expected):
285 290 assert _Url(found) == _Url(expected)
286 291
287 292 def get_element(self, css_selector):
288 293 elements = self._get_elements(css_selector)
289 294 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
290 295 return elements[0]
291 296
292 297 def get_elements(self, css_selector):
293 298 return self._get_elements(css_selector)
294 299
295 300 def _get_elements(self, css_selector):
296 301 fromstring, tostring, CSSSelector = self.get_imports()
297 302 doc = fromstring(self.response.body)
298 303 sel = CSSSelector(css_selector)
299 304 elements = sel(doc)
300 305 return elements
301 306
302 307 def _element_to_string(self, element):
303 308 fromstring, tostring, CSSSelector = self.get_imports()
304 309 return tostring(element)
305 310
306 311
307 312 class _Url(object):
308 313 """
309 314 A url object that can be compared with other url orbjects
310 315 without regard to the vagaries of encoding, escaping, and ordering
311 316 of parameters in query strings.
312 317
313 318 Inspired by
314 319 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
315 320 """
316 321
317 322 def __init__(self, url):
318 323 parts = urllib.parse.urlparse(url)
319 324 _query = frozenset(urllib.parse.parse_qsl(parts.query))
320 325 _path = unquote_plus(parts.path)
321 326 parts = parts._replace(query=_query, path=_path)
322 327 self.parts = parts
323 328
324 329 def __eq__(self, other):
325 330 return self.parts == other.parts
326 331
327 332 def __hash__(self):
328 333 return hash(self.parts)
329 334
330 335
331 336 def run_test_concurrently(times, raise_catched_exc=True):
332 337 """
333 338 Add this decorator to small pieces of code that you want to test
334 339 concurrently
335 340
336 341 ex:
337 342
338 343 @test_concurrently(25)
339 344 def my_test_function():
340 345 ...
341 346 """
342 347 def test_concurrently_decorator(test_func):
343 348 def wrapper(*args, **kwargs):
344 349 exceptions = []
345 350
346 351 def call_test_func():
347 352 try:
348 353 test_func(*args, **kwargs)
349 354 except Exception as e:
350 355 exceptions.append(e)
351 356 if raise_catched_exc:
352 357 raise
353 358 threads = []
354 359 for i in range(times):
355 360 threads.append(threading.Thread(target=call_test_func))
356 361 for t in threads:
357 362 t.start()
358 363 for t in threads:
359 364 t.join()
360 365 if exceptions:
361 366 raise Exception(
362 367 'test_concurrently intercepted %s exceptions: %s' % (
363 368 len(exceptions), exceptions))
364 369 return wrapper
365 370 return test_concurrently_decorator
366 371
367 372
368 373 def wait_for_url(url, timeout=10):
369 374 """
370 375 Wait until URL becomes reachable.
371 376
372 377 It polls the URL until the timeout is reached or it became reachable.
373 378 If will call to `py.test.fail` in case the URL is not reachable.
374 379 """
375 380 timeout = time.time() + timeout
376 381 last = 0
377 382 wait = 0.1
378 383
379 384 while timeout > last:
380 385 last = time.time()
381 386 if is_url_reachable(url):
382 387 break
383 388 elif (last + wait) > time.time():
384 389 # Go to sleep because not enough time has passed since last check.
385 390 time.sleep(wait)
386 391 else:
387 392 pytest.fail("Timeout while waiting for URL {}".format(url))
388 393
389 394
390 395 def is_url_reachable(url):
391 396 try:
392 397 urllib.request.urlopen(url)
393 398 except urllib.error.URLError:
394 399 log.exception('URL `{}` reach error'.format(url))
395 400 return False
396 401 return True
397 402
398 403
399 404 def repo_on_filesystem(repo_name):
400 405 from rhodecode.lib import vcs
401 406 from rhodecode.tests import TESTS_TMP_PATH
402 407 repo = vcs.get_vcs_instance(
403 408 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
404 409 return repo is not None
405 410
406 411
407 412 def commit_change(
408 413 repo, filename, content, message, vcs_type, parent=None, newfile=False):
409 414 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
410 415
411 416 repo = Repository.get_by_repo_name(repo)
412 417 _commit = parent
413 418 if not parent:
414 419 _commit = EmptyCommit(alias=vcs_type)
415 420
416 421 if newfile:
417 422 nodes = {
418 423 filename: {
419 424 'content': content
420 425 }
421 426 }
422 427 commit = ScmModel().create_nodes(
423 428 user=TEST_USER_ADMIN_LOGIN, repo=repo,
424 429 message=message,
425 430 nodes=nodes,
426 431 parent_commit=_commit,
427 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
432 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
428 433 )
429 434 else:
430 435 commit = ScmModel().commit_change(
431 436 repo=repo.scm_instance(), repo_name=repo.repo_name,
432 437 commit=parent, user=TEST_USER_ADMIN_LOGIN,
433 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
438 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
434 439 message=message,
435 440 content=content,
436 441 f_path=filename
437 442 )
438 443 return commit
439 444
440 445
441 446 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
442 447 if not default:
443 448 raise ValueError('Permission for default user must be given')
444 449 form_data = [(
445 450 'csrf_token', csrf_token
446 451 )]
447 452 # add default
448 453 form_data.extend([
449 454 ('u_perm_1', default)
450 455 ])
451 456
452 457 if grant:
453 458 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
454 459 form_data.extend([
455 460 ('perm_new_member_perm_new{}'.format(cnt), perm),
456 461 ('perm_new_member_id_new{}'.format(cnt), obj_id),
457 462 ('perm_new_member_name_new{}'.format(cnt), obj_name),
458 463 ('perm_new_member_type_new{}'.format(cnt), obj_type),
459 464
460 465 ])
461 466 if revoke:
462 467 for obj_id, obj_type in revoke:
463 468 form_data.extend([
464 469 ('perm_del_member_id_{}'.format(obj_id), obj_id),
465 470 ('perm_del_member_type_{}'.format(obj_id), obj_type),
466 471 ])
467 472 return form_data
@@ -1,208 +1,204 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 # Import early to make sure things are patched up properly
22 22 from setuptools import setup, find_packages
23 23
24 24 import os
25 25 import re
26 26 import sys
27 27 import pkgutil
28 28 import platform
29 29 import codecs
30 30
31 31 import pip
32 32
33 33 pip_major_version = int(pip.__version__.split(".")[0])
34 34 if pip_major_version >= 20:
35 35 from pip._internal.req import parse_requirements
36 36 from pip._internal.network.session import PipSession
37 37 elif pip_major_version >= 10:
38 38 from pip._internal.req import parse_requirements
39 39 from pip._internal.download import PipSession
40 40 else:
41 41 from pip.req import parse_requirements
42 42 from pip.download import PipSession
43 43
44 44
45 45 def get_package_name(req_object):
46 46 package_name = None
47 47 try:
48 48 from pip._internal.req.constructors import install_req_from_parsed_requirement
49 49 except ImportError:
50 50 install_req_from_parsed_requirement = None
51 51
52 52 # In 20.1 of pip, the requirements object changed
53 53 if hasattr(req_object, 'req'):
54 54 package_name = req_object.req.name
55 55
56 56 if package_name is None:
57 57 if install_req_from_parsed_requirement:
58 58 package = install_req_from_parsed_requirement(req_object)
59 59 package_name = package.req.name
60 60
61 61 if package_name is None:
62 62 # fallback for older pip
63 63 package_name = re.split('===|<=|!=|==|>=|~=|<|>', req_object.requirement)[0]
64 64
65 65 return package_name
66 66
67 67
68 68 if sys.version_info < (3, 10):
69 69 raise Exception('RhodeCode requires Python 3.10 or later')
70 70
71 71 here = os.path.abspath(os.path.dirname(__file__))
72 72
73 73 # defines current platform
74 74 __platform__ = platform.system()
75 75 __license__ = 'AGPLv3, and Commercial License'
76 76 __author__ = 'RhodeCode GmbH'
77 77 __url__ = 'https://code.rhodecode.com'
78 78 is_windows = __platform__ in ('Windows',)
79 79
80 80
81 81 def _get_requirements(req_filename, exclude=None, extras=None):
82 82 extras = extras or []
83 83 exclude = exclude or []
84 84
85 85 try:
86 86 parsed = parse_requirements(
87 87 os.path.join(here, req_filename), session=PipSession())
88 88 except TypeError:
89 89 # try pip < 6.0.0, that doesn't support session
90 90 parsed = parse_requirements(os.path.join(here, req_filename))
91 91
92 92 requirements = []
93 93 for int_req in parsed:
94 94 req_name = get_package_name(int_req)
95 95 if req_name not in exclude:
96 96 requirements.append(req_name)
97 97 return requirements + extras
98 98
99 99
100 100 # requirements extract
101 101 setup_requirements = ['PasteScript']
102 102 install_requirements = _get_requirements(
103 103 'requirements.txt', exclude=['setuptools', 'entrypoints'])
104 104 test_requirements = _get_requirements(
105 105 'requirements_test.txt')
106 106
107 107
108 108 def get_version():
109 109 version = pkgutil.get_data('rhodecode', 'VERSION')
110 110 return version.decode().strip()
111 111
112 112
113 113 # additional files that goes into package itself
114 114 package_data = {
115 115 '': ['*.txt', '*.rst'],
116 116 'configs': ['*.ini'],
117 117 'rhodecode': ['VERSION', 'i18n/*/LC_MESSAGES/*.mo', ],
118 118 }
119 119
120 120 description = 'Source Code Management Platform'
121 121 keywords = ' '.join([
122 122 'rhodecode', 'mercurial', 'git', 'svn',
123 123 'code review',
124 124 'repo groups', 'ldap', 'repository management', 'hgweb',
125 125 'hgwebdir', 'gitweb', 'serving hgweb',
126 126 ])
127 127
128 128
129 129 # README/DESCRIPTION generation
130 130 readme_file = 'README.rst'
131 131 changelog_file = 'CHANGES.rst'
132 132 try:
133 133 long_description = codecs.open(readme_file).read() + '\n\n' + \
134 134 codecs.open(changelog_file).read()
135 135 except IOError as err:
136 136 sys.stderr.write(
137 137 "[WARNING] Cannot find file specified as long_description (%s)\n "
138 138 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
139 139 long_description = description
140 140
141 141
142 142 setup(
143 143 name='rhodecode-enterprise-ce',
144 144 version=get_version(),
145 145 description=description,
146 146 long_description=long_description,
147 147 keywords=keywords,
148 148 license=__license__,
149 149 author=__author__,
150 150 author_email='support@rhodecode.com',
151 151 url=__url__,
152 152 setup_requires=setup_requirements,
153 153 install_requires=install_requirements,
154 154 tests_require=test_requirements,
155 155 zip_safe=False,
156 156 packages=find_packages(exclude=["docs", "tests*"]),
157 157 package_data=package_data,
158 158 include_package_data=True,
159 159 classifiers=[
160 160 'Development Status :: 6 - Mature',
161 161 'Environment :: Web Environment',
162 162 'Intended Audience :: Developers',
163 163 'Operating System :: OS Independent',
164 164 'Topic :: Software Development :: Version Control',
165 165 'License :: OSI Approved :: Affero GNU General Public License v3 or later (AGPLv3+)',
166 166 'Programming Language :: Python :: 3.10',
167 167 ],
168 168 message_extractors={
169 169 'rhodecode': [
170 170 ('**.py', 'python', None),
171 171 ('**.js', 'javascript', None),
172 172 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
173 173 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
174 174 ('public/**', 'ignore', None),
175 175 ]
176 176 },
177 177 paster_plugins=['PasteScript'],
178 178 entry_points={
179 179 'paste.app_factory': [
180 180 'main=rhodecode.config.middleware:make_pyramid_app',
181 181 ],
182 182 'paste.global_paster_command': [
183 183 'ishell=rhodecode.lib.paster_commands.ishell:Command',
184 184 'upgrade-db=rhodecode.lib.paster_commands.upgrade_db:UpgradeDb',
185 185
186 186 'setup-rhodecode=rhodecode.lib.paster_commands.deprecated.setup_rhodecode:Command',
187 187 'celeryd=rhodecode.lib.paster_commands.deprecated.celeryd:Command',
188 188 ],
189 189 'pyramid.pshell_runner': [
190 190 'ipython = rhodecode.lib.pyramid_shell:ipython_shell_runner',
191 191 ],
192 'pytest11': [
193 'pylons=rhodecode.tests.pylons_plugin',
194 'enterprise=rhodecode.tests.plugin',
195 ],
196 192 'console_scripts': [
197 193 'rc-setup-app=rhodecode.lib.rc_commands.setup_rc:main',
198 194 'rc-upgrade-db=rhodecode.lib.rc_commands.upgrade_db:main',
199 195 'rc-ishell=rhodecode.lib.rc_commands.ishell:main',
200 196 'rc-add-artifact=rhodecode.lib.rc_commands.add_artifact:main',
201 197 'rc-ssh-wrapper=rhodecode.apps.ssh_support.lib.ssh_wrapper:main',
202 198 ],
203 199 'beaker.backends': [
204 200 'memorylru_base=rhodecode.lib.memory_lru_dict:MemoryLRUNamespaceManagerBase',
205 201 'memorylru_debug=rhodecode.lib.memory_lru_dict:MemoryLRUNamespaceManagerDebug'
206 202 ]
207 203 },
208 204 )
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now