##// END OF EJS Templates
pytest: restructure fixtures/plugins to fix problems with pkg_resources and Can't perform this operation for unregistered loader type errors....
super-admin -
r4986:8782a2c5 default
parent child Browse files
Show More
@@ -0,0 +1,199 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import pytest
22 from rhodecode.lib import ext_json
23
24
25 pytest_plugins = [
26 "rhodecode.tests.fixture_mods.fixture_pyramid",
27 "rhodecode.tests.fixture_mods.fixture_utils",
28 ]
29
30
31 def pytest_configure(config):
32 from rhodecode.config import patches
33
34
35 def pytest_addoption(parser):
36
37 def _parse_json(value):
38 return ext_json.str_json(value) if value else None
39
40 def _split_comma(value):
41 return value.split(',')
42
43 parser.addoption(
44 '--keep-tmp-path', action='store_true',
45 help="Keep the test temporary directories")
46 parser.addoption(
47 '--backends', action='store', type=_split_comma,
48 default=['git', 'hg', 'svn'],
49 help="Select which backends to test for backend specific tests.")
50 parser.addoption(
51 '--dbs', action='store', type=_split_comma,
52 default=['sqlite'],
53 help="Select which database to test for database specific tests. "
54 "Possible options are sqlite,postgres,mysql")
55 parser.addoption(
56 '--appenlight', '--ae', action='store_true',
57 help="Track statistics in appenlight.")
58 parser.addoption(
59 '--appenlight-api-key', '--ae-key',
60 help="API key for Appenlight.")
61 parser.addoption(
62 '--appenlight-url', '--ae-url',
63 default="https://ae.rhodecode.com",
64 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
65 parser.addoption(
66 '--sqlite-connection-string', action='store',
67 default='', help="Connection string for the dbs tests with SQLite")
68 parser.addoption(
69 '--postgres-connection-string', action='store',
70 default='', help="Connection string for the dbs tests with Postgres")
71 parser.addoption(
72 '--mysql-connection-string', action='store',
73 default='', help="Connection string for the dbs tests with MySQL")
74 parser.addoption(
75 '--repeat', type=int, default=100,
76 help="Number of repetitions in performance tests.")
77
78 parser.addoption(
79 '--test-loglevel', dest='test_loglevel',
80 help="Set default Logging level for tests, critical(default), error, warn , info, debug")
81 group = parser.getgroup('pylons')
82 group.addoption(
83 '--with-pylons', dest='pyramid_config',
84 help="Set up a Pylons environment with the specified config file.")
85 group.addoption(
86 '--ini-config-override', action='store', type=_parse_json,
87 default=None, dest='pyramid_config_override', help=(
88 "Overrides the .ini file settings. Should be specified in JSON"
89 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
90 )
91 )
92 parser.addini(
93 'pyramid_config',
94 "Set up a Pyramid environment with the specified config file.")
95
96 vcsgroup = parser.getgroup('vcs')
97 vcsgroup.addoption(
98 '--without-vcsserver', dest='with_vcsserver', action='store_false',
99 help="Do not start the VCSServer in a background process.")
100 vcsgroup.addoption(
101 '--with-vcsserver-http', dest='vcsserver_config_http',
102 help="Start the HTTP VCSServer with the specified config file.")
103 vcsgroup.addoption(
104 '--vcsserver-protocol', dest='vcsserver_protocol',
105 help="Start the VCSServer with HTTP protocol support.")
106 vcsgroup.addoption(
107 '--vcsserver-config-override', action='store', type=_parse_json,
108 default=None, dest='vcsserver_config_override', help=(
109 "Overrides the .ini file settings for the VCSServer. "
110 "Should be specified in JSON "
111 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
112 )
113 )
114 vcsgroup.addoption(
115 '--vcsserver-port', action='store', type=int,
116 default=None, help=(
117 "Allows to set the port of the vcsserver. Useful when testing "
118 "against an already running server and random ports cause "
119 "trouble."))
120 parser.addini(
121 'vcsserver_config_http',
122 "Start the HTTP VCSServer with the specified config file.")
123 parser.addini(
124 'vcsserver_protocol',
125 "Start the VCSServer with HTTP protocol support.")
126
127
128 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
129 def pytest_runtest_makereport(item, call):
130 """
131 Adding the remote traceback if the exception has this information.
132
133 VCSServer attaches this information as the attribute `_vcs_server_traceback`
134 to the exception instance.
135 """
136 outcome = yield
137 report = outcome.get_result()
138 if call.excinfo:
139 exc = call.excinfo.value
140 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
141
142 if vcsserver_traceback:
143 section = 'VCSServer remote traceback ' + report.when
144 report.sections.append((section, vcsserver_traceback))
145
146
147 def pytest_collection_modifyitems(session, config, items):
148 # nottest marked, compare nose, used for transition from nose to pytest
149 remaining = [
150 i for i in items if getattr(i.obj, '__test__', True)]
151 items[:] = remaining
152
153 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
154 # be executed at the end for faster test feedback
155 def sorter(item):
156 pos = 0
157 key = item._nodeid
158 if key.startswith('rhodecode/tests/database'):
159 pos = 1
160 elif key.startswith('rhodecode/tests/vcs_operations'):
161 pos = 2
162
163 return pos
164
165 items.sort(key=sorter)
166
167
168 def get_backends_from_metafunc(metafunc):
169 requested_backends = set(metafunc.config.getoption('--backends'))
170 backend_mark = metafunc.definition.get_closest_marker('backends')
171 if backend_mark:
172 # Supported backends by this test function, created from
173 # pytest.mark.backends
174 backends = backend_mark.args
175 elif hasattr(metafunc.cls, 'backend_alias'):
176 # Support class attribute "backend_alias", this is mainly
177 # for legacy reasons for tests not yet using pytest.mark.backends
178 backends = [metafunc.cls.backend_alias]
179 else:
180 backends = metafunc.config.getoption('--backends')
181 return requested_backends.intersection(backends)
182
183
184 def pytest_generate_tests(metafunc):
185
186 # Support test generation based on --backend parameter
187 if 'backend_alias' in metafunc.fixturenames:
188 backends = get_backends_from_metafunc(metafunc)
189 scope = None
190 if not backends:
191 pytest.skip("Not enabled for any of selected backends")
192
193 metafunc.parametrize('backend_alias', backends, scope=scope)
194
195 backend_mark = metafunc.definition.get_closest_marker('backends')
196 if backend_mark:
197 backends = get_backends_from_metafunc(metafunc)
198 if not backends:
199 pytest.skip("Not enabled for any of selected backends")
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
@@ -1,210 +1,210 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23
23
24 from rhodecode.model.repo import RepoModel
24 from rhodecode.model.repo import RepoModel
25 from rhodecode.model.scm import ScmModel
25 from rhodecode.model.scm import ScmModel
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
27 from rhodecode.api.tests.utils import (
27 from rhodecode.api.tests.utils import (
28 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 build_data, api_call, assert_error, assert_ok, crash, jsonify)
29 from rhodecode.tests.fixture import Fixture
29 from rhodecode.tests.fixture import Fixture
30 from rhodecode.tests.plugin import plain_http_host_only_stub
30 from rhodecode.tests.fixture_mods.fixture_utils import plain_http_host_only_stub
31
31
32 fixture = Fixture()
32 fixture = Fixture()
33
33
34 UPDATE_REPO_NAME = 'api_update_me'
34 UPDATE_REPO_NAME = 'api_update_me'
35
35
36
36
37 class SAME_AS_UPDATES(object):
37 class SAME_AS_UPDATES(object):
38 """ Constant used for tests below """
38 """ Constant used for tests below """
39
39
40
40
41 @pytest.mark.usefixtures("testuser_api", "app")
41 @pytest.mark.usefixtures("testuser_api", "app")
42 class TestApiUpdateRepo(object):
42 class TestApiUpdateRepo(object):
43
43
44 @pytest.mark.parametrize("updates, expected", [
44 @pytest.mark.parametrize("updates, expected", [
45 ({'owner': TEST_USER_REGULAR_LOGIN},
45 ({'owner': TEST_USER_REGULAR_LOGIN},
46 SAME_AS_UPDATES),
46 SAME_AS_UPDATES),
47
47
48 ({'description': 'new description'},
48 ({'description': 'new description'},
49 SAME_AS_UPDATES),
49 SAME_AS_UPDATES),
50
50
51 ({'clone_uri': 'http://foo.com/repo'},
51 ({'clone_uri': 'http://foo.com/repo'},
52 SAME_AS_UPDATES),
52 SAME_AS_UPDATES),
53
53
54 ({'clone_uri': None},
54 ({'clone_uri': None},
55 {'clone_uri': ''}),
55 {'clone_uri': ''}),
56
56
57 ({'clone_uri': ''},
57 ({'clone_uri': ''},
58 {'clone_uri': ''}),
58 {'clone_uri': ''}),
59
59
60 ({'clone_uri': 'http://example.com/repo_pull'},
60 ({'clone_uri': 'http://example.com/repo_pull'},
61 {'clone_uri': 'http://example.com/repo_pull'}),
61 {'clone_uri': 'http://example.com/repo_pull'}),
62
62
63 ({'push_uri': ''},
63 ({'push_uri': ''},
64 {'push_uri': ''}),
64 {'push_uri': ''}),
65
65
66 ({'push_uri': 'http://example.com/repo_push'},
66 ({'push_uri': 'http://example.com/repo_push'},
67 {'push_uri': 'http://example.com/repo_push'}),
67 {'push_uri': 'http://example.com/repo_push'}),
68
68
69 ({'landing_rev': None}, # auto-updated based on type of repo
69 ({'landing_rev': None}, # auto-updated based on type of repo
70 {'landing_rev': [None, None]}),
70 {'landing_rev': [None, None]}),
71
71
72 ({'enable_statistics': True},
72 ({'enable_statistics': True},
73 SAME_AS_UPDATES),
73 SAME_AS_UPDATES),
74
74
75 ({'enable_locking': True},
75 ({'enable_locking': True},
76 SAME_AS_UPDATES),
76 SAME_AS_UPDATES),
77
77
78 ({'enable_downloads': True},
78 ({'enable_downloads': True},
79 SAME_AS_UPDATES),
79 SAME_AS_UPDATES),
80
80
81 ({'repo_name': 'new_repo_name'},
81 ({'repo_name': 'new_repo_name'},
82 {
82 {
83 'repo_name': 'new_repo_name',
83 'repo_name': 'new_repo_name',
84 'url': 'http://{}/new_repo_name'.format(plain_http_host_only_stub())
84 'url': 'http://{}/new_repo_name'.format(plain_http_host_only_stub())
85 }),
85 }),
86
86
87 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
87 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
88 '_group': 'test_group_for_update'},
88 '_group': 'test_group_for_update'},
89 {
89 {
90 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
90 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
91 'url': 'http://{}/test_group_for_update/{}'.format(
91 'url': 'http://{}/test_group_for_update/{}'.format(
92 plain_http_host_only_stub(), UPDATE_REPO_NAME)
92 plain_http_host_only_stub(), UPDATE_REPO_NAME)
93 }),
93 }),
94 ])
94 ])
95 def test_api_update_repo(self, updates, expected, backend):
95 def test_api_update_repo(self, updates, expected, backend):
96 repo_name = UPDATE_REPO_NAME
96 repo_name = UPDATE_REPO_NAME
97 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
97 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
98 if updates.get('_group'):
98 if updates.get('_group'):
99 fixture.create_repo_group(updates['_group'])
99 fixture.create_repo_group(updates['_group'])
100
100
101 if 'landing_rev' in updates:
101 if 'landing_rev' in updates:
102 default_landing_ref, _lbl = ScmModel.backend_landing_ref(backend.alias)
102 default_landing_ref, _lbl = ScmModel.backend_landing_ref(backend.alias)
103 _type, _name = default_landing_ref.split(':')
103 _type, _name = default_landing_ref.split(':')
104 updates['landing_rev'] = default_landing_ref
104 updates['landing_rev'] = default_landing_ref
105 expected['landing_rev'] = [_type, _name]
105 expected['landing_rev'] = [_type, _name]
106
106
107 expected_api_data = repo.get_api_data(include_secrets=True)
107 expected_api_data = repo.get_api_data(include_secrets=True)
108 if expected is SAME_AS_UPDATES:
108 if expected is SAME_AS_UPDATES:
109 expected_api_data.update(updates)
109 expected_api_data.update(updates)
110 else:
110 else:
111 expected_api_data.update(expected)
111 expected_api_data.update(expected)
112
112
113 id_, params = build_data(
113 id_, params = build_data(
114 self.apikey, 'update_repo', repoid=repo_name, **updates)
114 self.apikey, 'update_repo', repoid=repo_name, **updates)
115
115
116 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
116 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
117 response = api_call(self.app, params)
117 response = api_call(self.app, params)
118
118
119 if updates.get('repo_name'):
119 if updates.get('repo_name'):
120 repo_name = updates['repo_name']
120 repo_name = updates['repo_name']
121
121
122 try:
122 try:
123 expected = {
123 expected = {
124 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
124 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
125 'repository': jsonify(expected_api_data)
125 'repository': jsonify(expected_api_data)
126 }
126 }
127 assert_ok(id_, expected, given=response.body)
127 assert_ok(id_, expected, given=response.body)
128 finally:
128 finally:
129 fixture.destroy_repo(repo_name)
129 fixture.destroy_repo(repo_name)
130 if updates.get('_group'):
130 if updates.get('_group'):
131 fixture.destroy_repo_group(updates['_group'])
131 fixture.destroy_repo_group(updates['_group'])
132
132
133 def test_api_update_repo_fork_of_field(self, backend):
133 def test_api_update_repo_fork_of_field(self, backend):
134 master_repo = backend.create_repo()
134 master_repo = backend.create_repo()
135 repo = backend.create_repo()
135 repo = backend.create_repo()
136 updates = {
136 updates = {
137 'fork_of': master_repo.repo_name,
137 'fork_of': master_repo.repo_name,
138 'fork_of_id': master_repo.repo_id
138 'fork_of_id': master_repo.repo_id
139 }
139 }
140 expected_api_data = repo.get_api_data(include_secrets=True)
140 expected_api_data = repo.get_api_data(include_secrets=True)
141 expected_api_data.update(updates)
141 expected_api_data.update(updates)
142
142
143 id_, params = build_data(
143 id_, params = build_data(
144 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
144 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
145 response = api_call(self.app, params)
145 response = api_call(self.app, params)
146 expected = {
146 expected = {
147 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
147 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
148 'repository': jsonify(expected_api_data)
148 'repository': jsonify(expected_api_data)
149 }
149 }
150 assert_ok(id_, expected, given=response.body)
150 assert_ok(id_, expected, given=response.body)
151 result = response.json['result']['repository']
151 result = response.json['result']['repository']
152 assert result['fork_of'] == master_repo.repo_name
152 assert result['fork_of'] == master_repo.repo_name
153 assert result['fork_of_id'] == master_repo.repo_id
153 assert result['fork_of_id'] == master_repo.repo_id
154
154
155 def test_api_update_repo_fork_of_not_found(self, backend):
155 def test_api_update_repo_fork_of_not_found(self, backend):
156 master_repo_name = 'fake-parent-repo'
156 master_repo_name = 'fake-parent-repo'
157 repo = backend.create_repo()
157 repo = backend.create_repo()
158 updates = {
158 updates = {
159 'fork_of': master_repo_name
159 'fork_of': master_repo_name
160 }
160 }
161 id_, params = build_data(
161 id_, params = build_data(
162 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
162 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
163 response = api_call(self.app, params)
163 response = api_call(self.app, params)
164 expected = {
164 expected = {
165 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
165 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
166 master_repo_name)}
166 master_repo_name)}
167 assert_error(id_, expected, given=response.body)
167 assert_error(id_, expected, given=response.body)
168
168
169 def test_api_update_repo_with_repo_group_not_existing(self):
169 def test_api_update_repo_with_repo_group_not_existing(self):
170 repo_name = 'admin_owned'
170 repo_name = 'admin_owned'
171 fake_repo_group = 'test_group_for_update'
171 fake_repo_group = 'test_group_for_update'
172 fixture.create_repo(repo_name)
172 fixture.create_repo(repo_name)
173 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
173 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
174 id_, params = build_data(
174 id_, params = build_data(
175 self.apikey, 'update_repo', repoid=repo_name, **updates)
175 self.apikey, 'update_repo', repoid=repo_name, **updates)
176 response = api_call(self.app, params)
176 response = api_call(self.app, params)
177 try:
177 try:
178 expected = {
178 expected = {
179 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
179 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
180 }
180 }
181 assert_error(id_, expected, given=response.body)
181 assert_error(id_, expected, given=response.body)
182 finally:
182 finally:
183 fixture.destroy_repo(repo_name)
183 fixture.destroy_repo(repo_name)
184
184
185 def test_api_update_repo_regular_user_not_allowed(self):
185 def test_api_update_repo_regular_user_not_allowed(self):
186 repo_name = 'admin_owned'
186 repo_name = 'admin_owned'
187 fixture.create_repo(repo_name)
187 fixture.create_repo(repo_name)
188 updates = {'active': False}
188 updates = {'active': False}
189 id_, params = build_data(
189 id_, params = build_data(
190 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
190 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
191 response = api_call(self.app, params)
191 response = api_call(self.app, params)
192 try:
192 try:
193 expected = 'repository `%s` does not exist' % (repo_name,)
193 expected = 'repository `%s` does not exist' % (repo_name,)
194 assert_error(id_, expected, given=response.body)
194 assert_error(id_, expected, given=response.body)
195 finally:
195 finally:
196 fixture.destroy_repo(repo_name)
196 fixture.destroy_repo(repo_name)
197
197
198 @mock.patch.object(RepoModel, 'update', crash)
198 @mock.patch.object(RepoModel, 'update', crash)
199 def test_api_update_repo_exception_occurred(self, backend):
199 def test_api_update_repo_exception_occurred(self, backend):
200 repo_name = UPDATE_REPO_NAME
200 repo_name = UPDATE_REPO_NAME
201 fixture.create_repo(repo_name, repo_type=backend.alias)
201 fixture.create_repo(repo_name, repo_type=backend.alias)
202 id_, params = build_data(
202 id_, params = build_data(
203 self.apikey, 'update_repo', repoid=repo_name,
203 self.apikey, 'update_repo', repoid=repo_name,
204 owner=TEST_USER_ADMIN_LOGIN,)
204 owner=TEST_USER_ADMIN_LOGIN,)
205 response = api_call(self.app, params)
205 response = api_call(self.app, params)
206 try:
206 try:
207 expected = 'failed to update repo `%s`' % (repo_name,)
207 expected = 'failed to update repo `%s`' % (repo_name,)
208 assert_error(id_, expected, given=response.body)
208 assert_error(id_, expected, given=response.body)
209 finally:
209 finally:
210 fixture.destroy_repo(repo_name)
210 fixture.destroy_repo(repo_name)
@@ -1,285 +1,222 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
22 import platform
23 import socket
24 import random
25 import pytest
21 import pytest
26
22
27 from rhodecode.lib.pyramid_utils import get_app_config
23 from rhodecode.lib.pyramid_utils import get_app_config
28 from rhodecode.tests.fixture import TestINI
24 from rhodecode.tests.fixture import TestINI
29 from rhodecode.tests.server_utils import RcVCSServer
25 from rhodecode.tests.server_utils import RcVCSServer
30
26
31
27
32 def _parse_json(value):
33 return json.loads(value) if value else None
34
35
36 def pytest_addoption(parser):
37 parser.addoption(
38 '--test-loglevel', dest='test_loglevel',
39 help="Set default Logging level for tests, critical(default), error, warn , info, debug")
40 group = parser.getgroup('pylons')
41 group.addoption(
42 '--with-pylons', dest='pyramid_config',
43 help="Set up a Pylons environment with the specified config file.")
44 group.addoption(
45 '--ini-config-override', action='store', type=_parse_json,
46 default=None, dest='pyramid_config_override', help=(
47 "Overrides the .ini file settings. Should be specified in JSON"
48 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
49 )
50 )
51 parser.addini(
52 'pyramid_config',
53 "Set up a Pyramid environment with the specified config file.")
54
55 vcsgroup = parser.getgroup('vcs')
56 vcsgroup.addoption(
57 '--without-vcsserver', dest='with_vcsserver', action='store_false',
58 help="Do not start the VCSServer in a background process.")
59 vcsgroup.addoption(
60 '--with-vcsserver-http', dest='vcsserver_config_http',
61 help="Start the HTTP VCSServer with the specified config file.")
62 vcsgroup.addoption(
63 '--vcsserver-protocol', dest='vcsserver_protocol',
64 help="Start the VCSServer with HTTP protocol support.")
65 vcsgroup.addoption(
66 '--vcsserver-config-override', action='store', type=_parse_json,
67 default=None, dest='vcsserver_config_override', help=(
68 "Overrides the .ini file settings for the VCSServer. "
69 "Should be specified in JSON "
70 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
71 )
72 )
73 vcsgroup.addoption(
74 '--vcsserver-port', action='store', type=int,
75 default=None, help=(
76 "Allows to set the port of the vcsserver. Useful when testing "
77 "against an already running server and random ports cause "
78 "trouble."))
79 parser.addini(
80 'vcsserver_config_http',
81 "Start the HTTP VCSServer with the specified config file.")
82 parser.addini(
83 'vcsserver_protocol',
84 "Start the VCSServer with HTTP protocol support.")
85
86
87 @pytest.fixture(scope='session')
28 @pytest.fixture(scope='session')
88 def vcsserver(request, vcsserver_port, vcsserver_factory):
29 def vcsserver(request, vcsserver_port, vcsserver_factory):
89 """
30 """
90 Session scope VCSServer.
31 Session scope VCSServer.
91
32
92 Tests wich need the VCSServer have to rely on this fixture in order
33 Tests wich need the VCSServer have to rely on this fixture in order
93 to ensure it will be running.
34 to ensure it will be running.
94
35
95 For specific needs, the fixture vcsserver_factory can be used. It allows to
36 For specific needs, the fixture vcsserver_factory can be used. It allows to
96 adjust the configuration file for the test run.
37 adjust the configuration file for the test run.
97
38
98 Command line args:
39 Command line args:
99
40
100 --without-vcsserver: Allows to switch this fixture off. You have to
41 --without-vcsserver: Allows to switch this fixture off. You have to
101 manually start the server.
42 manually start the server.
102
43
103 --vcsserver-port: Will expect the VCSServer to listen on this port.
44 --vcsserver-port: Will expect the VCSServer to listen on this port.
104 """
45 """
105
46
106 if not request.config.getoption('with_vcsserver'):
47 if not request.config.getoption('with_vcsserver'):
107 return None
48 return None
108
49
109 return vcsserver_factory(
50 return vcsserver_factory(
110 request, vcsserver_port=vcsserver_port)
51 request, vcsserver_port=vcsserver_port)
111
52
112
53
113 @pytest.fixture(scope='session')
54 @pytest.fixture(scope='session')
114 def vcsserver_factory(tmpdir_factory):
55 def vcsserver_factory(tmpdir_factory):
115 """
56 """
116 Use this if you need a running vcsserver with a special configuration.
57 Use this if you need a running vcsserver with a special configuration.
117 """
58 """
118
59
119 def factory(request, overrides=(), vcsserver_port=None,
60 def factory(request, overrides=(), vcsserver_port=None,
120 log_file=None):
61 log_file=None):
121
62
122 if vcsserver_port is None:
63 if vcsserver_port is None:
123 vcsserver_port = get_available_port()
64 vcsserver_port = get_available_port()
124
65
125 overrides = list(overrides)
66 overrides = list(overrides)
126 overrides.append({'server:main': {'port': vcsserver_port}})
67 overrides.append({'server:main': {'port': vcsserver_port}})
127
68
128 option_name = 'vcsserver_config_http'
69 option_name = 'vcsserver_config_http'
129 override_option_name = 'vcsserver_config_override'
70 override_option_name = 'vcsserver_config_override'
130 config_file = get_config(
71 config_file = get_config(
131 request.config, option_name=option_name,
72 request.config, option_name=option_name,
132 override_option_name=override_option_name, overrides=overrides,
73 override_option_name=override_option_name, overrides=overrides,
133 basetemp=tmpdir_factory.getbasetemp().strpath,
74 basetemp=tmpdir_factory.getbasetemp().strpath,
134 prefix='test_vcs_')
75 prefix='test_vcs_')
135
76
136 server = RcVCSServer(config_file, log_file)
77 server = RcVCSServer(config_file, log_file)
137 server.start()
78 server.start()
138
79
139 @request.addfinalizer
80 @request.addfinalizer
140 def cleanup():
81 def cleanup():
141 server.shutdown()
82 server.shutdown()
142
83
143 server.wait_until_ready()
84 server.wait_until_ready()
144 return server
85 return server
145
86
146 return factory
87 return factory
147
88
148
89
149 def is_cygwin():
150 return 'cygwin' in platform.system().lower()
151
152
153 def _use_log_level(config):
90 def _use_log_level(config):
154 level = config.getoption('test_loglevel') or 'critical'
91 level = config.getoption('test_loglevel') or 'critical'
155 return level.upper()
92 return level.upper()
156
93
157
94
158 @pytest.fixture(scope='session')
95 @pytest.fixture(scope='session')
159 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
96 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
160 option_name = 'pyramid_config'
97 option_name = 'pyramid_config'
161 log_level = _use_log_level(request.config)
98 log_level = _use_log_level(request.config)
162
99
163 overrides = [
100 overrides = [
164 {'server:main': {'port': rcserver_port}},
101 {'server:main': {'port': rcserver_port}},
165 {'app:main': {
102 {'app:main': {
166 'vcs.server': 'localhost:%s' % vcsserver_port,
103 'vcs.server': 'localhost:%s' % vcsserver_port,
167 # johbo: We will always start the VCSServer on our own based on the
104 # johbo: We will always start the VCSServer on our own based on the
168 # fixtures of the test cases. For the test run it must always be
105 # fixtures of the test cases. For the test run it must always be
169 # off in the INI file.
106 # off in the INI file.
170 'vcs.start_server': 'false',
107 'vcs.start_server': 'false',
171
108
172 'vcs.server.protocol': 'http',
109 'vcs.server.protocol': 'http',
173 'vcs.scm_app_implementation': 'http',
110 'vcs.scm_app_implementation': 'http',
174 'vcs.hooks.protocol': 'http',
111 'vcs.hooks.protocol': 'http',
175 'vcs.hooks.host': '127.0.0.1',
112 'vcs.hooks.host': '127.0.0.1',
176 }},
113 }},
177
114
178 {'handler_console': {
115 {'handler_console': {
179 'class': 'StreamHandler',
116 'class': 'StreamHandler',
180 'args': '(sys.stderr,)',
117 'args': '(sys.stderr,)',
181 'level': log_level,
118 'level': log_level,
182 }},
119 }},
183
120
184 ]
121 ]
185
122
186 filename = get_config(
123 filename = get_config(
187 request.config, option_name=option_name,
124 request.config, option_name=option_name,
188 override_option_name='{}_override'.format(option_name),
125 override_option_name='{}_override'.format(option_name),
189 overrides=overrides,
126 overrides=overrides,
190 basetemp=tmpdir_factory.getbasetemp().strpath,
127 basetemp=tmpdir_factory.getbasetemp().strpath,
191 prefix='test_rce_')
128 prefix='test_rce_')
192 return filename
129 return filename
193
130
194
131
195 @pytest.fixture(scope='session')
132 @pytest.fixture(scope='session')
196 def ini_settings(ini_config):
133 def ini_settings(ini_config):
197 ini_path = ini_config
134 ini_path = ini_config
198 return get_app_config(ini_path)
135 return get_app_config(ini_path)
199
136
200
137
201 def get_available_port(min_port=40000, max_port=55555):
138 def get_available_port(min_port=40000, max_port=55555):
202 from rhodecode.lib.utils2 import get_available_port as _get_port
139 from rhodecode.lib.utils2 import get_available_port as _get_port
203 return _get_port(min_port, max_port)
140 return _get_port(min_port, max_port)
204
141
205
142
206 @pytest.fixture(scope='session')
143 @pytest.fixture(scope='session')
207 def rcserver_port(request):
144 def rcserver_port(request):
208 port = get_available_port()
145 port = get_available_port()
209 print('Using rhodecode port {}'.format(port))
146 print('Using rhodecode port {}'.format(port))
210 return port
147 return port
211
148
212
149
213 @pytest.fixture(scope='session')
150 @pytest.fixture(scope='session')
214 def vcsserver_port(request):
151 def vcsserver_port(request):
215 port = request.config.getoption('--vcsserver-port')
152 port = request.config.getoption('--vcsserver-port')
216 if port is None:
153 if port is None:
217 port = get_available_port()
154 port = get_available_port()
218 print('Using vcsserver port {}'.format(port))
155 print('Using vcsserver port {}'.format(port))
219 return port
156 return port
220
157
221
158
222 @pytest.fixture(scope='session')
159 @pytest.fixture(scope='session')
223 def available_port_factory():
160 def available_port_factory():
224 """
161 """
225 Returns a callable which returns free port numbers.
162 Returns a callable which returns free port numbers.
226 """
163 """
227 return get_available_port
164 return get_available_port
228
165
229
166
230 @pytest.fixture()
167 @pytest.fixture()
231 def available_port(available_port_factory):
168 def available_port(available_port_factory):
232 """
169 """
233 Gives you one free port for the current test.
170 Gives you one free port for the current test.
234
171
235 Uses "available_port_factory" to retrieve the port.
172 Uses "available_port_factory" to retrieve the port.
236 """
173 """
237 return available_port_factory()
174 return available_port_factory()
238
175
239
176
240 @pytest.fixture(scope='session')
177 @pytest.fixture(scope='session')
241 def testini_factory(tmpdir_factory, ini_config):
178 def testini_factory(tmpdir_factory, ini_config):
242 """
179 """
243 Factory to create an INI file based on TestINI.
180 Factory to create an INI file based on TestINI.
244
181
245 It will make sure to place the INI file in the correct directory.
182 It will make sure to place the INI file in the correct directory.
246 """
183 """
247 basetemp = tmpdir_factory.getbasetemp().strpath
184 basetemp = tmpdir_factory.getbasetemp().strpath
248 return TestIniFactory(basetemp, ini_config)
185 return TestIniFactory(basetemp, ini_config)
249
186
250
187
251 class TestIniFactory(object):
188 class TestIniFactory(object):
252
189
253 def __init__(self, basetemp, template_ini):
190 def __init__(self, basetemp, template_ini):
254 self._basetemp = basetemp
191 self._basetemp = basetemp
255 self._template_ini = template_ini
192 self._template_ini = template_ini
256
193
257 def __call__(self, ini_params, new_file_prefix='test'):
194 def __call__(self, ini_params, new_file_prefix='test'):
258 ini_file = TestINI(
195 ini_file = TestINI(
259 self._template_ini, ini_params=ini_params,
196 self._template_ini, ini_params=ini_params,
260 new_file_prefix=new_file_prefix, dir=self._basetemp)
197 new_file_prefix=new_file_prefix, dir=self._basetemp)
261 result = ini_file.create()
198 result = ini_file.create()
262 return result
199 return result
263
200
264
201
265 def get_config(
202 def get_config(
266 config, option_name, override_option_name, overrides=None,
203 config, option_name, override_option_name, overrides=None,
267 basetemp=None, prefix='test'):
204 basetemp=None, prefix='test'):
268 """
205 """
269 Find a configuration file and apply overrides for the given `prefix`.
206 Find a configuration file and apply overrides for the given `prefix`.
270 """
207 """
271 config_file = (
208 config_file = (
272 config.getoption(option_name) or config.getini(option_name))
209 config.getoption(option_name) or config.getini(option_name))
273 if not config_file:
210 if not config_file:
274 pytest.exit(
211 pytest.exit(
275 "Configuration error, could not extract {}.".format(option_name))
212 "Configuration error, could not extract {}.".format(option_name))
276
213
277 overrides = overrides or []
214 overrides = overrides or []
278 config_override = config.getoption(override_option_name)
215 config_override = config.getoption(override_option_name)
279 if config_override:
216 if config_override:
280 overrides.append(config_override)
217 overrides.append(config_override)
281 temp_ini_file = TestINI(
218 temp_ini_file = TestINI(
282 config_file, ini_params=overrides, new_file_prefix=prefix,
219 config_file, ini_params=overrides, new_file_prefix=prefix,
283 dir=basetemp)
220 dir=basetemp)
284
221
285 return temp_ini_file.create()
222 return temp_ini_file.create()
@@ -1,1848 +1,1724 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess
29 import subprocess
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import logging
33 import logging
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs.backends import get_backend
60 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.nodes import FileNode
61 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.tests import (
62 from rhodecode.tests import (
63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_REGULAR_PASS)
65 TEST_USER_REGULAR_PASS)
66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.fixture import Fixture
67 from rhodecode.tests.fixture import Fixture
68 from rhodecode.config import utils as config_utils
68 from rhodecode.config import utils as config_utils
69
69
70 log = logging.getLogger(__name__)
70 log = logging.getLogger(__name__)
71
71
72
72
73 def cmp(a, b):
73 def cmp(a, b):
74 # backport cmp from python2 so we can still use it in the custom code in this module
74 # backport cmp from python2 so we can still use it in the custom code in this module
75 return (a > b) - (a < b)
75 return (a > b) - (a < b)
76
76
77
78 def _split_comma(value):
79 return value.split(',')
80
81
82 def pytest_addoption(parser):
83 parser.addoption(
84 '--keep-tmp-path', action='store_true',
85 help="Keep the test temporary directories")
86 parser.addoption(
87 '--backends', action='store', type=_split_comma,
88 default=['git', 'hg', 'svn'],
89 help="Select which backends to test for backend specific tests.")
90 parser.addoption(
91 '--dbs', action='store', type=_split_comma,
92 default=['sqlite'],
93 help="Select which database to test for database specific tests. "
94 "Possible options are sqlite,postgres,mysql")
95 parser.addoption(
96 '--appenlight', '--ae', action='store_true',
97 help="Track statistics in appenlight.")
98 parser.addoption(
99 '--appenlight-api-key', '--ae-key',
100 help="API key for Appenlight.")
101 parser.addoption(
102 '--appenlight-url', '--ae-url',
103 default="https://ae.rhodecode.com",
104 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
105 parser.addoption(
106 '--sqlite-connection-string', action='store',
107 default='', help="Connection string for the dbs tests with SQLite")
108 parser.addoption(
109 '--postgres-connection-string', action='store',
110 default='', help="Connection string for the dbs tests with Postgres")
111 parser.addoption(
112 '--mysql-connection-string', action='store',
113 default='', help="Connection string for the dbs tests with MySQL")
114 parser.addoption(
115 '--repeat', type=int, default=100,
116 help="Number of repetitions in performance tests.")
117
118
119 def pytest_configure(config):
120 from rhodecode.config import patches
121
122
123 def pytest_collection_modifyitems(session, config, items):
124 # nottest marked, compare nose, used for transition from nose to pytest
125 remaining = [
126 i for i in items if getattr(i.obj, '__test__', True)]
127 items[:] = remaining
128
129 # NOTE(marcink): custom test ordering, db tests and vcstests are slowes and should
130 # be executed at the end for faster test feedback
131 def sorter(item):
132 pos = 0
133 key = item._nodeid
134 if key.startswith('rhodecode/tests/database'):
135 pos = 1
136 elif key.startswith('rhodecode/tests/vcs_operations'):
137 pos = 2
138
139 return pos
140
141 items.sort(key=sorter)
142
143
144 def pytest_generate_tests(metafunc):
145
146 # Support test generation based on --backend parameter
147 if 'backend_alias' in metafunc.fixturenames:
148 backends = get_backends_from_metafunc(metafunc)
149 scope = None
150 if not backends:
151 pytest.skip("Not enabled for any of selected backends")
152
153 metafunc.parametrize('backend_alias', backends, scope=scope)
154
155 backend_mark = metafunc.definition.get_closest_marker('backends')
156 if backend_mark:
157 backends = get_backends_from_metafunc(metafunc)
158 if not backends:
159 pytest.skip("Not enabled for any of selected backends")
160
161
162 def get_backends_from_metafunc(metafunc):
163 requested_backends = set(metafunc.config.getoption('--backends'))
164 backend_mark = metafunc.definition.get_closest_marker('backends')
165 if backend_mark:
166 # Supported backends by this test function, created from
167 # pytest.mark.backends
168 backends = backend_mark.args
169 elif hasattr(metafunc.cls, 'backend_alias'):
170 # Support class attribute "backend_alias", this is mainly
171 # for legacy reasons for tests not yet using pytest.mark.backends
172 backends = [metafunc.cls.backend_alias]
173 else:
174 backends = metafunc.config.getoption('--backends')
175 return requested_backends.intersection(backends)
176
177
178 @pytest.fixture(scope='session', autouse=True)
77 @pytest.fixture(scope='session', autouse=True)
179 def activate_example_rcextensions(request):
78 def activate_example_rcextensions(request):
180 """
79 """
181 Patch in an example rcextensions module which verifies passed in kwargs.
80 Patch in an example rcextensions module which verifies passed in kwargs.
182 """
81 """
183 from rhodecode.config import rcextensions
82 from rhodecode.config import rcextensions
184
83
185 old_extensions = rhodecode.EXTENSIONS
84 old_extensions = rhodecode.EXTENSIONS
186 rhodecode.EXTENSIONS = rcextensions
85 rhodecode.EXTENSIONS = rcextensions
187 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
86 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
188
87
189 @request.addfinalizer
88 @request.addfinalizer
190 def cleanup():
89 def cleanup():
191 rhodecode.EXTENSIONS = old_extensions
90 rhodecode.EXTENSIONS = old_extensions
192
91
193
92
194 @pytest.fixture()
93 @pytest.fixture()
195 def capture_rcextensions():
94 def capture_rcextensions():
196 """
95 """
197 Returns the recorded calls to entry points in rcextensions.
96 Returns the recorded calls to entry points in rcextensions.
198 """
97 """
199 calls = rhodecode.EXTENSIONS.calls
98 calls = rhodecode.EXTENSIONS.calls
200 calls.clear()
99 calls.clear()
201 # Note: At this moment, it is still the empty dict, but that will
100 # Note: At this moment, it is still the empty dict, but that will
202 # be filled during the test run and since it is a reference this
101 # be filled during the test run and since it is a reference this
203 # is enough to make it work.
102 # is enough to make it work.
204 return calls
103 return calls
205
104
206
105
207 @pytest.fixture(scope='session')
106 @pytest.fixture(scope='session')
208 def http_environ_session():
107 def http_environ_session():
209 """
108 """
210 Allow to use "http_environ" in session scope.
109 Allow to use "http_environ" in session scope.
211 """
110 """
212 return plain_http_environ()
111 return plain_http_environ()
213
112
214
113
215 def plain_http_host_stub():
114 def plain_http_host_stub():
216 """
115 """
217 Value of HTTP_HOST in the test run.
116 Value of HTTP_HOST in the test run.
218 """
117 """
219 return 'example.com:80'
118 return 'example.com:80'
220
119
221
120
222 @pytest.fixture()
121 @pytest.fixture()
223 def http_host_stub():
122 def http_host_stub():
224 """
123 """
225 Value of HTTP_HOST in the test run.
124 Value of HTTP_HOST in the test run.
226 """
125 """
227 return plain_http_host_stub()
126 return plain_http_host_stub()
228
127
229
128
230 def plain_http_host_only_stub():
129 def plain_http_host_only_stub():
231 """
130 """
232 Value of HTTP_HOST in the test run.
131 Value of HTTP_HOST in the test run.
233 """
132 """
234 return plain_http_host_stub().split(':')[0]
133 return plain_http_host_stub().split(':')[0]
235
134
236
135
237 @pytest.fixture()
136 @pytest.fixture()
238 def http_host_only_stub():
137 def http_host_only_stub():
239 """
138 """
240 Value of HTTP_HOST in the test run.
139 Value of HTTP_HOST in the test run.
241 """
140 """
242 return plain_http_host_only_stub()
141 return plain_http_host_only_stub()
243
142
244
143
245 def plain_http_environ():
144 def plain_http_environ():
246 """
145 """
247 HTTP extra environ keys.
146 HTTP extra environ keys.
248
147
249 User by the test application and as well for setting up the pylons
148 User by the test application and as well for setting up the pylons
250 environment. In the case of the fixture "app" it should be possible
149 environment. In the case of the fixture "app" it should be possible
251 to override this for a specific test case.
150 to override this for a specific test case.
252 """
151 """
253 return {
152 return {
254 'SERVER_NAME': plain_http_host_only_stub(),
153 'SERVER_NAME': plain_http_host_only_stub(),
255 'SERVER_PORT': plain_http_host_stub().split(':')[1],
154 'SERVER_PORT': plain_http_host_stub().split(':')[1],
256 'HTTP_HOST': plain_http_host_stub(),
155 'HTTP_HOST': plain_http_host_stub(),
257 'HTTP_USER_AGENT': 'rc-test-agent',
156 'HTTP_USER_AGENT': 'rc-test-agent',
258 'REQUEST_METHOD': 'GET'
157 'REQUEST_METHOD': 'GET'
259 }
158 }
260
159
261
160
262 @pytest.fixture()
161 @pytest.fixture()
263 def http_environ():
162 def http_environ():
264 """
163 """
265 HTTP extra environ keys.
164 HTTP extra environ keys.
266
165
267 User by the test application and as well for setting up the pylons
166 User by the test application and as well for setting up the pylons
268 environment. In the case of the fixture "app" it should be possible
167 environment. In the case of the fixture "app" it should be possible
269 to override this for a specific test case.
168 to override this for a specific test case.
270 """
169 """
271 return plain_http_environ()
170 return plain_http_environ()
272
171
273
172
274 @pytest.fixture(scope='session')
173 @pytest.fixture(scope='session')
275 def baseapp(ini_config, vcsserver, http_environ_session):
174 def baseapp(ini_config, vcsserver, http_environ_session):
276 from rhodecode.lib.pyramid_utils import get_app_config
175 from rhodecode.lib.pyramid_utils import get_app_config
277 from rhodecode.config.middleware import make_pyramid_app
176 from rhodecode.config.middleware import make_pyramid_app
278
177
279 log.info("Using the RhodeCode configuration:{}".format(ini_config))
178 log.info("Using the RhodeCode configuration:{}".format(ini_config))
280 pyramid.paster.setup_logging(ini_config)
179 pyramid.paster.setup_logging(ini_config)
281
180
282 settings = get_app_config(ini_config)
181 settings = get_app_config(ini_config)
283 app = make_pyramid_app({'__file__': ini_config}, **settings)
182 app = make_pyramid_app({'__file__': ini_config}, **settings)
284
183
285 return app
184 return app
286
185
287
186
288 @pytest.fixture(scope='function')
187 @pytest.fixture(scope='function')
289 def app(request, config_stub, baseapp, http_environ):
188 def app(request, config_stub, baseapp, http_environ):
290 app = CustomTestApp(
189 app = CustomTestApp(
291 baseapp,
190 baseapp,
292 extra_environ=http_environ)
191 extra_environ=http_environ)
293 if request.cls:
192 if request.cls:
294 request.cls.app = app
193 request.cls.app = app
295 return app
194 return app
296
195
297
196
298 @pytest.fixture(scope='session')
197 @pytest.fixture(scope='session')
299 def app_settings(baseapp, ini_config):
198 def app_settings(baseapp, ini_config):
300 """
199 """
301 Settings dictionary used to create the app.
200 Settings dictionary used to create the app.
302
201
303 Parses the ini file and passes the result through the sanitize and apply
202 Parses the ini file and passes the result through the sanitize and apply
304 defaults mechanism in `rhodecode.config.middleware`.
203 defaults mechanism in `rhodecode.config.middleware`.
305 """
204 """
306 return baseapp.config.get_settings()
205 return baseapp.config.get_settings()
307
206
308
207
309 @pytest.fixture(scope='session')
208 @pytest.fixture(scope='session')
310 def db_connection(ini_settings):
209 def db_connection(ini_settings):
311 # Initialize the database connection.
210 # Initialize the database connection.
312 config_utils.initialize_database(ini_settings)
211 config_utils.initialize_database(ini_settings)
313
212
314
213
315 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
214 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
316
215
317
216
318 def _autologin_user(app, *args):
217 def _autologin_user(app, *args):
319 session = login_user_session(app, *args)
218 session = login_user_session(app, *args)
320 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
219 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
321 return LoginData(csrf_token, session['rhodecode_user'])
220 return LoginData(csrf_token, session['rhodecode_user'])
322
221
323
222
324 @pytest.fixture()
223 @pytest.fixture()
325 def autologin_user(app):
224 def autologin_user(app):
326 """
225 """
327 Utility fixture which makes sure that the admin user is logged in
226 Utility fixture which makes sure that the admin user is logged in
328 """
227 """
329 return _autologin_user(app)
228 return _autologin_user(app)
330
229
331
230
332 @pytest.fixture()
231 @pytest.fixture()
333 def autologin_regular_user(app):
232 def autologin_regular_user(app):
334 """
233 """
335 Utility fixture which makes sure that the regular user is logged in
234 Utility fixture which makes sure that the regular user is logged in
336 """
235 """
337 return _autologin_user(
236 return _autologin_user(
338 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
237 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
339
238
340
239
341 @pytest.fixture(scope='function')
240 @pytest.fixture(scope='function')
342 def csrf_token(request, autologin_user):
241 def csrf_token(request, autologin_user):
343 return autologin_user.csrf_token
242 return autologin_user.csrf_token
344
243
345
244
346 @pytest.fixture(scope='function')
245 @pytest.fixture(scope='function')
347 def xhr_header(request):
246 def xhr_header(request):
348 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
247 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
349
248
350
249
351 @pytest.fixture()
250 @pytest.fixture()
352 def real_crypto_backend(monkeypatch):
251 def real_crypto_backend(monkeypatch):
353 """
252 """
354 Switch the production crypto backend on for this test.
253 Switch the production crypto backend on for this test.
355
254
356 During the test run the crypto backend is replaced with a faster
255 During the test run the crypto backend is replaced with a faster
357 implementation based on the MD5 algorithm.
256 implementation based on the MD5 algorithm.
358 """
257 """
359 monkeypatch.setattr(rhodecode, 'is_test', False)
258 monkeypatch.setattr(rhodecode, 'is_test', False)
360
259
361
260
362 @pytest.fixture(scope='class')
261 @pytest.fixture(scope='class')
363 def index_location(request, baseapp):
262 def index_location(request, baseapp):
364 index_location = baseapp.config.get_settings()['search.location']
263 index_location = baseapp.config.get_settings()['search.location']
365 if request.cls:
264 if request.cls:
366 request.cls.index_location = index_location
265 request.cls.index_location = index_location
367 return index_location
266 return index_location
368
267
369
268
370 @pytest.fixture(scope='session', autouse=True)
269 @pytest.fixture(scope='session', autouse=True)
371 def tests_tmp_path(request):
270 def tests_tmp_path(request):
372 """
271 """
373 Create temporary directory to be used during the test session.
272 Create temporary directory to be used during the test session.
374 """
273 """
375 if not os.path.exists(TESTS_TMP_PATH):
274 if not os.path.exists(TESTS_TMP_PATH):
376 os.makedirs(TESTS_TMP_PATH)
275 os.makedirs(TESTS_TMP_PATH)
377
276
378 if not request.config.getoption('--keep-tmp-path'):
277 if not request.config.getoption('--keep-tmp-path'):
379 @request.addfinalizer
278 @request.addfinalizer
380 def remove_tmp_path():
279 def remove_tmp_path():
381 shutil.rmtree(TESTS_TMP_PATH)
280 shutil.rmtree(TESTS_TMP_PATH)
382
281
383 return TESTS_TMP_PATH
282 return TESTS_TMP_PATH
384
283
385
284
386 @pytest.fixture()
285 @pytest.fixture()
387 def test_repo_group(request):
286 def test_repo_group(request):
388 """
287 """
389 Create a temporary repository group, and destroy it after
288 Create a temporary repository group, and destroy it after
390 usage automatically
289 usage automatically
391 """
290 """
392 fixture = Fixture()
291 fixture = Fixture()
393 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
292 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
394 repo_group = fixture.create_repo_group(repogroupid)
293 repo_group = fixture.create_repo_group(repogroupid)
395
294
396 def _cleanup():
295 def _cleanup():
397 fixture.destroy_repo_group(repogroupid)
296 fixture.destroy_repo_group(repogroupid)
398
297
399 request.addfinalizer(_cleanup)
298 request.addfinalizer(_cleanup)
400 return repo_group
299 return repo_group
401
300
402
301
403 @pytest.fixture()
302 @pytest.fixture()
404 def test_user_group(request):
303 def test_user_group(request):
405 """
304 """
406 Create a temporary user group, and destroy it after
305 Create a temporary user group, and destroy it after
407 usage automatically
306 usage automatically
408 """
307 """
409 fixture = Fixture()
308 fixture = Fixture()
410 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
309 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
411 user_group = fixture.create_user_group(usergroupid)
310 user_group = fixture.create_user_group(usergroupid)
412
311
413 def _cleanup():
312 def _cleanup():
414 fixture.destroy_user_group(user_group)
313 fixture.destroy_user_group(user_group)
415
314
416 request.addfinalizer(_cleanup)
315 request.addfinalizer(_cleanup)
417 return user_group
316 return user_group
418
317
419
318
420 @pytest.fixture(scope='session')
319 @pytest.fixture(scope='session')
421 def test_repo(request):
320 def test_repo(request):
422 container = TestRepoContainer()
321 container = TestRepoContainer()
423 request.addfinalizer(container._cleanup)
322 request.addfinalizer(container._cleanup)
424 return container
323 return container
425
324
426
325
427 class TestRepoContainer(object):
326 class TestRepoContainer(object):
428 """
327 """
429 Container for test repositories which are used read only.
328 Container for test repositories which are used read only.
430
329
431 Repositories will be created on demand and re-used during the lifetime
330 Repositories will be created on demand and re-used during the lifetime
432 of this object.
331 of this object.
433
332
434 Usage to get the svn test repository "minimal"::
333 Usage to get the svn test repository "minimal"::
435
334
436 test_repo = TestContainer()
335 test_repo = TestContainer()
437 repo = test_repo('minimal', 'svn')
336 repo = test_repo('minimal', 'svn')
438
337
439 """
338 """
440
339
441 dump_extractors = {
340 dump_extractors = {
442 'git': utils.extract_git_repo_from_dump,
341 'git': utils.extract_git_repo_from_dump,
443 'hg': utils.extract_hg_repo_from_dump,
342 'hg': utils.extract_hg_repo_from_dump,
444 'svn': utils.extract_svn_repo_from_dump,
343 'svn': utils.extract_svn_repo_from_dump,
445 }
344 }
446
345
447 def __init__(self):
346 def __init__(self):
448 self._cleanup_repos = []
347 self._cleanup_repos = []
449 self._fixture = Fixture()
348 self._fixture = Fixture()
450 self._repos = {}
349 self._repos = {}
451
350
452 def __call__(self, dump_name, backend_alias, config=None):
351 def __call__(self, dump_name, backend_alias, config=None):
453 key = (dump_name, backend_alias)
352 key = (dump_name, backend_alias)
454 if key not in self._repos:
353 if key not in self._repos:
455 repo = self._create_repo(dump_name, backend_alias, config)
354 repo = self._create_repo(dump_name, backend_alias, config)
456 self._repos[key] = repo.repo_id
355 self._repos[key] = repo.repo_id
457 return Repository.get(self._repos[key])
356 return Repository.get(self._repos[key])
458
357
459 def _create_repo(self, dump_name, backend_alias, config):
358 def _create_repo(self, dump_name, backend_alias, config):
460 repo_name = '%s-%s' % (backend_alias, dump_name)
359 repo_name = '%s-%s' % (backend_alias, dump_name)
461 backend = get_backend(backend_alias)
360 backend = get_backend(backend_alias)
462 dump_extractor = self.dump_extractors[backend_alias]
361 dump_extractor = self.dump_extractors[backend_alias]
463 repo_path = dump_extractor(dump_name, repo_name)
362 repo_path = dump_extractor(dump_name, repo_name)
464
363
465 vcs_repo = backend(repo_path, config=config)
364 vcs_repo = backend(repo_path, config=config)
466 repo2db_mapper({repo_name: vcs_repo})
365 repo2db_mapper({repo_name: vcs_repo})
467
366
468 repo = RepoModel().get_by_repo_name(repo_name)
367 repo = RepoModel().get_by_repo_name(repo_name)
469 self._cleanup_repos.append(repo_name)
368 self._cleanup_repos.append(repo_name)
470 return repo
369 return repo
471
370
472 def _cleanup(self):
371 def _cleanup(self):
473 for repo_name in reversed(self._cleanup_repos):
372 for repo_name in reversed(self._cleanup_repos):
474 self._fixture.destroy_repo(repo_name)
373 self._fixture.destroy_repo(repo_name)
475
374
476
375
477 def backend_base(request, backend_alias, baseapp, test_repo):
376 def backend_base(request, backend_alias, baseapp, test_repo):
478 if backend_alias not in request.config.getoption('--backends'):
377 if backend_alias not in request.config.getoption('--backends'):
479 pytest.skip("Backend %s not selected." % (backend_alias, ))
378 pytest.skip("Backend %s not selected." % (backend_alias, ))
480
379
481 utils.check_xfail_backends(request.node, backend_alias)
380 utils.check_xfail_backends(request.node, backend_alias)
482 utils.check_skip_backends(request.node, backend_alias)
381 utils.check_skip_backends(request.node, backend_alias)
483
382
484 repo_name = 'vcs_test_%s' % (backend_alias, )
383 repo_name = 'vcs_test_%s' % (backend_alias, )
485 backend = Backend(
384 backend = Backend(
486 alias=backend_alias,
385 alias=backend_alias,
487 repo_name=repo_name,
386 repo_name=repo_name,
488 test_name=request.node.name,
387 test_name=request.node.name,
489 test_repo_container=test_repo)
388 test_repo_container=test_repo)
490 request.addfinalizer(backend.cleanup)
389 request.addfinalizer(backend.cleanup)
491 return backend
390 return backend
492
391
493
392
494 @pytest.fixture()
393 @pytest.fixture()
495 def backend(request, backend_alias, baseapp, test_repo):
394 def backend(request, backend_alias, baseapp, test_repo):
496 """
395 """
497 Parametrized fixture which represents a single backend implementation.
396 Parametrized fixture which represents a single backend implementation.
498
397
499 It respects the option `--backends` to focus the test run on specific
398 It respects the option `--backends` to focus the test run on specific
500 backend implementations.
399 backend implementations.
501
400
502 It also supports `pytest.mark.xfail_backends` to mark tests as failing
401 It also supports `pytest.mark.xfail_backends` to mark tests as failing
503 for specific backends. This is intended as a utility for incremental
402 for specific backends. This is intended as a utility for incremental
504 development of a new backend implementation.
403 development of a new backend implementation.
505 """
404 """
506 return backend_base(request, backend_alias, baseapp, test_repo)
405 return backend_base(request, backend_alias, baseapp, test_repo)
507
406
508
407
509 @pytest.fixture()
408 @pytest.fixture()
510 def backend_git(request, baseapp, test_repo):
409 def backend_git(request, baseapp, test_repo):
511 return backend_base(request, 'git', baseapp, test_repo)
410 return backend_base(request, 'git', baseapp, test_repo)
512
411
513
412
514 @pytest.fixture()
413 @pytest.fixture()
515 def backend_hg(request, baseapp, test_repo):
414 def backend_hg(request, baseapp, test_repo):
516 return backend_base(request, 'hg', baseapp, test_repo)
415 return backend_base(request, 'hg', baseapp, test_repo)
517
416
518
417
519 @pytest.fixture()
418 @pytest.fixture()
520 def backend_svn(request, baseapp, test_repo):
419 def backend_svn(request, baseapp, test_repo):
521 return backend_base(request, 'svn', baseapp, test_repo)
420 return backend_base(request, 'svn', baseapp, test_repo)
522
421
523
422
524 @pytest.fixture()
423 @pytest.fixture()
525 def backend_random(backend_git):
424 def backend_random(backend_git):
526 """
425 """
527 Use this to express that your tests need "a backend.
426 Use this to express that your tests need "a backend.
528
427
529 A few of our tests need a backend, so that we can run the code. This
428 A few of our tests need a backend, so that we can run the code. This
530 fixture is intended to be used for such cases. It will pick one of the
429 fixture is intended to be used for such cases. It will pick one of the
531 backends and run the tests.
430 backends and run the tests.
532
431
533 The fixture `backend` would run the test multiple times for each
432 The fixture `backend` would run the test multiple times for each
534 available backend which is a pure waste of time if the test is
433 available backend which is a pure waste of time if the test is
535 independent of the backend type.
434 independent of the backend type.
536 """
435 """
537 # TODO: johbo: Change this to pick a random backend
436 # TODO: johbo: Change this to pick a random backend
538 return backend_git
437 return backend_git
539
438
540
439
541 @pytest.fixture()
440 @pytest.fixture()
542 def backend_stub(backend_git):
441 def backend_stub(backend_git):
543 """
442 """
544 Use this to express that your tests need a backend stub
443 Use this to express that your tests need a backend stub
545
444
546 TODO: mikhail: Implement a real stub logic instead of returning
445 TODO: mikhail: Implement a real stub logic instead of returning
547 a git backend
446 a git backend
548 """
447 """
549 return backend_git
448 return backend_git
550
449
551
450
552 @pytest.fixture()
451 @pytest.fixture()
553 def repo_stub(backend_stub):
452 def repo_stub(backend_stub):
554 """
453 """
555 Use this to express that your tests need a repository stub
454 Use this to express that your tests need a repository stub
556 """
455 """
557 return backend_stub.create_repo()
456 return backend_stub.create_repo()
558
457
559
458
560 class Backend(object):
459 class Backend(object):
561 """
460 """
562 Represents the test configuration for one supported backend
461 Represents the test configuration for one supported backend
563
462
564 Provides easy access to different test repositories based on
463 Provides easy access to different test repositories based on
565 `__getitem__`. Such repositories will only be created once per test
464 `__getitem__`. Such repositories will only be created once per test
566 session.
465 session.
567 """
466 """
568
467
569 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
468 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
570 _master_repo = None
469 _master_repo = None
571 _master_repo_path = ''
470 _master_repo_path = ''
572 _commit_ids = {}
471 _commit_ids = {}
573
472
574 def __init__(self, alias, repo_name, test_name, test_repo_container):
473 def __init__(self, alias, repo_name, test_name, test_repo_container):
575 self.alias = alias
474 self.alias = alias
576 self.repo_name = repo_name
475 self.repo_name = repo_name
577 self._cleanup_repos = []
476 self._cleanup_repos = []
578 self._test_name = test_name
477 self._test_name = test_name
579 self._test_repo_container = test_repo_container
478 self._test_repo_container = test_repo_container
580 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
479 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
581 # Fixture will survive in the end.
480 # Fixture will survive in the end.
582 self._fixture = Fixture()
481 self._fixture = Fixture()
583
482
584 def __getitem__(self, key):
483 def __getitem__(self, key):
585 return self._test_repo_container(key, self.alias)
484 return self._test_repo_container(key, self.alias)
586
485
587 def create_test_repo(self, key, config=None):
486 def create_test_repo(self, key, config=None):
588 return self._test_repo_container(key, self.alias, config)
487 return self._test_repo_container(key, self.alias, config)
589
488
590 @property
489 @property
591 def repo(self):
490 def repo(self):
592 """
491 """
593 Returns the "current" repository. This is the vcs_test repo or the
492 Returns the "current" repository. This is the vcs_test repo or the
594 last repo which has been created with `create_repo`.
493 last repo which has been created with `create_repo`.
595 """
494 """
596 from rhodecode.model.db import Repository
495 from rhodecode.model.db import Repository
597 return Repository.get_by_repo_name(self.repo_name)
496 return Repository.get_by_repo_name(self.repo_name)
598
497
599 @property
498 @property
600 def default_branch_name(self):
499 def default_branch_name(self):
601 VcsRepository = get_backend(self.alias)
500 VcsRepository = get_backend(self.alias)
602 return VcsRepository.DEFAULT_BRANCH_NAME
501 return VcsRepository.DEFAULT_BRANCH_NAME
603
502
604 @property
503 @property
605 def default_head_id(self):
504 def default_head_id(self):
606 """
505 """
607 Returns the default head id of the underlying backend.
506 Returns the default head id of the underlying backend.
608
507
609 This will be the default branch name in case the backend does have a
508 This will be the default branch name in case the backend does have a
610 default branch. In the other cases it will point to a valid head
509 default branch. In the other cases it will point to a valid head
611 which can serve as the base to create a new commit on top of it.
510 which can serve as the base to create a new commit on top of it.
612 """
511 """
613 vcsrepo = self.repo.scm_instance()
512 vcsrepo = self.repo.scm_instance()
614 head_id = (
513 head_id = (
615 vcsrepo.DEFAULT_BRANCH_NAME or
514 vcsrepo.DEFAULT_BRANCH_NAME or
616 vcsrepo.commit_ids[-1])
515 vcsrepo.commit_ids[-1])
617 return head_id
516 return head_id
618
517
619 @property
518 @property
620 def commit_ids(self):
519 def commit_ids(self):
621 """
520 """
622 Returns the list of commits for the last created repository
521 Returns the list of commits for the last created repository
623 """
522 """
624 return self._commit_ids
523 return self._commit_ids
625
524
626 def create_master_repo(self, commits):
525 def create_master_repo(self, commits):
627 """
526 """
628 Create a repository and remember it as a template.
527 Create a repository and remember it as a template.
629
528
630 This allows to easily create derived repositories to construct
529 This allows to easily create derived repositories to construct
631 more complex scenarios for diff, compare and pull requests.
530 more complex scenarios for diff, compare and pull requests.
632
531
633 Returns a commit map which maps from commit message to raw_id.
532 Returns a commit map which maps from commit message to raw_id.
634 """
533 """
635 self._master_repo = self.create_repo(commits=commits)
534 self._master_repo = self.create_repo(commits=commits)
636 self._master_repo_path = self._master_repo.repo_full_path
535 self._master_repo_path = self._master_repo.repo_full_path
637
536
638 return self._commit_ids
537 return self._commit_ids
639
538
640 def create_repo(
539 def create_repo(
641 self, commits=None, number_of_commits=0, heads=None,
540 self, commits=None, number_of_commits=0, heads=None,
642 name_suffix=u'', bare=False, **kwargs):
541 name_suffix=u'', bare=False, **kwargs):
643 """
542 """
644 Create a repository and record it for later cleanup.
543 Create a repository and record it for later cleanup.
645
544
646 :param commits: Optional. A sequence of dict instances.
545 :param commits: Optional. A sequence of dict instances.
647 Will add a commit per entry to the new repository.
546 Will add a commit per entry to the new repository.
648 :param number_of_commits: Optional. If set to a number, this number of
547 :param number_of_commits: Optional. If set to a number, this number of
649 commits will be added to the new repository.
548 commits will be added to the new repository.
650 :param heads: Optional. Can be set to a sequence of of commit
549 :param heads: Optional. Can be set to a sequence of of commit
651 names which shall be pulled in from the master repository.
550 names which shall be pulled in from the master repository.
652 :param name_suffix: adds special suffix to generated repo name
551 :param name_suffix: adds special suffix to generated repo name
653 :param bare: set a repo as bare (no checkout)
552 :param bare: set a repo as bare (no checkout)
654 """
553 """
655 self.repo_name = self._next_repo_name() + name_suffix
554 self.repo_name = self._next_repo_name() + name_suffix
656 repo = self._fixture.create_repo(
555 repo = self._fixture.create_repo(
657 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
556 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
658 self._cleanup_repos.append(repo.repo_name)
557 self._cleanup_repos.append(repo.repo_name)
659
558
660 commits = commits or [
559 commits = commits or [
661 {'message': 'Commit %s of %s' % (x, self.repo_name)}
560 {'message': 'Commit %s of %s' % (x, self.repo_name)}
662 for x in range(number_of_commits)]
561 for x in range(number_of_commits)]
663 vcs_repo = repo.scm_instance()
562 vcs_repo = repo.scm_instance()
664 vcs_repo.count()
563 vcs_repo.count()
665 self._add_commits_to_repo(vcs_repo, commits)
564 self._add_commits_to_repo(vcs_repo, commits)
666 if heads:
565 if heads:
667 self.pull_heads(repo, heads)
566 self.pull_heads(repo, heads)
668
567
669 return repo
568 return repo
670
569
671 def pull_heads(self, repo, heads):
570 def pull_heads(self, repo, heads):
672 """
571 """
673 Make sure that repo contains all commits mentioned in `heads`
572 Make sure that repo contains all commits mentioned in `heads`
674 """
573 """
675 vcsrepo = repo.scm_instance()
574 vcsrepo = repo.scm_instance()
676 vcsrepo.config.clear_section('hooks')
575 vcsrepo.config.clear_section('hooks')
677 commit_ids = [self._commit_ids[h] for h in heads]
576 commit_ids = [self._commit_ids[h] for h in heads]
678 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
577 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
679
578
680 def create_fork(self):
579 def create_fork(self):
681 repo_to_fork = self.repo_name
580 repo_to_fork = self.repo_name
682 self.repo_name = self._next_repo_name()
581 self.repo_name = self._next_repo_name()
683 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
582 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
684 self._cleanup_repos.append(self.repo_name)
583 self._cleanup_repos.append(self.repo_name)
685 return repo
584 return repo
686
585
687 def new_repo_name(self, suffix=u''):
586 def new_repo_name(self, suffix=u''):
688 self.repo_name = self._next_repo_name() + suffix
587 self.repo_name = self._next_repo_name() + suffix
689 self._cleanup_repos.append(self.repo_name)
588 self._cleanup_repos.append(self.repo_name)
690 return self.repo_name
589 return self.repo_name
691
590
692 def _next_repo_name(self):
591 def _next_repo_name(self):
693 return u"%s_%s" % (
592 return u"%s_%s" % (
694 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
593 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
695
594
696 def ensure_file(self, filename, content='Test content\n'):
595 def ensure_file(self, filename, content='Test content\n'):
697 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
596 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
698 commits = [
597 commits = [
699 {'added': [
598 {'added': [
700 FileNode(filename, content=content),
599 FileNode(filename, content=content),
701 ]},
600 ]},
702 ]
601 ]
703 self._add_commits_to_repo(self.repo.scm_instance(), commits)
602 self._add_commits_to_repo(self.repo.scm_instance(), commits)
704
603
705 def enable_downloads(self):
604 def enable_downloads(self):
706 repo = self.repo
605 repo = self.repo
707 repo.enable_downloads = True
606 repo.enable_downloads = True
708 Session().add(repo)
607 Session().add(repo)
709 Session().commit()
608 Session().commit()
710
609
711 def cleanup(self):
610 def cleanup(self):
712 for repo_name in reversed(self._cleanup_repos):
611 for repo_name in reversed(self._cleanup_repos):
713 self._fixture.destroy_repo(repo_name)
612 self._fixture.destroy_repo(repo_name)
714
613
715 def _add_commits_to_repo(self, repo, commits):
614 def _add_commits_to_repo(self, repo, commits):
716 commit_ids = _add_commits_to_repo(repo, commits)
615 commit_ids = _add_commits_to_repo(repo, commits)
717 if not commit_ids:
616 if not commit_ids:
718 return
617 return
719 self._commit_ids = commit_ids
618 self._commit_ids = commit_ids
720
619
721 # Creating refs for Git to allow fetching them from remote repository
620 # Creating refs for Git to allow fetching them from remote repository
722 if self.alias == 'git':
621 if self.alias == 'git':
723 refs = {}
622 refs = {}
724 for message in self._commit_ids:
623 for message in self._commit_ids:
725 # TODO: mikhail: do more special chars replacements
624 # TODO: mikhail: do more special chars replacements
726 ref_name = 'refs/test-refs/{}'.format(
625 ref_name = 'refs/test-refs/{}'.format(
727 message.replace(' ', ''))
626 message.replace(' ', ''))
728 refs[ref_name] = self._commit_ids[message]
627 refs[ref_name] = self._commit_ids[message]
729 self._create_refs(repo, refs)
628 self._create_refs(repo, refs)
730
629
731 def _create_refs(self, repo, refs):
630 def _create_refs(self, repo, refs):
732 for ref_name in refs:
631 for ref_name in refs:
733 repo.set_refs(ref_name, refs[ref_name])
632 repo.set_refs(ref_name, refs[ref_name])
734
633
735
634
736 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
635 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
737 if backend_alias not in request.config.getoption('--backends'):
636 if backend_alias not in request.config.getoption('--backends'):
738 pytest.skip("Backend %s not selected." % (backend_alias, ))
637 pytest.skip("Backend %s not selected." % (backend_alias, ))
739
638
740 utils.check_xfail_backends(request.node, backend_alias)
639 utils.check_xfail_backends(request.node, backend_alias)
741 utils.check_skip_backends(request.node, backend_alias)
640 utils.check_skip_backends(request.node, backend_alias)
742
641
743 repo_name = 'vcs_test_%s' % (backend_alias, )
642 repo_name = 'vcs_test_%s' % (backend_alias, )
744 repo_path = os.path.join(tests_tmp_path, repo_name)
643 repo_path = os.path.join(tests_tmp_path, repo_name)
745 backend = VcsBackend(
644 backend = VcsBackend(
746 alias=backend_alias,
645 alias=backend_alias,
747 repo_path=repo_path,
646 repo_path=repo_path,
748 test_name=request.node.name,
647 test_name=request.node.name,
749 test_repo_container=test_repo)
648 test_repo_container=test_repo)
750 request.addfinalizer(backend.cleanup)
649 request.addfinalizer(backend.cleanup)
751 return backend
650 return backend
752
651
753
652
754 @pytest.fixture()
653 @pytest.fixture()
755 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
654 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
756 """
655 """
757 Parametrized fixture which represents a single vcs backend implementation.
656 Parametrized fixture which represents a single vcs backend implementation.
758
657
759 See the fixture `backend` for more details. This one implements the same
658 See the fixture `backend` for more details. This one implements the same
760 concept, but on vcs level. So it does not provide model instances etc.
659 concept, but on vcs level. So it does not provide model instances etc.
761
660
762 Parameters are generated dynamically, see :func:`pytest_generate_tests`
661 Parameters are generated dynamically, see :func:`pytest_generate_tests`
763 for how this works.
662 for how this works.
764 """
663 """
765 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
664 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
766
665
767
666
768 @pytest.fixture()
667 @pytest.fixture()
769 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
668 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
770 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
669 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
771
670
772
671
773 @pytest.fixture()
672 @pytest.fixture()
774 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
673 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
775 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
674 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
776
675
777
676
778 @pytest.fixture()
677 @pytest.fixture()
779 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
678 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
780 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
679 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
781
680
782
681
783 @pytest.fixture()
682 @pytest.fixture()
784 def vcsbackend_stub(vcsbackend_git):
683 def vcsbackend_stub(vcsbackend_git):
785 """
684 """
786 Use this to express that your test just needs a stub of a vcsbackend.
685 Use this to express that your test just needs a stub of a vcsbackend.
787
686
788 Plan is to eventually implement an in-memory stub to speed tests up.
687 Plan is to eventually implement an in-memory stub to speed tests up.
789 """
688 """
790 return vcsbackend_git
689 return vcsbackend_git
791
690
792
691
793 class VcsBackend(object):
692 class VcsBackend(object):
794 """
693 """
795 Represents the test configuration for one supported vcs backend.
694 Represents the test configuration for one supported vcs backend.
796 """
695 """
797
696
798 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
697 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
799
698
800 def __init__(self, alias, repo_path, test_name, test_repo_container):
699 def __init__(self, alias, repo_path, test_name, test_repo_container):
801 self.alias = alias
700 self.alias = alias
802 self._repo_path = repo_path
701 self._repo_path = repo_path
803 self._cleanup_repos = []
702 self._cleanup_repos = []
804 self._test_name = test_name
703 self._test_name = test_name
805 self._test_repo_container = test_repo_container
704 self._test_repo_container = test_repo_container
806
705
807 def __getitem__(self, key):
706 def __getitem__(self, key):
808 return self._test_repo_container(key, self.alias).scm_instance()
707 return self._test_repo_container(key, self.alias).scm_instance()
809
708
810 @property
709 @property
811 def repo(self):
710 def repo(self):
812 """
711 """
813 Returns the "current" repository. This is the vcs_test repo of the last
712 Returns the "current" repository. This is the vcs_test repo of the last
814 repo which has been created.
713 repo which has been created.
815 """
714 """
816 Repository = get_backend(self.alias)
715 Repository = get_backend(self.alias)
817 return Repository(self._repo_path)
716 return Repository(self._repo_path)
818
717
819 @property
718 @property
820 def backend(self):
719 def backend(self):
821 """
720 """
822 Returns the backend implementation class.
721 Returns the backend implementation class.
823 """
722 """
824 return get_backend(self.alias)
723 return get_backend(self.alias)
825
724
826 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
725 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
827 bare=False):
726 bare=False):
828 repo_name = self._next_repo_name()
727 repo_name = self._next_repo_name()
829 self._repo_path = get_new_dir(repo_name)
728 self._repo_path = get_new_dir(repo_name)
830 repo_class = get_backend(self.alias)
729 repo_class = get_backend(self.alias)
831 src_url = None
730 src_url = None
832 if _clone_repo:
731 if _clone_repo:
833 src_url = _clone_repo.path
732 src_url = _clone_repo.path
834 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
733 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
835 self._cleanup_repos.append(repo)
734 self._cleanup_repos.append(repo)
836
735
837 commits = commits or [
736 commits = commits or [
838 {'message': 'Commit %s of %s' % (x, repo_name)}
737 {'message': 'Commit %s of %s' % (x, repo_name)}
839 for x in range(number_of_commits)]
738 for x in range(number_of_commits)]
840 _add_commits_to_repo(repo, commits)
739 _add_commits_to_repo(repo, commits)
841 return repo
740 return repo
842
741
843 def clone_repo(self, repo):
742 def clone_repo(self, repo):
844 return self.create_repo(_clone_repo=repo)
743 return self.create_repo(_clone_repo=repo)
845
744
846 def cleanup(self):
745 def cleanup(self):
847 for repo in self._cleanup_repos:
746 for repo in self._cleanup_repos:
848 shutil.rmtree(repo.path)
747 shutil.rmtree(repo.path)
849
748
850 def new_repo_path(self):
749 def new_repo_path(self):
851 repo_name = self._next_repo_name()
750 repo_name = self._next_repo_name()
852 self._repo_path = get_new_dir(repo_name)
751 self._repo_path = get_new_dir(repo_name)
853 return self._repo_path
752 return self._repo_path
854
753
855 def _next_repo_name(self):
754 def _next_repo_name(self):
856 return "%s_%s" % (
755 return "%s_%s" % (
857 self.invalid_repo_name.sub('_', self._test_name),
756 self.invalid_repo_name.sub('_', self._test_name),
858 len(self._cleanup_repos))
757 len(self._cleanup_repos))
859
758
860 def add_file(self, repo, filename, content='Test content\n'):
759 def add_file(self, repo, filename, content='Test content\n'):
861 imc = repo.in_memory_commit
760 imc = repo.in_memory_commit
862 imc.add(FileNode(filename, content=content))
761 imc.add(FileNode(filename, content=content))
863 imc.commit(
762 imc.commit(
864 message=u'Automatic commit from vcsbackend fixture',
763 message=u'Automatic commit from vcsbackend fixture',
865 author=u'Automatic <automatic@rhodecode.com>')
764 author=u'Automatic <automatic@rhodecode.com>')
866
765
867 def ensure_file(self, filename, content='Test content\n'):
766 def ensure_file(self, filename, content='Test content\n'):
868 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
767 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
869 self.add_file(self.repo, filename, content)
768 self.add_file(self.repo, filename, content)
870
769
871
770
872 def _add_commits_to_repo(vcs_repo, commits):
771 def _add_commits_to_repo(vcs_repo, commits):
873 commit_ids = {}
772 commit_ids = {}
874 if not commits:
773 if not commits:
875 return commit_ids
774 return commit_ids
876
775
877 imc = vcs_repo.in_memory_commit
776 imc = vcs_repo.in_memory_commit
878 commit = None
777 commit = None
879
778
880 for idx, commit in enumerate(commits):
779 for idx, commit in enumerate(commits):
881 message = str(commit.get('message', 'Commit %s' % idx))
780 message = str(commit.get('message', 'Commit %s' % idx))
882
781
883 for node in commit.get('added', []):
782 for node in commit.get('added', []):
884 imc.add(FileNode(node.path, content=node.content))
783 imc.add(FileNode(node.path, content=node.content))
885 for node in commit.get('changed', []):
784 for node in commit.get('changed', []):
886 imc.change(FileNode(node.path, content=node.content))
785 imc.change(FileNode(node.path, content=node.content))
887 for node in commit.get('removed', []):
786 for node in commit.get('removed', []):
888 imc.remove(FileNode(node.path))
787 imc.remove(FileNode(node.path))
889
788
890 parents = [
789 parents = [
891 vcs_repo.get_commit(commit_id=commit_ids[p])
790 vcs_repo.get_commit(commit_id=commit_ids[p])
892 for p in commit.get('parents', [])]
791 for p in commit.get('parents', [])]
893
792
894 operations = ('added', 'changed', 'removed')
793 operations = ('added', 'changed', 'removed')
895 if not any((commit.get(o) for o in operations)):
794 if not any((commit.get(o) for o in operations)):
896 imc.add(FileNode('file_%s' % idx, content=message))
795 imc.add(FileNode('file_%s' % idx, content=message))
897
796
898 commit = imc.commit(
797 commit = imc.commit(
899 message=message,
798 message=message,
900 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
799 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
901 date=commit.get('date'),
800 date=commit.get('date'),
902 branch=commit.get('branch'),
801 branch=commit.get('branch'),
903 parents=parents)
802 parents=parents)
904
803
905 commit_ids[commit.message] = commit.raw_id
804 commit_ids[commit.message] = commit.raw_id
906
805
907 return commit_ids
806 return commit_ids
908
807
909
808
910 @pytest.fixture()
809 @pytest.fixture()
911 def reposerver(request):
810 def reposerver(request):
912 """
811 """
913 Allows to serve a backend repository
812 Allows to serve a backend repository
914 """
813 """
915
814
916 repo_server = RepoServer()
815 repo_server = RepoServer()
917 request.addfinalizer(repo_server.cleanup)
816 request.addfinalizer(repo_server.cleanup)
918 return repo_server
817 return repo_server
919
818
920
819
921 class RepoServer(object):
820 class RepoServer(object):
922 """
821 """
923 Utility to serve a local repository for the duration of a test case.
822 Utility to serve a local repository for the duration of a test case.
924
823
925 Supports only Subversion so far.
824 Supports only Subversion so far.
926 """
825 """
927
826
928 url = None
827 url = None
929
828
930 def __init__(self):
829 def __init__(self):
931 self._cleanup_servers = []
830 self._cleanup_servers = []
932
831
933 def serve(self, vcsrepo):
832 def serve(self, vcsrepo):
934 if vcsrepo.alias != 'svn':
833 if vcsrepo.alias != 'svn':
935 raise TypeError("Backend %s not supported" % vcsrepo.alias)
834 raise TypeError("Backend %s not supported" % vcsrepo.alias)
936
835
937 proc = subprocess.Popen(
836 proc = subprocess.Popen(
938 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
837 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
939 '--root', vcsrepo.path])
838 '--root', vcsrepo.path])
940 self._cleanup_servers.append(proc)
839 self._cleanup_servers.append(proc)
941 self.url = 'svn://localhost'
840 self.url = 'svn://localhost'
942
841
943 def cleanup(self):
842 def cleanup(self):
944 for proc in self._cleanup_servers:
843 for proc in self._cleanup_servers:
945 proc.terminate()
844 proc.terminate()
946
845
947
846
948 @pytest.fixture()
847 @pytest.fixture()
949 def pr_util(backend, request, config_stub):
848 def pr_util(backend, request, config_stub):
950 """
849 """
951 Utility for tests of models and for functional tests around pull requests.
850 Utility for tests of models and for functional tests around pull requests.
952
851
953 It gives an instance of :class:`PRTestUtility` which provides various
852 It gives an instance of :class:`PRTestUtility` which provides various
954 utility methods around one pull request.
853 utility methods around one pull request.
955
854
956 This fixture uses `backend` and inherits its parameterization.
855 This fixture uses `backend` and inherits its parameterization.
957 """
856 """
958
857
959 util = PRTestUtility(backend)
858 util = PRTestUtility(backend)
960 request.addfinalizer(util.cleanup)
859 request.addfinalizer(util.cleanup)
961
860
962 return util
861 return util
963
862
964
863
965 class PRTestUtility(object):
864 class PRTestUtility(object):
966
865
967 pull_request = None
866 pull_request = None
968 pull_request_id = None
867 pull_request_id = None
969 mergeable_patcher = None
868 mergeable_patcher = None
970 mergeable_mock = None
869 mergeable_mock = None
971 notification_patcher = None
870 notification_patcher = None
972
871
973 def __init__(self, backend):
872 def __init__(self, backend):
974 self.backend = backend
873 self.backend = backend
975
874
976 def create_pull_request(
875 def create_pull_request(
977 self, commits=None, target_head=None, source_head=None,
876 self, commits=None, target_head=None, source_head=None,
978 revisions=None, approved=False, author=None, mergeable=False,
877 revisions=None, approved=False, author=None, mergeable=False,
979 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
878 enable_notifications=True, name_suffix=u'', reviewers=None, observers=None,
980 title=u"Test", description=u"Description"):
879 title=u"Test", description=u"Description"):
981 self.set_mergeable(mergeable)
880 self.set_mergeable(mergeable)
982 if not enable_notifications:
881 if not enable_notifications:
983 # mock notification side effect
882 # mock notification side effect
984 self.notification_patcher = mock.patch(
883 self.notification_patcher = mock.patch(
985 'rhodecode.model.notification.NotificationModel.create')
884 'rhodecode.model.notification.NotificationModel.create')
986 self.notification_patcher.start()
885 self.notification_patcher.start()
987
886
988 if not self.pull_request:
887 if not self.pull_request:
989 if not commits:
888 if not commits:
990 commits = [
889 commits = [
991 {'message': 'c1'},
890 {'message': 'c1'},
992 {'message': 'c2'},
891 {'message': 'c2'},
993 {'message': 'c3'},
892 {'message': 'c3'},
994 ]
893 ]
995 target_head = 'c1'
894 target_head = 'c1'
996 source_head = 'c2'
895 source_head = 'c2'
997 revisions = ['c2']
896 revisions = ['c2']
998
897
999 self.commit_ids = self.backend.create_master_repo(commits)
898 self.commit_ids = self.backend.create_master_repo(commits)
1000 self.target_repository = self.backend.create_repo(
899 self.target_repository = self.backend.create_repo(
1001 heads=[target_head], name_suffix=name_suffix)
900 heads=[target_head], name_suffix=name_suffix)
1002 self.source_repository = self.backend.create_repo(
901 self.source_repository = self.backend.create_repo(
1003 heads=[source_head], name_suffix=name_suffix)
902 heads=[source_head], name_suffix=name_suffix)
1004 self.author = author or UserModel().get_by_username(
903 self.author = author or UserModel().get_by_username(
1005 TEST_USER_ADMIN_LOGIN)
904 TEST_USER_ADMIN_LOGIN)
1006
905
1007 model = PullRequestModel()
906 model = PullRequestModel()
1008 self.create_parameters = {
907 self.create_parameters = {
1009 'created_by': self.author,
908 'created_by': self.author,
1010 'source_repo': self.source_repository.repo_name,
909 'source_repo': self.source_repository.repo_name,
1011 'source_ref': self._default_branch_reference(source_head),
910 'source_ref': self._default_branch_reference(source_head),
1012 'target_repo': self.target_repository.repo_name,
911 'target_repo': self.target_repository.repo_name,
1013 'target_ref': self._default_branch_reference(target_head),
912 'target_ref': self._default_branch_reference(target_head),
1014 'revisions': [self.commit_ids[r] for r in revisions],
913 'revisions': [self.commit_ids[r] for r in revisions],
1015 'reviewers': reviewers or self._get_reviewers(),
914 'reviewers': reviewers or self._get_reviewers(),
1016 'observers': observers or self._get_observers(),
915 'observers': observers or self._get_observers(),
1017 'title': title,
916 'title': title,
1018 'description': description,
917 'description': description,
1019 }
918 }
1020 self.pull_request = model.create(**self.create_parameters)
919 self.pull_request = model.create(**self.create_parameters)
1021 assert model.get_versions(self.pull_request) == []
920 assert model.get_versions(self.pull_request) == []
1022
921
1023 self.pull_request_id = self.pull_request.pull_request_id
922 self.pull_request_id = self.pull_request.pull_request_id
1024
923
1025 if approved:
924 if approved:
1026 self.approve()
925 self.approve()
1027
926
1028 Session().add(self.pull_request)
927 Session().add(self.pull_request)
1029 Session().commit()
928 Session().commit()
1030
929
1031 return self.pull_request
930 return self.pull_request
1032
931
1033 def approve(self):
932 def approve(self):
1034 self.create_status_votes(
933 self.create_status_votes(
1035 ChangesetStatus.STATUS_APPROVED,
934 ChangesetStatus.STATUS_APPROVED,
1036 *self.pull_request.reviewers)
935 *self.pull_request.reviewers)
1037
936
1038 def close(self):
937 def close(self):
1039 PullRequestModel().close_pull_request(self.pull_request, self.author)
938 PullRequestModel().close_pull_request(self.pull_request, self.author)
1040
939
1041 def _default_branch_reference(self, commit_message):
940 def _default_branch_reference(self, commit_message):
1042 reference = '%s:%s:%s' % (
941 reference = '%s:%s:%s' % (
1043 'branch',
942 'branch',
1044 self.backend.default_branch_name,
943 self.backend.default_branch_name,
1045 self.commit_ids[commit_message])
944 self.commit_ids[commit_message])
1046 return reference
945 return reference
1047
946
1048 def _get_reviewers(self):
947 def _get_reviewers(self):
1049 role = PullRequestReviewers.ROLE_REVIEWER
948 role = PullRequestReviewers.ROLE_REVIEWER
1050 return [
949 return [
1051 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
950 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
1052 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
951 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
1053 ]
952 ]
1054
953
1055 def _get_observers(self):
954 def _get_observers(self):
1056 return [
955 return [
1057
956
1058 ]
957 ]
1059
958
1060 def update_source_repository(self, head=None):
959 def update_source_repository(self, head=None):
1061 heads = [head or 'c3']
960 heads = [head or 'c3']
1062 self.backend.pull_heads(self.source_repository, heads=heads)
961 self.backend.pull_heads(self.source_repository, heads=heads)
1063
962
1064 def add_one_commit(self, head=None):
963 def add_one_commit(self, head=None):
1065 self.update_source_repository(head=head)
964 self.update_source_repository(head=head)
1066 old_commit_ids = set(self.pull_request.revisions)
965 old_commit_ids = set(self.pull_request.revisions)
1067 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
966 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1068 commit_ids = set(self.pull_request.revisions)
967 commit_ids = set(self.pull_request.revisions)
1069 new_commit_ids = commit_ids - old_commit_ids
968 new_commit_ids = commit_ids - old_commit_ids
1070 assert len(new_commit_ids) == 1
969 assert len(new_commit_ids) == 1
1071 return new_commit_ids.pop()
970 return new_commit_ids.pop()
1072
971
1073 def remove_one_commit(self):
972 def remove_one_commit(self):
1074 assert len(self.pull_request.revisions) == 2
973 assert len(self.pull_request.revisions) == 2
1075 source_vcs = self.source_repository.scm_instance()
974 source_vcs = self.source_repository.scm_instance()
1076 removed_commit_id = source_vcs.commit_ids[-1]
975 removed_commit_id = source_vcs.commit_ids[-1]
1077
976
1078 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
977 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1079 # remove the if once that's sorted out.
978 # remove the if once that's sorted out.
1080 if self.backend.alias == "git":
979 if self.backend.alias == "git":
1081 kwargs = {'branch_name': self.backend.default_branch_name}
980 kwargs = {'branch_name': self.backend.default_branch_name}
1082 else:
981 else:
1083 kwargs = {}
982 kwargs = {}
1084 source_vcs.strip(removed_commit_id, **kwargs)
983 source_vcs.strip(removed_commit_id, **kwargs)
1085
984
1086 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
985 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1087 assert len(self.pull_request.revisions) == 1
986 assert len(self.pull_request.revisions) == 1
1088 return removed_commit_id
987 return removed_commit_id
1089
988
1090 def create_comment(self, linked_to=None):
989 def create_comment(self, linked_to=None):
1091 comment = CommentsModel().create(
990 comment = CommentsModel().create(
1092 text=u"Test comment",
991 text=u"Test comment",
1093 repo=self.target_repository.repo_name,
992 repo=self.target_repository.repo_name,
1094 user=self.author,
993 user=self.author,
1095 pull_request=self.pull_request)
994 pull_request=self.pull_request)
1096 assert comment.pull_request_version_id is None
995 assert comment.pull_request_version_id is None
1097
996
1098 if linked_to:
997 if linked_to:
1099 PullRequestModel()._link_comments_to_version(linked_to)
998 PullRequestModel()._link_comments_to_version(linked_to)
1100
999
1101 return comment
1000 return comment
1102
1001
1103 def create_inline_comment(
1002 def create_inline_comment(
1104 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1003 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1105 comment = CommentsModel().create(
1004 comment = CommentsModel().create(
1106 text=u"Test comment",
1005 text=u"Test comment",
1107 repo=self.target_repository.repo_name,
1006 repo=self.target_repository.repo_name,
1108 user=self.author,
1007 user=self.author,
1109 line_no=line_no,
1008 line_no=line_no,
1110 f_path=file_path,
1009 f_path=file_path,
1111 pull_request=self.pull_request)
1010 pull_request=self.pull_request)
1112 assert comment.pull_request_version_id is None
1011 assert comment.pull_request_version_id is None
1113
1012
1114 if linked_to:
1013 if linked_to:
1115 PullRequestModel()._link_comments_to_version(linked_to)
1014 PullRequestModel()._link_comments_to_version(linked_to)
1116
1015
1117 return comment
1016 return comment
1118
1017
1119 def create_version_of_pull_request(self):
1018 def create_version_of_pull_request(self):
1120 pull_request = self.create_pull_request()
1019 pull_request = self.create_pull_request()
1121 version = PullRequestModel()._create_version_from_snapshot(
1020 version = PullRequestModel()._create_version_from_snapshot(
1122 pull_request)
1021 pull_request)
1123 return version
1022 return version
1124
1023
1125 def create_status_votes(self, status, *reviewers):
1024 def create_status_votes(self, status, *reviewers):
1126 for reviewer in reviewers:
1025 for reviewer in reviewers:
1127 ChangesetStatusModel().set_status(
1026 ChangesetStatusModel().set_status(
1128 repo=self.pull_request.target_repo,
1027 repo=self.pull_request.target_repo,
1129 status=status,
1028 status=status,
1130 user=reviewer.user_id,
1029 user=reviewer.user_id,
1131 pull_request=self.pull_request)
1030 pull_request=self.pull_request)
1132
1031
1133 def set_mergeable(self, value):
1032 def set_mergeable(self, value):
1134 if not self.mergeable_patcher:
1033 if not self.mergeable_patcher:
1135 self.mergeable_patcher = mock.patch.object(
1034 self.mergeable_patcher = mock.patch.object(
1136 VcsSettingsModel, 'get_general_settings')
1035 VcsSettingsModel, 'get_general_settings')
1137 self.mergeable_mock = self.mergeable_patcher.start()
1036 self.mergeable_mock = self.mergeable_patcher.start()
1138 self.mergeable_mock.return_value = {
1037 self.mergeable_mock.return_value = {
1139 'rhodecode_pr_merge_enabled': value}
1038 'rhodecode_pr_merge_enabled': value}
1140
1039
1141 def cleanup(self):
1040 def cleanup(self):
1142 # In case the source repository is already cleaned up, the pull
1041 # In case the source repository is already cleaned up, the pull
1143 # request will already be deleted.
1042 # request will already be deleted.
1144 pull_request = PullRequest().get(self.pull_request_id)
1043 pull_request = PullRequest().get(self.pull_request_id)
1145 if pull_request:
1044 if pull_request:
1146 PullRequestModel().delete(pull_request, pull_request.author)
1045 PullRequestModel().delete(pull_request, pull_request.author)
1147 Session().commit()
1046 Session().commit()
1148
1047
1149 if self.notification_patcher:
1048 if self.notification_patcher:
1150 self.notification_patcher.stop()
1049 self.notification_patcher.stop()
1151
1050
1152 if self.mergeable_patcher:
1051 if self.mergeable_patcher:
1153 self.mergeable_patcher.stop()
1052 self.mergeable_patcher.stop()
1154
1053
1155
1054
1156 @pytest.fixture()
1055 @pytest.fixture()
1157 def user_admin(baseapp):
1056 def user_admin(baseapp):
1158 """
1057 """
1159 Provides the default admin test user as an instance of `db.User`.
1058 Provides the default admin test user as an instance of `db.User`.
1160 """
1059 """
1161 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1060 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1162 return user
1061 return user
1163
1062
1164
1063
1165 @pytest.fixture()
1064 @pytest.fixture()
1166 def user_regular(baseapp):
1065 def user_regular(baseapp):
1167 """
1066 """
1168 Provides the default regular test user as an instance of `db.User`.
1067 Provides the default regular test user as an instance of `db.User`.
1169 """
1068 """
1170 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1069 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1171 return user
1070 return user
1172
1071
1173
1072
1174 @pytest.fixture()
1073 @pytest.fixture()
1175 def user_util(request, db_connection):
1074 def user_util(request, db_connection):
1176 """
1075 """
1177 Provides a wired instance of `UserUtility` with integrated cleanup.
1076 Provides a wired instance of `UserUtility` with integrated cleanup.
1178 """
1077 """
1179 utility = UserUtility(test_name=request.node.name)
1078 utility = UserUtility(test_name=request.node.name)
1180 request.addfinalizer(utility.cleanup)
1079 request.addfinalizer(utility.cleanup)
1181 return utility
1080 return utility
1182
1081
1183
1082
1184 # TODO: johbo: Split this up into utilities per domain or something similar
1083 # TODO: johbo: Split this up into utilities per domain or something similar
1185 class UserUtility(object):
1084 class UserUtility(object):
1186
1085
1187 def __init__(self, test_name="test"):
1086 def __init__(self, test_name="test"):
1188 self._test_name = self._sanitize_name(test_name)
1087 self._test_name = self._sanitize_name(test_name)
1189 self.fixture = Fixture()
1088 self.fixture = Fixture()
1190 self.repo_group_ids = []
1089 self.repo_group_ids = []
1191 self.repos_ids = []
1090 self.repos_ids = []
1192 self.user_ids = []
1091 self.user_ids = []
1193 self.user_group_ids = []
1092 self.user_group_ids = []
1194 self.user_repo_permission_ids = []
1093 self.user_repo_permission_ids = []
1195 self.user_group_repo_permission_ids = []
1094 self.user_group_repo_permission_ids = []
1196 self.user_repo_group_permission_ids = []
1095 self.user_repo_group_permission_ids = []
1197 self.user_group_repo_group_permission_ids = []
1096 self.user_group_repo_group_permission_ids = []
1198 self.user_user_group_permission_ids = []
1097 self.user_user_group_permission_ids = []
1199 self.user_group_user_group_permission_ids = []
1098 self.user_group_user_group_permission_ids = []
1200 self.user_permissions = []
1099 self.user_permissions = []
1201
1100
1202 def _sanitize_name(self, name):
1101 def _sanitize_name(self, name):
1203 for char in ['[', ']']:
1102 for char in ['[', ']']:
1204 name = name.replace(char, '_')
1103 name = name.replace(char, '_')
1205 return name
1104 return name
1206
1105
1207 def create_repo_group(
1106 def create_repo_group(
1208 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1107 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1209 group_name = "{prefix}_repogroup_{count}".format(
1108 group_name = "{prefix}_repogroup_{count}".format(
1210 prefix=self._test_name,
1109 prefix=self._test_name,
1211 count=len(self.repo_group_ids))
1110 count=len(self.repo_group_ids))
1212 repo_group = self.fixture.create_repo_group(
1111 repo_group = self.fixture.create_repo_group(
1213 group_name, cur_user=owner)
1112 group_name, cur_user=owner)
1214 if auto_cleanup:
1113 if auto_cleanup:
1215 self.repo_group_ids.append(repo_group.group_id)
1114 self.repo_group_ids.append(repo_group.group_id)
1216 return repo_group
1115 return repo_group
1217
1116
1218 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1117 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1219 auto_cleanup=True, repo_type='hg', bare=False):
1118 auto_cleanup=True, repo_type='hg', bare=False):
1220 repo_name = "{prefix}_repository_{count}".format(
1119 repo_name = "{prefix}_repository_{count}".format(
1221 prefix=self._test_name,
1120 prefix=self._test_name,
1222 count=len(self.repos_ids))
1121 count=len(self.repos_ids))
1223
1122
1224 repository = self.fixture.create_repo(
1123 repository = self.fixture.create_repo(
1225 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1124 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1226 if auto_cleanup:
1125 if auto_cleanup:
1227 self.repos_ids.append(repository.repo_id)
1126 self.repos_ids.append(repository.repo_id)
1228 return repository
1127 return repository
1229
1128
1230 def create_user(self, auto_cleanup=True, **kwargs):
1129 def create_user(self, auto_cleanup=True, **kwargs):
1231 user_name = "{prefix}_user_{count}".format(
1130 user_name = "{prefix}_user_{count}".format(
1232 prefix=self._test_name,
1131 prefix=self._test_name,
1233 count=len(self.user_ids))
1132 count=len(self.user_ids))
1234 user = self.fixture.create_user(user_name, **kwargs)
1133 user = self.fixture.create_user(user_name, **kwargs)
1235 if auto_cleanup:
1134 if auto_cleanup:
1236 self.user_ids.append(user.user_id)
1135 self.user_ids.append(user.user_id)
1237 return user
1136 return user
1238
1137
1239 def create_additional_user_email(self, user, email):
1138 def create_additional_user_email(self, user, email):
1240 uem = self.fixture.create_additional_user_email(user=user, email=email)
1139 uem = self.fixture.create_additional_user_email(user=user, email=email)
1241 return uem
1140 return uem
1242
1141
1243 def create_user_with_group(self):
1142 def create_user_with_group(self):
1244 user = self.create_user()
1143 user = self.create_user()
1245 user_group = self.create_user_group(members=[user])
1144 user_group = self.create_user_group(members=[user])
1246 return user, user_group
1145 return user, user_group
1247
1146
1248 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1147 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1249 auto_cleanup=True, **kwargs):
1148 auto_cleanup=True, **kwargs):
1250 group_name = "{prefix}_usergroup_{count}".format(
1149 group_name = "{prefix}_usergroup_{count}".format(
1251 prefix=self._test_name,
1150 prefix=self._test_name,
1252 count=len(self.user_group_ids))
1151 count=len(self.user_group_ids))
1253 user_group = self.fixture.create_user_group(
1152 user_group = self.fixture.create_user_group(
1254 group_name, cur_user=owner, **kwargs)
1153 group_name, cur_user=owner, **kwargs)
1255
1154
1256 if auto_cleanup:
1155 if auto_cleanup:
1257 self.user_group_ids.append(user_group.users_group_id)
1156 self.user_group_ids.append(user_group.users_group_id)
1258 if members:
1157 if members:
1259 for user in members:
1158 for user in members:
1260 UserGroupModel().add_user_to_group(user_group, user)
1159 UserGroupModel().add_user_to_group(user_group, user)
1261 return user_group
1160 return user_group
1262
1161
1263 def grant_user_permission(self, user_name, permission_name):
1162 def grant_user_permission(self, user_name, permission_name):
1264 self.inherit_default_user_permissions(user_name, False)
1163 self.inherit_default_user_permissions(user_name, False)
1265 self.user_permissions.append((user_name, permission_name))
1164 self.user_permissions.append((user_name, permission_name))
1266
1165
1267 def grant_user_permission_to_repo_group(
1166 def grant_user_permission_to_repo_group(
1268 self, repo_group, user, permission_name):
1167 self, repo_group, user, permission_name):
1269 permission = RepoGroupModel().grant_user_permission(
1168 permission = RepoGroupModel().grant_user_permission(
1270 repo_group, user, permission_name)
1169 repo_group, user, permission_name)
1271 self.user_repo_group_permission_ids.append(
1170 self.user_repo_group_permission_ids.append(
1272 (repo_group.group_id, user.user_id))
1171 (repo_group.group_id, user.user_id))
1273 return permission
1172 return permission
1274
1173
1275 def grant_user_group_permission_to_repo_group(
1174 def grant_user_group_permission_to_repo_group(
1276 self, repo_group, user_group, permission_name):
1175 self, repo_group, user_group, permission_name):
1277 permission = RepoGroupModel().grant_user_group_permission(
1176 permission = RepoGroupModel().grant_user_group_permission(
1278 repo_group, user_group, permission_name)
1177 repo_group, user_group, permission_name)
1279 self.user_group_repo_group_permission_ids.append(
1178 self.user_group_repo_group_permission_ids.append(
1280 (repo_group.group_id, user_group.users_group_id))
1179 (repo_group.group_id, user_group.users_group_id))
1281 return permission
1180 return permission
1282
1181
1283 def grant_user_permission_to_repo(
1182 def grant_user_permission_to_repo(
1284 self, repo, user, permission_name):
1183 self, repo, user, permission_name):
1285 permission = RepoModel().grant_user_permission(
1184 permission = RepoModel().grant_user_permission(
1286 repo, user, permission_name)
1185 repo, user, permission_name)
1287 self.user_repo_permission_ids.append(
1186 self.user_repo_permission_ids.append(
1288 (repo.repo_id, user.user_id))
1187 (repo.repo_id, user.user_id))
1289 return permission
1188 return permission
1290
1189
1291 def grant_user_group_permission_to_repo(
1190 def grant_user_group_permission_to_repo(
1292 self, repo, user_group, permission_name):
1191 self, repo, user_group, permission_name):
1293 permission = RepoModel().grant_user_group_permission(
1192 permission = RepoModel().grant_user_group_permission(
1294 repo, user_group, permission_name)
1193 repo, user_group, permission_name)
1295 self.user_group_repo_permission_ids.append(
1194 self.user_group_repo_permission_ids.append(
1296 (repo.repo_id, user_group.users_group_id))
1195 (repo.repo_id, user_group.users_group_id))
1297 return permission
1196 return permission
1298
1197
1299 def grant_user_permission_to_user_group(
1198 def grant_user_permission_to_user_group(
1300 self, target_user_group, user, permission_name):
1199 self, target_user_group, user, permission_name):
1301 permission = UserGroupModel().grant_user_permission(
1200 permission = UserGroupModel().grant_user_permission(
1302 target_user_group, user, permission_name)
1201 target_user_group, user, permission_name)
1303 self.user_user_group_permission_ids.append(
1202 self.user_user_group_permission_ids.append(
1304 (target_user_group.users_group_id, user.user_id))
1203 (target_user_group.users_group_id, user.user_id))
1305 return permission
1204 return permission
1306
1205
1307 def grant_user_group_permission_to_user_group(
1206 def grant_user_group_permission_to_user_group(
1308 self, target_user_group, user_group, permission_name):
1207 self, target_user_group, user_group, permission_name):
1309 permission = UserGroupModel().grant_user_group_permission(
1208 permission = UserGroupModel().grant_user_group_permission(
1310 target_user_group, user_group, permission_name)
1209 target_user_group, user_group, permission_name)
1311 self.user_group_user_group_permission_ids.append(
1210 self.user_group_user_group_permission_ids.append(
1312 (target_user_group.users_group_id, user_group.users_group_id))
1211 (target_user_group.users_group_id, user_group.users_group_id))
1313 return permission
1212 return permission
1314
1213
1315 def revoke_user_permission(self, user_name, permission_name):
1214 def revoke_user_permission(self, user_name, permission_name):
1316 self.inherit_default_user_permissions(user_name, True)
1215 self.inherit_default_user_permissions(user_name, True)
1317 UserModel().revoke_perm(user_name, permission_name)
1216 UserModel().revoke_perm(user_name, permission_name)
1318
1217
1319 def inherit_default_user_permissions(self, user_name, value):
1218 def inherit_default_user_permissions(self, user_name, value):
1320 user = UserModel().get_by_username(user_name)
1219 user = UserModel().get_by_username(user_name)
1321 user.inherit_default_permissions = value
1220 user.inherit_default_permissions = value
1322 Session().add(user)
1221 Session().add(user)
1323 Session().commit()
1222 Session().commit()
1324
1223
1325 def cleanup(self):
1224 def cleanup(self):
1326 self._cleanup_permissions()
1225 self._cleanup_permissions()
1327 self._cleanup_repos()
1226 self._cleanup_repos()
1328 self._cleanup_repo_groups()
1227 self._cleanup_repo_groups()
1329 self._cleanup_user_groups()
1228 self._cleanup_user_groups()
1330 self._cleanup_users()
1229 self._cleanup_users()
1331
1230
1332 def _cleanup_permissions(self):
1231 def _cleanup_permissions(self):
1333 if self.user_permissions:
1232 if self.user_permissions:
1334 for user_name, permission_name in self.user_permissions:
1233 for user_name, permission_name in self.user_permissions:
1335 self.revoke_user_permission(user_name, permission_name)
1234 self.revoke_user_permission(user_name, permission_name)
1336
1235
1337 for permission in self.user_repo_permission_ids:
1236 for permission in self.user_repo_permission_ids:
1338 RepoModel().revoke_user_permission(*permission)
1237 RepoModel().revoke_user_permission(*permission)
1339
1238
1340 for permission in self.user_group_repo_permission_ids:
1239 for permission in self.user_group_repo_permission_ids:
1341 RepoModel().revoke_user_group_permission(*permission)
1240 RepoModel().revoke_user_group_permission(*permission)
1342
1241
1343 for permission in self.user_repo_group_permission_ids:
1242 for permission in self.user_repo_group_permission_ids:
1344 RepoGroupModel().revoke_user_permission(*permission)
1243 RepoGroupModel().revoke_user_permission(*permission)
1345
1244
1346 for permission in self.user_group_repo_group_permission_ids:
1245 for permission in self.user_group_repo_group_permission_ids:
1347 RepoGroupModel().revoke_user_group_permission(*permission)
1246 RepoGroupModel().revoke_user_group_permission(*permission)
1348
1247
1349 for permission in self.user_user_group_permission_ids:
1248 for permission in self.user_user_group_permission_ids:
1350 UserGroupModel().revoke_user_permission(*permission)
1249 UserGroupModel().revoke_user_permission(*permission)
1351
1250
1352 for permission in self.user_group_user_group_permission_ids:
1251 for permission in self.user_group_user_group_permission_ids:
1353 UserGroupModel().revoke_user_group_permission(*permission)
1252 UserGroupModel().revoke_user_group_permission(*permission)
1354
1253
1355 def _cleanup_repo_groups(self):
1254 def _cleanup_repo_groups(self):
1356 def _repo_group_compare(first_group_id, second_group_id):
1255 def _repo_group_compare(first_group_id, second_group_id):
1357 """
1256 """
1358 Gives higher priority to the groups with the most complex paths
1257 Gives higher priority to the groups with the most complex paths
1359 """
1258 """
1360 first_group = RepoGroup.get(first_group_id)
1259 first_group = RepoGroup.get(first_group_id)
1361 second_group = RepoGroup.get(second_group_id)
1260 second_group = RepoGroup.get(second_group_id)
1362 first_group_parts = (
1261 first_group_parts = (
1363 len(first_group.group_name.split('/')) if first_group else 0)
1262 len(first_group.group_name.split('/')) if first_group else 0)
1364 second_group_parts = (
1263 second_group_parts = (
1365 len(second_group.group_name.split('/')) if second_group else 0)
1264 len(second_group.group_name.split('/')) if second_group else 0)
1366 return cmp(second_group_parts, first_group_parts)
1265 return cmp(second_group_parts, first_group_parts)
1367
1266
1368 sorted_repo_group_ids = sorted(
1267 sorted_repo_group_ids = sorted(
1369 self.repo_group_ids, cmp=_repo_group_compare)
1268 self.repo_group_ids, cmp=_repo_group_compare)
1370 for repo_group_id in sorted_repo_group_ids:
1269 for repo_group_id in sorted_repo_group_ids:
1371 self.fixture.destroy_repo_group(repo_group_id)
1270 self.fixture.destroy_repo_group(repo_group_id)
1372
1271
1373 def _cleanup_repos(self):
1272 def _cleanup_repos(self):
1374 sorted_repos_ids = sorted(self.repos_ids)
1273 sorted_repos_ids = sorted(self.repos_ids)
1375 for repo_id in sorted_repos_ids:
1274 for repo_id in sorted_repos_ids:
1376 self.fixture.destroy_repo(repo_id)
1275 self.fixture.destroy_repo(repo_id)
1377
1276
1378 def _cleanup_user_groups(self):
1277 def _cleanup_user_groups(self):
1379 def _user_group_compare(first_group_id, second_group_id):
1278 def _user_group_compare(first_group_id, second_group_id):
1380 """
1279 """
1381 Gives higher priority to the groups with the most complex paths
1280 Gives higher priority to the groups with the most complex paths
1382 """
1281 """
1383 first_group = UserGroup.get(first_group_id)
1282 first_group = UserGroup.get(first_group_id)
1384 second_group = UserGroup.get(second_group_id)
1283 second_group = UserGroup.get(second_group_id)
1385 first_group_parts = (
1284 first_group_parts = (
1386 len(first_group.users_group_name.split('/'))
1285 len(first_group.users_group_name.split('/'))
1387 if first_group else 0)
1286 if first_group else 0)
1388 second_group_parts = (
1287 second_group_parts = (
1389 len(second_group.users_group_name.split('/'))
1288 len(second_group.users_group_name.split('/'))
1390 if second_group else 0)
1289 if second_group else 0)
1391 return cmp(second_group_parts, first_group_parts)
1290 return cmp(second_group_parts, first_group_parts)
1392
1291
1393 sorted_user_group_ids = sorted(
1292 sorted_user_group_ids = sorted(
1394 self.user_group_ids, cmp=_user_group_compare)
1293 self.user_group_ids, cmp=_user_group_compare)
1395 for user_group_id in sorted_user_group_ids:
1294 for user_group_id in sorted_user_group_ids:
1396 self.fixture.destroy_user_group(user_group_id)
1295 self.fixture.destroy_user_group(user_group_id)
1397
1296
1398 def _cleanup_users(self):
1297 def _cleanup_users(self):
1399 for user_id in self.user_ids:
1298 for user_id in self.user_ids:
1400 self.fixture.destroy_user(user_id)
1299 self.fixture.destroy_user(user_id)
1401
1300
1402
1301
1403 # TODO: Think about moving this into a pytest-pyro package and make it a
1404 # pytest plugin
1405 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1406 def pytest_runtest_makereport(item, call):
1407 """
1408 Adding the remote traceback if the exception has this information.
1409
1410 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1411 to the exception instance.
1412 """
1413 outcome = yield
1414 report = outcome.get_result()
1415 if call.excinfo:
1416 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1417
1418
1419 def _add_vcsserver_remote_traceback(report, exc):
1420 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1421
1422 if vcsserver_traceback:
1423 section = 'VCSServer remote traceback ' + report.when
1424 report.sections.append((section, vcsserver_traceback))
1425
1426
1427 @pytest.fixture(scope='session')
1302 @pytest.fixture(scope='session')
1428 def testrun():
1303 def testrun():
1429 return {
1304 return {
1430 'uuid': uuid.uuid4(),
1305 'uuid': uuid.uuid4(),
1431 'start': datetime.datetime.utcnow().isoformat(),
1306 'start': datetime.datetime.utcnow().isoformat(),
1432 'timestamp': int(time.time()),
1307 'timestamp': int(time.time()),
1433 }
1308 }
1434
1309
1435
1310
1436 class AppenlightClient(object):
1311 class AppenlightClient(object):
1437
1312
1438 url_template = '{url}?protocol_version=0.5'
1313 url_template = '{url}?protocol_version=0.5'
1439
1314
1440 def __init__(
1315 def __init__(
1441 self, url, api_key, add_server=True, add_timestamp=True,
1316 self, url, api_key, add_server=True, add_timestamp=True,
1442 namespace=None, request=None, testrun=None):
1317 namespace=None, request=None, testrun=None):
1443 self.url = self.url_template.format(url=url)
1318 self.url = self.url_template.format(url=url)
1444 self.api_key = api_key
1319 self.api_key = api_key
1445 self.add_server = add_server
1320 self.add_server = add_server
1446 self.add_timestamp = add_timestamp
1321 self.add_timestamp = add_timestamp
1447 self.namespace = namespace
1322 self.namespace = namespace
1448 self.request = request
1323 self.request = request
1449 self.server = socket.getfqdn(socket.gethostname())
1324 self.server = socket.getfqdn(socket.gethostname())
1450 self.tags_before = {}
1325 self.tags_before = {}
1451 self.tags_after = {}
1326 self.tags_after = {}
1452 self.stats = []
1327 self.stats = []
1453 self.testrun = testrun or {}
1328 self.testrun = testrun or {}
1454
1329
1455 def tag_before(self, tag, value):
1330 def tag_before(self, tag, value):
1456 self.tags_before[tag] = value
1331 self.tags_before[tag] = value
1457
1332
1458 def tag_after(self, tag, value):
1333 def tag_after(self, tag, value):
1459 self.tags_after[tag] = value
1334 self.tags_after[tag] = value
1460
1335
1461 def collect(self, data):
1336 def collect(self, data):
1462 if self.add_server:
1337 if self.add_server:
1463 data.setdefault('server', self.server)
1338 data.setdefault('server', self.server)
1464 if self.add_timestamp:
1339 if self.add_timestamp:
1465 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1340 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1466 if self.namespace:
1341 if self.namespace:
1467 data.setdefault('namespace', self.namespace)
1342 data.setdefault('namespace', self.namespace)
1468 if self.request:
1343 if self.request:
1469 data.setdefault('request', self.request)
1344 data.setdefault('request', self.request)
1470 self.stats.append(data)
1345 self.stats.append(data)
1471
1346
1472 def send_stats(self):
1347 def send_stats(self):
1473 tags = [
1348 tags = [
1474 ('testrun', self.request),
1349 ('testrun', self.request),
1475 ('testrun.start', self.testrun['start']),
1350 ('testrun.start', self.testrun['start']),
1476 ('testrun.timestamp', self.testrun['timestamp']),
1351 ('testrun.timestamp', self.testrun['timestamp']),
1477 ('test', self.namespace),
1352 ('test', self.namespace),
1478 ]
1353 ]
1479 for key, value in self.tags_before.items():
1354 for key, value in self.tags_before.items():
1480 tags.append((key + '.before', value))
1355 tags.append((key + '.before', value))
1481 try:
1356 try:
1482 delta = self.tags_after[key] - value
1357 delta = self.tags_after[key] - value
1483 tags.append((key + '.delta', delta))
1358 tags.append((key + '.delta', delta))
1484 except Exception:
1359 except Exception:
1485 pass
1360 pass
1486 for key, value in self.tags_after.items():
1361 for key, value in self.tags_after.items():
1487 tags.append((key + '.after', value))
1362 tags.append((key + '.after', value))
1488 self.collect({
1363 self.collect({
1489 'message': "Collected tags",
1364 'message': "Collected tags",
1490 'tags': tags,
1365 'tags': tags,
1491 })
1366 })
1492
1367
1493 response = requests.post(
1368 response = requests.post(
1494 self.url,
1369 self.url,
1495 headers={
1370 headers={
1496 'X-appenlight-api-key': self.api_key},
1371 'X-appenlight-api-key': self.api_key},
1497 json=self.stats,
1372 json=self.stats,
1498 )
1373 )
1499
1374
1500 if not response.status_code == 200:
1375 if not response.status_code == 200:
1501 pprint.pprint(self.stats)
1376 pprint.pprint(self.stats)
1502 print(response.headers)
1377 print(response.headers)
1503 print(response.text)
1378 print(response.text)
1504 raise Exception('Sending to appenlight failed')
1379 raise Exception('Sending to appenlight failed')
1505
1380
1506
1381
1507 @pytest.fixture()
1382 @pytest.fixture()
1508 def gist_util(request, db_connection):
1383 def gist_util(request, db_connection):
1509 """
1384 """
1510 Provides a wired instance of `GistUtility` with integrated cleanup.
1385 Provides a wired instance of `GistUtility` with integrated cleanup.
1511 """
1386 """
1512 utility = GistUtility()
1387 utility = GistUtility()
1513 request.addfinalizer(utility.cleanup)
1388 request.addfinalizer(utility.cleanup)
1514 return utility
1389 return utility
1515
1390
1516
1391
1517 class GistUtility(object):
1392 class GistUtility(object):
1518 def __init__(self):
1393 def __init__(self):
1519 self.fixture = Fixture()
1394 self.fixture = Fixture()
1520 self.gist_ids = []
1395 self.gist_ids = []
1521
1396
1522 def create_gist(self, **kwargs):
1397 def create_gist(self, **kwargs):
1523 gist = self.fixture.create_gist(**kwargs)
1398 gist = self.fixture.create_gist(**kwargs)
1524 self.gist_ids.append(gist.gist_id)
1399 self.gist_ids.append(gist.gist_id)
1525 return gist
1400 return gist
1526
1401
1527 def cleanup(self):
1402 def cleanup(self):
1528 for id_ in self.gist_ids:
1403 for id_ in self.gist_ids:
1529 self.fixture.destroy_gists(str(id_))
1404 self.fixture.destroy_gists(str(id_))
1530
1405
1531
1406
1532 @pytest.fixture()
1407 @pytest.fixture()
1533 def enabled_backends(request):
1408 def enabled_backends(request):
1534 backends = request.config.option.backends
1409 backends = request.config.option.backends
1535 return backends[:]
1410 return backends[:]
1536
1411
1537
1412
1538 @pytest.fixture()
1413 @pytest.fixture()
1539 def settings_util(request, db_connection):
1414 def settings_util(request, db_connection):
1540 """
1415 """
1541 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1416 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1542 """
1417 """
1543 utility = SettingsUtility()
1418 utility = SettingsUtility()
1544 request.addfinalizer(utility.cleanup)
1419 request.addfinalizer(utility.cleanup)
1545 return utility
1420 return utility
1546
1421
1547
1422
1548 class SettingsUtility(object):
1423 class SettingsUtility(object):
1549 def __init__(self):
1424 def __init__(self):
1550 self.rhodecode_ui_ids = []
1425 self.rhodecode_ui_ids = []
1551 self.rhodecode_setting_ids = []
1426 self.rhodecode_setting_ids = []
1552 self.repo_rhodecode_ui_ids = []
1427 self.repo_rhodecode_ui_ids = []
1553 self.repo_rhodecode_setting_ids = []
1428 self.repo_rhodecode_setting_ids = []
1554
1429
1555 def create_repo_rhodecode_ui(
1430 def create_repo_rhodecode_ui(
1556 self, repo, section, value, key=None, active=True, cleanup=True):
1431 self, repo, section, value, key=None, active=True, cleanup=True):
1557 key = key or hashlib.sha1(
1432 key = key or hashlib.sha1(
1558 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1433 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1559
1434
1560 setting = RepoRhodeCodeUi()
1435 setting = RepoRhodeCodeUi()
1561 setting.repository_id = repo.repo_id
1436 setting.repository_id = repo.repo_id
1562 setting.ui_section = section
1437 setting.ui_section = section
1563 setting.ui_value = value
1438 setting.ui_value = value
1564 setting.ui_key = key
1439 setting.ui_key = key
1565 setting.ui_active = active
1440 setting.ui_active = active
1566 Session().add(setting)
1441 Session().add(setting)
1567 Session().commit()
1442 Session().commit()
1568
1443
1569 if cleanup:
1444 if cleanup:
1570 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1445 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1571 return setting
1446 return setting
1572
1447
1573 def create_rhodecode_ui(
1448 def create_rhodecode_ui(
1574 self, section, value, key=None, active=True, cleanup=True):
1449 self, section, value, key=None, active=True, cleanup=True):
1575 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1450 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1576
1451
1577 setting = RhodeCodeUi()
1452 setting = RhodeCodeUi()
1578 setting.ui_section = section
1453 setting.ui_section = section
1579 setting.ui_value = value
1454 setting.ui_value = value
1580 setting.ui_key = key
1455 setting.ui_key = key
1581 setting.ui_active = active
1456 setting.ui_active = active
1582 Session().add(setting)
1457 Session().add(setting)
1583 Session().commit()
1458 Session().commit()
1584
1459
1585 if cleanup:
1460 if cleanup:
1586 self.rhodecode_ui_ids.append(setting.ui_id)
1461 self.rhodecode_ui_ids.append(setting.ui_id)
1587 return setting
1462 return setting
1588
1463
1589 def create_repo_rhodecode_setting(
1464 def create_repo_rhodecode_setting(
1590 self, repo, name, value, type_, cleanup=True):
1465 self, repo, name, value, type_, cleanup=True):
1591 setting = RepoRhodeCodeSetting(
1466 setting = RepoRhodeCodeSetting(
1592 repo.repo_id, key=name, val=value, type=type_)
1467 repo.repo_id, key=name, val=value, type=type_)
1593 Session().add(setting)
1468 Session().add(setting)
1594 Session().commit()
1469 Session().commit()
1595
1470
1596 if cleanup:
1471 if cleanup:
1597 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1472 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1598 return setting
1473 return setting
1599
1474
1600 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1475 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1601 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1476 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1602 Session().add(setting)
1477 Session().add(setting)
1603 Session().commit()
1478 Session().commit()
1604
1479
1605 if cleanup:
1480 if cleanup:
1606 self.rhodecode_setting_ids.append(setting.app_settings_id)
1481 self.rhodecode_setting_ids.append(setting.app_settings_id)
1607
1482
1608 return setting
1483 return setting
1609
1484
1610 def cleanup(self):
1485 def cleanup(self):
1611 for id_ in self.rhodecode_ui_ids:
1486 for id_ in self.rhodecode_ui_ids:
1612 setting = RhodeCodeUi.get(id_)
1487 setting = RhodeCodeUi.get(id_)
1613 Session().delete(setting)
1488 Session().delete(setting)
1614
1489
1615 for id_ in self.rhodecode_setting_ids:
1490 for id_ in self.rhodecode_setting_ids:
1616 setting = RhodeCodeSetting.get(id_)
1491 setting = RhodeCodeSetting.get(id_)
1617 Session().delete(setting)
1492 Session().delete(setting)
1618
1493
1619 for id_ in self.repo_rhodecode_ui_ids:
1494 for id_ in self.repo_rhodecode_ui_ids:
1620 setting = RepoRhodeCodeUi.get(id_)
1495 setting = RepoRhodeCodeUi.get(id_)
1621 Session().delete(setting)
1496 Session().delete(setting)
1622
1497
1623 for id_ in self.repo_rhodecode_setting_ids:
1498 for id_ in self.repo_rhodecode_setting_ids:
1624 setting = RepoRhodeCodeSetting.get(id_)
1499 setting = RepoRhodeCodeSetting.get(id_)
1625 Session().delete(setting)
1500 Session().delete(setting)
1626
1501
1627 Session().commit()
1502 Session().commit()
1628
1503
1629
1504
1630 @pytest.fixture()
1505 @pytest.fixture()
1631 def no_notifications(request):
1506 def no_notifications(request):
1632 notification_patcher = mock.patch(
1507 notification_patcher = mock.patch(
1633 'rhodecode.model.notification.NotificationModel.create')
1508 'rhodecode.model.notification.NotificationModel.create')
1634 notification_patcher.start()
1509 notification_patcher.start()
1635 request.addfinalizer(notification_patcher.stop)
1510 request.addfinalizer(notification_patcher.stop)
1636
1511
1637
1512
1638 @pytest.fixture(scope='session')
1513 @pytest.fixture(scope='session')
1639 def repeat(request):
1514 def repeat(request):
1640 """
1515 """
1641 The number of repetitions is based on this fixture.
1516 The number of repetitions is based on this fixture.
1642
1517
1643 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1518 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1644 tests are not too slow in our default test suite.
1519 tests are not too slow in our default test suite.
1645 """
1520 """
1646 return request.config.getoption('--repeat')
1521 return request.config.getoption('--repeat')
1647
1522
1648
1523
1649 @pytest.fixture()
1524 @pytest.fixture()
1650 def rhodecode_fixtures():
1525 def rhodecode_fixtures():
1651 return Fixture()
1526 return Fixture()
1652
1527
1653
1528
1654 @pytest.fixture()
1529 @pytest.fixture()
1655 def context_stub():
1530 def context_stub():
1656 """
1531 """
1657 Stub context object.
1532 Stub context object.
1658 """
1533 """
1659 context = pyramid.testing.DummyResource()
1534 context = pyramid.testing.DummyResource()
1660 return context
1535 return context
1661
1536
1662
1537
1663 @pytest.fixture()
1538 @pytest.fixture()
1664 def request_stub():
1539 def request_stub():
1665 """
1540 """
1666 Stub request object.
1541 Stub request object.
1667 """
1542 """
1668 from rhodecode.lib.base import bootstrap_request
1543 from rhodecode.lib.base import bootstrap_request
1669 request = bootstrap_request(scheme='https')
1544 request = bootstrap_request(scheme='https')
1670 return request
1545 return request
1671
1546
1672
1547
1673 @pytest.fixture()
1548 @pytest.fixture()
1674 def config_stub(request, request_stub):
1549 def config_stub(request, request_stub):
1675 """
1550 """
1676 Set up pyramid.testing and return the Configurator.
1551 Set up pyramid.testing and return the Configurator.
1677 """
1552 """
1678 from rhodecode.lib.base import bootstrap_config
1553 from rhodecode.lib.base import bootstrap_config
1679 config = bootstrap_config(request=request_stub)
1554 config = bootstrap_config(request=request_stub)
1680
1555
1681 @request.addfinalizer
1556 @request.addfinalizer
1682 def cleanup():
1557 def cleanup():
1683 pyramid.testing.tearDown()
1558 pyramid.testing.tearDown()
1684
1559
1685 return config
1560 return config
1686
1561
1687
1562
1688 @pytest.fixture()
1563 @pytest.fixture()
1689 def StubIntegrationType():
1564 def StubIntegrationType():
1690 class _StubIntegrationType(IntegrationTypeBase):
1565 class _StubIntegrationType(IntegrationTypeBase):
1691 """ Test integration type class """
1566 """ Test integration type class """
1692
1567
1693 key = 'test'
1568 key = 'test'
1694 display_name = 'Test integration type'
1569 display_name = 'Test integration type'
1695 description = 'A test integration type for testing'
1570 description = 'A test integration type for testing'
1696
1571
1697 @classmethod
1572 @classmethod
1698 def icon(cls):
1573 def icon(cls):
1699 return 'test_icon_html_image'
1574 return 'test_icon_html_image'
1700
1575
1701 def __init__(self, settings):
1576 def __init__(self, settings):
1702 super(_StubIntegrationType, self).__init__(settings)
1577 super(_StubIntegrationType, self).__init__(settings)
1703 self.sent_events = [] # for testing
1578 self.sent_events = [] # for testing
1704
1579
1705 def send_event(self, event):
1580 def send_event(self, event):
1706 self.sent_events.append(event)
1581 self.sent_events.append(event)
1707
1582
1708 def settings_schema(self):
1583 def settings_schema(self):
1709 class SettingsSchema(colander.Schema):
1584 class SettingsSchema(colander.Schema):
1710 test_string_field = colander.SchemaNode(
1585 test_string_field = colander.SchemaNode(
1711 colander.String(),
1586 colander.String(),
1712 missing=colander.required,
1587 missing=colander.required,
1713 title='test string field',
1588 title='test string field',
1714 )
1589 )
1715 test_int_field = colander.SchemaNode(
1590 test_int_field = colander.SchemaNode(
1716 colander.Int(),
1591 colander.Int(),
1717 title='some integer setting',
1592 title='some integer setting',
1718 )
1593 )
1719 return SettingsSchema()
1594 return SettingsSchema()
1720
1595
1721
1596
1722 integration_type_registry.register_integration_type(_StubIntegrationType)
1597 integration_type_registry.register_integration_type(_StubIntegrationType)
1723 return _StubIntegrationType
1598 return _StubIntegrationType
1724
1599
1600
1725 @pytest.fixture()
1601 @pytest.fixture()
1726 def stub_integration_settings():
1602 def stub_integration_settings():
1727 return {
1603 return {
1728 'test_string_field': 'some data',
1604 'test_string_field': 'some data',
1729 'test_int_field': 100,
1605 'test_int_field': 100,
1730 }
1606 }
1731
1607
1732
1608
1733 @pytest.fixture()
1609 @pytest.fixture()
1734 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1610 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1735 stub_integration_settings):
1611 stub_integration_settings):
1736 integration = IntegrationModel().create(
1612 integration = IntegrationModel().create(
1737 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1613 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1738 name='test repo integration',
1614 name='test repo integration',
1739 repo=repo_stub, repo_group=None, child_repos_only=None)
1615 repo=repo_stub, repo_group=None, child_repos_only=None)
1740
1616
1741 @request.addfinalizer
1617 @request.addfinalizer
1742 def cleanup():
1618 def cleanup():
1743 IntegrationModel().delete(integration)
1619 IntegrationModel().delete(integration)
1744
1620
1745 return integration
1621 return integration
1746
1622
1747
1623
1748 @pytest.fixture()
1624 @pytest.fixture()
1749 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1625 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1750 stub_integration_settings):
1626 stub_integration_settings):
1751 integration = IntegrationModel().create(
1627 integration = IntegrationModel().create(
1752 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1628 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1753 name='test repogroup integration',
1629 name='test repogroup integration',
1754 repo=None, repo_group=test_repo_group, child_repos_only=True)
1630 repo=None, repo_group=test_repo_group, child_repos_only=True)
1755
1631
1756 @request.addfinalizer
1632 @request.addfinalizer
1757 def cleanup():
1633 def cleanup():
1758 IntegrationModel().delete(integration)
1634 IntegrationModel().delete(integration)
1759
1635
1760 return integration
1636 return integration
1761
1637
1762
1638
1763 @pytest.fixture()
1639 @pytest.fixture()
1764 def repogroup_recursive_integration_stub(request, test_repo_group,
1640 def repogroup_recursive_integration_stub(request, test_repo_group,
1765 StubIntegrationType, stub_integration_settings):
1641 StubIntegrationType, stub_integration_settings):
1766 integration = IntegrationModel().create(
1642 integration = IntegrationModel().create(
1767 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1643 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1768 name='test recursive repogroup integration',
1644 name='test recursive repogroup integration',
1769 repo=None, repo_group=test_repo_group, child_repos_only=False)
1645 repo=None, repo_group=test_repo_group, child_repos_only=False)
1770
1646
1771 @request.addfinalizer
1647 @request.addfinalizer
1772 def cleanup():
1648 def cleanup():
1773 IntegrationModel().delete(integration)
1649 IntegrationModel().delete(integration)
1774
1650
1775 return integration
1651 return integration
1776
1652
1777
1653
1778 @pytest.fixture()
1654 @pytest.fixture()
1779 def global_integration_stub(request, StubIntegrationType,
1655 def global_integration_stub(request, StubIntegrationType,
1780 stub_integration_settings):
1656 stub_integration_settings):
1781 integration = IntegrationModel().create(
1657 integration = IntegrationModel().create(
1782 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1658 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1783 name='test global integration',
1659 name='test global integration',
1784 repo=None, repo_group=None, child_repos_only=None)
1660 repo=None, repo_group=None, child_repos_only=None)
1785
1661
1786 @request.addfinalizer
1662 @request.addfinalizer
1787 def cleanup():
1663 def cleanup():
1788 IntegrationModel().delete(integration)
1664 IntegrationModel().delete(integration)
1789
1665
1790 return integration
1666 return integration
1791
1667
1792
1668
1793 @pytest.fixture()
1669 @pytest.fixture()
1794 def root_repos_integration_stub(request, StubIntegrationType,
1670 def root_repos_integration_stub(request, StubIntegrationType,
1795 stub_integration_settings):
1671 stub_integration_settings):
1796 integration = IntegrationModel().create(
1672 integration = IntegrationModel().create(
1797 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1673 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1798 name='test global integration',
1674 name='test global integration',
1799 repo=None, repo_group=None, child_repos_only=True)
1675 repo=None, repo_group=None, child_repos_only=True)
1800
1676
1801 @request.addfinalizer
1677 @request.addfinalizer
1802 def cleanup():
1678 def cleanup():
1803 IntegrationModel().delete(integration)
1679 IntegrationModel().delete(integration)
1804
1680
1805 return integration
1681 return integration
1806
1682
1807
1683
1808 @pytest.fixture()
1684 @pytest.fixture()
1809 def local_dt_to_utc():
1685 def local_dt_to_utc():
1810 def _factory(dt):
1686 def _factory(dt):
1811 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1687 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1812 dateutil.tz.tzutc()).replace(tzinfo=None)
1688 dateutil.tz.tzutc()).replace(tzinfo=None)
1813 return _factory
1689 return _factory
1814
1690
1815
1691
1816 @pytest.fixture()
1692 @pytest.fixture()
1817 def disable_anonymous_user(request, baseapp):
1693 def disable_anonymous_user(request, baseapp):
1818 set_anonymous_access(False)
1694 set_anonymous_access(False)
1819
1695
1820 @request.addfinalizer
1696 @request.addfinalizer
1821 def cleanup():
1697 def cleanup():
1822 set_anonymous_access(True)
1698 set_anonymous_access(True)
1823
1699
1824
1700
1825 @pytest.fixture(scope='module')
1701 @pytest.fixture(scope='module')
1826 def rc_fixture(request):
1702 def rc_fixture(request):
1827 return Fixture()
1703 return Fixture()
1828
1704
1829
1705
1830 @pytest.fixture()
1706 @pytest.fixture()
1831 def repo_groups(request):
1707 def repo_groups(request):
1832 fixture = Fixture()
1708 fixture = Fixture()
1833
1709
1834 session = Session()
1710 session = Session()
1835 zombie_group = fixture.create_repo_group('zombie')
1711 zombie_group = fixture.create_repo_group('zombie')
1836 parent_group = fixture.create_repo_group('parent')
1712 parent_group = fixture.create_repo_group('parent')
1837 child_group = fixture.create_repo_group('parent/child')
1713 child_group = fixture.create_repo_group('parent/child')
1838 groups_in_db = session.query(RepoGroup).all()
1714 groups_in_db = session.query(RepoGroup).all()
1839 assert len(groups_in_db) == 3
1715 assert len(groups_in_db) == 3
1840 assert child_group.group_parent_id == parent_group.group_id
1716 assert child_group.group_parent_id == parent_group.group_id
1841
1717
1842 @request.addfinalizer
1718 @request.addfinalizer
1843 def cleanup():
1719 def cleanup():
1844 fixture.destroy_repo_group(zombie_group)
1720 fixture.destroy_repo_group(zombie_group)
1845 fixture.destroy_repo_group(child_group)
1721 fixture.destroy_repo_group(child_group)
1846 fixture.destroy_repo_group(parent_group)
1722 fixture.destroy_repo_group(parent_group)
1847
1723
1848 return zombie_group, parent_group, child_group
1724 return zombie_group, parent_group, child_group
@@ -1,467 +1,472 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import threading
21 import threading
22 import time
22 import time
23 import logging
23 import logging
24 import os.path
24 import os.path
25 import subprocess
25 import subprocess
26 import tempfile
26 import tempfile
27 import urllib.request, urllib.error, urllib.parse
27 import urllib.request, urllib.error, urllib.parse
28 from lxml.html import fromstring, tostring
28 from lxml.html import fromstring, tostring
29 from lxml.cssselect import CSSSelector
29 from lxml.cssselect import CSSSelector
30 from urllib.parse import unquote_plus
30 from urllib.parse import unquote_plus
31 import webob
31 import webob
32
32
33 from webtest.app import TestResponse, TestApp
33 from webtest.app import TestResponse, TestApp
34 from webtest.compat import print_stderr
34 from webtest.compat import print_stderr
35
35
36 import pytest
36 import pytest
37 import rc_testdata
37
38 try:
39 import rc_testdata
40 except ImportError:
41 raise ImportError('Failed to import rc_testdata, '
42 'please make sure this package is installed from requirements_test.txt')
38
43
39 from rhodecode.model.db import User, Repository
44 from rhodecode.model.db import User, Repository
40 from rhodecode.model.meta import Session
45 from rhodecode.model.meta import Session
41 from rhodecode.model.scm import ScmModel
46 from rhodecode.model.scm import ScmModel
42 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
47 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
43 from rhodecode.lib.vcs.backends.base import EmptyCommit
48 from rhodecode.lib.vcs.backends.base import EmptyCommit
44 from rhodecode.tests import login_user_session
49 from rhodecode.tests import login_user_session
45
50
46 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
47
52
48
53
49 class CustomTestResponse(TestResponse):
54 class CustomTestResponse(TestResponse):
50
55
51 def _save_output(self, out):
56 def _save_output(self, out):
52 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
57 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
53 f.write(out)
58 f.write(out)
54 return f.name
59 return f.name
55
60
56 def mustcontain(self, *strings, **kw):
61 def mustcontain(self, *strings, **kw):
57 """
62 """
58 Assert that the response contains all of the strings passed
63 Assert that the response contains all of the strings passed
59 in as arguments.
64 in as arguments.
60
65
61 Equivalent to::
66 Equivalent to::
62
67
63 assert string in res
68 assert string in res
64 """
69 """
65 print_body = kw.pop('print_body', False)
70 print_body = kw.pop('print_body', False)
66 if 'no' in kw:
71 if 'no' in kw:
67 no = kw['no']
72 no = kw['no']
68 del kw['no']
73 del kw['no']
69 if isinstance(no, str):
74 if isinstance(no, str):
70 no = [no]
75 no = [no]
71 else:
76 else:
72 no = []
77 no = []
73 if kw:
78 if kw:
74 raise TypeError(
79 raise TypeError(
75 "The only keyword argument allowed is 'no' got %s" % kw)
80 "The only keyword argument allowed is 'no' got %s" % kw)
76
81
77 f = self._save_output(str(self))
82 f = self._save_output(str(self))
78
83
79 for s in strings:
84 for s in strings:
80 if not s in self:
85 if not s in self:
81 print_stderr("Actual response (no %r):" % s)
86 print_stderr("Actual response (no %r):" % s)
82 print_stderr("body output saved as `%s`" % f)
87 print_stderr("body output saved as `%s`" % f)
83 if print_body:
88 if print_body:
84 print_stderr(str(self))
89 print_stderr(str(self))
85 raise IndexError(
90 raise IndexError(
86 "Body does not contain string %r, body output saved as %s" % (s, f))
91 "Body does not contain string %r, body output saved as %s" % (s, f))
87
92
88 for no_s in no:
93 for no_s in no:
89 if no_s in self:
94 if no_s in self:
90 print_stderr("Actual response (has %r)" % no_s)
95 print_stderr("Actual response (has %r)" % no_s)
91 print_stderr("body output saved as `%s`" % f)
96 print_stderr("body output saved as `%s`" % f)
92 if print_body:
97 if print_body:
93 print_stderr(str(self))
98 print_stderr(str(self))
94 raise IndexError(
99 raise IndexError(
95 "Body contains bad string %r, body output saved as %s" % (no_s, f))
100 "Body contains bad string %r, body output saved as %s" % (no_s, f))
96
101
97 def assert_response(self):
102 def assert_response(self):
98 return AssertResponse(self)
103 return AssertResponse(self)
99
104
100 def get_session_from_response(self):
105 def get_session_from_response(self):
101 """
106 """
102 This returns the session from a response object.
107 This returns the session from a response object.
103 """
108 """
104 from rhodecode.lib.rc_beaker import session_factory_from_settings
109 from rhodecode.lib.rc_beaker import session_factory_from_settings
105 session = session_factory_from_settings(self.test_app._pyramid_settings)
110 session = session_factory_from_settings(self.test_app._pyramid_settings)
106 return session(self.request)
111 return session(self.request)
107
112
108
113
109 class TestRequest(webob.BaseRequest):
114 class TestRequest(webob.BaseRequest):
110
115
111 # for py.test
116 # for py.test
112 disabled = True
117 disabled = True
113 ResponseClass = CustomTestResponse
118 ResponseClass = CustomTestResponse
114
119
115 def add_response_callback(self, callback):
120 def add_response_callback(self, callback):
116 pass
121 pass
117
122
118
123
119 class CustomTestApp(TestApp):
124 class CustomTestApp(TestApp):
120 """
125 """
121 Custom app to make mustcontain more Useful, and extract special methods
126 Custom app to make mustcontain more Useful, and extract special methods
122 """
127 """
123 RequestClass = TestRequest
128 RequestClass = TestRequest
124 rc_login_data = {}
129 rc_login_data = {}
125 rc_current_session = None
130 rc_current_session = None
126
131
127 def login(self, username=None, password=None):
132 def login(self, username=None, password=None):
128 from rhodecode.lib import auth
133 from rhodecode.lib import auth
129
134
130 if username and password:
135 if username and password:
131 session = login_user_session(self, username, password)
136 session = login_user_session(self, username, password)
132 else:
137 else:
133 session = login_user_session(self)
138 session = login_user_session(self)
134
139
135 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
140 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
136 self.rc_current_session = session
141 self.rc_current_session = session
137 return session['rhodecode_user']
142 return session['rhodecode_user']
138
143
139 @property
144 @property
140 def csrf_token(self):
145 def csrf_token(self):
141 return self.rc_login_data['csrf_token']
146 return self.rc_login_data['csrf_token']
142
147
143 @property
148 @property
144 def _pyramid_registry(self):
149 def _pyramid_registry(self):
145 return self.app.config.registry
150 return self.app.config.registry
146
151
147 @property
152 @property
148 def _pyramid_settings(self):
153 def _pyramid_settings(self):
149 return self._pyramid_registry.settings
154 return self._pyramid_registry.settings
150
155
151
156
152 def set_anonymous_access(enabled):
157 def set_anonymous_access(enabled):
153 """(Dis)allows anonymous access depending on parameter `enabled`"""
158 """(Dis)allows anonymous access depending on parameter `enabled`"""
154 user = User.get_default_user()
159 user = User.get_default_user()
155 user.active = enabled
160 user.active = enabled
156 Session().add(user)
161 Session().add(user)
157 Session().commit()
162 Session().commit()
158 time.sleep(1.5) # must sleep for cache (1s to expire)
163 time.sleep(1.5) # must sleep for cache (1s to expire)
159 log.info('anonymous access is now: %s', enabled)
164 log.info('anonymous access is now: %s', enabled)
160 assert enabled == User.get_default_user().active, (
165 assert enabled == User.get_default_user().active, (
161 'Cannot set anonymous access')
166 'Cannot set anonymous access')
162
167
163
168
164 def check_xfail_backends(node, backend_alias):
169 def check_xfail_backends(node, backend_alias):
165 # Using "xfail_backends" here intentionally, since this marks work
170 # Using "xfail_backends" here intentionally, since this marks work
166 # which is "to be done" soon.
171 # which is "to be done" soon.
167 skip_marker = node.get_closest_marker('xfail_backends')
172 skip_marker = node.get_closest_marker('xfail_backends')
168 if skip_marker and backend_alias in skip_marker.args:
173 if skip_marker and backend_alias in skip_marker.args:
169 msg = "Support for backend %s to be developed." % (backend_alias, )
174 msg = "Support for backend %s to be developed." % (backend_alias, )
170 msg = skip_marker.kwargs.get('reason', msg)
175 msg = skip_marker.kwargs.get('reason', msg)
171 pytest.xfail(msg)
176 pytest.xfail(msg)
172
177
173
178
174 def check_skip_backends(node, backend_alias):
179 def check_skip_backends(node, backend_alias):
175 # Using "skip_backends" here intentionally, since this marks work which is
180 # Using "skip_backends" here intentionally, since this marks work which is
176 # not supported.
181 # not supported.
177 skip_marker = node.get_closest_marker('skip_backends')
182 skip_marker = node.get_closest_marker('skip_backends')
178 if skip_marker and backend_alias in skip_marker.args:
183 if skip_marker and backend_alias in skip_marker.args:
179 msg = "Feature not supported for backend %s." % (backend_alias, )
184 msg = "Feature not supported for backend %s." % (backend_alias, )
180 msg = skip_marker.kwargs.get('reason', msg)
185 msg = skip_marker.kwargs.get('reason', msg)
181 pytest.skip(msg)
186 pytest.skip(msg)
182
187
183
188
184 def extract_git_repo_from_dump(dump_name, repo_name):
189 def extract_git_repo_from_dump(dump_name, repo_name):
185 """Create git repo `repo_name` from dump `dump_name`."""
190 """Create git repo `repo_name` from dump `dump_name`."""
186 repos_path = ScmModel().repos_path
191 repos_path = ScmModel().repos_path
187 target_path = os.path.join(repos_path, repo_name)
192 target_path = os.path.join(repos_path, repo_name)
188 rc_testdata.extract_git_dump(dump_name, target_path)
193 rc_testdata.extract_git_dump(dump_name, target_path)
189 return target_path
194 return target_path
190
195
191
196
192 def extract_hg_repo_from_dump(dump_name, repo_name):
197 def extract_hg_repo_from_dump(dump_name, repo_name):
193 """Create hg repo `repo_name` from dump `dump_name`."""
198 """Create hg repo `repo_name` from dump `dump_name`."""
194 repos_path = ScmModel().repos_path
199 repos_path = ScmModel().repos_path
195 target_path = os.path.join(repos_path, repo_name)
200 target_path = os.path.join(repos_path, repo_name)
196 rc_testdata.extract_hg_dump(dump_name, target_path)
201 rc_testdata.extract_hg_dump(dump_name, target_path)
197 return target_path
202 return target_path
198
203
199
204
200 def extract_svn_repo_from_dump(dump_name, repo_name):
205 def extract_svn_repo_from_dump(dump_name, repo_name):
201 """Create a svn repo `repo_name` from dump `dump_name`."""
206 """Create a svn repo `repo_name` from dump `dump_name`."""
202 repos_path = ScmModel().repos_path
207 repos_path = ScmModel().repos_path
203 target_path = os.path.join(repos_path, repo_name)
208 target_path = os.path.join(repos_path, repo_name)
204 SubversionRepository(target_path, create=True)
209 SubversionRepository(target_path, create=True)
205 _load_svn_dump_into_repo(dump_name, target_path)
210 _load_svn_dump_into_repo(dump_name, target_path)
206 return target_path
211 return target_path
207
212
208
213
209 def assert_message_in_log(log_records, message, levelno, module):
214 def assert_message_in_log(log_records, message, levelno, module):
210 messages = [
215 messages = [
211 r.message for r in log_records
216 r.message for r in log_records
212 if r.module == module and r.levelno == levelno
217 if r.module == module and r.levelno == levelno
213 ]
218 ]
214 assert message in messages
219 assert message in messages
215
220
216
221
217 def _load_svn_dump_into_repo(dump_name, repo_path):
222 def _load_svn_dump_into_repo(dump_name, repo_path):
218 """
223 """
219 Utility to populate a svn repository with a named dump
224 Utility to populate a svn repository with a named dump
220
225
221 Currently the dumps are in rc_testdata. They might later on be
226 Currently the dumps are in rc_testdata. They might later on be
222 integrated with the main repository once they stabilize more.
227 integrated with the main repository once they stabilize more.
223 """
228 """
224 dump = rc_testdata.load_svn_dump(dump_name)
229 dump = rc_testdata.load_svn_dump(dump_name)
225 load_dump = subprocess.Popen(
230 load_dump = subprocess.Popen(
226 ['svnadmin', 'load', repo_path],
231 ['svnadmin', 'load', repo_path],
227 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
232 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
228 stderr=subprocess.PIPE)
233 stderr=subprocess.PIPE)
229 out, err = load_dump.communicate(dump)
234 out, err = load_dump.communicate(dump)
230 if load_dump.returncode != 0:
235 if load_dump.returncode != 0:
231 log.error("Output of load_dump command: %s", out)
236 log.error("Output of load_dump command: %s", out)
232 log.error("Error output of load_dump command: %s", err)
237 log.error("Error output of load_dump command: %s", err)
233 raise Exception(
238 raise Exception(
234 'Failed to load dump "%s" into repository at path "%s".'
239 'Failed to load dump "%s" into repository at path "%s".'
235 % (dump_name, repo_path))
240 % (dump_name, repo_path))
236
241
237
242
238 class AssertResponse(object):
243 class AssertResponse(object):
239 """
244 """
240 Utility that helps to assert things about a given HTML response.
245 Utility that helps to assert things about a given HTML response.
241 """
246 """
242
247
243 def __init__(self, response):
248 def __init__(self, response):
244 self.response = response
249 self.response = response
245
250
246 def get_imports(self):
251 def get_imports(self):
247 return fromstring, tostring, CSSSelector
252 return fromstring, tostring, CSSSelector
248
253
249 def one_element_exists(self, css_selector):
254 def one_element_exists(self, css_selector):
250 self.get_element(css_selector)
255 self.get_element(css_selector)
251
256
252 def no_element_exists(self, css_selector):
257 def no_element_exists(self, css_selector):
253 assert not self._get_elements(css_selector)
258 assert not self._get_elements(css_selector)
254
259
255 def element_equals_to(self, css_selector, expected_content):
260 def element_equals_to(self, css_selector, expected_content):
256 element = self.get_element(css_selector)
261 element = self.get_element(css_selector)
257 element_text = self._element_to_string(element)
262 element_text = self._element_to_string(element)
258 assert expected_content in element_text
263 assert expected_content in element_text
259
264
260 def element_contains(self, css_selector, expected_content):
265 def element_contains(self, css_selector, expected_content):
261 element = self.get_element(css_selector)
266 element = self.get_element(css_selector)
262 assert expected_content in element.text_content()
267 assert expected_content in element.text_content()
263
268
264 def element_value_contains(self, css_selector, expected_content):
269 def element_value_contains(self, css_selector, expected_content):
265 element = self.get_element(css_selector)
270 element = self.get_element(css_selector)
266 assert expected_content in element.value
271 assert expected_content in element.value
267
272
268 def contains_one_link(self, link_text, href):
273 def contains_one_link(self, link_text, href):
269 fromstring, tostring, CSSSelector = self.get_imports()
274 fromstring, tostring, CSSSelector = self.get_imports()
270 doc = fromstring(self.response.body)
275 doc = fromstring(self.response.body)
271 sel = CSSSelector('a[href]')
276 sel = CSSSelector('a[href]')
272 elements = [
277 elements = [
273 e for e in sel(doc) if e.text_content().strip() == link_text]
278 e for e in sel(doc) if e.text_content().strip() == link_text]
274 assert len(elements) == 1, "Did not find link or found multiple links"
279 assert len(elements) == 1, "Did not find link or found multiple links"
275 self._ensure_url_equal(elements[0].attrib.get('href'), href)
280 self._ensure_url_equal(elements[0].attrib.get('href'), href)
276
281
277 def contains_one_anchor(self, anchor_id):
282 def contains_one_anchor(self, anchor_id):
278 fromstring, tostring, CSSSelector = self.get_imports()
283 fromstring, tostring, CSSSelector = self.get_imports()
279 doc = fromstring(self.response.body)
284 doc = fromstring(self.response.body)
280 sel = CSSSelector('#' + anchor_id)
285 sel = CSSSelector('#' + anchor_id)
281 elements = sel(doc)
286 elements = sel(doc)
282 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
287 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
283
288
284 def _ensure_url_equal(self, found, expected):
289 def _ensure_url_equal(self, found, expected):
285 assert _Url(found) == _Url(expected)
290 assert _Url(found) == _Url(expected)
286
291
287 def get_element(self, css_selector):
292 def get_element(self, css_selector):
288 elements = self._get_elements(css_selector)
293 elements = self._get_elements(css_selector)
289 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
294 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
290 return elements[0]
295 return elements[0]
291
296
292 def get_elements(self, css_selector):
297 def get_elements(self, css_selector):
293 return self._get_elements(css_selector)
298 return self._get_elements(css_selector)
294
299
295 def _get_elements(self, css_selector):
300 def _get_elements(self, css_selector):
296 fromstring, tostring, CSSSelector = self.get_imports()
301 fromstring, tostring, CSSSelector = self.get_imports()
297 doc = fromstring(self.response.body)
302 doc = fromstring(self.response.body)
298 sel = CSSSelector(css_selector)
303 sel = CSSSelector(css_selector)
299 elements = sel(doc)
304 elements = sel(doc)
300 return elements
305 return elements
301
306
302 def _element_to_string(self, element):
307 def _element_to_string(self, element):
303 fromstring, tostring, CSSSelector = self.get_imports()
308 fromstring, tostring, CSSSelector = self.get_imports()
304 return tostring(element)
309 return tostring(element)
305
310
306
311
307 class _Url(object):
312 class _Url(object):
308 """
313 """
309 A url object that can be compared with other url orbjects
314 A url object that can be compared with other url orbjects
310 without regard to the vagaries of encoding, escaping, and ordering
315 without regard to the vagaries of encoding, escaping, and ordering
311 of parameters in query strings.
316 of parameters in query strings.
312
317
313 Inspired by
318 Inspired by
314 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
319 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
315 """
320 """
316
321
317 def __init__(self, url):
322 def __init__(self, url):
318 parts = urllib.parse.urlparse(url)
323 parts = urllib.parse.urlparse(url)
319 _query = frozenset(urllib.parse.parse_qsl(parts.query))
324 _query = frozenset(urllib.parse.parse_qsl(parts.query))
320 _path = unquote_plus(parts.path)
325 _path = unquote_plus(parts.path)
321 parts = parts._replace(query=_query, path=_path)
326 parts = parts._replace(query=_query, path=_path)
322 self.parts = parts
327 self.parts = parts
323
328
324 def __eq__(self, other):
329 def __eq__(self, other):
325 return self.parts == other.parts
330 return self.parts == other.parts
326
331
327 def __hash__(self):
332 def __hash__(self):
328 return hash(self.parts)
333 return hash(self.parts)
329
334
330
335
331 def run_test_concurrently(times, raise_catched_exc=True):
336 def run_test_concurrently(times, raise_catched_exc=True):
332 """
337 """
333 Add this decorator to small pieces of code that you want to test
338 Add this decorator to small pieces of code that you want to test
334 concurrently
339 concurrently
335
340
336 ex:
341 ex:
337
342
338 @test_concurrently(25)
343 @test_concurrently(25)
339 def my_test_function():
344 def my_test_function():
340 ...
345 ...
341 """
346 """
342 def test_concurrently_decorator(test_func):
347 def test_concurrently_decorator(test_func):
343 def wrapper(*args, **kwargs):
348 def wrapper(*args, **kwargs):
344 exceptions = []
349 exceptions = []
345
350
346 def call_test_func():
351 def call_test_func():
347 try:
352 try:
348 test_func(*args, **kwargs)
353 test_func(*args, **kwargs)
349 except Exception as e:
354 except Exception as e:
350 exceptions.append(e)
355 exceptions.append(e)
351 if raise_catched_exc:
356 if raise_catched_exc:
352 raise
357 raise
353 threads = []
358 threads = []
354 for i in range(times):
359 for i in range(times):
355 threads.append(threading.Thread(target=call_test_func))
360 threads.append(threading.Thread(target=call_test_func))
356 for t in threads:
361 for t in threads:
357 t.start()
362 t.start()
358 for t in threads:
363 for t in threads:
359 t.join()
364 t.join()
360 if exceptions:
365 if exceptions:
361 raise Exception(
366 raise Exception(
362 'test_concurrently intercepted %s exceptions: %s' % (
367 'test_concurrently intercepted %s exceptions: %s' % (
363 len(exceptions), exceptions))
368 len(exceptions), exceptions))
364 return wrapper
369 return wrapper
365 return test_concurrently_decorator
370 return test_concurrently_decorator
366
371
367
372
368 def wait_for_url(url, timeout=10):
373 def wait_for_url(url, timeout=10):
369 """
374 """
370 Wait until URL becomes reachable.
375 Wait until URL becomes reachable.
371
376
372 It polls the URL until the timeout is reached or it became reachable.
377 It polls the URL until the timeout is reached or it became reachable.
373 If will call to `py.test.fail` in case the URL is not reachable.
378 If will call to `py.test.fail` in case the URL is not reachable.
374 """
379 """
375 timeout = time.time() + timeout
380 timeout = time.time() + timeout
376 last = 0
381 last = 0
377 wait = 0.1
382 wait = 0.1
378
383
379 while timeout > last:
384 while timeout > last:
380 last = time.time()
385 last = time.time()
381 if is_url_reachable(url):
386 if is_url_reachable(url):
382 break
387 break
383 elif (last + wait) > time.time():
388 elif (last + wait) > time.time():
384 # Go to sleep because not enough time has passed since last check.
389 # Go to sleep because not enough time has passed since last check.
385 time.sleep(wait)
390 time.sleep(wait)
386 else:
391 else:
387 pytest.fail("Timeout while waiting for URL {}".format(url))
392 pytest.fail("Timeout while waiting for URL {}".format(url))
388
393
389
394
390 def is_url_reachable(url):
395 def is_url_reachable(url):
391 try:
396 try:
392 urllib.request.urlopen(url)
397 urllib.request.urlopen(url)
393 except urllib.error.URLError:
398 except urllib.error.URLError:
394 log.exception('URL `{}` reach error'.format(url))
399 log.exception('URL `{}` reach error'.format(url))
395 return False
400 return False
396 return True
401 return True
397
402
398
403
399 def repo_on_filesystem(repo_name):
404 def repo_on_filesystem(repo_name):
400 from rhodecode.lib import vcs
405 from rhodecode.lib import vcs
401 from rhodecode.tests import TESTS_TMP_PATH
406 from rhodecode.tests import TESTS_TMP_PATH
402 repo = vcs.get_vcs_instance(
407 repo = vcs.get_vcs_instance(
403 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
408 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
404 return repo is not None
409 return repo is not None
405
410
406
411
407 def commit_change(
412 def commit_change(
408 repo, filename, content, message, vcs_type, parent=None, newfile=False):
413 repo, filename, content, message, vcs_type, parent=None, newfile=False):
409 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
414 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
410
415
411 repo = Repository.get_by_repo_name(repo)
416 repo = Repository.get_by_repo_name(repo)
412 _commit = parent
417 _commit = parent
413 if not parent:
418 if not parent:
414 _commit = EmptyCommit(alias=vcs_type)
419 _commit = EmptyCommit(alias=vcs_type)
415
420
416 if newfile:
421 if newfile:
417 nodes = {
422 nodes = {
418 filename: {
423 filename: {
419 'content': content
424 'content': content
420 }
425 }
421 }
426 }
422 commit = ScmModel().create_nodes(
427 commit = ScmModel().create_nodes(
423 user=TEST_USER_ADMIN_LOGIN, repo=repo,
428 user=TEST_USER_ADMIN_LOGIN, repo=repo,
424 message=message,
429 message=message,
425 nodes=nodes,
430 nodes=nodes,
426 parent_commit=_commit,
431 parent_commit=_commit,
427 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
432 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
428 )
433 )
429 else:
434 else:
430 commit = ScmModel().commit_change(
435 commit = ScmModel().commit_change(
431 repo=repo.scm_instance(), repo_name=repo.repo_name,
436 repo=repo.scm_instance(), repo_name=repo.repo_name,
432 commit=parent, user=TEST_USER_ADMIN_LOGIN,
437 commit=parent, user=TEST_USER_ADMIN_LOGIN,
433 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
438 author=f'{TEST_USER_ADMIN_LOGIN} <admin@rhodecode.com>',
434 message=message,
439 message=message,
435 content=content,
440 content=content,
436 f_path=filename
441 f_path=filename
437 )
442 )
438 return commit
443 return commit
439
444
440
445
441 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
446 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
442 if not default:
447 if not default:
443 raise ValueError('Permission for default user must be given')
448 raise ValueError('Permission for default user must be given')
444 form_data = [(
449 form_data = [(
445 'csrf_token', csrf_token
450 'csrf_token', csrf_token
446 )]
451 )]
447 # add default
452 # add default
448 form_data.extend([
453 form_data.extend([
449 ('u_perm_1', default)
454 ('u_perm_1', default)
450 ])
455 ])
451
456
452 if grant:
457 if grant:
453 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
458 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
454 form_data.extend([
459 form_data.extend([
455 ('perm_new_member_perm_new{}'.format(cnt), perm),
460 ('perm_new_member_perm_new{}'.format(cnt), perm),
456 ('perm_new_member_id_new{}'.format(cnt), obj_id),
461 ('perm_new_member_id_new{}'.format(cnt), obj_id),
457 ('perm_new_member_name_new{}'.format(cnt), obj_name),
462 ('perm_new_member_name_new{}'.format(cnt), obj_name),
458 ('perm_new_member_type_new{}'.format(cnt), obj_type),
463 ('perm_new_member_type_new{}'.format(cnt), obj_type),
459
464
460 ])
465 ])
461 if revoke:
466 if revoke:
462 for obj_id, obj_type in revoke:
467 for obj_id, obj_type in revoke:
463 form_data.extend([
468 form_data.extend([
464 ('perm_del_member_id_{}'.format(obj_id), obj_id),
469 ('perm_del_member_id_{}'.format(obj_id), obj_id),
465 ('perm_del_member_type_{}'.format(obj_id), obj_type),
470 ('perm_del_member_type_{}'.format(obj_id), obj_type),
466 ])
471 ])
467 return form_data
472 return form_data
@@ -1,208 +1,204 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 # Import early to make sure things are patched up properly
21 # Import early to make sure things are patched up properly
22 from setuptools import setup, find_packages
22 from setuptools import setup, find_packages
23
23
24 import os
24 import os
25 import re
25 import re
26 import sys
26 import sys
27 import pkgutil
27 import pkgutil
28 import platform
28 import platform
29 import codecs
29 import codecs
30
30
31 import pip
31 import pip
32
32
33 pip_major_version = int(pip.__version__.split(".")[0])
33 pip_major_version = int(pip.__version__.split(".")[0])
34 if pip_major_version >= 20:
34 if pip_major_version >= 20:
35 from pip._internal.req import parse_requirements
35 from pip._internal.req import parse_requirements
36 from pip._internal.network.session import PipSession
36 from pip._internal.network.session import PipSession
37 elif pip_major_version >= 10:
37 elif pip_major_version >= 10:
38 from pip._internal.req import parse_requirements
38 from pip._internal.req import parse_requirements
39 from pip._internal.download import PipSession
39 from pip._internal.download import PipSession
40 else:
40 else:
41 from pip.req import parse_requirements
41 from pip.req import parse_requirements
42 from pip.download import PipSession
42 from pip.download import PipSession
43
43
44
44
45 def get_package_name(req_object):
45 def get_package_name(req_object):
46 package_name = None
46 package_name = None
47 try:
47 try:
48 from pip._internal.req.constructors import install_req_from_parsed_requirement
48 from pip._internal.req.constructors import install_req_from_parsed_requirement
49 except ImportError:
49 except ImportError:
50 install_req_from_parsed_requirement = None
50 install_req_from_parsed_requirement = None
51
51
52 # In 20.1 of pip, the requirements object changed
52 # In 20.1 of pip, the requirements object changed
53 if hasattr(req_object, 'req'):
53 if hasattr(req_object, 'req'):
54 package_name = req_object.req.name
54 package_name = req_object.req.name
55
55
56 if package_name is None:
56 if package_name is None:
57 if install_req_from_parsed_requirement:
57 if install_req_from_parsed_requirement:
58 package = install_req_from_parsed_requirement(req_object)
58 package = install_req_from_parsed_requirement(req_object)
59 package_name = package.req.name
59 package_name = package.req.name
60
60
61 if package_name is None:
61 if package_name is None:
62 # fallback for older pip
62 # fallback for older pip
63 package_name = re.split('===|<=|!=|==|>=|~=|<|>', req_object.requirement)[0]
63 package_name = re.split('===|<=|!=|==|>=|~=|<|>', req_object.requirement)[0]
64
64
65 return package_name
65 return package_name
66
66
67
67
68 if sys.version_info < (3, 10):
68 if sys.version_info < (3, 10):
69 raise Exception('RhodeCode requires Python 3.10 or later')
69 raise Exception('RhodeCode requires Python 3.10 or later')
70
70
71 here = os.path.abspath(os.path.dirname(__file__))
71 here = os.path.abspath(os.path.dirname(__file__))
72
72
73 # defines current platform
73 # defines current platform
74 __platform__ = platform.system()
74 __platform__ = platform.system()
75 __license__ = 'AGPLv3, and Commercial License'
75 __license__ = 'AGPLv3, and Commercial License'
76 __author__ = 'RhodeCode GmbH'
76 __author__ = 'RhodeCode GmbH'
77 __url__ = 'https://code.rhodecode.com'
77 __url__ = 'https://code.rhodecode.com'
78 is_windows = __platform__ in ('Windows',)
78 is_windows = __platform__ in ('Windows',)
79
79
80
80
81 def _get_requirements(req_filename, exclude=None, extras=None):
81 def _get_requirements(req_filename, exclude=None, extras=None):
82 extras = extras or []
82 extras = extras or []
83 exclude = exclude or []
83 exclude = exclude or []
84
84
85 try:
85 try:
86 parsed = parse_requirements(
86 parsed = parse_requirements(
87 os.path.join(here, req_filename), session=PipSession())
87 os.path.join(here, req_filename), session=PipSession())
88 except TypeError:
88 except TypeError:
89 # try pip < 6.0.0, that doesn't support session
89 # try pip < 6.0.0, that doesn't support session
90 parsed = parse_requirements(os.path.join(here, req_filename))
90 parsed = parse_requirements(os.path.join(here, req_filename))
91
91
92 requirements = []
92 requirements = []
93 for int_req in parsed:
93 for int_req in parsed:
94 req_name = get_package_name(int_req)
94 req_name = get_package_name(int_req)
95 if req_name not in exclude:
95 if req_name not in exclude:
96 requirements.append(req_name)
96 requirements.append(req_name)
97 return requirements + extras
97 return requirements + extras
98
98
99
99
100 # requirements extract
100 # requirements extract
101 setup_requirements = ['PasteScript']
101 setup_requirements = ['PasteScript']
102 install_requirements = _get_requirements(
102 install_requirements = _get_requirements(
103 'requirements.txt', exclude=['setuptools', 'entrypoints'])
103 'requirements.txt', exclude=['setuptools', 'entrypoints'])
104 test_requirements = _get_requirements(
104 test_requirements = _get_requirements(
105 'requirements_test.txt')
105 'requirements_test.txt')
106
106
107
107
108 def get_version():
108 def get_version():
109 version = pkgutil.get_data('rhodecode', 'VERSION')
109 version = pkgutil.get_data('rhodecode', 'VERSION')
110 return version.decode().strip()
110 return version.decode().strip()
111
111
112
112
113 # additional files that goes into package itself
113 # additional files that goes into package itself
114 package_data = {
114 package_data = {
115 '': ['*.txt', '*.rst'],
115 '': ['*.txt', '*.rst'],
116 'configs': ['*.ini'],
116 'configs': ['*.ini'],
117 'rhodecode': ['VERSION', 'i18n/*/LC_MESSAGES/*.mo', ],
117 'rhodecode': ['VERSION', 'i18n/*/LC_MESSAGES/*.mo', ],
118 }
118 }
119
119
120 description = 'Source Code Management Platform'
120 description = 'Source Code Management Platform'
121 keywords = ' '.join([
121 keywords = ' '.join([
122 'rhodecode', 'mercurial', 'git', 'svn',
122 'rhodecode', 'mercurial', 'git', 'svn',
123 'code review',
123 'code review',
124 'repo groups', 'ldap', 'repository management', 'hgweb',
124 'repo groups', 'ldap', 'repository management', 'hgweb',
125 'hgwebdir', 'gitweb', 'serving hgweb',
125 'hgwebdir', 'gitweb', 'serving hgweb',
126 ])
126 ])
127
127
128
128
129 # README/DESCRIPTION generation
129 # README/DESCRIPTION generation
130 readme_file = 'README.rst'
130 readme_file = 'README.rst'
131 changelog_file = 'CHANGES.rst'
131 changelog_file = 'CHANGES.rst'
132 try:
132 try:
133 long_description = codecs.open(readme_file).read() + '\n\n' + \
133 long_description = codecs.open(readme_file).read() + '\n\n' + \
134 codecs.open(changelog_file).read()
134 codecs.open(changelog_file).read()
135 except IOError as err:
135 except IOError as err:
136 sys.stderr.write(
136 sys.stderr.write(
137 "[WARNING] Cannot find file specified as long_description (%s)\n "
137 "[WARNING] Cannot find file specified as long_description (%s)\n "
138 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
138 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
139 long_description = description
139 long_description = description
140
140
141
141
142 setup(
142 setup(
143 name='rhodecode-enterprise-ce',
143 name='rhodecode-enterprise-ce',
144 version=get_version(),
144 version=get_version(),
145 description=description,
145 description=description,
146 long_description=long_description,
146 long_description=long_description,
147 keywords=keywords,
147 keywords=keywords,
148 license=__license__,
148 license=__license__,
149 author=__author__,
149 author=__author__,
150 author_email='support@rhodecode.com',
150 author_email='support@rhodecode.com',
151 url=__url__,
151 url=__url__,
152 setup_requires=setup_requirements,
152 setup_requires=setup_requirements,
153 install_requires=install_requirements,
153 install_requires=install_requirements,
154 tests_require=test_requirements,
154 tests_require=test_requirements,
155 zip_safe=False,
155 zip_safe=False,
156 packages=find_packages(exclude=["docs", "tests*"]),
156 packages=find_packages(exclude=["docs", "tests*"]),
157 package_data=package_data,
157 package_data=package_data,
158 include_package_data=True,
158 include_package_data=True,
159 classifiers=[
159 classifiers=[
160 'Development Status :: 6 - Mature',
160 'Development Status :: 6 - Mature',
161 'Environment :: Web Environment',
161 'Environment :: Web Environment',
162 'Intended Audience :: Developers',
162 'Intended Audience :: Developers',
163 'Operating System :: OS Independent',
163 'Operating System :: OS Independent',
164 'Topic :: Software Development :: Version Control',
164 'Topic :: Software Development :: Version Control',
165 'License :: OSI Approved :: Affero GNU General Public License v3 or later (AGPLv3+)',
165 'License :: OSI Approved :: Affero GNU General Public License v3 or later (AGPLv3+)',
166 'Programming Language :: Python :: 3.10',
166 'Programming Language :: Python :: 3.10',
167 ],
167 ],
168 message_extractors={
168 message_extractors={
169 'rhodecode': [
169 'rhodecode': [
170 ('**.py', 'python', None),
170 ('**.py', 'python', None),
171 ('**.js', 'javascript', None),
171 ('**.js', 'javascript', None),
172 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
172 ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
173 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
173 ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}),
174 ('public/**', 'ignore', None),
174 ('public/**', 'ignore', None),
175 ]
175 ]
176 },
176 },
177 paster_plugins=['PasteScript'],
177 paster_plugins=['PasteScript'],
178 entry_points={
178 entry_points={
179 'paste.app_factory': [
179 'paste.app_factory': [
180 'main=rhodecode.config.middleware:make_pyramid_app',
180 'main=rhodecode.config.middleware:make_pyramid_app',
181 ],
181 ],
182 'paste.global_paster_command': [
182 'paste.global_paster_command': [
183 'ishell=rhodecode.lib.paster_commands.ishell:Command',
183 'ishell=rhodecode.lib.paster_commands.ishell:Command',
184 'upgrade-db=rhodecode.lib.paster_commands.upgrade_db:UpgradeDb',
184 'upgrade-db=rhodecode.lib.paster_commands.upgrade_db:UpgradeDb',
185
185
186 'setup-rhodecode=rhodecode.lib.paster_commands.deprecated.setup_rhodecode:Command',
186 'setup-rhodecode=rhodecode.lib.paster_commands.deprecated.setup_rhodecode:Command',
187 'celeryd=rhodecode.lib.paster_commands.deprecated.celeryd:Command',
187 'celeryd=rhodecode.lib.paster_commands.deprecated.celeryd:Command',
188 ],
188 ],
189 'pyramid.pshell_runner': [
189 'pyramid.pshell_runner': [
190 'ipython = rhodecode.lib.pyramid_shell:ipython_shell_runner',
190 'ipython = rhodecode.lib.pyramid_shell:ipython_shell_runner',
191 ],
191 ],
192 'pytest11': [
193 'pylons=rhodecode.tests.pylons_plugin',
194 'enterprise=rhodecode.tests.plugin',
195 ],
196 'console_scripts': [
192 'console_scripts': [
197 'rc-setup-app=rhodecode.lib.rc_commands.setup_rc:main',
193 'rc-setup-app=rhodecode.lib.rc_commands.setup_rc:main',
198 'rc-upgrade-db=rhodecode.lib.rc_commands.upgrade_db:main',
194 'rc-upgrade-db=rhodecode.lib.rc_commands.upgrade_db:main',
199 'rc-ishell=rhodecode.lib.rc_commands.ishell:main',
195 'rc-ishell=rhodecode.lib.rc_commands.ishell:main',
200 'rc-add-artifact=rhodecode.lib.rc_commands.add_artifact:main',
196 'rc-add-artifact=rhodecode.lib.rc_commands.add_artifact:main',
201 'rc-ssh-wrapper=rhodecode.apps.ssh_support.lib.ssh_wrapper:main',
197 'rc-ssh-wrapper=rhodecode.apps.ssh_support.lib.ssh_wrapper:main',
202 ],
198 ],
203 'beaker.backends': [
199 'beaker.backends': [
204 'memorylru_base=rhodecode.lib.memory_lru_dict:MemoryLRUNamespaceManagerBase',
200 'memorylru_base=rhodecode.lib.memory_lru_dict:MemoryLRUNamespaceManagerBase',
205 'memorylru_debug=rhodecode.lib.memory_lru_dict:MemoryLRUNamespaceManagerDebug'
201 'memorylru_debug=rhodecode.lib.memory_lru_dict:MemoryLRUNamespaceManagerDebug'
206 ]
202 ]
207 },
203 },
208 )
204 )
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now