##// END OF EJS Templates
pytest: added db_connection fixture.
marcink -
r2372:2173e0ba default
parent child Browse files
Show More
@@ -1,1851 +1,1858 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import functools
33
34
34 import mock
35 import mock
35 import pyramid.testing
36 import pyramid.testing
36 import pytest
37 import pytest
37 import colander
38 import colander
38 import requests
39 import requests
39 import pyramid.paster
40 import pyramid.paster
40
41
41 import rhodecode
42 import rhodecode
42 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
46 from rhodecode.model.db import (
46 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
49 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.vcs import create_vcsserver_proxy
60 from rhodecode.lib.vcs import create_vcsserver_proxy
60 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.tests import (
63 from rhodecode.tests import (
63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
68
69 from rhodecode.config import utils as config_utils
69
70
70 def _split_comma(value):
71 def _split_comma(value):
71 return value.split(',')
72 return value.split(',')
72
73
73
74
74 def pytest_addoption(parser):
75 def pytest_addoption(parser):
75 parser.addoption(
76 parser.addoption(
76 '--keep-tmp-path', action='store_true',
77 '--keep-tmp-path', action='store_true',
77 help="Keep the test temporary directories")
78 help="Keep the test temporary directories")
78 parser.addoption(
79 parser.addoption(
79 '--backends', action='store', type=_split_comma,
80 '--backends', action='store', type=_split_comma,
80 default=['git', 'hg', 'svn'],
81 default=['git', 'hg', 'svn'],
81 help="Select which backends to test for backend specific tests.")
82 help="Select which backends to test for backend specific tests.")
82 parser.addoption(
83 parser.addoption(
83 '--dbs', action='store', type=_split_comma,
84 '--dbs', action='store', type=_split_comma,
84 default=['sqlite'],
85 default=['sqlite'],
85 help="Select which database to test for database specific tests. "
86 help="Select which database to test for database specific tests. "
86 "Possible options are sqlite,postgres,mysql")
87 "Possible options are sqlite,postgres,mysql")
87 parser.addoption(
88 parser.addoption(
88 '--appenlight', '--ae', action='store_true',
89 '--appenlight', '--ae', action='store_true',
89 help="Track statistics in appenlight.")
90 help="Track statistics in appenlight.")
90 parser.addoption(
91 parser.addoption(
91 '--appenlight-api-key', '--ae-key',
92 '--appenlight-api-key', '--ae-key',
92 help="API key for Appenlight.")
93 help="API key for Appenlight.")
93 parser.addoption(
94 parser.addoption(
94 '--appenlight-url', '--ae-url',
95 '--appenlight-url', '--ae-url',
95 default="https://ae.rhodecode.com",
96 default="https://ae.rhodecode.com",
96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 parser.addoption(
98 parser.addoption(
98 '--sqlite-connection-string', action='store',
99 '--sqlite-connection-string', action='store',
99 default='', help="Connection string for the dbs tests with SQLite")
100 default='', help="Connection string for the dbs tests with SQLite")
100 parser.addoption(
101 parser.addoption(
101 '--postgres-connection-string', action='store',
102 '--postgres-connection-string', action='store',
102 default='', help="Connection string for the dbs tests with Postgres")
103 default='', help="Connection string for the dbs tests with Postgres")
103 parser.addoption(
104 parser.addoption(
104 '--mysql-connection-string', action='store',
105 '--mysql-connection-string', action='store',
105 default='', help="Connection string for the dbs tests with MySQL")
106 default='', help="Connection string for the dbs tests with MySQL")
106 parser.addoption(
107 parser.addoption(
107 '--repeat', type=int, default=100,
108 '--repeat', type=int, default=100,
108 help="Number of repetitions in performance tests.")
109 help="Number of repetitions in performance tests.")
109
110
110
111
111 def pytest_configure(config):
112 def pytest_configure(config):
112 from rhodecode.config import patches
113 from rhodecode.config import patches
113
114
114
115
115 def pytest_collection_modifyitems(session, config, items):
116 def pytest_collection_modifyitems(session, config, items):
116 # nottest marked, compare nose, used for transition from nose to pytest
117 # nottest marked, compare nose, used for transition from nose to pytest
117 remaining = [
118 remaining = [
118 i for i in items if getattr(i.obj, '__test__', True)]
119 i for i in items if getattr(i.obj, '__test__', True)]
119 items[:] = remaining
120 items[:] = remaining
120
121
121
122
122 def pytest_generate_tests(metafunc):
123 def pytest_generate_tests(metafunc):
123 # Support test generation based on --backend parameter
124 # Support test generation based on --backend parameter
124 if 'backend_alias' in metafunc.fixturenames:
125 if 'backend_alias' in metafunc.fixturenames:
125 backends = get_backends_from_metafunc(metafunc)
126 backends = get_backends_from_metafunc(metafunc)
126 scope = None
127 scope = None
127 if not backends:
128 if not backends:
128 pytest.skip("Not enabled for any of selected backends")
129 pytest.skip("Not enabled for any of selected backends")
129 metafunc.parametrize('backend_alias', backends, scope=scope)
130 metafunc.parametrize('backend_alias', backends, scope=scope)
130 elif hasattr(metafunc.function, 'backends'):
131 elif hasattr(metafunc.function, 'backends'):
131 backends = get_backends_from_metafunc(metafunc)
132 backends = get_backends_from_metafunc(metafunc)
132 if not backends:
133 if not backends:
133 pytest.skip("Not enabled for any of selected backends")
134 pytest.skip("Not enabled for any of selected backends")
134
135
135
136
136 def get_backends_from_metafunc(metafunc):
137 def get_backends_from_metafunc(metafunc):
137 requested_backends = set(metafunc.config.getoption('--backends'))
138 requested_backends = set(metafunc.config.getoption('--backends'))
138 if hasattr(metafunc.function, 'backends'):
139 if hasattr(metafunc.function, 'backends'):
139 # Supported backends by this test function, created from
140 # Supported backends by this test function, created from
140 # pytest.mark.backends
141 # pytest.mark.backends
141 backends = metafunc.function.backends.args
142 backends = metafunc.function.backends.args
142 elif hasattr(metafunc.cls, 'backend_alias'):
143 elif hasattr(metafunc.cls, 'backend_alias'):
143 # Support class attribute "backend_alias", this is mainly
144 # Support class attribute "backend_alias", this is mainly
144 # for legacy reasons for tests not yet using pytest.mark.backends
145 # for legacy reasons for tests not yet using pytest.mark.backends
145 backends = [metafunc.cls.backend_alias]
146 backends = [metafunc.cls.backend_alias]
146 else:
147 else:
147 backends = metafunc.config.getoption('--backends')
148 backends = metafunc.config.getoption('--backends')
148 return requested_backends.intersection(backends)
149 return requested_backends.intersection(backends)
149
150
150
151
151 @pytest.fixture(scope='session', autouse=True)
152 @pytest.fixture(scope='session', autouse=True)
152 def activate_example_rcextensions(request):
153 def activate_example_rcextensions(request):
153 """
154 """
154 Patch in an example rcextensions module which verifies passed in kwargs.
155 Patch in an example rcextensions module which verifies passed in kwargs.
155 """
156 """
156 from rhodecode.tests.other import example_rcextensions
157 from rhodecode.tests.other import example_rcextensions
157
158
158 old_extensions = rhodecode.EXTENSIONS
159 old_extensions = rhodecode.EXTENSIONS
159 rhodecode.EXTENSIONS = example_rcextensions
160 rhodecode.EXTENSIONS = example_rcextensions
160
161
161 @request.addfinalizer
162 @request.addfinalizer
162 def cleanup():
163 def cleanup():
163 rhodecode.EXTENSIONS = old_extensions
164 rhodecode.EXTENSIONS = old_extensions
164
165
165
166
166 @pytest.fixture
167 @pytest.fixture
167 def capture_rcextensions():
168 def capture_rcextensions():
168 """
169 """
169 Returns the recorded calls to entry points in rcextensions.
170 Returns the recorded calls to entry points in rcextensions.
170 """
171 """
171 calls = rhodecode.EXTENSIONS.calls
172 calls = rhodecode.EXTENSIONS.calls
172 calls.clear()
173 calls.clear()
173 # Note: At this moment, it is still the empty dict, but that will
174 # Note: At this moment, it is still the empty dict, but that will
174 # be filled during the test run and since it is a reference this
175 # be filled during the test run and since it is a reference this
175 # is enough to make it work.
176 # is enough to make it work.
176 return calls
177 return calls
177
178
178
179
179 @pytest.fixture(scope='session')
180 @pytest.fixture(scope='session')
180 def http_environ_session():
181 def http_environ_session():
181 """
182 """
182 Allow to use "http_environ" in session scope.
183 Allow to use "http_environ" in session scope.
183 """
184 """
184 return http_environ(
185 return http_environ(
185 http_host_stub=http_host_stub())
186 http_host_stub=http_host_stub())
186
187
187
188
188 @pytest.fixture
189 @pytest.fixture
189 def http_host_stub():
190 def http_host_stub():
190 """
191 """
191 Value of HTTP_HOST in the test run.
192 Value of HTTP_HOST in the test run.
192 """
193 """
193 return 'example.com:80'
194 return 'example.com:80'
194
195
195
196
196 @pytest.fixture
197 @pytest.fixture
197 def http_host_only_stub():
198 def http_host_only_stub():
198 """
199 """
199 Value of HTTP_HOST in the test run.
200 Value of HTTP_HOST in the test run.
200 """
201 """
201 return http_host_stub().split(':')[0]
202 return http_host_stub().split(':')[0]
202
203
203
204
204 @pytest.fixture
205 @pytest.fixture
205 def http_environ(http_host_stub):
206 def http_environ(http_host_stub):
206 """
207 """
207 HTTP extra environ keys.
208 HTTP extra environ keys.
208
209
209 User by the test application and as well for setting up the pylons
210 User by the test application and as well for setting up the pylons
210 environment. In the case of the fixture "app" it should be possible
211 environment. In the case of the fixture "app" it should be possible
211 to override this for a specific test case.
212 to override this for a specific test case.
212 """
213 """
213 return {
214 return {
214 'SERVER_NAME': http_host_only_stub(),
215 'SERVER_NAME': http_host_only_stub(),
215 'SERVER_PORT': http_host_stub.split(':')[1],
216 'SERVER_PORT': http_host_stub.split(':')[1],
216 'HTTP_HOST': http_host_stub,
217 'HTTP_HOST': http_host_stub,
217 'HTTP_USER_AGENT': 'rc-test-agent',
218 'HTTP_USER_AGENT': 'rc-test-agent',
218 'REQUEST_METHOD': 'GET'
219 'REQUEST_METHOD': 'GET'
219 }
220 }
220
221
221
222
222 @pytest.fixture(scope='session')
223 @pytest.fixture(scope='session')
223 def baseapp(ini_config, vcsserver, http_environ_session):
224 def baseapp(ini_config, vcsserver, http_environ_session):
224 from rhodecode.lib.pyramid_utils import get_app_config
225 from rhodecode.lib.pyramid_utils import get_app_config
225 from rhodecode.config.middleware import make_pyramid_app
226 from rhodecode.config.middleware import make_pyramid_app
226
227
227 print("Using the RhodeCode configuration:{}".format(ini_config))
228 print("Using the RhodeCode configuration:{}".format(ini_config))
228 pyramid.paster.setup_logging(ini_config)
229 pyramid.paster.setup_logging(ini_config)
229
230
230 settings = get_app_config(ini_config)
231 settings = get_app_config(ini_config)
231 app = make_pyramid_app({'__file__': ini_config}, **settings)
232 app = make_pyramid_app({'__file__': ini_config}, **settings)
232
233
233 return app
234 return app
234
235
235
236
236 @pytest.fixture(scope='function')
237 @pytest.fixture(scope='function')
237 def app(request, config_stub, baseapp, http_environ):
238 def app(request, config_stub, baseapp, http_environ):
238 app = CustomTestApp(
239 app = CustomTestApp(
239 baseapp,
240 baseapp,
240 extra_environ=http_environ)
241 extra_environ=http_environ)
241 if request.cls:
242 if request.cls:
242 request.cls.app = app
243 request.cls.app = app
243 return app
244 return app
244
245
245
246
246 @pytest.fixture(scope='session')
247 @pytest.fixture(scope='session')
247 def app_settings(baseapp, ini_config):
248 def app_settings(baseapp, ini_config):
248 """
249 """
249 Settings dictionary used to create the app.
250 Settings dictionary used to create the app.
250
251
251 Parses the ini file and passes the result through the sanitize and apply
252 Parses the ini file and passes the result through the sanitize and apply
252 defaults mechanism in `rhodecode.config.middleware`.
253 defaults mechanism in `rhodecode.config.middleware`.
253 """
254 """
254 return baseapp.config.get_settings()
255 return baseapp.config.get_settings()
255
256
256
257
258 @pytest.fixture(scope='session')
259 def db_connection(ini_settings):
260 # Initialize the database connection.
261 config_utils.initialize_database(ini_settings)
262
263
257 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
264 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
258
265
259
266
260 def _autologin_user(app, *args):
267 def _autologin_user(app, *args):
261 session = login_user_session(app, *args)
268 session = login_user_session(app, *args)
262 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
269 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
263 return LoginData(csrf_token, session['rhodecode_user'])
270 return LoginData(csrf_token, session['rhodecode_user'])
264
271
265
272
266 @pytest.fixture
273 @pytest.fixture
267 def autologin_user(app):
274 def autologin_user(app):
268 """
275 """
269 Utility fixture which makes sure that the admin user is logged in
276 Utility fixture which makes sure that the admin user is logged in
270 """
277 """
271 return _autologin_user(app)
278 return _autologin_user(app)
272
279
273
280
274 @pytest.fixture
281 @pytest.fixture
275 def autologin_regular_user(app):
282 def autologin_regular_user(app):
276 """
283 """
277 Utility fixture which makes sure that the regular user is logged in
284 Utility fixture which makes sure that the regular user is logged in
278 """
285 """
279 return _autologin_user(
286 return _autologin_user(
280 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
287 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
281
288
282
289
283 @pytest.fixture(scope='function')
290 @pytest.fixture(scope='function')
284 def csrf_token(request, autologin_user):
291 def csrf_token(request, autologin_user):
285 return autologin_user.csrf_token
292 return autologin_user.csrf_token
286
293
287
294
288 @pytest.fixture(scope='function')
295 @pytest.fixture(scope='function')
289 def xhr_header(request):
296 def xhr_header(request):
290 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
297 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
291
298
292
299
293 @pytest.fixture
300 @pytest.fixture
294 def real_crypto_backend(monkeypatch):
301 def real_crypto_backend(monkeypatch):
295 """
302 """
296 Switch the production crypto backend on for this test.
303 Switch the production crypto backend on for this test.
297
304
298 During the test run the crypto backend is replaced with a faster
305 During the test run the crypto backend is replaced with a faster
299 implementation based on the MD5 algorithm.
306 implementation based on the MD5 algorithm.
300 """
307 """
301 monkeypatch.setattr(rhodecode, 'is_test', False)
308 monkeypatch.setattr(rhodecode, 'is_test', False)
302
309
303
310
304 @pytest.fixture(scope='class')
311 @pytest.fixture(scope='class')
305 def index_location(request, baseapp):
312 def index_location(request, baseapp):
306 index_location = baseapp.config.get_settings()['search.location']
313 index_location = baseapp.config.get_settings()['search.location']
307 if request.cls:
314 if request.cls:
308 request.cls.index_location = index_location
315 request.cls.index_location = index_location
309 return index_location
316 return index_location
310
317
311
318
312 @pytest.fixture(scope='session', autouse=True)
319 @pytest.fixture(scope='session', autouse=True)
313 def tests_tmp_path(request):
320 def tests_tmp_path(request):
314 """
321 """
315 Create temporary directory to be used during the test session.
322 Create temporary directory to be used during the test session.
316 """
323 """
317 if not os.path.exists(TESTS_TMP_PATH):
324 if not os.path.exists(TESTS_TMP_PATH):
318 os.makedirs(TESTS_TMP_PATH)
325 os.makedirs(TESTS_TMP_PATH)
319
326
320 if not request.config.getoption('--keep-tmp-path'):
327 if not request.config.getoption('--keep-tmp-path'):
321 @request.addfinalizer
328 @request.addfinalizer
322 def remove_tmp_path():
329 def remove_tmp_path():
323 shutil.rmtree(TESTS_TMP_PATH)
330 shutil.rmtree(TESTS_TMP_PATH)
324
331
325 return TESTS_TMP_PATH
332 return TESTS_TMP_PATH
326
333
327
334
328 @pytest.fixture
335 @pytest.fixture
329 def test_repo_group(request):
336 def test_repo_group(request):
330 """
337 """
331 Create a temporary repository group, and destroy it after
338 Create a temporary repository group, and destroy it after
332 usage automatically
339 usage automatically
333 """
340 """
334 fixture = Fixture()
341 fixture = Fixture()
335 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
342 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
336 repo_group = fixture.create_repo_group(repogroupid)
343 repo_group = fixture.create_repo_group(repogroupid)
337
344
338 def _cleanup():
345 def _cleanup():
339 fixture.destroy_repo_group(repogroupid)
346 fixture.destroy_repo_group(repogroupid)
340
347
341 request.addfinalizer(_cleanup)
348 request.addfinalizer(_cleanup)
342 return repo_group
349 return repo_group
343
350
344
351
345 @pytest.fixture
352 @pytest.fixture
346 def test_user_group(request):
353 def test_user_group(request):
347 """
354 """
348 Create a temporary user group, and destroy it after
355 Create a temporary user group, and destroy it after
349 usage automatically
356 usage automatically
350 """
357 """
351 fixture = Fixture()
358 fixture = Fixture()
352 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
359 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
353 user_group = fixture.create_user_group(usergroupid)
360 user_group = fixture.create_user_group(usergroupid)
354
361
355 def _cleanup():
362 def _cleanup():
356 fixture.destroy_user_group(user_group)
363 fixture.destroy_user_group(user_group)
357
364
358 request.addfinalizer(_cleanup)
365 request.addfinalizer(_cleanup)
359 return user_group
366 return user_group
360
367
361
368
362 @pytest.fixture(scope='session')
369 @pytest.fixture(scope='session')
363 def test_repo(request):
370 def test_repo(request):
364 container = TestRepoContainer()
371 container = TestRepoContainer()
365 request.addfinalizer(container._cleanup)
372 request.addfinalizer(container._cleanup)
366 return container
373 return container
367
374
368
375
369 class TestRepoContainer(object):
376 class TestRepoContainer(object):
370 """
377 """
371 Container for test repositories which are used read only.
378 Container for test repositories which are used read only.
372
379
373 Repositories will be created on demand and re-used during the lifetime
380 Repositories will be created on demand and re-used during the lifetime
374 of this object.
381 of this object.
375
382
376 Usage to get the svn test repository "minimal"::
383 Usage to get the svn test repository "minimal"::
377
384
378 test_repo = TestContainer()
385 test_repo = TestContainer()
379 repo = test_repo('minimal', 'svn')
386 repo = test_repo('minimal', 'svn')
380
387
381 """
388 """
382
389
383 dump_extractors = {
390 dump_extractors = {
384 'git': utils.extract_git_repo_from_dump,
391 'git': utils.extract_git_repo_from_dump,
385 'hg': utils.extract_hg_repo_from_dump,
392 'hg': utils.extract_hg_repo_from_dump,
386 'svn': utils.extract_svn_repo_from_dump,
393 'svn': utils.extract_svn_repo_from_dump,
387 }
394 }
388
395
389 def __init__(self):
396 def __init__(self):
390 self._cleanup_repos = []
397 self._cleanup_repos = []
391 self._fixture = Fixture()
398 self._fixture = Fixture()
392 self._repos = {}
399 self._repos = {}
393
400
394 def __call__(self, dump_name, backend_alias, config=None):
401 def __call__(self, dump_name, backend_alias, config=None):
395 key = (dump_name, backend_alias)
402 key = (dump_name, backend_alias)
396 if key not in self._repos:
403 if key not in self._repos:
397 repo = self._create_repo(dump_name, backend_alias, config)
404 repo = self._create_repo(dump_name, backend_alias, config)
398 self._repos[key] = repo.repo_id
405 self._repos[key] = repo.repo_id
399 return Repository.get(self._repos[key])
406 return Repository.get(self._repos[key])
400
407
401 def _create_repo(self, dump_name, backend_alias, config):
408 def _create_repo(self, dump_name, backend_alias, config):
402 repo_name = '%s-%s' % (backend_alias, dump_name)
409 repo_name = '%s-%s' % (backend_alias, dump_name)
403 backend_class = get_backend(backend_alias)
410 backend_class = get_backend(backend_alias)
404 dump_extractor = self.dump_extractors[backend_alias]
411 dump_extractor = self.dump_extractors[backend_alias]
405 repo_path = dump_extractor(dump_name, repo_name)
412 repo_path = dump_extractor(dump_name, repo_name)
406
413
407 vcs_repo = backend_class(repo_path, config=config)
414 vcs_repo = backend_class(repo_path, config=config)
408 repo2db_mapper({repo_name: vcs_repo})
415 repo2db_mapper({repo_name: vcs_repo})
409
416
410 repo = RepoModel().get_by_repo_name(repo_name)
417 repo = RepoModel().get_by_repo_name(repo_name)
411 self._cleanup_repos.append(repo_name)
418 self._cleanup_repos.append(repo_name)
412 return repo
419 return repo
413
420
414 def _cleanup(self):
421 def _cleanup(self):
415 for repo_name in reversed(self._cleanup_repos):
422 for repo_name in reversed(self._cleanup_repos):
416 self._fixture.destroy_repo(repo_name)
423 self._fixture.destroy_repo(repo_name)
417
424
418
425
419 @pytest.fixture
426 @pytest.fixture
420 def backend(request, backend_alias, baseapp, test_repo):
427 def backend(request, backend_alias, baseapp, test_repo):
421 """
428 """
422 Parametrized fixture which represents a single backend implementation.
429 Parametrized fixture which represents a single backend implementation.
423
430
424 It respects the option `--backends` to focus the test run on specific
431 It respects the option `--backends` to focus the test run on specific
425 backend implementations.
432 backend implementations.
426
433
427 It also supports `pytest.mark.xfail_backends` to mark tests as failing
434 It also supports `pytest.mark.xfail_backends` to mark tests as failing
428 for specific backends. This is intended as a utility for incremental
435 for specific backends. This is intended as a utility for incremental
429 development of a new backend implementation.
436 development of a new backend implementation.
430 """
437 """
431 if backend_alias not in request.config.getoption('--backends'):
438 if backend_alias not in request.config.getoption('--backends'):
432 pytest.skip("Backend %s not selected." % (backend_alias, ))
439 pytest.skip("Backend %s not selected." % (backend_alias, ))
433
440
434 utils.check_xfail_backends(request.node, backend_alias)
441 utils.check_xfail_backends(request.node, backend_alias)
435 utils.check_skip_backends(request.node, backend_alias)
442 utils.check_skip_backends(request.node, backend_alias)
436
443
437 repo_name = 'vcs_test_%s' % (backend_alias, )
444 repo_name = 'vcs_test_%s' % (backend_alias, )
438 backend = Backend(
445 backend = Backend(
439 alias=backend_alias,
446 alias=backend_alias,
440 repo_name=repo_name,
447 repo_name=repo_name,
441 test_name=request.node.name,
448 test_name=request.node.name,
442 test_repo_container=test_repo)
449 test_repo_container=test_repo)
443 request.addfinalizer(backend.cleanup)
450 request.addfinalizer(backend.cleanup)
444 return backend
451 return backend
445
452
446
453
447 @pytest.fixture
454 @pytest.fixture
448 def backend_git(request, baseapp, test_repo):
455 def backend_git(request, baseapp, test_repo):
449 return backend(request, 'git', baseapp, test_repo)
456 return backend(request, 'git', baseapp, test_repo)
450
457
451
458
452 @pytest.fixture
459 @pytest.fixture
453 def backend_hg(request, baseapp, test_repo):
460 def backend_hg(request, baseapp, test_repo):
454 return backend(request, 'hg', baseapp, test_repo)
461 return backend(request, 'hg', baseapp, test_repo)
455
462
456
463
457 @pytest.fixture
464 @pytest.fixture
458 def backend_svn(request, baseapp, test_repo):
465 def backend_svn(request, baseapp, test_repo):
459 return backend(request, 'svn', baseapp, test_repo)
466 return backend(request, 'svn', baseapp, test_repo)
460
467
461
468
462 @pytest.fixture
469 @pytest.fixture
463 def backend_random(backend_git):
470 def backend_random(backend_git):
464 """
471 """
465 Use this to express that your tests need "a backend.
472 Use this to express that your tests need "a backend.
466
473
467 A few of our tests need a backend, so that we can run the code. This
474 A few of our tests need a backend, so that we can run the code. This
468 fixture is intended to be used for such cases. It will pick one of the
475 fixture is intended to be used for such cases. It will pick one of the
469 backends and run the tests.
476 backends and run the tests.
470
477
471 The fixture `backend` would run the test multiple times for each
478 The fixture `backend` would run the test multiple times for each
472 available backend which is a pure waste of time if the test is
479 available backend which is a pure waste of time if the test is
473 independent of the backend type.
480 independent of the backend type.
474 """
481 """
475 # TODO: johbo: Change this to pick a random backend
482 # TODO: johbo: Change this to pick a random backend
476 return backend_git
483 return backend_git
477
484
478
485
479 @pytest.fixture
486 @pytest.fixture
480 def backend_stub(backend_git):
487 def backend_stub(backend_git):
481 """
488 """
482 Use this to express that your tests need a backend stub
489 Use this to express that your tests need a backend stub
483
490
484 TODO: mikhail: Implement a real stub logic instead of returning
491 TODO: mikhail: Implement a real stub logic instead of returning
485 a git backend
492 a git backend
486 """
493 """
487 return backend_git
494 return backend_git
488
495
489
496
490 @pytest.fixture
497 @pytest.fixture
491 def repo_stub(backend_stub):
498 def repo_stub(backend_stub):
492 """
499 """
493 Use this to express that your tests need a repository stub
500 Use this to express that your tests need a repository stub
494 """
501 """
495 return backend_stub.create_repo()
502 return backend_stub.create_repo()
496
503
497
504
498 class Backend(object):
505 class Backend(object):
499 """
506 """
500 Represents the test configuration for one supported backend
507 Represents the test configuration for one supported backend
501
508
502 Provides easy access to different test repositories based on
509 Provides easy access to different test repositories based on
503 `__getitem__`. Such repositories will only be created once per test
510 `__getitem__`. Such repositories will only be created once per test
504 session.
511 session.
505 """
512 """
506
513
507 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
514 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
508 _master_repo = None
515 _master_repo = None
509 _commit_ids = {}
516 _commit_ids = {}
510
517
511 def __init__(self, alias, repo_name, test_name, test_repo_container):
518 def __init__(self, alias, repo_name, test_name, test_repo_container):
512 self.alias = alias
519 self.alias = alias
513 self.repo_name = repo_name
520 self.repo_name = repo_name
514 self._cleanup_repos = []
521 self._cleanup_repos = []
515 self._test_name = test_name
522 self._test_name = test_name
516 self._test_repo_container = test_repo_container
523 self._test_repo_container = test_repo_container
517 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
524 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
518 # Fixture will survive in the end.
525 # Fixture will survive in the end.
519 self._fixture = Fixture()
526 self._fixture = Fixture()
520
527
521 def __getitem__(self, key):
528 def __getitem__(self, key):
522 return self._test_repo_container(key, self.alias)
529 return self._test_repo_container(key, self.alias)
523
530
524 def create_test_repo(self, key, config=None):
531 def create_test_repo(self, key, config=None):
525 return self._test_repo_container(key, self.alias, config)
532 return self._test_repo_container(key, self.alias, config)
526
533
527 @property
534 @property
528 def repo(self):
535 def repo(self):
529 """
536 """
530 Returns the "current" repository. This is the vcs_test repo or the
537 Returns the "current" repository. This is the vcs_test repo or the
531 last repo which has been created with `create_repo`.
538 last repo which has been created with `create_repo`.
532 """
539 """
533 from rhodecode.model.db import Repository
540 from rhodecode.model.db import Repository
534 return Repository.get_by_repo_name(self.repo_name)
541 return Repository.get_by_repo_name(self.repo_name)
535
542
536 @property
543 @property
537 def default_branch_name(self):
544 def default_branch_name(self):
538 VcsRepository = get_backend(self.alias)
545 VcsRepository = get_backend(self.alias)
539 return VcsRepository.DEFAULT_BRANCH_NAME
546 return VcsRepository.DEFAULT_BRANCH_NAME
540
547
541 @property
548 @property
542 def default_head_id(self):
549 def default_head_id(self):
543 """
550 """
544 Returns the default head id of the underlying backend.
551 Returns the default head id of the underlying backend.
545
552
546 This will be the default branch name in case the backend does have a
553 This will be the default branch name in case the backend does have a
547 default branch. In the other cases it will point to a valid head
554 default branch. In the other cases it will point to a valid head
548 which can serve as the base to create a new commit on top of it.
555 which can serve as the base to create a new commit on top of it.
549 """
556 """
550 vcsrepo = self.repo.scm_instance()
557 vcsrepo = self.repo.scm_instance()
551 head_id = (
558 head_id = (
552 vcsrepo.DEFAULT_BRANCH_NAME or
559 vcsrepo.DEFAULT_BRANCH_NAME or
553 vcsrepo.commit_ids[-1])
560 vcsrepo.commit_ids[-1])
554 return head_id
561 return head_id
555
562
556 @property
563 @property
557 def commit_ids(self):
564 def commit_ids(self):
558 """
565 """
559 Returns the list of commits for the last created repository
566 Returns the list of commits for the last created repository
560 """
567 """
561 return self._commit_ids
568 return self._commit_ids
562
569
563 def create_master_repo(self, commits):
570 def create_master_repo(self, commits):
564 """
571 """
565 Create a repository and remember it as a template.
572 Create a repository and remember it as a template.
566
573
567 This allows to easily create derived repositories to construct
574 This allows to easily create derived repositories to construct
568 more complex scenarios for diff, compare and pull requests.
575 more complex scenarios for diff, compare and pull requests.
569
576
570 Returns a commit map which maps from commit message to raw_id.
577 Returns a commit map which maps from commit message to raw_id.
571 """
578 """
572 self._master_repo = self.create_repo(commits=commits)
579 self._master_repo = self.create_repo(commits=commits)
573 return self._commit_ids
580 return self._commit_ids
574
581
575 def create_repo(
582 def create_repo(
576 self, commits=None, number_of_commits=0, heads=None,
583 self, commits=None, number_of_commits=0, heads=None,
577 name_suffix=u'', **kwargs):
584 name_suffix=u'', **kwargs):
578 """
585 """
579 Create a repository and record it for later cleanup.
586 Create a repository and record it for later cleanup.
580
587
581 :param commits: Optional. A sequence of dict instances.
588 :param commits: Optional. A sequence of dict instances.
582 Will add a commit per entry to the new repository.
589 Will add a commit per entry to the new repository.
583 :param number_of_commits: Optional. If set to a number, this number of
590 :param number_of_commits: Optional. If set to a number, this number of
584 commits will be added to the new repository.
591 commits will be added to the new repository.
585 :param heads: Optional. Can be set to a sequence of of commit
592 :param heads: Optional. Can be set to a sequence of of commit
586 names which shall be pulled in from the master repository.
593 names which shall be pulled in from the master repository.
587
594
588 """
595 """
589 self.repo_name = self._next_repo_name() + name_suffix
596 self.repo_name = self._next_repo_name() + name_suffix
590 repo = self._fixture.create_repo(
597 repo = self._fixture.create_repo(
591 self.repo_name, repo_type=self.alias, **kwargs)
598 self.repo_name, repo_type=self.alias, **kwargs)
592 self._cleanup_repos.append(repo.repo_name)
599 self._cleanup_repos.append(repo.repo_name)
593
600
594 commits = commits or [
601 commits = commits or [
595 {'message': 'Commit %s of %s' % (x, self.repo_name)}
602 {'message': 'Commit %s of %s' % (x, self.repo_name)}
596 for x in xrange(number_of_commits)]
603 for x in xrange(number_of_commits)]
597 self._add_commits_to_repo(repo.scm_instance(), commits)
604 self._add_commits_to_repo(repo.scm_instance(), commits)
598 if heads:
605 if heads:
599 self.pull_heads(repo, heads)
606 self.pull_heads(repo, heads)
600
607
601 return repo
608 return repo
602
609
603 def pull_heads(self, repo, heads):
610 def pull_heads(self, repo, heads):
604 """
611 """
605 Make sure that repo contains all commits mentioned in `heads`
612 Make sure that repo contains all commits mentioned in `heads`
606 """
613 """
607 vcsmaster = self._master_repo.scm_instance()
614 vcsmaster = self._master_repo.scm_instance()
608 vcsrepo = repo.scm_instance()
615 vcsrepo = repo.scm_instance()
609 vcsrepo.config.clear_section('hooks')
616 vcsrepo.config.clear_section('hooks')
610 commit_ids = [self._commit_ids[h] for h in heads]
617 commit_ids = [self._commit_ids[h] for h in heads]
611 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
618 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
612
619
613 def create_fork(self):
620 def create_fork(self):
614 repo_to_fork = self.repo_name
621 repo_to_fork = self.repo_name
615 self.repo_name = self._next_repo_name()
622 self.repo_name = self._next_repo_name()
616 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
623 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
617 self._cleanup_repos.append(self.repo_name)
624 self._cleanup_repos.append(self.repo_name)
618 return repo
625 return repo
619
626
620 def new_repo_name(self, suffix=u''):
627 def new_repo_name(self, suffix=u''):
621 self.repo_name = self._next_repo_name() + suffix
628 self.repo_name = self._next_repo_name() + suffix
622 self._cleanup_repos.append(self.repo_name)
629 self._cleanup_repos.append(self.repo_name)
623 return self.repo_name
630 return self.repo_name
624
631
625 def _next_repo_name(self):
632 def _next_repo_name(self):
626 return u"%s_%s" % (
633 return u"%s_%s" % (
627 self.invalid_repo_name.sub(u'_', self._test_name),
634 self.invalid_repo_name.sub(u'_', self._test_name),
628 len(self._cleanup_repos))
635 len(self._cleanup_repos))
629
636
630 def ensure_file(self, filename, content='Test content\n'):
637 def ensure_file(self, filename, content='Test content\n'):
631 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
638 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
632 commits = [
639 commits = [
633 {'added': [
640 {'added': [
634 FileNode(filename, content=content),
641 FileNode(filename, content=content),
635 ]},
642 ]},
636 ]
643 ]
637 self._add_commits_to_repo(self.repo.scm_instance(), commits)
644 self._add_commits_to_repo(self.repo.scm_instance(), commits)
638
645
639 def enable_downloads(self):
646 def enable_downloads(self):
640 repo = self.repo
647 repo = self.repo
641 repo.enable_downloads = True
648 repo.enable_downloads = True
642 Session().add(repo)
649 Session().add(repo)
643 Session().commit()
650 Session().commit()
644
651
645 def cleanup(self):
652 def cleanup(self):
646 for repo_name in reversed(self._cleanup_repos):
653 for repo_name in reversed(self._cleanup_repos):
647 self._fixture.destroy_repo(repo_name)
654 self._fixture.destroy_repo(repo_name)
648
655
649 def _add_commits_to_repo(self, repo, commits):
656 def _add_commits_to_repo(self, repo, commits):
650 commit_ids = _add_commits_to_repo(repo, commits)
657 commit_ids = _add_commits_to_repo(repo, commits)
651 if not commit_ids:
658 if not commit_ids:
652 return
659 return
653 self._commit_ids = commit_ids
660 self._commit_ids = commit_ids
654
661
655 # Creating refs for Git to allow fetching them from remote repository
662 # Creating refs for Git to allow fetching them from remote repository
656 if self.alias == 'git':
663 if self.alias == 'git':
657 refs = {}
664 refs = {}
658 for message in self._commit_ids:
665 for message in self._commit_ids:
659 # TODO: mikhail: do more special chars replacements
666 # TODO: mikhail: do more special chars replacements
660 ref_name = 'refs/test-refs/{}'.format(
667 ref_name = 'refs/test-refs/{}'.format(
661 message.replace(' ', ''))
668 message.replace(' ', ''))
662 refs[ref_name] = self._commit_ids[message]
669 refs[ref_name] = self._commit_ids[message]
663 self._create_refs(repo, refs)
670 self._create_refs(repo, refs)
664
671
665 def _create_refs(self, repo, refs):
672 def _create_refs(self, repo, refs):
666 for ref_name in refs:
673 for ref_name in refs:
667 repo.set_refs(ref_name, refs[ref_name])
674 repo.set_refs(ref_name, refs[ref_name])
668
675
669
676
670 @pytest.fixture
677 @pytest.fixture
671 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
678 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
672 """
679 """
673 Parametrized fixture which represents a single vcs backend implementation.
680 Parametrized fixture which represents a single vcs backend implementation.
674
681
675 See the fixture `backend` for more details. This one implements the same
682 See the fixture `backend` for more details. This one implements the same
676 concept, but on vcs level. So it does not provide model instances etc.
683 concept, but on vcs level. So it does not provide model instances etc.
677
684
678 Parameters are generated dynamically, see :func:`pytest_generate_tests`
685 Parameters are generated dynamically, see :func:`pytest_generate_tests`
679 for how this works.
686 for how this works.
680 """
687 """
681 if backend_alias not in request.config.getoption('--backends'):
688 if backend_alias not in request.config.getoption('--backends'):
682 pytest.skip("Backend %s not selected." % (backend_alias, ))
689 pytest.skip("Backend %s not selected." % (backend_alias, ))
683
690
684 utils.check_xfail_backends(request.node, backend_alias)
691 utils.check_xfail_backends(request.node, backend_alias)
685 utils.check_skip_backends(request.node, backend_alias)
692 utils.check_skip_backends(request.node, backend_alias)
686
693
687 repo_name = 'vcs_test_%s' % (backend_alias, )
694 repo_name = 'vcs_test_%s' % (backend_alias, )
688 repo_path = os.path.join(tests_tmp_path, repo_name)
695 repo_path = os.path.join(tests_tmp_path, repo_name)
689 backend = VcsBackend(
696 backend = VcsBackend(
690 alias=backend_alias,
697 alias=backend_alias,
691 repo_path=repo_path,
698 repo_path=repo_path,
692 test_name=request.node.name,
699 test_name=request.node.name,
693 test_repo_container=test_repo)
700 test_repo_container=test_repo)
694 request.addfinalizer(backend.cleanup)
701 request.addfinalizer(backend.cleanup)
695 return backend
702 return backend
696
703
697
704
698 @pytest.fixture
705 @pytest.fixture
699 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
706 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
700 return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo)
707 return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo)
701
708
702
709
703 @pytest.fixture
710 @pytest.fixture
704 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
711 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
705 return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo)
712 return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo)
706
713
707
714
708 @pytest.fixture
715 @pytest.fixture
709 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
716 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
710 return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo)
717 return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo)
711
718
712
719
713 @pytest.fixture
720 @pytest.fixture
714 def vcsbackend_random(vcsbackend_git):
721 def vcsbackend_random(vcsbackend_git):
715 """
722 """
716 Use this to express that your tests need "a vcsbackend".
723 Use this to express that your tests need "a vcsbackend".
717
724
718 The fixture `vcsbackend` would run the test multiple times for each
725 The fixture `vcsbackend` would run the test multiple times for each
719 available vcs backend which is a pure waste of time if the test is
726 available vcs backend which is a pure waste of time if the test is
720 independent of the vcs backend type.
727 independent of the vcs backend type.
721 """
728 """
722 # TODO: johbo: Change this to pick a random backend
729 # TODO: johbo: Change this to pick a random backend
723 return vcsbackend_git
730 return vcsbackend_git
724
731
725
732
726 @pytest.fixture
733 @pytest.fixture
727 def vcsbackend_stub(vcsbackend_git):
734 def vcsbackend_stub(vcsbackend_git):
728 """
735 """
729 Use this to express that your test just needs a stub of a vcsbackend.
736 Use this to express that your test just needs a stub of a vcsbackend.
730
737
731 Plan is to eventually implement an in-memory stub to speed tests up.
738 Plan is to eventually implement an in-memory stub to speed tests up.
732 """
739 """
733 return vcsbackend_git
740 return vcsbackend_git
734
741
735
742
736 class VcsBackend(object):
743 class VcsBackend(object):
737 """
744 """
738 Represents the test configuration for one supported vcs backend.
745 Represents the test configuration for one supported vcs backend.
739 """
746 """
740
747
741 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
748 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
742
749
743 def __init__(self, alias, repo_path, test_name, test_repo_container):
750 def __init__(self, alias, repo_path, test_name, test_repo_container):
744 self.alias = alias
751 self.alias = alias
745 self._repo_path = repo_path
752 self._repo_path = repo_path
746 self._cleanup_repos = []
753 self._cleanup_repos = []
747 self._test_name = test_name
754 self._test_name = test_name
748 self._test_repo_container = test_repo_container
755 self._test_repo_container = test_repo_container
749
756
750 def __getitem__(self, key):
757 def __getitem__(self, key):
751 return self._test_repo_container(key, self.alias).scm_instance()
758 return self._test_repo_container(key, self.alias).scm_instance()
752
759
753 @property
760 @property
754 def repo(self):
761 def repo(self):
755 """
762 """
756 Returns the "current" repository. This is the vcs_test repo of the last
763 Returns the "current" repository. This is the vcs_test repo of the last
757 repo which has been created.
764 repo which has been created.
758 """
765 """
759 Repository = get_backend(self.alias)
766 Repository = get_backend(self.alias)
760 return Repository(self._repo_path)
767 return Repository(self._repo_path)
761
768
762 @property
769 @property
763 def backend(self):
770 def backend(self):
764 """
771 """
765 Returns the backend implementation class.
772 Returns the backend implementation class.
766 """
773 """
767 return get_backend(self.alias)
774 return get_backend(self.alias)
768
775
769 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
776 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
770 repo_name = self._next_repo_name()
777 repo_name = self._next_repo_name()
771 self._repo_path = get_new_dir(repo_name)
778 self._repo_path = get_new_dir(repo_name)
772 repo_class = get_backend(self.alias)
779 repo_class = get_backend(self.alias)
773 src_url = None
780 src_url = None
774 if _clone_repo:
781 if _clone_repo:
775 src_url = _clone_repo.path
782 src_url = _clone_repo.path
776 repo = repo_class(self._repo_path, create=True, src_url=src_url)
783 repo = repo_class(self._repo_path, create=True, src_url=src_url)
777 self._cleanup_repos.append(repo)
784 self._cleanup_repos.append(repo)
778
785
779 commits = commits or [
786 commits = commits or [
780 {'message': 'Commit %s of %s' % (x, repo_name)}
787 {'message': 'Commit %s of %s' % (x, repo_name)}
781 for x in xrange(number_of_commits)]
788 for x in xrange(number_of_commits)]
782 _add_commits_to_repo(repo, commits)
789 _add_commits_to_repo(repo, commits)
783 return repo
790 return repo
784
791
785 def clone_repo(self, repo):
792 def clone_repo(self, repo):
786 return self.create_repo(_clone_repo=repo)
793 return self.create_repo(_clone_repo=repo)
787
794
788 def cleanup(self):
795 def cleanup(self):
789 for repo in self._cleanup_repos:
796 for repo in self._cleanup_repos:
790 shutil.rmtree(repo.path)
797 shutil.rmtree(repo.path)
791
798
792 def new_repo_path(self):
799 def new_repo_path(self):
793 repo_name = self._next_repo_name()
800 repo_name = self._next_repo_name()
794 self._repo_path = get_new_dir(repo_name)
801 self._repo_path = get_new_dir(repo_name)
795 return self._repo_path
802 return self._repo_path
796
803
797 def _next_repo_name(self):
804 def _next_repo_name(self):
798 return "%s_%s" % (
805 return "%s_%s" % (
799 self.invalid_repo_name.sub('_', self._test_name),
806 self.invalid_repo_name.sub('_', self._test_name),
800 len(self._cleanup_repos))
807 len(self._cleanup_repos))
801
808
802 def add_file(self, repo, filename, content='Test content\n'):
809 def add_file(self, repo, filename, content='Test content\n'):
803 imc = repo.in_memory_commit
810 imc = repo.in_memory_commit
804 imc.add(FileNode(filename, content=content))
811 imc.add(FileNode(filename, content=content))
805 imc.commit(
812 imc.commit(
806 message=u'Automatic commit from vcsbackend fixture',
813 message=u'Automatic commit from vcsbackend fixture',
807 author=u'Automatic')
814 author=u'Automatic')
808
815
809 def ensure_file(self, filename, content='Test content\n'):
816 def ensure_file(self, filename, content='Test content\n'):
810 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
817 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
811 self.add_file(self.repo, filename, content)
818 self.add_file(self.repo, filename, content)
812
819
813
820
814 def _add_commits_to_repo(vcs_repo, commits):
821 def _add_commits_to_repo(vcs_repo, commits):
815 commit_ids = {}
822 commit_ids = {}
816 if not commits:
823 if not commits:
817 return commit_ids
824 return commit_ids
818
825
819 imc = vcs_repo.in_memory_commit
826 imc = vcs_repo.in_memory_commit
820 commit = None
827 commit = None
821
828
822 for idx, commit in enumerate(commits):
829 for idx, commit in enumerate(commits):
823 message = unicode(commit.get('message', 'Commit %s' % idx))
830 message = unicode(commit.get('message', 'Commit %s' % idx))
824
831
825 for node in commit.get('added', []):
832 for node in commit.get('added', []):
826 imc.add(FileNode(node.path, content=node.content))
833 imc.add(FileNode(node.path, content=node.content))
827 for node in commit.get('changed', []):
834 for node in commit.get('changed', []):
828 imc.change(FileNode(node.path, content=node.content))
835 imc.change(FileNode(node.path, content=node.content))
829 for node in commit.get('removed', []):
836 for node in commit.get('removed', []):
830 imc.remove(FileNode(node.path))
837 imc.remove(FileNode(node.path))
831
838
832 parents = [
839 parents = [
833 vcs_repo.get_commit(commit_id=commit_ids[p])
840 vcs_repo.get_commit(commit_id=commit_ids[p])
834 for p in commit.get('parents', [])]
841 for p in commit.get('parents', [])]
835
842
836 operations = ('added', 'changed', 'removed')
843 operations = ('added', 'changed', 'removed')
837 if not any((commit.get(o) for o in operations)):
844 if not any((commit.get(o) for o in operations)):
838 imc.add(FileNode('file_%s' % idx, content=message))
845 imc.add(FileNode('file_%s' % idx, content=message))
839
846
840 commit = imc.commit(
847 commit = imc.commit(
841 message=message,
848 message=message,
842 author=unicode(commit.get('author', 'Automatic')),
849 author=unicode(commit.get('author', 'Automatic')),
843 date=commit.get('date'),
850 date=commit.get('date'),
844 branch=commit.get('branch'),
851 branch=commit.get('branch'),
845 parents=parents)
852 parents=parents)
846
853
847 commit_ids[commit.message] = commit.raw_id
854 commit_ids[commit.message] = commit.raw_id
848
855
849 return commit_ids
856 return commit_ids
850
857
851
858
852 @pytest.fixture
859 @pytest.fixture
853 def reposerver(request):
860 def reposerver(request):
854 """
861 """
855 Allows to serve a backend repository
862 Allows to serve a backend repository
856 """
863 """
857
864
858 repo_server = RepoServer()
865 repo_server = RepoServer()
859 request.addfinalizer(repo_server.cleanup)
866 request.addfinalizer(repo_server.cleanup)
860 return repo_server
867 return repo_server
861
868
862
869
863 class RepoServer(object):
870 class RepoServer(object):
864 """
871 """
865 Utility to serve a local repository for the duration of a test case.
872 Utility to serve a local repository for the duration of a test case.
866
873
867 Supports only Subversion so far.
874 Supports only Subversion so far.
868 """
875 """
869
876
870 url = None
877 url = None
871
878
872 def __init__(self):
879 def __init__(self):
873 self._cleanup_servers = []
880 self._cleanup_servers = []
874
881
875 def serve(self, vcsrepo):
882 def serve(self, vcsrepo):
876 if vcsrepo.alias != 'svn':
883 if vcsrepo.alias != 'svn':
877 raise TypeError("Backend %s not supported" % vcsrepo.alias)
884 raise TypeError("Backend %s not supported" % vcsrepo.alias)
878
885
879 proc = subprocess32.Popen(
886 proc = subprocess32.Popen(
880 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
887 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
881 '--root', vcsrepo.path])
888 '--root', vcsrepo.path])
882 self._cleanup_servers.append(proc)
889 self._cleanup_servers.append(proc)
883 self.url = 'svn://localhost'
890 self.url = 'svn://localhost'
884
891
885 def cleanup(self):
892 def cleanup(self):
886 for proc in self._cleanup_servers:
893 for proc in self._cleanup_servers:
887 proc.terminate()
894 proc.terminate()
888
895
889
896
890 @pytest.fixture
897 @pytest.fixture
891 def pr_util(backend, request, config_stub):
898 def pr_util(backend, request, config_stub):
892 """
899 """
893 Utility for tests of models and for functional tests around pull requests.
900 Utility for tests of models and for functional tests around pull requests.
894
901
895 It gives an instance of :class:`PRTestUtility` which provides various
902 It gives an instance of :class:`PRTestUtility` which provides various
896 utility methods around one pull request.
903 utility methods around one pull request.
897
904
898 This fixture uses `backend` and inherits its parameterization.
905 This fixture uses `backend` and inherits its parameterization.
899 """
906 """
900
907
901 util = PRTestUtility(backend)
908 util = PRTestUtility(backend)
902 request.addfinalizer(util.cleanup)
909 request.addfinalizer(util.cleanup)
903
910
904 return util
911 return util
905
912
906
913
907 class PRTestUtility(object):
914 class PRTestUtility(object):
908
915
909 pull_request = None
916 pull_request = None
910 pull_request_id = None
917 pull_request_id = None
911 mergeable_patcher = None
918 mergeable_patcher = None
912 mergeable_mock = None
919 mergeable_mock = None
913 notification_patcher = None
920 notification_patcher = None
914
921
915 def __init__(self, backend):
922 def __init__(self, backend):
916 self.backend = backend
923 self.backend = backend
917
924
918 def create_pull_request(
925 def create_pull_request(
919 self, commits=None, target_head=None, source_head=None,
926 self, commits=None, target_head=None, source_head=None,
920 revisions=None, approved=False, author=None, mergeable=False,
927 revisions=None, approved=False, author=None, mergeable=False,
921 enable_notifications=True, name_suffix=u'', reviewers=None,
928 enable_notifications=True, name_suffix=u'', reviewers=None,
922 title=u"Test", description=u"Description"):
929 title=u"Test", description=u"Description"):
923 self.set_mergeable(mergeable)
930 self.set_mergeable(mergeable)
924 if not enable_notifications:
931 if not enable_notifications:
925 # mock notification side effect
932 # mock notification side effect
926 self.notification_patcher = mock.patch(
933 self.notification_patcher = mock.patch(
927 'rhodecode.model.notification.NotificationModel.create')
934 'rhodecode.model.notification.NotificationModel.create')
928 self.notification_patcher.start()
935 self.notification_patcher.start()
929
936
930 if not self.pull_request:
937 if not self.pull_request:
931 if not commits:
938 if not commits:
932 commits = [
939 commits = [
933 {'message': 'c1'},
940 {'message': 'c1'},
934 {'message': 'c2'},
941 {'message': 'c2'},
935 {'message': 'c3'},
942 {'message': 'c3'},
936 ]
943 ]
937 target_head = 'c1'
944 target_head = 'c1'
938 source_head = 'c2'
945 source_head = 'c2'
939 revisions = ['c2']
946 revisions = ['c2']
940
947
941 self.commit_ids = self.backend.create_master_repo(commits)
948 self.commit_ids = self.backend.create_master_repo(commits)
942 self.target_repository = self.backend.create_repo(
949 self.target_repository = self.backend.create_repo(
943 heads=[target_head], name_suffix=name_suffix)
950 heads=[target_head], name_suffix=name_suffix)
944 self.source_repository = self.backend.create_repo(
951 self.source_repository = self.backend.create_repo(
945 heads=[source_head], name_suffix=name_suffix)
952 heads=[source_head], name_suffix=name_suffix)
946 self.author = author or UserModel().get_by_username(
953 self.author = author or UserModel().get_by_username(
947 TEST_USER_ADMIN_LOGIN)
954 TEST_USER_ADMIN_LOGIN)
948
955
949 model = PullRequestModel()
956 model = PullRequestModel()
950 self.create_parameters = {
957 self.create_parameters = {
951 'created_by': self.author,
958 'created_by': self.author,
952 'source_repo': self.source_repository.repo_name,
959 'source_repo': self.source_repository.repo_name,
953 'source_ref': self._default_branch_reference(source_head),
960 'source_ref': self._default_branch_reference(source_head),
954 'target_repo': self.target_repository.repo_name,
961 'target_repo': self.target_repository.repo_name,
955 'target_ref': self._default_branch_reference(target_head),
962 'target_ref': self._default_branch_reference(target_head),
956 'revisions': [self.commit_ids[r] for r in revisions],
963 'revisions': [self.commit_ids[r] for r in revisions],
957 'reviewers': reviewers or self._get_reviewers(),
964 'reviewers': reviewers or self._get_reviewers(),
958 'title': title,
965 'title': title,
959 'description': description,
966 'description': description,
960 }
967 }
961 self.pull_request = model.create(**self.create_parameters)
968 self.pull_request = model.create(**self.create_parameters)
962 assert model.get_versions(self.pull_request) == []
969 assert model.get_versions(self.pull_request) == []
963
970
964 self.pull_request_id = self.pull_request.pull_request_id
971 self.pull_request_id = self.pull_request.pull_request_id
965
972
966 if approved:
973 if approved:
967 self.approve()
974 self.approve()
968
975
969 Session().add(self.pull_request)
976 Session().add(self.pull_request)
970 Session().commit()
977 Session().commit()
971
978
972 return self.pull_request
979 return self.pull_request
973
980
974 def approve(self):
981 def approve(self):
975 self.create_status_votes(
982 self.create_status_votes(
976 ChangesetStatus.STATUS_APPROVED,
983 ChangesetStatus.STATUS_APPROVED,
977 *self.pull_request.reviewers)
984 *self.pull_request.reviewers)
978
985
979 def close(self):
986 def close(self):
980 PullRequestModel().close_pull_request(self.pull_request, self.author)
987 PullRequestModel().close_pull_request(self.pull_request, self.author)
981
988
982 def _default_branch_reference(self, commit_message):
989 def _default_branch_reference(self, commit_message):
983 reference = '%s:%s:%s' % (
990 reference = '%s:%s:%s' % (
984 'branch',
991 'branch',
985 self.backend.default_branch_name,
992 self.backend.default_branch_name,
986 self.commit_ids[commit_message])
993 self.commit_ids[commit_message])
987 return reference
994 return reference
988
995
989 def _get_reviewers(self):
996 def _get_reviewers(self):
990 return [
997 return [
991 (TEST_USER_REGULAR_LOGIN, ['default1'], False),
998 (TEST_USER_REGULAR_LOGIN, ['default1'], False),
992 (TEST_USER_REGULAR2_LOGIN, ['default2'], False),
999 (TEST_USER_REGULAR2_LOGIN, ['default2'], False),
993 ]
1000 ]
994
1001
995 def update_source_repository(self, head=None):
1002 def update_source_repository(self, head=None):
996 heads = [head or 'c3']
1003 heads = [head or 'c3']
997 self.backend.pull_heads(self.source_repository, heads=heads)
1004 self.backend.pull_heads(self.source_repository, heads=heads)
998
1005
999 def add_one_commit(self, head=None):
1006 def add_one_commit(self, head=None):
1000 self.update_source_repository(head=head)
1007 self.update_source_repository(head=head)
1001 old_commit_ids = set(self.pull_request.revisions)
1008 old_commit_ids = set(self.pull_request.revisions)
1002 PullRequestModel().update_commits(self.pull_request)
1009 PullRequestModel().update_commits(self.pull_request)
1003 commit_ids = set(self.pull_request.revisions)
1010 commit_ids = set(self.pull_request.revisions)
1004 new_commit_ids = commit_ids - old_commit_ids
1011 new_commit_ids = commit_ids - old_commit_ids
1005 assert len(new_commit_ids) == 1
1012 assert len(new_commit_ids) == 1
1006 return new_commit_ids.pop()
1013 return new_commit_ids.pop()
1007
1014
1008 def remove_one_commit(self):
1015 def remove_one_commit(self):
1009 assert len(self.pull_request.revisions) == 2
1016 assert len(self.pull_request.revisions) == 2
1010 source_vcs = self.source_repository.scm_instance()
1017 source_vcs = self.source_repository.scm_instance()
1011 removed_commit_id = source_vcs.commit_ids[-1]
1018 removed_commit_id = source_vcs.commit_ids[-1]
1012
1019
1013 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1020 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1014 # remove the if once that's sorted out.
1021 # remove the if once that's sorted out.
1015 if self.backend.alias == "git":
1022 if self.backend.alias == "git":
1016 kwargs = {'branch_name': self.backend.default_branch_name}
1023 kwargs = {'branch_name': self.backend.default_branch_name}
1017 else:
1024 else:
1018 kwargs = {}
1025 kwargs = {}
1019 source_vcs.strip(removed_commit_id, **kwargs)
1026 source_vcs.strip(removed_commit_id, **kwargs)
1020
1027
1021 PullRequestModel().update_commits(self.pull_request)
1028 PullRequestModel().update_commits(self.pull_request)
1022 assert len(self.pull_request.revisions) == 1
1029 assert len(self.pull_request.revisions) == 1
1023 return removed_commit_id
1030 return removed_commit_id
1024
1031
1025 def create_comment(self, linked_to=None):
1032 def create_comment(self, linked_to=None):
1026 comment = CommentsModel().create(
1033 comment = CommentsModel().create(
1027 text=u"Test comment",
1034 text=u"Test comment",
1028 repo=self.target_repository.repo_name,
1035 repo=self.target_repository.repo_name,
1029 user=self.author,
1036 user=self.author,
1030 pull_request=self.pull_request)
1037 pull_request=self.pull_request)
1031 assert comment.pull_request_version_id is None
1038 assert comment.pull_request_version_id is None
1032
1039
1033 if linked_to:
1040 if linked_to:
1034 PullRequestModel()._link_comments_to_version(linked_to)
1041 PullRequestModel()._link_comments_to_version(linked_to)
1035
1042
1036 return comment
1043 return comment
1037
1044
1038 def create_inline_comment(
1045 def create_inline_comment(
1039 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1046 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1040 comment = CommentsModel().create(
1047 comment = CommentsModel().create(
1041 text=u"Test comment",
1048 text=u"Test comment",
1042 repo=self.target_repository.repo_name,
1049 repo=self.target_repository.repo_name,
1043 user=self.author,
1050 user=self.author,
1044 line_no=line_no,
1051 line_no=line_no,
1045 f_path=file_path,
1052 f_path=file_path,
1046 pull_request=self.pull_request)
1053 pull_request=self.pull_request)
1047 assert comment.pull_request_version_id is None
1054 assert comment.pull_request_version_id is None
1048
1055
1049 if linked_to:
1056 if linked_to:
1050 PullRequestModel()._link_comments_to_version(linked_to)
1057 PullRequestModel()._link_comments_to_version(linked_to)
1051
1058
1052 return comment
1059 return comment
1053
1060
1054 def create_version_of_pull_request(self):
1061 def create_version_of_pull_request(self):
1055 pull_request = self.create_pull_request()
1062 pull_request = self.create_pull_request()
1056 version = PullRequestModel()._create_version_from_snapshot(
1063 version = PullRequestModel()._create_version_from_snapshot(
1057 pull_request)
1064 pull_request)
1058 return version
1065 return version
1059
1066
1060 def create_status_votes(self, status, *reviewers):
1067 def create_status_votes(self, status, *reviewers):
1061 for reviewer in reviewers:
1068 for reviewer in reviewers:
1062 ChangesetStatusModel().set_status(
1069 ChangesetStatusModel().set_status(
1063 repo=self.pull_request.target_repo,
1070 repo=self.pull_request.target_repo,
1064 status=status,
1071 status=status,
1065 user=reviewer.user_id,
1072 user=reviewer.user_id,
1066 pull_request=self.pull_request)
1073 pull_request=self.pull_request)
1067
1074
1068 def set_mergeable(self, value):
1075 def set_mergeable(self, value):
1069 if not self.mergeable_patcher:
1076 if not self.mergeable_patcher:
1070 self.mergeable_patcher = mock.patch.object(
1077 self.mergeable_patcher = mock.patch.object(
1071 VcsSettingsModel, 'get_general_settings')
1078 VcsSettingsModel, 'get_general_settings')
1072 self.mergeable_mock = self.mergeable_patcher.start()
1079 self.mergeable_mock = self.mergeable_patcher.start()
1073 self.mergeable_mock.return_value = {
1080 self.mergeable_mock.return_value = {
1074 'rhodecode_pr_merge_enabled': value}
1081 'rhodecode_pr_merge_enabled': value}
1075
1082
1076 def cleanup(self):
1083 def cleanup(self):
1077 # In case the source repository is already cleaned up, the pull
1084 # In case the source repository is already cleaned up, the pull
1078 # request will already be deleted.
1085 # request will already be deleted.
1079 pull_request = PullRequest().get(self.pull_request_id)
1086 pull_request = PullRequest().get(self.pull_request_id)
1080 if pull_request:
1087 if pull_request:
1081 PullRequestModel().delete(pull_request, pull_request.author)
1088 PullRequestModel().delete(pull_request, pull_request.author)
1082 Session().commit()
1089 Session().commit()
1083
1090
1084 if self.notification_patcher:
1091 if self.notification_patcher:
1085 self.notification_patcher.stop()
1092 self.notification_patcher.stop()
1086
1093
1087 if self.mergeable_patcher:
1094 if self.mergeable_patcher:
1088 self.mergeable_patcher.stop()
1095 self.mergeable_patcher.stop()
1089
1096
1090
1097
1091 @pytest.fixture
1098 @pytest.fixture
1092 def user_admin(baseapp):
1099 def user_admin(baseapp):
1093 """
1100 """
1094 Provides the default admin test user as an instance of `db.User`.
1101 Provides the default admin test user as an instance of `db.User`.
1095 """
1102 """
1096 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1103 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1097 return user
1104 return user
1098
1105
1099
1106
1100 @pytest.fixture
1107 @pytest.fixture
1101 def user_regular(baseapp):
1108 def user_regular(baseapp):
1102 """
1109 """
1103 Provides the default regular test user as an instance of `db.User`.
1110 Provides the default regular test user as an instance of `db.User`.
1104 """
1111 """
1105 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1112 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1106 return user
1113 return user
1107
1114
1108
1115
1109 @pytest.fixture
1116 @pytest.fixture
1110 def user_util(request, baseapp):
1117 def user_util(request, baseapp):
1111 """
1118 """
1112 Provides a wired instance of `UserUtility` with integrated cleanup.
1119 Provides a wired instance of `UserUtility` with integrated cleanup.
1113 """
1120 """
1114 utility = UserUtility(test_name=request.node.name)
1121 utility = UserUtility(test_name=request.node.name)
1115 request.addfinalizer(utility.cleanup)
1122 request.addfinalizer(utility.cleanup)
1116 return utility
1123 return utility
1117
1124
1118
1125
1119 # TODO: johbo: Split this up into utilities per domain or something similar
1126 # TODO: johbo: Split this up into utilities per domain or something similar
1120 class UserUtility(object):
1127 class UserUtility(object):
1121
1128
1122 def __init__(self, test_name="test"):
1129 def __init__(self, test_name="test"):
1123 self._test_name = self._sanitize_name(test_name)
1130 self._test_name = self._sanitize_name(test_name)
1124 self.fixture = Fixture()
1131 self.fixture = Fixture()
1125 self.repo_group_ids = []
1132 self.repo_group_ids = []
1126 self.repos_ids = []
1133 self.repos_ids = []
1127 self.user_ids = []
1134 self.user_ids = []
1128 self.user_group_ids = []
1135 self.user_group_ids = []
1129 self.user_repo_permission_ids = []
1136 self.user_repo_permission_ids = []
1130 self.user_group_repo_permission_ids = []
1137 self.user_group_repo_permission_ids = []
1131 self.user_repo_group_permission_ids = []
1138 self.user_repo_group_permission_ids = []
1132 self.user_group_repo_group_permission_ids = []
1139 self.user_group_repo_group_permission_ids = []
1133 self.user_user_group_permission_ids = []
1140 self.user_user_group_permission_ids = []
1134 self.user_group_user_group_permission_ids = []
1141 self.user_group_user_group_permission_ids = []
1135 self.user_permissions = []
1142 self.user_permissions = []
1136
1143
1137 def _sanitize_name(self, name):
1144 def _sanitize_name(self, name):
1138 for char in ['[', ']']:
1145 for char in ['[', ']']:
1139 name = name.replace(char, '_')
1146 name = name.replace(char, '_')
1140 return name
1147 return name
1141
1148
1142 def create_repo_group(
1149 def create_repo_group(
1143 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1150 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1144 group_name = "{prefix}_repogroup_{count}".format(
1151 group_name = "{prefix}_repogroup_{count}".format(
1145 prefix=self._test_name,
1152 prefix=self._test_name,
1146 count=len(self.repo_group_ids))
1153 count=len(self.repo_group_ids))
1147 repo_group = self.fixture.create_repo_group(
1154 repo_group = self.fixture.create_repo_group(
1148 group_name, cur_user=owner)
1155 group_name, cur_user=owner)
1149 if auto_cleanup:
1156 if auto_cleanup:
1150 self.repo_group_ids.append(repo_group.group_id)
1157 self.repo_group_ids.append(repo_group.group_id)
1151 return repo_group
1158 return repo_group
1152
1159
1153 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1160 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1154 auto_cleanup=True, repo_type='hg'):
1161 auto_cleanup=True, repo_type='hg'):
1155 repo_name = "{prefix}_repository_{count}".format(
1162 repo_name = "{prefix}_repository_{count}".format(
1156 prefix=self._test_name,
1163 prefix=self._test_name,
1157 count=len(self.repos_ids))
1164 count=len(self.repos_ids))
1158
1165
1159 repository = self.fixture.create_repo(
1166 repository = self.fixture.create_repo(
1160 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1167 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1161 if auto_cleanup:
1168 if auto_cleanup:
1162 self.repos_ids.append(repository.repo_id)
1169 self.repos_ids.append(repository.repo_id)
1163 return repository
1170 return repository
1164
1171
1165 def create_user(self, auto_cleanup=True, **kwargs):
1172 def create_user(self, auto_cleanup=True, **kwargs):
1166 user_name = "{prefix}_user_{count}".format(
1173 user_name = "{prefix}_user_{count}".format(
1167 prefix=self._test_name,
1174 prefix=self._test_name,
1168 count=len(self.user_ids))
1175 count=len(self.user_ids))
1169 user = self.fixture.create_user(user_name, **kwargs)
1176 user = self.fixture.create_user(user_name, **kwargs)
1170 if auto_cleanup:
1177 if auto_cleanup:
1171 self.user_ids.append(user.user_id)
1178 self.user_ids.append(user.user_id)
1172 return user
1179 return user
1173
1180
1174 def create_user_with_group(self):
1181 def create_user_with_group(self):
1175 user = self.create_user()
1182 user = self.create_user()
1176 user_group = self.create_user_group(members=[user])
1183 user_group = self.create_user_group(members=[user])
1177 return user, user_group
1184 return user, user_group
1178
1185
1179 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1186 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1180 auto_cleanup=True, **kwargs):
1187 auto_cleanup=True, **kwargs):
1181 group_name = "{prefix}_usergroup_{count}".format(
1188 group_name = "{prefix}_usergroup_{count}".format(
1182 prefix=self._test_name,
1189 prefix=self._test_name,
1183 count=len(self.user_group_ids))
1190 count=len(self.user_group_ids))
1184 user_group = self.fixture.create_user_group(
1191 user_group = self.fixture.create_user_group(
1185 group_name, cur_user=owner, **kwargs)
1192 group_name, cur_user=owner, **kwargs)
1186
1193
1187 if auto_cleanup:
1194 if auto_cleanup:
1188 self.user_group_ids.append(user_group.users_group_id)
1195 self.user_group_ids.append(user_group.users_group_id)
1189 if members:
1196 if members:
1190 for user in members:
1197 for user in members:
1191 UserGroupModel().add_user_to_group(user_group, user)
1198 UserGroupModel().add_user_to_group(user_group, user)
1192 return user_group
1199 return user_group
1193
1200
1194 def grant_user_permission(self, user_name, permission_name):
1201 def grant_user_permission(self, user_name, permission_name):
1195 self._inherit_default_user_permissions(user_name, False)
1202 self._inherit_default_user_permissions(user_name, False)
1196 self.user_permissions.append((user_name, permission_name))
1203 self.user_permissions.append((user_name, permission_name))
1197
1204
1198 def grant_user_permission_to_repo_group(
1205 def grant_user_permission_to_repo_group(
1199 self, repo_group, user, permission_name):
1206 self, repo_group, user, permission_name):
1200 permission = RepoGroupModel().grant_user_permission(
1207 permission = RepoGroupModel().grant_user_permission(
1201 repo_group, user, permission_name)
1208 repo_group, user, permission_name)
1202 self.user_repo_group_permission_ids.append(
1209 self.user_repo_group_permission_ids.append(
1203 (repo_group.group_id, user.user_id))
1210 (repo_group.group_id, user.user_id))
1204 return permission
1211 return permission
1205
1212
1206 def grant_user_group_permission_to_repo_group(
1213 def grant_user_group_permission_to_repo_group(
1207 self, repo_group, user_group, permission_name):
1214 self, repo_group, user_group, permission_name):
1208 permission = RepoGroupModel().grant_user_group_permission(
1215 permission = RepoGroupModel().grant_user_group_permission(
1209 repo_group, user_group, permission_name)
1216 repo_group, user_group, permission_name)
1210 self.user_group_repo_group_permission_ids.append(
1217 self.user_group_repo_group_permission_ids.append(
1211 (repo_group.group_id, user_group.users_group_id))
1218 (repo_group.group_id, user_group.users_group_id))
1212 return permission
1219 return permission
1213
1220
1214 def grant_user_permission_to_repo(
1221 def grant_user_permission_to_repo(
1215 self, repo, user, permission_name):
1222 self, repo, user, permission_name):
1216 permission = RepoModel().grant_user_permission(
1223 permission = RepoModel().grant_user_permission(
1217 repo, user, permission_name)
1224 repo, user, permission_name)
1218 self.user_repo_permission_ids.append(
1225 self.user_repo_permission_ids.append(
1219 (repo.repo_id, user.user_id))
1226 (repo.repo_id, user.user_id))
1220 return permission
1227 return permission
1221
1228
1222 def grant_user_group_permission_to_repo(
1229 def grant_user_group_permission_to_repo(
1223 self, repo, user_group, permission_name):
1230 self, repo, user_group, permission_name):
1224 permission = RepoModel().grant_user_group_permission(
1231 permission = RepoModel().grant_user_group_permission(
1225 repo, user_group, permission_name)
1232 repo, user_group, permission_name)
1226 self.user_group_repo_permission_ids.append(
1233 self.user_group_repo_permission_ids.append(
1227 (repo.repo_id, user_group.users_group_id))
1234 (repo.repo_id, user_group.users_group_id))
1228 return permission
1235 return permission
1229
1236
1230 def grant_user_permission_to_user_group(
1237 def grant_user_permission_to_user_group(
1231 self, target_user_group, user, permission_name):
1238 self, target_user_group, user, permission_name):
1232 permission = UserGroupModel().grant_user_permission(
1239 permission = UserGroupModel().grant_user_permission(
1233 target_user_group, user, permission_name)
1240 target_user_group, user, permission_name)
1234 self.user_user_group_permission_ids.append(
1241 self.user_user_group_permission_ids.append(
1235 (target_user_group.users_group_id, user.user_id))
1242 (target_user_group.users_group_id, user.user_id))
1236 return permission
1243 return permission
1237
1244
1238 def grant_user_group_permission_to_user_group(
1245 def grant_user_group_permission_to_user_group(
1239 self, target_user_group, user_group, permission_name):
1246 self, target_user_group, user_group, permission_name):
1240 permission = UserGroupModel().grant_user_group_permission(
1247 permission = UserGroupModel().grant_user_group_permission(
1241 target_user_group, user_group, permission_name)
1248 target_user_group, user_group, permission_name)
1242 self.user_group_user_group_permission_ids.append(
1249 self.user_group_user_group_permission_ids.append(
1243 (target_user_group.users_group_id, user_group.users_group_id))
1250 (target_user_group.users_group_id, user_group.users_group_id))
1244 return permission
1251 return permission
1245
1252
1246 def revoke_user_permission(self, user_name, permission_name):
1253 def revoke_user_permission(self, user_name, permission_name):
1247 self._inherit_default_user_permissions(user_name, True)
1254 self._inherit_default_user_permissions(user_name, True)
1248 UserModel().revoke_perm(user_name, permission_name)
1255 UserModel().revoke_perm(user_name, permission_name)
1249
1256
1250 def _inherit_default_user_permissions(self, user_name, value):
1257 def _inherit_default_user_permissions(self, user_name, value):
1251 user = UserModel().get_by_username(user_name)
1258 user = UserModel().get_by_username(user_name)
1252 user.inherit_default_permissions = value
1259 user.inherit_default_permissions = value
1253 Session().add(user)
1260 Session().add(user)
1254 Session().commit()
1261 Session().commit()
1255
1262
1256 def cleanup(self):
1263 def cleanup(self):
1257 self._cleanup_permissions()
1264 self._cleanup_permissions()
1258 self._cleanup_repos()
1265 self._cleanup_repos()
1259 self._cleanup_repo_groups()
1266 self._cleanup_repo_groups()
1260 self._cleanup_user_groups()
1267 self._cleanup_user_groups()
1261 self._cleanup_users()
1268 self._cleanup_users()
1262
1269
1263 def _cleanup_permissions(self):
1270 def _cleanup_permissions(self):
1264 if self.user_permissions:
1271 if self.user_permissions:
1265 for user_name, permission_name in self.user_permissions:
1272 for user_name, permission_name in self.user_permissions:
1266 self.revoke_user_permission(user_name, permission_name)
1273 self.revoke_user_permission(user_name, permission_name)
1267
1274
1268 for permission in self.user_repo_permission_ids:
1275 for permission in self.user_repo_permission_ids:
1269 RepoModel().revoke_user_permission(*permission)
1276 RepoModel().revoke_user_permission(*permission)
1270
1277
1271 for permission in self.user_group_repo_permission_ids:
1278 for permission in self.user_group_repo_permission_ids:
1272 RepoModel().revoke_user_group_permission(*permission)
1279 RepoModel().revoke_user_group_permission(*permission)
1273
1280
1274 for permission in self.user_repo_group_permission_ids:
1281 for permission in self.user_repo_group_permission_ids:
1275 RepoGroupModel().revoke_user_permission(*permission)
1282 RepoGroupModel().revoke_user_permission(*permission)
1276
1283
1277 for permission in self.user_group_repo_group_permission_ids:
1284 for permission in self.user_group_repo_group_permission_ids:
1278 RepoGroupModel().revoke_user_group_permission(*permission)
1285 RepoGroupModel().revoke_user_group_permission(*permission)
1279
1286
1280 for permission in self.user_user_group_permission_ids:
1287 for permission in self.user_user_group_permission_ids:
1281 UserGroupModel().revoke_user_permission(*permission)
1288 UserGroupModel().revoke_user_permission(*permission)
1282
1289
1283 for permission in self.user_group_user_group_permission_ids:
1290 for permission in self.user_group_user_group_permission_ids:
1284 UserGroupModel().revoke_user_group_permission(*permission)
1291 UserGroupModel().revoke_user_group_permission(*permission)
1285
1292
1286 def _cleanup_repo_groups(self):
1293 def _cleanup_repo_groups(self):
1287 def _repo_group_compare(first_group_id, second_group_id):
1294 def _repo_group_compare(first_group_id, second_group_id):
1288 """
1295 """
1289 Gives higher priority to the groups with the most complex paths
1296 Gives higher priority to the groups with the most complex paths
1290 """
1297 """
1291 first_group = RepoGroup.get(first_group_id)
1298 first_group = RepoGroup.get(first_group_id)
1292 second_group = RepoGroup.get(second_group_id)
1299 second_group = RepoGroup.get(second_group_id)
1293 first_group_parts = (
1300 first_group_parts = (
1294 len(first_group.group_name.split('/')) if first_group else 0)
1301 len(first_group.group_name.split('/')) if first_group else 0)
1295 second_group_parts = (
1302 second_group_parts = (
1296 len(second_group.group_name.split('/')) if second_group else 0)
1303 len(second_group.group_name.split('/')) if second_group else 0)
1297 return cmp(second_group_parts, first_group_parts)
1304 return cmp(second_group_parts, first_group_parts)
1298
1305
1299 sorted_repo_group_ids = sorted(
1306 sorted_repo_group_ids = sorted(
1300 self.repo_group_ids, cmp=_repo_group_compare)
1307 self.repo_group_ids, cmp=_repo_group_compare)
1301 for repo_group_id in sorted_repo_group_ids:
1308 for repo_group_id in sorted_repo_group_ids:
1302 self.fixture.destroy_repo_group(repo_group_id)
1309 self.fixture.destroy_repo_group(repo_group_id)
1303
1310
1304 def _cleanup_repos(self):
1311 def _cleanup_repos(self):
1305 sorted_repos_ids = sorted(self.repos_ids)
1312 sorted_repos_ids = sorted(self.repos_ids)
1306 for repo_id in sorted_repos_ids:
1313 for repo_id in sorted_repos_ids:
1307 self.fixture.destroy_repo(repo_id)
1314 self.fixture.destroy_repo(repo_id)
1308
1315
1309 def _cleanup_user_groups(self):
1316 def _cleanup_user_groups(self):
1310 def _user_group_compare(first_group_id, second_group_id):
1317 def _user_group_compare(first_group_id, second_group_id):
1311 """
1318 """
1312 Gives higher priority to the groups with the most complex paths
1319 Gives higher priority to the groups with the most complex paths
1313 """
1320 """
1314 first_group = UserGroup.get(first_group_id)
1321 first_group = UserGroup.get(first_group_id)
1315 second_group = UserGroup.get(second_group_id)
1322 second_group = UserGroup.get(second_group_id)
1316 first_group_parts = (
1323 first_group_parts = (
1317 len(first_group.users_group_name.split('/'))
1324 len(first_group.users_group_name.split('/'))
1318 if first_group else 0)
1325 if first_group else 0)
1319 second_group_parts = (
1326 second_group_parts = (
1320 len(second_group.users_group_name.split('/'))
1327 len(second_group.users_group_name.split('/'))
1321 if second_group else 0)
1328 if second_group else 0)
1322 return cmp(second_group_parts, first_group_parts)
1329 return cmp(second_group_parts, first_group_parts)
1323
1330
1324 sorted_user_group_ids = sorted(
1331 sorted_user_group_ids = sorted(
1325 self.user_group_ids, cmp=_user_group_compare)
1332 self.user_group_ids, cmp=_user_group_compare)
1326 for user_group_id in sorted_user_group_ids:
1333 for user_group_id in sorted_user_group_ids:
1327 self.fixture.destroy_user_group(user_group_id)
1334 self.fixture.destroy_user_group(user_group_id)
1328
1335
1329 def _cleanup_users(self):
1336 def _cleanup_users(self):
1330 for user_id in self.user_ids:
1337 for user_id in self.user_ids:
1331 self.fixture.destroy_user(user_id)
1338 self.fixture.destroy_user(user_id)
1332
1339
1333
1340
1334 # TODO: Think about moving this into a pytest-pyro package and make it a
1341 # TODO: Think about moving this into a pytest-pyro package and make it a
1335 # pytest plugin
1342 # pytest plugin
1336 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1343 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1337 def pytest_runtest_makereport(item, call):
1344 def pytest_runtest_makereport(item, call):
1338 """
1345 """
1339 Adding the remote traceback if the exception has this information.
1346 Adding the remote traceback if the exception has this information.
1340
1347
1341 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1348 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1342 to the exception instance.
1349 to the exception instance.
1343 """
1350 """
1344 outcome = yield
1351 outcome = yield
1345 report = outcome.get_result()
1352 report = outcome.get_result()
1346 if call.excinfo:
1353 if call.excinfo:
1347 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1354 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1348
1355
1349
1356
1350 def _add_vcsserver_remote_traceback(report, exc):
1357 def _add_vcsserver_remote_traceback(report, exc):
1351 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1358 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1352
1359
1353 if vcsserver_traceback:
1360 if vcsserver_traceback:
1354 section = 'VCSServer remote traceback ' + report.when
1361 section = 'VCSServer remote traceback ' + report.when
1355 report.sections.append((section, vcsserver_traceback))
1362 report.sections.append((section, vcsserver_traceback))
1356
1363
1357
1364
1358 @pytest.fixture(scope='session')
1365 @pytest.fixture(scope='session')
1359 def testrun():
1366 def testrun():
1360 return {
1367 return {
1361 'uuid': uuid.uuid4(),
1368 'uuid': uuid.uuid4(),
1362 'start': datetime.datetime.utcnow().isoformat(),
1369 'start': datetime.datetime.utcnow().isoformat(),
1363 'timestamp': int(time.time()),
1370 'timestamp': int(time.time()),
1364 }
1371 }
1365
1372
1366
1373
1367 @pytest.fixture(autouse=True)
1374 @pytest.fixture(autouse=True)
1368 def collect_appenlight_stats(request, testrun):
1375 def collect_appenlight_stats(request, testrun):
1369 """
1376 """
1370 This fixture reports memory consumtion of single tests.
1377 This fixture reports memory consumtion of single tests.
1371
1378
1372 It gathers data based on `psutil` and sends them to Appenlight. The option
1379 It gathers data based on `psutil` and sends them to Appenlight. The option
1373 ``--ae`` has te be used to enable this fixture and the API key for your
1380 ``--ae`` has te be used to enable this fixture and the API key for your
1374 application has to be provided in ``--ae-key``.
1381 application has to be provided in ``--ae-key``.
1375 """
1382 """
1376 try:
1383 try:
1377 # cygwin cannot have yet psutil support.
1384 # cygwin cannot have yet psutil support.
1378 import psutil
1385 import psutil
1379 except ImportError:
1386 except ImportError:
1380 return
1387 return
1381
1388
1382 if not request.config.getoption('--appenlight'):
1389 if not request.config.getoption('--appenlight'):
1383 return
1390 return
1384 else:
1391 else:
1385 # Only request the baseapp fixture if appenlight tracking is
1392 # Only request the baseapp fixture if appenlight tracking is
1386 # enabled. This will speed up a test run of unit tests by 2 to 3
1393 # enabled. This will speed up a test run of unit tests by 2 to 3
1387 # seconds if appenlight is not enabled.
1394 # seconds if appenlight is not enabled.
1388 baseapp = request.getfuncargvalue("baseapp")
1395 baseapp = request.getfuncargvalue("baseapp")
1389 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1396 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1390 client = AppenlightClient(
1397 client = AppenlightClient(
1391 url=url,
1398 url=url,
1392 api_key=request.config.getoption('--appenlight-api-key'),
1399 api_key=request.config.getoption('--appenlight-api-key'),
1393 namespace=request.node.nodeid,
1400 namespace=request.node.nodeid,
1394 request=str(testrun['uuid']),
1401 request=str(testrun['uuid']),
1395 testrun=testrun)
1402 testrun=testrun)
1396
1403
1397 client.collect({
1404 client.collect({
1398 'message': "Starting",
1405 'message': "Starting",
1399 })
1406 })
1400
1407
1401 server_and_port = baseapp.config.get_settings()['vcs.server']
1408 server_and_port = baseapp.config.get_settings()['vcs.server']
1402 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1409 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1403 server = create_vcsserver_proxy(server_and_port, protocol)
1410 server = create_vcsserver_proxy(server_and_port, protocol)
1404 with server:
1411 with server:
1405 vcs_pid = server.get_pid()
1412 vcs_pid = server.get_pid()
1406 server.run_gc()
1413 server.run_gc()
1407 vcs_process = psutil.Process(vcs_pid)
1414 vcs_process = psutil.Process(vcs_pid)
1408 mem = vcs_process.memory_info()
1415 mem = vcs_process.memory_info()
1409 client.tag_before('vcsserver.rss', mem.rss)
1416 client.tag_before('vcsserver.rss', mem.rss)
1410 client.tag_before('vcsserver.vms', mem.vms)
1417 client.tag_before('vcsserver.vms', mem.vms)
1411
1418
1412 test_process = psutil.Process()
1419 test_process = psutil.Process()
1413 mem = test_process.memory_info()
1420 mem = test_process.memory_info()
1414 client.tag_before('test.rss', mem.rss)
1421 client.tag_before('test.rss', mem.rss)
1415 client.tag_before('test.vms', mem.vms)
1422 client.tag_before('test.vms', mem.vms)
1416
1423
1417 client.tag_before('time', time.time())
1424 client.tag_before('time', time.time())
1418
1425
1419 @request.addfinalizer
1426 @request.addfinalizer
1420 def send_stats():
1427 def send_stats():
1421 client.tag_after('time', time.time())
1428 client.tag_after('time', time.time())
1422 with server:
1429 with server:
1423 gc_stats = server.run_gc()
1430 gc_stats = server.run_gc()
1424 for tag, value in gc_stats.items():
1431 for tag, value in gc_stats.items():
1425 client.tag_after(tag, value)
1432 client.tag_after(tag, value)
1426 mem = vcs_process.memory_info()
1433 mem = vcs_process.memory_info()
1427 client.tag_after('vcsserver.rss', mem.rss)
1434 client.tag_after('vcsserver.rss', mem.rss)
1428 client.tag_after('vcsserver.vms', mem.vms)
1435 client.tag_after('vcsserver.vms', mem.vms)
1429
1436
1430 mem = test_process.memory_info()
1437 mem = test_process.memory_info()
1431 client.tag_after('test.rss', mem.rss)
1438 client.tag_after('test.rss', mem.rss)
1432 client.tag_after('test.vms', mem.vms)
1439 client.tag_after('test.vms', mem.vms)
1433
1440
1434 client.collect({
1441 client.collect({
1435 'message': "Finished",
1442 'message': "Finished",
1436 })
1443 })
1437 client.send_stats()
1444 client.send_stats()
1438
1445
1439 return client
1446 return client
1440
1447
1441
1448
1442 class AppenlightClient():
1449 class AppenlightClient():
1443
1450
1444 url_template = '{url}?protocol_version=0.5'
1451 url_template = '{url}?protocol_version=0.5'
1445
1452
1446 def __init__(
1453 def __init__(
1447 self, url, api_key, add_server=True, add_timestamp=True,
1454 self, url, api_key, add_server=True, add_timestamp=True,
1448 namespace=None, request=None, testrun=None):
1455 namespace=None, request=None, testrun=None):
1449 self.url = self.url_template.format(url=url)
1456 self.url = self.url_template.format(url=url)
1450 self.api_key = api_key
1457 self.api_key = api_key
1451 self.add_server = add_server
1458 self.add_server = add_server
1452 self.add_timestamp = add_timestamp
1459 self.add_timestamp = add_timestamp
1453 self.namespace = namespace
1460 self.namespace = namespace
1454 self.request = request
1461 self.request = request
1455 self.server = socket.getfqdn(socket.gethostname())
1462 self.server = socket.getfqdn(socket.gethostname())
1456 self.tags_before = {}
1463 self.tags_before = {}
1457 self.tags_after = {}
1464 self.tags_after = {}
1458 self.stats = []
1465 self.stats = []
1459 self.testrun = testrun or {}
1466 self.testrun = testrun or {}
1460
1467
1461 def tag_before(self, tag, value):
1468 def tag_before(self, tag, value):
1462 self.tags_before[tag] = value
1469 self.tags_before[tag] = value
1463
1470
1464 def tag_after(self, tag, value):
1471 def tag_after(self, tag, value):
1465 self.tags_after[tag] = value
1472 self.tags_after[tag] = value
1466
1473
1467 def collect(self, data):
1474 def collect(self, data):
1468 if self.add_server:
1475 if self.add_server:
1469 data.setdefault('server', self.server)
1476 data.setdefault('server', self.server)
1470 if self.add_timestamp:
1477 if self.add_timestamp:
1471 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1478 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1472 if self.namespace:
1479 if self.namespace:
1473 data.setdefault('namespace', self.namespace)
1480 data.setdefault('namespace', self.namespace)
1474 if self.request:
1481 if self.request:
1475 data.setdefault('request', self.request)
1482 data.setdefault('request', self.request)
1476 self.stats.append(data)
1483 self.stats.append(data)
1477
1484
1478 def send_stats(self):
1485 def send_stats(self):
1479 tags = [
1486 tags = [
1480 ('testrun', self.request),
1487 ('testrun', self.request),
1481 ('testrun.start', self.testrun['start']),
1488 ('testrun.start', self.testrun['start']),
1482 ('testrun.timestamp', self.testrun['timestamp']),
1489 ('testrun.timestamp', self.testrun['timestamp']),
1483 ('test', self.namespace),
1490 ('test', self.namespace),
1484 ]
1491 ]
1485 for key, value in self.tags_before.items():
1492 for key, value in self.tags_before.items():
1486 tags.append((key + '.before', value))
1493 tags.append((key + '.before', value))
1487 try:
1494 try:
1488 delta = self.tags_after[key] - value
1495 delta = self.tags_after[key] - value
1489 tags.append((key + '.delta', delta))
1496 tags.append((key + '.delta', delta))
1490 except Exception:
1497 except Exception:
1491 pass
1498 pass
1492 for key, value in self.tags_after.items():
1499 for key, value in self.tags_after.items():
1493 tags.append((key + '.after', value))
1500 tags.append((key + '.after', value))
1494 self.collect({
1501 self.collect({
1495 'message': "Collected tags",
1502 'message': "Collected tags",
1496 'tags': tags,
1503 'tags': tags,
1497 })
1504 })
1498
1505
1499 response = requests.post(
1506 response = requests.post(
1500 self.url,
1507 self.url,
1501 headers={
1508 headers={
1502 'X-appenlight-api-key': self.api_key},
1509 'X-appenlight-api-key': self.api_key},
1503 json=self.stats,
1510 json=self.stats,
1504 )
1511 )
1505
1512
1506 if not response.status_code == 200:
1513 if not response.status_code == 200:
1507 pprint.pprint(self.stats)
1514 pprint.pprint(self.stats)
1508 print(response.headers)
1515 print(response.headers)
1509 print(response.text)
1516 print(response.text)
1510 raise Exception('Sending to appenlight failed')
1517 raise Exception('Sending to appenlight failed')
1511
1518
1512
1519
1513 @pytest.fixture
1520 @pytest.fixture
1514 def gist_util(request, baseapp):
1521 def gist_util(request, baseapp):
1515 """
1522 """
1516 Provides a wired instance of `GistUtility` with integrated cleanup.
1523 Provides a wired instance of `GistUtility` with integrated cleanup.
1517 """
1524 """
1518 utility = GistUtility()
1525 utility = GistUtility()
1519 request.addfinalizer(utility.cleanup)
1526 request.addfinalizer(utility.cleanup)
1520 return utility
1527 return utility
1521
1528
1522
1529
1523 class GistUtility(object):
1530 class GistUtility(object):
1524 def __init__(self):
1531 def __init__(self):
1525 self.fixture = Fixture()
1532 self.fixture = Fixture()
1526 self.gist_ids = []
1533 self.gist_ids = []
1527
1534
1528 def create_gist(self, **kwargs):
1535 def create_gist(self, **kwargs):
1529 gist = self.fixture.create_gist(**kwargs)
1536 gist = self.fixture.create_gist(**kwargs)
1530 self.gist_ids.append(gist.gist_id)
1537 self.gist_ids.append(gist.gist_id)
1531 return gist
1538 return gist
1532
1539
1533 def cleanup(self):
1540 def cleanup(self):
1534 for id_ in self.gist_ids:
1541 for id_ in self.gist_ids:
1535 self.fixture.destroy_gists(str(id_))
1542 self.fixture.destroy_gists(str(id_))
1536
1543
1537
1544
1538 @pytest.fixture
1545 @pytest.fixture
1539 def enabled_backends(request):
1546 def enabled_backends(request):
1540 backends = request.config.option.backends
1547 backends = request.config.option.backends
1541 return backends[:]
1548 return backends[:]
1542
1549
1543
1550
1544 @pytest.fixture
1551 @pytest.fixture
1545 def settings_util(request):
1552 def settings_util(request):
1546 """
1553 """
1547 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1554 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1548 """
1555 """
1549 utility = SettingsUtility()
1556 utility = SettingsUtility()
1550 request.addfinalizer(utility.cleanup)
1557 request.addfinalizer(utility.cleanup)
1551 return utility
1558 return utility
1552
1559
1553
1560
1554 class SettingsUtility(object):
1561 class SettingsUtility(object):
1555 def __init__(self):
1562 def __init__(self):
1556 self.rhodecode_ui_ids = []
1563 self.rhodecode_ui_ids = []
1557 self.rhodecode_setting_ids = []
1564 self.rhodecode_setting_ids = []
1558 self.repo_rhodecode_ui_ids = []
1565 self.repo_rhodecode_ui_ids = []
1559 self.repo_rhodecode_setting_ids = []
1566 self.repo_rhodecode_setting_ids = []
1560
1567
1561 def create_repo_rhodecode_ui(
1568 def create_repo_rhodecode_ui(
1562 self, repo, section, value, key=None, active=True, cleanup=True):
1569 self, repo, section, value, key=None, active=True, cleanup=True):
1563 key = key or hashlib.sha1(
1570 key = key or hashlib.sha1(
1564 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1571 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1565
1572
1566 setting = RepoRhodeCodeUi()
1573 setting = RepoRhodeCodeUi()
1567 setting.repository_id = repo.repo_id
1574 setting.repository_id = repo.repo_id
1568 setting.ui_section = section
1575 setting.ui_section = section
1569 setting.ui_value = value
1576 setting.ui_value = value
1570 setting.ui_key = key
1577 setting.ui_key = key
1571 setting.ui_active = active
1578 setting.ui_active = active
1572 Session().add(setting)
1579 Session().add(setting)
1573 Session().commit()
1580 Session().commit()
1574
1581
1575 if cleanup:
1582 if cleanup:
1576 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1583 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1577 return setting
1584 return setting
1578
1585
1579 def create_rhodecode_ui(
1586 def create_rhodecode_ui(
1580 self, section, value, key=None, active=True, cleanup=True):
1587 self, section, value, key=None, active=True, cleanup=True):
1581 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1588 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1582
1589
1583 setting = RhodeCodeUi()
1590 setting = RhodeCodeUi()
1584 setting.ui_section = section
1591 setting.ui_section = section
1585 setting.ui_value = value
1592 setting.ui_value = value
1586 setting.ui_key = key
1593 setting.ui_key = key
1587 setting.ui_active = active
1594 setting.ui_active = active
1588 Session().add(setting)
1595 Session().add(setting)
1589 Session().commit()
1596 Session().commit()
1590
1597
1591 if cleanup:
1598 if cleanup:
1592 self.rhodecode_ui_ids.append(setting.ui_id)
1599 self.rhodecode_ui_ids.append(setting.ui_id)
1593 return setting
1600 return setting
1594
1601
1595 def create_repo_rhodecode_setting(
1602 def create_repo_rhodecode_setting(
1596 self, repo, name, value, type_, cleanup=True):
1603 self, repo, name, value, type_, cleanup=True):
1597 setting = RepoRhodeCodeSetting(
1604 setting = RepoRhodeCodeSetting(
1598 repo.repo_id, key=name, val=value, type=type_)
1605 repo.repo_id, key=name, val=value, type=type_)
1599 Session().add(setting)
1606 Session().add(setting)
1600 Session().commit()
1607 Session().commit()
1601
1608
1602 if cleanup:
1609 if cleanup:
1603 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1610 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1604 return setting
1611 return setting
1605
1612
1606 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1613 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1607 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1614 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1608 Session().add(setting)
1615 Session().add(setting)
1609 Session().commit()
1616 Session().commit()
1610
1617
1611 if cleanup:
1618 if cleanup:
1612 self.rhodecode_setting_ids.append(setting.app_settings_id)
1619 self.rhodecode_setting_ids.append(setting.app_settings_id)
1613
1620
1614 return setting
1621 return setting
1615
1622
1616 def cleanup(self):
1623 def cleanup(self):
1617 for id_ in self.rhodecode_ui_ids:
1624 for id_ in self.rhodecode_ui_ids:
1618 setting = RhodeCodeUi.get(id_)
1625 setting = RhodeCodeUi.get(id_)
1619 Session().delete(setting)
1626 Session().delete(setting)
1620
1627
1621 for id_ in self.rhodecode_setting_ids:
1628 for id_ in self.rhodecode_setting_ids:
1622 setting = RhodeCodeSetting.get(id_)
1629 setting = RhodeCodeSetting.get(id_)
1623 Session().delete(setting)
1630 Session().delete(setting)
1624
1631
1625 for id_ in self.repo_rhodecode_ui_ids:
1632 for id_ in self.repo_rhodecode_ui_ids:
1626 setting = RepoRhodeCodeUi.get(id_)
1633 setting = RepoRhodeCodeUi.get(id_)
1627 Session().delete(setting)
1634 Session().delete(setting)
1628
1635
1629 for id_ in self.repo_rhodecode_setting_ids:
1636 for id_ in self.repo_rhodecode_setting_ids:
1630 setting = RepoRhodeCodeSetting.get(id_)
1637 setting = RepoRhodeCodeSetting.get(id_)
1631 Session().delete(setting)
1638 Session().delete(setting)
1632
1639
1633 Session().commit()
1640 Session().commit()
1634
1641
1635
1642
1636 @pytest.fixture
1643 @pytest.fixture
1637 def no_notifications(request):
1644 def no_notifications(request):
1638 notification_patcher = mock.patch(
1645 notification_patcher = mock.patch(
1639 'rhodecode.model.notification.NotificationModel.create')
1646 'rhodecode.model.notification.NotificationModel.create')
1640 notification_patcher.start()
1647 notification_patcher.start()
1641 request.addfinalizer(notification_patcher.stop)
1648 request.addfinalizer(notification_patcher.stop)
1642
1649
1643
1650
1644 @pytest.fixture(scope='session')
1651 @pytest.fixture(scope='session')
1645 def repeat(request):
1652 def repeat(request):
1646 """
1653 """
1647 The number of repetitions is based on this fixture.
1654 The number of repetitions is based on this fixture.
1648
1655
1649 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1656 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1650 tests are not too slow in our default test suite.
1657 tests are not too slow in our default test suite.
1651 """
1658 """
1652 return request.config.getoption('--repeat')
1659 return request.config.getoption('--repeat')
1653
1660
1654
1661
1655 @pytest.fixture
1662 @pytest.fixture
1656 def rhodecode_fixtures():
1663 def rhodecode_fixtures():
1657 return Fixture()
1664 return Fixture()
1658
1665
1659
1666
1660 @pytest.fixture
1667 @pytest.fixture
1661 def context_stub():
1668 def context_stub():
1662 """
1669 """
1663 Stub context object.
1670 Stub context object.
1664 """
1671 """
1665 context = pyramid.testing.DummyResource()
1672 context = pyramid.testing.DummyResource()
1666 return context
1673 return context
1667
1674
1668
1675
1669 @pytest.fixture
1676 @pytest.fixture
1670 def request_stub():
1677 def request_stub():
1671 """
1678 """
1672 Stub request object.
1679 Stub request object.
1673 """
1680 """
1674 from rhodecode.lib.base import bootstrap_request
1681 from rhodecode.lib.base import bootstrap_request
1675 request = bootstrap_request(scheme='https')
1682 request = bootstrap_request(scheme='https')
1676 return request
1683 return request
1677
1684
1678
1685
1679 @pytest.fixture
1686 @pytest.fixture
1680 def config_stub(request, request_stub):
1687 def config_stub(request, request_stub):
1681 """
1688 """
1682 Set up pyramid.testing and return the Configurator.
1689 Set up pyramid.testing and return the Configurator.
1683 """
1690 """
1684 from rhodecode.lib.base import bootstrap_config
1691 from rhodecode.lib.base import bootstrap_config
1685 config = bootstrap_config(request=request_stub)
1692 config = bootstrap_config(request=request_stub)
1686
1693
1687 @request.addfinalizer
1694 @request.addfinalizer
1688 def cleanup():
1695 def cleanup():
1689 pyramid.testing.tearDown()
1696 pyramid.testing.tearDown()
1690
1697
1691 return config
1698 return config
1692
1699
1693
1700
1694 @pytest.fixture
1701 @pytest.fixture
1695 def StubIntegrationType():
1702 def StubIntegrationType():
1696 class _StubIntegrationType(IntegrationTypeBase):
1703 class _StubIntegrationType(IntegrationTypeBase):
1697 """ Test integration type class """
1704 """ Test integration type class """
1698
1705
1699 key = 'test'
1706 key = 'test'
1700 display_name = 'Test integration type'
1707 display_name = 'Test integration type'
1701 description = 'A test integration type for testing'
1708 description = 'A test integration type for testing'
1702 icon = 'test_icon_html_image'
1709 icon = 'test_icon_html_image'
1703
1710
1704 def __init__(self, settings):
1711 def __init__(self, settings):
1705 super(_StubIntegrationType, self).__init__(settings)
1712 super(_StubIntegrationType, self).__init__(settings)
1706 self.sent_events = [] # for testing
1713 self.sent_events = [] # for testing
1707
1714
1708 def send_event(self, event):
1715 def send_event(self, event):
1709 self.sent_events.append(event)
1716 self.sent_events.append(event)
1710
1717
1711 def settings_schema(self):
1718 def settings_schema(self):
1712 class SettingsSchema(colander.Schema):
1719 class SettingsSchema(colander.Schema):
1713 test_string_field = colander.SchemaNode(
1720 test_string_field = colander.SchemaNode(
1714 colander.String(),
1721 colander.String(),
1715 missing=colander.required,
1722 missing=colander.required,
1716 title='test string field',
1723 title='test string field',
1717 )
1724 )
1718 test_int_field = colander.SchemaNode(
1725 test_int_field = colander.SchemaNode(
1719 colander.Int(),
1726 colander.Int(),
1720 title='some integer setting',
1727 title='some integer setting',
1721 )
1728 )
1722 return SettingsSchema()
1729 return SettingsSchema()
1723
1730
1724
1731
1725 integration_type_registry.register_integration_type(_StubIntegrationType)
1732 integration_type_registry.register_integration_type(_StubIntegrationType)
1726 return _StubIntegrationType
1733 return _StubIntegrationType
1727
1734
1728 @pytest.fixture
1735 @pytest.fixture
1729 def stub_integration_settings():
1736 def stub_integration_settings():
1730 return {
1737 return {
1731 'test_string_field': 'some data',
1738 'test_string_field': 'some data',
1732 'test_int_field': 100,
1739 'test_int_field': 100,
1733 }
1740 }
1734
1741
1735
1742
1736 @pytest.fixture
1743 @pytest.fixture
1737 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1744 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1738 stub_integration_settings):
1745 stub_integration_settings):
1739 integration = IntegrationModel().create(
1746 integration = IntegrationModel().create(
1740 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1747 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1741 name='test repo integration',
1748 name='test repo integration',
1742 repo=repo_stub, repo_group=None, child_repos_only=None)
1749 repo=repo_stub, repo_group=None, child_repos_only=None)
1743
1750
1744 @request.addfinalizer
1751 @request.addfinalizer
1745 def cleanup():
1752 def cleanup():
1746 IntegrationModel().delete(integration)
1753 IntegrationModel().delete(integration)
1747
1754
1748 return integration
1755 return integration
1749
1756
1750
1757
1751 @pytest.fixture
1758 @pytest.fixture
1752 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1759 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1753 stub_integration_settings):
1760 stub_integration_settings):
1754 integration = IntegrationModel().create(
1761 integration = IntegrationModel().create(
1755 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1762 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1756 name='test repogroup integration',
1763 name='test repogroup integration',
1757 repo=None, repo_group=test_repo_group, child_repos_only=True)
1764 repo=None, repo_group=test_repo_group, child_repos_only=True)
1758
1765
1759 @request.addfinalizer
1766 @request.addfinalizer
1760 def cleanup():
1767 def cleanup():
1761 IntegrationModel().delete(integration)
1768 IntegrationModel().delete(integration)
1762
1769
1763 return integration
1770 return integration
1764
1771
1765
1772
1766 @pytest.fixture
1773 @pytest.fixture
1767 def repogroup_recursive_integration_stub(request, test_repo_group,
1774 def repogroup_recursive_integration_stub(request, test_repo_group,
1768 StubIntegrationType, stub_integration_settings):
1775 StubIntegrationType, stub_integration_settings):
1769 integration = IntegrationModel().create(
1776 integration = IntegrationModel().create(
1770 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1771 name='test recursive repogroup integration',
1778 name='test recursive repogroup integration',
1772 repo=None, repo_group=test_repo_group, child_repos_only=False)
1779 repo=None, repo_group=test_repo_group, child_repos_only=False)
1773
1780
1774 @request.addfinalizer
1781 @request.addfinalizer
1775 def cleanup():
1782 def cleanup():
1776 IntegrationModel().delete(integration)
1783 IntegrationModel().delete(integration)
1777
1784
1778 return integration
1785 return integration
1779
1786
1780
1787
1781 @pytest.fixture
1788 @pytest.fixture
1782 def global_integration_stub(request, StubIntegrationType,
1789 def global_integration_stub(request, StubIntegrationType,
1783 stub_integration_settings):
1790 stub_integration_settings):
1784 integration = IntegrationModel().create(
1791 integration = IntegrationModel().create(
1785 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1786 name='test global integration',
1793 name='test global integration',
1787 repo=None, repo_group=None, child_repos_only=None)
1794 repo=None, repo_group=None, child_repos_only=None)
1788
1795
1789 @request.addfinalizer
1796 @request.addfinalizer
1790 def cleanup():
1797 def cleanup():
1791 IntegrationModel().delete(integration)
1798 IntegrationModel().delete(integration)
1792
1799
1793 return integration
1800 return integration
1794
1801
1795
1802
1796 @pytest.fixture
1803 @pytest.fixture
1797 def root_repos_integration_stub(request, StubIntegrationType,
1804 def root_repos_integration_stub(request, StubIntegrationType,
1798 stub_integration_settings):
1805 stub_integration_settings):
1799 integration = IntegrationModel().create(
1806 integration = IntegrationModel().create(
1800 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1801 name='test global integration',
1808 name='test global integration',
1802 repo=None, repo_group=None, child_repos_only=True)
1809 repo=None, repo_group=None, child_repos_only=True)
1803
1810
1804 @request.addfinalizer
1811 @request.addfinalizer
1805 def cleanup():
1812 def cleanup():
1806 IntegrationModel().delete(integration)
1813 IntegrationModel().delete(integration)
1807
1814
1808 return integration
1815 return integration
1809
1816
1810
1817
1811 @pytest.fixture
1818 @pytest.fixture
1812 def local_dt_to_utc():
1819 def local_dt_to_utc():
1813 def _factory(dt):
1820 def _factory(dt):
1814 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1821 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1815 dateutil.tz.tzutc()).replace(tzinfo=None)
1822 dateutil.tz.tzutc()).replace(tzinfo=None)
1816 return _factory
1823 return _factory
1817
1824
1818
1825
1819 @pytest.fixture
1826 @pytest.fixture
1820 def disable_anonymous_user(request, baseapp):
1827 def disable_anonymous_user(request, baseapp):
1821 set_anonymous_access(False)
1828 set_anonymous_access(False)
1822
1829
1823 @request.addfinalizer
1830 @request.addfinalizer
1824 def cleanup():
1831 def cleanup():
1825 set_anonymous_access(True)
1832 set_anonymous_access(True)
1826
1833
1827
1834
1828 @pytest.fixture(scope='module')
1835 @pytest.fixture(scope='module')
1829 def rc_fixture(request):
1836 def rc_fixture(request):
1830 return Fixture()
1837 return Fixture()
1831
1838
1832
1839
1833 @pytest.fixture
1840 @pytest.fixture
1834 def repo_groups(request):
1841 def repo_groups(request):
1835 fixture = Fixture()
1842 fixture = Fixture()
1836
1843
1837 session = Session()
1844 session = Session()
1838 zombie_group = fixture.create_repo_group('zombie')
1845 zombie_group = fixture.create_repo_group('zombie')
1839 parent_group = fixture.create_repo_group('parent')
1846 parent_group = fixture.create_repo_group('parent')
1840 child_group = fixture.create_repo_group('parent/child')
1847 child_group = fixture.create_repo_group('parent/child')
1841 groups_in_db = session.query(RepoGroup).all()
1848 groups_in_db = session.query(RepoGroup).all()
1842 assert len(groups_in_db) == 3
1849 assert len(groups_in_db) == 3
1843 assert child_group.group_parent_id == parent_group.group_id
1850 assert child_group.group_parent_id == parent_group.group_id
1844
1851
1845 @request.addfinalizer
1852 @request.addfinalizer
1846 def cleanup():
1853 def cleanup():
1847 fixture.destroy_repo_group(zombie_group)
1854 fixture.destroy_repo_group(zombie_group)
1848 fixture.destroy_repo_group(child_group)
1855 fixture.destroy_repo_group(child_group)
1849 fixture.destroy_repo_group(parent_group)
1856 fixture.destroy_repo_group(parent_group)
1850
1857
1851 return zombie_group, parent_group, child_group
1858 return zombie_group, parent_group, child_group
@@ -1,372 +1,380 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import json
22 import json
23 import platform
23 import platform
24 import socket
24 import socket
25 import subprocess32
25 import subprocess32
26 import time
26 import time
27 from urllib2 import urlopen, URLError
27 from urllib2 import urlopen, URLError
28
28
29 import configobj
29 import configobj
30 import pytest
30 import pytest
31
31
32 import pyramid.paster
32 import pyramid.paster
33
34 from rhodecode.lib.pyramid_utils import get_app_config
33 from rhodecode.tests.fixture import TestINI
35 from rhodecode.tests.fixture import TestINI
34 import rhodecode
36 import rhodecode
35
37
36
38
37 def _parse_json(value):
39 def _parse_json(value):
38 return json.loads(value) if value else None
40 return json.loads(value) if value else None
39
41
40
42
41 def pytest_addoption(parser):
43 def pytest_addoption(parser):
42 parser.addoption(
44 parser.addoption(
43 '--test-loglevel', dest='test_loglevel',
45 '--test-loglevel', dest='test_loglevel',
44 help="Set default Logging level for tests, warn (default), info, debug")
46 help="Set default Logging level for tests, warn (default), info, debug")
45 group = parser.getgroup('pylons')
47 group = parser.getgroup('pylons')
46 group.addoption(
48 group.addoption(
47 '--with-pylons', dest='pyramid_config',
49 '--with-pylons', dest='pyramid_config',
48 help="Set up a Pylons environment with the specified config file.")
50 help="Set up a Pylons environment with the specified config file.")
49 group.addoption(
51 group.addoption(
50 '--ini-config-override', action='store', type=_parse_json,
52 '--ini-config-override', action='store', type=_parse_json,
51 default=None, dest='pyramid_config_override', help=(
53 default=None, dest='pyramid_config_override', help=(
52 "Overrides the .ini file settings. Should be specified in JSON"
54 "Overrides the .ini file settings. Should be specified in JSON"
53 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
55 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
54 )
56 )
55 )
57 )
56 parser.addini(
58 parser.addini(
57 'pyramid_config',
59 'pyramid_config',
58 "Set up a Pyramid environment with the specified config file.")
60 "Set up a Pyramid environment with the specified config file.")
59
61
60 vcsgroup = parser.getgroup('vcs')
62 vcsgroup = parser.getgroup('vcs')
61 vcsgroup.addoption(
63 vcsgroup.addoption(
62 '--without-vcsserver', dest='with_vcsserver', action='store_false',
64 '--without-vcsserver', dest='with_vcsserver', action='store_false',
63 help="Do not start the VCSServer in a background process.")
65 help="Do not start the VCSServer in a background process.")
64 vcsgroup.addoption(
66 vcsgroup.addoption(
65 '--with-vcsserver-http', dest='vcsserver_config_http',
67 '--with-vcsserver-http', dest='vcsserver_config_http',
66 help="Start the HTTP VCSServer with the specified config file.")
68 help="Start the HTTP VCSServer with the specified config file.")
67 vcsgroup.addoption(
69 vcsgroup.addoption(
68 '--vcsserver-protocol', dest='vcsserver_protocol',
70 '--vcsserver-protocol', dest='vcsserver_protocol',
69 help="Start the VCSServer with HTTP protocol support.")
71 help="Start the VCSServer with HTTP protocol support.")
70 vcsgroup.addoption(
72 vcsgroup.addoption(
71 '--vcsserver-config-override', action='store', type=_parse_json,
73 '--vcsserver-config-override', action='store', type=_parse_json,
72 default=None, dest='vcsserver_config_override', help=(
74 default=None, dest='vcsserver_config_override', help=(
73 "Overrides the .ini file settings for the VCSServer. "
75 "Overrides the .ini file settings for the VCSServer. "
74 "Should be specified in JSON "
76 "Should be specified in JSON "
75 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
77 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
76 )
78 )
77 )
79 )
78 vcsgroup.addoption(
80 vcsgroup.addoption(
79 '--vcsserver-port', action='store', type=int,
81 '--vcsserver-port', action='store', type=int,
80 default=None, help=(
82 default=None, help=(
81 "Allows to set the port of the vcsserver. Useful when testing "
83 "Allows to set the port of the vcsserver. Useful when testing "
82 "against an already running server and random ports cause "
84 "against an already running server and random ports cause "
83 "trouble."))
85 "trouble."))
84 parser.addini(
86 parser.addini(
85 'vcsserver_config_http',
87 'vcsserver_config_http',
86 "Start the HTTP VCSServer with the specified config file.")
88 "Start the HTTP VCSServer with the specified config file.")
87 parser.addini(
89 parser.addini(
88 'vcsserver_protocol',
90 'vcsserver_protocol',
89 "Start the VCSServer with HTTP protocol support.")
91 "Start the VCSServer with HTTP protocol support.")
90
92
91
93
92 @pytest.fixture(scope='session')
94 @pytest.fixture(scope='session')
93 def vcsserver(request, vcsserver_port, vcsserver_factory):
95 def vcsserver(request, vcsserver_port, vcsserver_factory):
94 """
96 """
95 Session scope VCSServer.
97 Session scope VCSServer.
96
98
97 Tests wich need the VCSServer have to rely on this fixture in order
99 Tests wich need the VCSServer have to rely on this fixture in order
98 to ensure it will be running.
100 to ensure it will be running.
99
101
100 For specific needs, the fixture vcsserver_factory can be used. It allows to
102 For specific needs, the fixture vcsserver_factory can be used. It allows to
101 adjust the configuration file for the test run.
103 adjust the configuration file for the test run.
102
104
103 Command line args:
105 Command line args:
104
106
105 --without-vcsserver: Allows to switch this fixture off. You have to
107 --without-vcsserver: Allows to switch this fixture off. You have to
106 manually start the server.
108 manually start the server.
107
109
108 --vcsserver-port: Will expect the VCSServer to listen on this port.
110 --vcsserver-port: Will expect the VCSServer to listen on this port.
109 """
111 """
110
112
111 if not request.config.getoption('with_vcsserver'):
113 if not request.config.getoption('with_vcsserver'):
112 return None
114 return None
113
115
114 use_http = _use_vcs_http_server(request.config)
116 use_http = _use_vcs_http_server(request.config)
115 return vcsserver_factory(
117 return vcsserver_factory(
116 request, use_http=use_http, vcsserver_port=vcsserver_port)
118 request, use_http=use_http, vcsserver_port=vcsserver_port)
117
119
118
120
119 @pytest.fixture(scope='session')
121 @pytest.fixture(scope='session')
120 def vcsserver_factory(tmpdir_factory):
122 def vcsserver_factory(tmpdir_factory):
121 """
123 """
122 Use this if you need a running vcsserver with a special configuration.
124 Use this if you need a running vcsserver with a special configuration.
123 """
125 """
124
126
125 def factory(request, use_http=True, overrides=(), vcsserver_port=None):
127 def factory(request, use_http=True, overrides=(), vcsserver_port=None):
126
128
127 if vcsserver_port is None:
129 if vcsserver_port is None:
128 vcsserver_port = get_available_port()
130 vcsserver_port = get_available_port()
129
131
130 overrides = list(overrides)
132 overrides = list(overrides)
131 if use_http:
133 if use_http:
132 overrides.append({'server:main': {'port': vcsserver_port}})
134 overrides.append({'server:main': {'port': vcsserver_port}})
133 else:
135 else:
134 overrides.append({'DEFAULT': {'port': vcsserver_port}})
136 overrides.append({'DEFAULT': {'port': vcsserver_port}})
135
137
136 if is_cygwin():
138 if is_cygwin():
137 platform_override = {'DEFAULT': {
139 platform_override = {'DEFAULT': {
138 'beaker.cache.repo_object.type': 'nocache'}}
140 'beaker.cache.repo_object.type': 'nocache'}}
139 overrides.append(platform_override)
141 overrides.append(platform_override)
140
142
141 option_name = 'vcsserver_config_http' if use_http else ''
143 option_name = 'vcsserver_config_http' if use_http else ''
142 override_option_name = 'vcsserver_config_override'
144 override_option_name = 'vcsserver_config_override'
143 config_file = get_config(
145 config_file = get_config(
144 request.config, option_name=option_name,
146 request.config, option_name=option_name,
145 override_option_name=override_option_name, overrides=overrides,
147 override_option_name=override_option_name, overrides=overrides,
146 basetemp=tmpdir_factory.getbasetemp().strpath,
148 basetemp=tmpdir_factory.getbasetemp().strpath,
147 prefix='test_vcs_')
149 prefix='test_vcs_')
148
150
149 print("Using the VCSServer configuration:{}".format(config_file))
151 print("Using the VCSServer configuration:{}".format(config_file))
150 ServerClass = HttpVCSServer if use_http else None
152 ServerClass = HttpVCSServer if use_http else None
151 server = ServerClass(config_file)
153 server = ServerClass(config_file)
152 server.start()
154 server.start()
153
155
154 @request.addfinalizer
156 @request.addfinalizer
155 def cleanup():
157 def cleanup():
156 server.shutdown()
158 server.shutdown()
157
159
158 server.wait_until_ready()
160 server.wait_until_ready()
159 return server
161 return server
160
162
161 return factory
163 return factory
162
164
163
165
164 def is_cygwin():
166 def is_cygwin():
165 return 'cygwin' in platform.system().lower()
167 return 'cygwin' in platform.system().lower()
166
168
167
169
168 def _use_vcs_http_server(config):
170 def _use_vcs_http_server(config):
169 protocol_option = 'vcsserver_protocol'
171 protocol_option = 'vcsserver_protocol'
170 protocol = (
172 protocol = (
171 config.getoption(protocol_option) or
173 config.getoption(protocol_option) or
172 config.getini(protocol_option) or
174 config.getini(protocol_option) or
173 'http')
175 'http')
174 return protocol == 'http'
176 return protocol == 'http'
175
177
176
178
177 def _use_log_level(config):
179 def _use_log_level(config):
178 level = config.getoption('test_loglevel') or 'warn'
180 level = config.getoption('test_loglevel') or 'warn'
179 return level.upper()
181 return level.upper()
180
182
181
183
182 class VCSServer(object):
184 class VCSServer(object):
183 """
185 """
184 Represents a running VCSServer instance.
186 Represents a running VCSServer instance.
185 """
187 """
186
188
187 _args = []
189 _args = []
188
190
189 def start(self):
191 def start(self):
190 print("Starting the VCSServer: {}".format(self._args))
192 print("Starting the VCSServer: {}".format(self._args))
191 self.process = subprocess32.Popen(self._args)
193 self.process = subprocess32.Popen(self._args)
192
194
193 def wait_until_ready(self, timeout=30):
195 def wait_until_ready(self, timeout=30):
194 raise NotImplementedError()
196 raise NotImplementedError()
195
197
196 def shutdown(self):
198 def shutdown(self):
197 self.process.kill()
199 self.process.kill()
198
200
199
201
200 class HttpVCSServer(VCSServer):
202 class HttpVCSServer(VCSServer):
201 """
203 """
202 Represents a running VCSServer instance.
204 Represents a running VCSServer instance.
203 """
205 """
204 def __init__(self, config_file):
206 def __init__(self, config_file):
205 config_data = configobj.ConfigObj(config_file)
207 config_data = configobj.ConfigObj(config_file)
206 self._config = config_data['server:main']
208 self._config = config_data['server:main']
207
209
208 args = ['pserve', config_file]
210 args = ['pserve', config_file]
209 self._args = args
211 self._args = args
210
212
211 @property
213 @property
212 def http_url(self):
214 def http_url(self):
213 template = 'http://{host}:{port}/'
215 template = 'http://{host}:{port}/'
214 return template.format(**self._config)
216 return template.format(**self._config)
215
217
216 def start(self):
218 def start(self):
217 self.process = subprocess32.Popen(self._args)
219 self.process = subprocess32.Popen(self._args)
218
220
219 def wait_until_ready(self, timeout=30):
221 def wait_until_ready(self, timeout=30):
220 host = self._config['host']
222 host = self._config['host']
221 port = self._config['port']
223 port = self._config['port']
222 status_url = 'http://{host}:{port}/status'.format(host=host, port=port)
224 status_url = 'http://{host}:{port}/status'.format(host=host, port=port)
223 start = time.time()
225 start = time.time()
224
226
225 while time.time() - start < timeout:
227 while time.time() - start < timeout:
226 try:
228 try:
227 urlopen(status_url)
229 urlopen(status_url)
228 break
230 break
229 except URLError:
231 except URLError:
230 time.sleep(0.2)
232 time.sleep(0.2)
231 else:
233 else:
232 pytest.exit(
234 pytest.exit(
233 "Starting the VCSServer failed or took more than {} "
235 "Starting the VCSServer failed or took more than {} "
234 "seconds. cmd: `{}`".format(timeout, ' '.join(self._args)))
236 "seconds. cmd: `{}`".format(timeout, ' '.join(self._args)))
235
237
236 def shutdown(self):
238 def shutdown(self):
237 self.process.kill()
239 self.process.kill()
238
240
239
241
240 @pytest.fixture(scope='session')
242 @pytest.fixture(scope='session')
241 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
243 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
242 option_name = 'pyramid_config'
244 option_name = 'pyramid_config'
243 log_level = _use_log_level(request.config)
245 log_level = _use_log_level(request.config)
244
246
245 overrides = [
247 overrides = [
246 {'server:main': {'port': rcserver_port}},
248 {'server:main': {'port': rcserver_port}},
247 {'app:main': {
249 {'app:main': {
248 'vcs.server': 'localhost:%s' % vcsserver_port,
250 'vcs.server': 'localhost:%s' % vcsserver_port,
249 # johbo: We will always start the VCSServer on our own based on the
251 # johbo: We will always start the VCSServer on our own based on the
250 # fixtures of the test cases. For the test run it must always be
252 # fixtures of the test cases. For the test run it must always be
251 # off in the INI file.
253 # off in the INI file.
252 'vcs.start_server': 'false',
254 'vcs.start_server': 'false',
253 }},
255 }},
254
256
255 {'handler_console': {
257 {'handler_console': {
256 'class ': 'StreamHandler',
258 'class ': 'StreamHandler',
257 'args ': '(sys.stderr,)',
259 'args ': '(sys.stderr,)',
258 'level': log_level,
260 'level': log_level,
259 }},
261 }},
260
262
261 ]
263 ]
262 if _use_vcs_http_server(request.config):
264 if _use_vcs_http_server(request.config):
263 overrides.append({
265 overrides.append({
264 'app:main': {
266 'app:main': {
265 'vcs.server.protocol': 'http',
267 'vcs.server.protocol': 'http',
266 'vcs.scm_app_implementation': 'http',
268 'vcs.scm_app_implementation': 'http',
267 'vcs.hooks.protocol': 'http',
269 'vcs.hooks.protocol': 'http',
268 }
270 }
269 })
271 })
270
272
271 filename = get_config(
273 filename = get_config(
272 request.config, option_name=option_name,
274 request.config, option_name=option_name,
273 override_option_name='{}_override'.format(option_name),
275 override_option_name='{}_override'.format(option_name),
274 overrides=overrides,
276 overrides=overrides,
275 basetemp=tmpdir_factory.getbasetemp().strpath,
277 basetemp=tmpdir_factory.getbasetemp().strpath,
276 prefix='test_rce_')
278 prefix='test_rce_')
277 return filename
279 return filename
278
280
279
281
280 @pytest.fixture(scope='session')
282 @pytest.fixture(scope='session')
283 def ini_settings(ini_config):
284 ini_path = ini_config
285 return get_app_config(ini_path)
286
287
288 @pytest.fixture(scope='session')
281 def rcserver_port(request):
289 def rcserver_port(request):
282 port = get_available_port()
290 port = get_available_port()
283 print('Using rcserver port {}'.format(port))
291 print('Using rcserver port {}'.format(port))
284 return port
292 return port
285
293
286
294
287 @pytest.fixture(scope='session')
295 @pytest.fixture(scope='session')
288 def vcsserver_port(request):
296 def vcsserver_port(request):
289 port = request.config.getoption('--vcsserver-port')
297 port = request.config.getoption('--vcsserver-port')
290 if port is None:
298 if port is None:
291 port = get_available_port()
299 port = get_available_port()
292 print('Using vcsserver port {}'.format(port))
300 print('Using vcsserver port {}'.format(port))
293 return port
301 return port
294
302
295
303
296 def get_available_port():
304 def get_available_port():
297 family = socket.AF_INET
305 family = socket.AF_INET
298 socktype = socket.SOCK_STREAM
306 socktype = socket.SOCK_STREAM
299 host = '127.0.0.1'
307 host = '127.0.0.1'
300
308
301 mysocket = socket.socket(family, socktype)
309 mysocket = socket.socket(family, socktype)
302 mysocket.bind((host, 0))
310 mysocket.bind((host, 0))
303 port = mysocket.getsockname()[1]
311 port = mysocket.getsockname()[1]
304 mysocket.close()
312 mysocket.close()
305 del mysocket
313 del mysocket
306 return port
314 return port
307
315
308
316
309 @pytest.fixture(scope='session')
317 @pytest.fixture(scope='session')
310 def available_port_factory():
318 def available_port_factory():
311 """
319 """
312 Returns a callable which returns free port numbers.
320 Returns a callable which returns free port numbers.
313 """
321 """
314 return get_available_port
322 return get_available_port
315
323
316
324
317 @pytest.fixture
325 @pytest.fixture
318 def available_port(available_port_factory):
326 def available_port(available_port_factory):
319 """
327 """
320 Gives you one free port for the current test.
328 Gives you one free port for the current test.
321
329
322 Uses "available_port_factory" to retrieve the port.
330 Uses "available_port_factory" to retrieve the port.
323 """
331 """
324 return available_port_factory()
332 return available_port_factory()
325
333
326
334
327 @pytest.fixture(scope='session')
335 @pytest.fixture(scope='session')
328 def testini_factory(tmpdir_factory, ini_config):
336 def testini_factory(tmpdir_factory, ini_config):
329 """
337 """
330 Factory to create an INI file based on TestINI.
338 Factory to create an INI file based on TestINI.
331
339
332 It will make sure to place the INI file in the correct directory.
340 It will make sure to place the INI file in the correct directory.
333 """
341 """
334 basetemp = tmpdir_factory.getbasetemp().strpath
342 basetemp = tmpdir_factory.getbasetemp().strpath
335 return TestIniFactory(basetemp, ini_config)
343 return TestIniFactory(basetemp, ini_config)
336
344
337
345
338 class TestIniFactory(object):
346 class TestIniFactory(object):
339
347
340 def __init__(self, basetemp, template_ini):
348 def __init__(self, basetemp, template_ini):
341 self._basetemp = basetemp
349 self._basetemp = basetemp
342 self._template_ini = template_ini
350 self._template_ini = template_ini
343
351
344 def __call__(self, ini_params, new_file_prefix='test'):
352 def __call__(self, ini_params, new_file_prefix='test'):
345 ini_file = TestINI(
353 ini_file = TestINI(
346 self._template_ini, ini_params=ini_params,
354 self._template_ini, ini_params=ini_params,
347 new_file_prefix=new_file_prefix, dir=self._basetemp)
355 new_file_prefix=new_file_prefix, dir=self._basetemp)
348 result = ini_file.create()
356 result = ini_file.create()
349 return result
357 return result
350
358
351
359
352 def get_config(
360 def get_config(
353 config, option_name, override_option_name, overrides=None,
361 config, option_name, override_option_name, overrides=None,
354 basetemp=None, prefix='test'):
362 basetemp=None, prefix='test'):
355 """
363 """
356 Find a configuration file and apply overrides for the given `prefix`.
364 Find a configuration file and apply overrides for the given `prefix`.
357 """
365 """
358 config_file = (
366 config_file = (
359 config.getoption(option_name) or config.getini(option_name))
367 config.getoption(option_name) or config.getini(option_name))
360 if not config_file:
368 if not config_file:
361 pytest.exit(
369 pytest.exit(
362 "Configuration error, could not extract {}.".format(option_name))
370 "Configuration error, could not extract {}.".format(option_name))
363
371
364 overrides = overrides or []
372 overrides = overrides or []
365 config_override = config.getoption(override_option_name)
373 config_override = config.getoption(override_option_name)
366 if config_override:
374 if config_override:
367 overrides.append(config_override)
375 overrides.append(config_override)
368 temp_ini_file = TestINI(
376 temp_ini_file = TestINI(
369 config_file, ini_params=overrides, new_file_prefix=prefix,
377 config_file, ini_params=overrides, new_file_prefix=prefix,
370 dir=basetemp)
378 dir=basetemp)
371
379
372 return temp_ini_file.create()
380 return temp_ini_file.create()
General Comments 0
You need to be logged in to leave comments. Login now