##// END OF EJS Templates
tests: added creation of repos for user-util fixture.
marcink -
r1266:55bee39a default
parent child Browse files
Show More
@@ -1,1798 +1,1816 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32
32
33 import mock
33 import mock
34 import pyramid.testing
34 import pyramid.testing
35 import pytest
35 import pytest
36 import colander
36 import colander
37 import requests
37 import requests
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.lib.utils2 import AttributeDict
40 from rhodecode.lib.utils2 import AttributeDict
41 from rhodecode.model.changeset_status import ChangesetStatusModel
41 from rhodecode.model.changeset_status import ChangesetStatusModel
42 from rhodecode.model.comment import ChangesetCommentsModel
42 from rhodecode.model.comment import ChangesetCommentsModel
43 from rhodecode.model.db import (
43 from rhodecode.model.db import (
44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
46 from rhodecode.model.meta import Session
46 from rhodecode.model.meta import Session
47 from rhodecode.model.pull_request import PullRequestModel
47 from rhodecode.model.pull_request import PullRequestModel
48 from rhodecode.model.repo import RepoModel
48 from rhodecode.model.repo import RepoModel
49 from rhodecode.model.repo_group import RepoGroupModel
49 from rhodecode.model.repo_group import RepoGroupModel
50 from rhodecode.model.user import UserModel
50 from rhodecode.model.user import UserModel
51 from rhodecode.model.settings import VcsSettingsModel
51 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.user_group import UserGroupModel
52 from rhodecode.model.user_group import UserGroupModel
53 from rhodecode.model.integration import IntegrationModel
53 from rhodecode.model.integration import IntegrationModel
54 from rhodecode.integrations import integration_type_registry
54 from rhodecode.integrations import integration_type_registry
55 from rhodecode.integrations.types.base import IntegrationTypeBase
55 from rhodecode.integrations.types.base import IntegrationTypeBase
56 from rhodecode.lib.utils import repo2db_mapper
56 from rhodecode.lib.utils import repo2db_mapper
57 from rhodecode.lib.vcs import create_vcsserver_proxy
57 from rhodecode.lib.vcs import create_vcsserver_proxy
58 from rhodecode.lib.vcs.backends import get_backend
58 from rhodecode.lib.vcs.backends import get_backend
59 from rhodecode.lib.vcs.nodes import FileNode
59 from rhodecode.lib.vcs.nodes import FileNode
60 from rhodecode.tests import (
60 from rhodecode.tests import (
61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 TEST_USER_REGULAR_PASS)
63 TEST_USER_REGULAR_PASS)
64 from rhodecode.tests.utils import CustomTestApp
64 from rhodecode.tests.utils import CustomTestApp
65 from rhodecode.tests.fixture import Fixture
65 from rhodecode.tests.fixture import Fixture
66
66
67
67
68 def _split_comma(value):
68 def _split_comma(value):
69 return value.split(',')
69 return value.split(',')
70
70
71
71
72 def pytest_addoption(parser):
72 def pytest_addoption(parser):
73 parser.addoption(
73 parser.addoption(
74 '--keep-tmp-path', action='store_true',
74 '--keep-tmp-path', action='store_true',
75 help="Keep the test temporary directories")
75 help="Keep the test temporary directories")
76 parser.addoption(
76 parser.addoption(
77 '--backends', action='store', type=_split_comma,
77 '--backends', action='store', type=_split_comma,
78 default=['git', 'hg', 'svn'],
78 default=['git', 'hg', 'svn'],
79 help="Select which backends to test for backend specific tests.")
79 help="Select which backends to test for backend specific tests.")
80 parser.addoption(
80 parser.addoption(
81 '--dbs', action='store', type=_split_comma,
81 '--dbs', action='store', type=_split_comma,
82 default=['sqlite'],
82 default=['sqlite'],
83 help="Select which database to test for database specific tests. "
83 help="Select which database to test for database specific tests. "
84 "Possible options are sqlite,postgres,mysql")
84 "Possible options are sqlite,postgres,mysql")
85 parser.addoption(
85 parser.addoption(
86 '--appenlight', '--ae', action='store_true',
86 '--appenlight', '--ae', action='store_true',
87 help="Track statistics in appenlight.")
87 help="Track statistics in appenlight.")
88 parser.addoption(
88 parser.addoption(
89 '--appenlight-api-key', '--ae-key',
89 '--appenlight-api-key', '--ae-key',
90 help="API key for Appenlight.")
90 help="API key for Appenlight.")
91 parser.addoption(
91 parser.addoption(
92 '--appenlight-url', '--ae-url',
92 '--appenlight-url', '--ae-url',
93 default="https://ae.rhodecode.com",
93 default="https://ae.rhodecode.com",
94 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
94 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
95 parser.addoption(
95 parser.addoption(
96 '--sqlite-connection-string', action='store',
96 '--sqlite-connection-string', action='store',
97 default='', help="Connection string for the dbs tests with SQLite")
97 default='', help="Connection string for the dbs tests with SQLite")
98 parser.addoption(
98 parser.addoption(
99 '--postgres-connection-string', action='store',
99 '--postgres-connection-string', action='store',
100 default='', help="Connection string for the dbs tests with Postgres")
100 default='', help="Connection string for the dbs tests with Postgres")
101 parser.addoption(
101 parser.addoption(
102 '--mysql-connection-string', action='store',
102 '--mysql-connection-string', action='store',
103 default='', help="Connection string for the dbs tests with MySQL")
103 default='', help="Connection string for the dbs tests with MySQL")
104 parser.addoption(
104 parser.addoption(
105 '--repeat', type=int, default=100,
105 '--repeat', type=int, default=100,
106 help="Number of repetitions in performance tests.")
106 help="Number of repetitions in performance tests.")
107
107
108
108
109 def pytest_configure(config):
109 def pytest_configure(config):
110 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
110 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
111 from rhodecode.config import patches
111 from rhodecode.config import patches
112 patches.kombu_1_5_1_python_2_7_11()
112 patches.kombu_1_5_1_python_2_7_11()
113
113
114
114
115 def pytest_collection_modifyitems(session, config, items):
115 def pytest_collection_modifyitems(session, config, items):
116 # nottest marked, compare nose, used for transition from nose to pytest
116 # nottest marked, compare nose, used for transition from nose to pytest
117 remaining = [
117 remaining = [
118 i for i in items if getattr(i.obj, '__test__', True)]
118 i for i in items if getattr(i.obj, '__test__', True)]
119 items[:] = remaining
119 items[:] = remaining
120
120
121
121
122 def pytest_generate_tests(metafunc):
122 def pytest_generate_tests(metafunc):
123 # Support test generation based on --backend parameter
123 # Support test generation based on --backend parameter
124 if 'backend_alias' in metafunc.fixturenames:
124 if 'backend_alias' in metafunc.fixturenames:
125 backends = get_backends_from_metafunc(metafunc)
125 backends = get_backends_from_metafunc(metafunc)
126 scope = None
126 scope = None
127 if not backends:
127 if not backends:
128 pytest.skip("Not enabled for any of selected backends")
128 pytest.skip("Not enabled for any of selected backends")
129 metafunc.parametrize('backend_alias', backends, scope=scope)
129 metafunc.parametrize('backend_alias', backends, scope=scope)
130 elif hasattr(metafunc.function, 'backends'):
130 elif hasattr(metafunc.function, 'backends'):
131 backends = get_backends_from_metafunc(metafunc)
131 backends = get_backends_from_metafunc(metafunc)
132 if not backends:
132 if not backends:
133 pytest.skip("Not enabled for any of selected backends")
133 pytest.skip("Not enabled for any of selected backends")
134
134
135
135
136 def get_backends_from_metafunc(metafunc):
136 def get_backends_from_metafunc(metafunc):
137 requested_backends = set(metafunc.config.getoption('--backends'))
137 requested_backends = set(metafunc.config.getoption('--backends'))
138 if hasattr(metafunc.function, 'backends'):
138 if hasattr(metafunc.function, 'backends'):
139 # Supported backends by this test function, created from
139 # Supported backends by this test function, created from
140 # pytest.mark.backends
140 # pytest.mark.backends
141 backends = metafunc.function.backends.args
141 backends = metafunc.function.backends.args
142 elif hasattr(metafunc.cls, 'backend_alias'):
142 elif hasattr(metafunc.cls, 'backend_alias'):
143 # Support class attribute "backend_alias", this is mainly
143 # Support class attribute "backend_alias", this is mainly
144 # for legacy reasons for tests not yet using pytest.mark.backends
144 # for legacy reasons for tests not yet using pytest.mark.backends
145 backends = [metafunc.cls.backend_alias]
145 backends = [metafunc.cls.backend_alias]
146 else:
146 else:
147 backends = metafunc.config.getoption('--backends')
147 backends = metafunc.config.getoption('--backends')
148 return requested_backends.intersection(backends)
148 return requested_backends.intersection(backends)
149
149
150
150
151 @pytest.fixture(scope='session', autouse=True)
151 @pytest.fixture(scope='session', autouse=True)
152 def activate_example_rcextensions(request):
152 def activate_example_rcextensions(request):
153 """
153 """
154 Patch in an example rcextensions module which verifies passed in kwargs.
154 Patch in an example rcextensions module which verifies passed in kwargs.
155 """
155 """
156 from rhodecode.tests.other import example_rcextensions
156 from rhodecode.tests.other import example_rcextensions
157
157
158 old_extensions = rhodecode.EXTENSIONS
158 old_extensions = rhodecode.EXTENSIONS
159 rhodecode.EXTENSIONS = example_rcextensions
159 rhodecode.EXTENSIONS = example_rcextensions
160
160
161 @request.addfinalizer
161 @request.addfinalizer
162 def cleanup():
162 def cleanup():
163 rhodecode.EXTENSIONS = old_extensions
163 rhodecode.EXTENSIONS = old_extensions
164
164
165
165
166 @pytest.fixture
166 @pytest.fixture
167 def capture_rcextensions():
167 def capture_rcextensions():
168 """
168 """
169 Returns the recorded calls to entry points in rcextensions.
169 Returns the recorded calls to entry points in rcextensions.
170 """
170 """
171 calls = rhodecode.EXTENSIONS.calls
171 calls = rhodecode.EXTENSIONS.calls
172 calls.clear()
172 calls.clear()
173 # Note: At this moment, it is still the empty dict, but that will
173 # Note: At this moment, it is still the empty dict, but that will
174 # be filled during the test run and since it is a reference this
174 # be filled during the test run and since it is a reference this
175 # is enough to make it work.
175 # is enough to make it work.
176 return calls
176 return calls
177
177
178
178
179 @pytest.fixture(scope='session')
179 @pytest.fixture(scope='session')
180 def http_environ_session():
180 def http_environ_session():
181 """
181 """
182 Allow to use "http_environ" in session scope.
182 Allow to use "http_environ" in session scope.
183 """
183 """
184 return http_environ(
184 return http_environ(
185 http_host_stub=http_host_stub())
185 http_host_stub=http_host_stub())
186
186
187
187
188 @pytest.fixture
188 @pytest.fixture
189 def http_host_stub():
189 def http_host_stub():
190 """
190 """
191 Value of HTTP_HOST in the test run.
191 Value of HTTP_HOST in the test run.
192 """
192 """
193 return 'test.example.com:80'
193 return 'test.example.com:80'
194
194
195
195
196 @pytest.fixture
196 @pytest.fixture
197 def http_environ(http_host_stub):
197 def http_environ(http_host_stub):
198 """
198 """
199 HTTP extra environ keys.
199 HTTP extra environ keys.
200
200
201 User by the test application and as well for setting up the pylons
201 User by the test application and as well for setting up the pylons
202 environment. In the case of the fixture "app" it should be possible
202 environment. In the case of the fixture "app" it should be possible
203 to override this for a specific test case.
203 to override this for a specific test case.
204 """
204 """
205 return {
205 return {
206 'SERVER_NAME': http_host_stub.split(':')[0],
206 'SERVER_NAME': http_host_stub.split(':')[0],
207 'SERVER_PORT': http_host_stub.split(':')[1],
207 'SERVER_PORT': http_host_stub.split(':')[1],
208 'HTTP_HOST': http_host_stub,
208 'HTTP_HOST': http_host_stub,
209 }
209 }
210
210
211
211
212 @pytest.fixture(scope='function')
212 @pytest.fixture(scope='function')
213 def app(request, pylonsapp, http_environ):
213 def app(request, pylonsapp, http_environ):
214
214
215
215
216 app = CustomTestApp(
216 app = CustomTestApp(
217 pylonsapp,
217 pylonsapp,
218 extra_environ=http_environ)
218 extra_environ=http_environ)
219 if request.cls:
219 if request.cls:
220 request.cls.app = app
220 request.cls.app = app
221 return app
221 return app
222
222
223
223
224 @pytest.fixture(scope='session')
224 @pytest.fixture(scope='session')
225 def app_settings(pylonsapp, pylons_config):
225 def app_settings(pylonsapp, pylons_config):
226 """
226 """
227 Settings dictionary used to create the app.
227 Settings dictionary used to create the app.
228
228
229 Parses the ini file and passes the result through the sanitize and apply
229 Parses the ini file and passes the result through the sanitize and apply
230 defaults mechanism in `rhodecode.config.middleware`.
230 defaults mechanism in `rhodecode.config.middleware`.
231 """
231 """
232 from paste.deploy.loadwsgi import loadcontext, APP
232 from paste.deploy.loadwsgi import loadcontext, APP
233 from rhodecode.config.middleware import (
233 from rhodecode.config.middleware import (
234 sanitize_settings_and_apply_defaults)
234 sanitize_settings_and_apply_defaults)
235 context = loadcontext(APP, 'config:' + pylons_config)
235 context = loadcontext(APP, 'config:' + pylons_config)
236 settings = sanitize_settings_and_apply_defaults(context.config())
236 settings = sanitize_settings_and_apply_defaults(context.config())
237 return settings
237 return settings
238
238
239
239
240 @pytest.fixture(scope='session')
240 @pytest.fixture(scope='session')
241 def db(app_settings):
241 def db(app_settings):
242 """
242 """
243 Initializes the database connection.
243 Initializes the database connection.
244
244
245 It uses the same settings which are used to create the ``pylonsapp`` or
245 It uses the same settings which are used to create the ``pylonsapp`` or
246 ``app`` fixtures.
246 ``app`` fixtures.
247 """
247 """
248 from rhodecode.config.utils import initialize_database
248 from rhodecode.config.utils import initialize_database
249 initialize_database(app_settings)
249 initialize_database(app_settings)
250
250
251
251
252 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
252 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
253
253
254
254
255 def _autologin_user(app, *args):
255 def _autologin_user(app, *args):
256 session = login_user_session(app, *args)
256 session = login_user_session(app, *args)
257 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
257 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
258 return LoginData(csrf_token, session['rhodecode_user'])
258 return LoginData(csrf_token, session['rhodecode_user'])
259
259
260
260
261 @pytest.fixture
261 @pytest.fixture
262 def autologin_user(app):
262 def autologin_user(app):
263 """
263 """
264 Utility fixture which makes sure that the admin user is logged in
264 Utility fixture which makes sure that the admin user is logged in
265 """
265 """
266 return _autologin_user(app)
266 return _autologin_user(app)
267
267
268
268
269 @pytest.fixture
269 @pytest.fixture
270 def autologin_regular_user(app):
270 def autologin_regular_user(app):
271 """
271 """
272 Utility fixture which makes sure that the regular user is logged in
272 Utility fixture which makes sure that the regular user is logged in
273 """
273 """
274 return _autologin_user(
274 return _autologin_user(
275 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
275 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
276
276
277
277
278 @pytest.fixture(scope='function')
278 @pytest.fixture(scope='function')
279 def csrf_token(request, autologin_user):
279 def csrf_token(request, autologin_user):
280 return autologin_user.csrf_token
280 return autologin_user.csrf_token
281
281
282
282
283 @pytest.fixture(scope='function')
283 @pytest.fixture(scope='function')
284 def xhr_header(request):
284 def xhr_header(request):
285 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
285 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
286
286
287
287
288 @pytest.fixture
288 @pytest.fixture
289 def real_crypto_backend(monkeypatch):
289 def real_crypto_backend(monkeypatch):
290 """
290 """
291 Switch the production crypto backend on for this test.
291 Switch the production crypto backend on for this test.
292
292
293 During the test run the crypto backend is replaced with a faster
293 During the test run the crypto backend is replaced with a faster
294 implementation based on the MD5 algorithm.
294 implementation based on the MD5 algorithm.
295 """
295 """
296 monkeypatch.setattr(rhodecode, 'is_test', False)
296 monkeypatch.setattr(rhodecode, 'is_test', False)
297
297
298
298
299 @pytest.fixture(scope='class')
299 @pytest.fixture(scope='class')
300 def index_location(request, pylonsapp):
300 def index_location(request, pylonsapp):
301 index_location = pylonsapp.config['app_conf']['search.location']
301 index_location = pylonsapp.config['app_conf']['search.location']
302 if request.cls:
302 if request.cls:
303 request.cls.index_location = index_location
303 request.cls.index_location = index_location
304 return index_location
304 return index_location
305
305
306
306
307 @pytest.fixture(scope='session', autouse=True)
307 @pytest.fixture(scope='session', autouse=True)
308 def tests_tmp_path(request):
308 def tests_tmp_path(request):
309 """
309 """
310 Create temporary directory to be used during the test session.
310 Create temporary directory to be used during the test session.
311 """
311 """
312 if not os.path.exists(TESTS_TMP_PATH):
312 if not os.path.exists(TESTS_TMP_PATH):
313 os.makedirs(TESTS_TMP_PATH)
313 os.makedirs(TESTS_TMP_PATH)
314
314
315 if not request.config.getoption('--keep-tmp-path'):
315 if not request.config.getoption('--keep-tmp-path'):
316 @request.addfinalizer
316 @request.addfinalizer
317 def remove_tmp_path():
317 def remove_tmp_path():
318 shutil.rmtree(TESTS_TMP_PATH)
318 shutil.rmtree(TESTS_TMP_PATH)
319
319
320 return TESTS_TMP_PATH
320 return TESTS_TMP_PATH
321
321
322
322
323 @pytest.fixture(scope='session', autouse=True)
323 @pytest.fixture(scope='session', autouse=True)
324 def patch_pyro_request_scope_proxy_factory(request):
324 def patch_pyro_request_scope_proxy_factory(request):
325 """
325 """
326 Patch the pyro proxy factory to always use the same dummy request object
326 Patch the pyro proxy factory to always use the same dummy request object
327 when under test. This will return the same pyro proxy on every call.
327 when under test. This will return the same pyro proxy on every call.
328 """
328 """
329 dummy_request = pyramid.testing.DummyRequest()
329 dummy_request = pyramid.testing.DummyRequest()
330
330
331 def mocked_call(self, request=None):
331 def mocked_call(self, request=None):
332 return self.getProxy(request=dummy_request)
332 return self.getProxy(request=dummy_request)
333
333
334 patcher = mock.patch(
334 patcher = mock.patch(
335 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
335 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
336 new=mocked_call)
336 new=mocked_call)
337 patcher.start()
337 patcher.start()
338
338
339 @request.addfinalizer
339 @request.addfinalizer
340 def undo_patching():
340 def undo_patching():
341 patcher.stop()
341 patcher.stop()
342
342
343
343
344 @pytest.fixture
344 @pytest.fixture
345 def test_repo_group(request):
345 def test_repo_group(request):
346 """
346 """
347 Create a temporary repository group, and destroy it after
347 Create a temporary repository group, and destroy it after
348 usage automatically
348 usage automatically
349 """
349 """
350 fixture = Fixture()
350 fixture = Fixture()
351 repogroupid = 'test_repo_group_%s' % int(time.time())
351 repogroupid = 'test_repo_group_%s' % int(time.time())
352 repo_group = fixture.create_repo_group(repogroupid)
352 repo_group = fixture.create_repo_group(repogroupid)
353
353
354 def _cleanup():
354 def _cleanup():
355 fixture.destroy_repo_group(repogroupid)
355 fixture.destroy_repo_group(repogroupid)
356
356
357 request.addfinalizer(_cleanup)
357 request.addfinalizer(_cleanup)
358 return repo_group
358 return repo_group
359
359
360
360
361 @pytest.fixture
361 @pytest.fixture
362 def test_user_group(request):
362 def test_user_group(request):
363 """
363 """
364 Create a temporary user group, and destroy it after
364 Create a temporary user group, and destroy it after
365 usage automatically
365 usage automatically
366 """
366 """
367 fixture = Fixture()
367 fixture = Fixture()
368 usergroupid = 'test_user_group_%s' % int(time.time())
368 usergroupid = 'test_user_group_%s' % int(time.time())
369 user_group = fixture.create_user_group(usergroupid)
369 user_group = fixture.create_user_group(usergroupid)
370
370
371 def _cleanup():
371 def _cleanup():
372 fixture.destroy_user_group(user_group)
372 fixture.destroy_user_group(user_group)
373
373
374 request.addfinalizer(_cleanup)
374 request.addfinalizer(_cleanup)
375 return user_group
375 return user_group
376
376
377
377
378 @pytest.fixture(scope='session')
378 @pytest.fixture(scope='session')
379 def test_repo(request):
379 def test_repo(request):
380 container = TestRepoContainer()
380 container = TestRepoContainer()
381 request.addfinalizer(container._cleanup)
381 request.addfinalizer(container._cleanup)
382 return container
382 return container
383
383
384
384
385 class TestRepoContainer(object):
385 class TestRepoContainer(object):
386 """
386 """
387 Container for test repositories which are used read only.
387 Container for test repositories which are used read only.
388
388
389 Repositories will be created on demand and re-used during the lifetime
389 Repositories will be created on demand and re-used during the lifetime
390 of this object.
390 of this object.
391
391
392 Usage to get the svn test repository "minimal"::
392 Usage to get the svn test repository "minimal"::
393
393
394 test_repo = TestContainer()
394 test_repo = TestContainer()
395 repo = test_repo('minimal', 'svn')
395 repo = test_repo('minimal', 'svn')
396
396
397 """
397 """
398
398
399 dump_extractors = {
399 dump_extractors = {
400 'git': utils.extract_git_repo_from_dump,
400 'git': utils.extract_git_repo_from_dump,
401 'hg': utils.extract_hg_repo_from_dump,
401 'hg': utils.extract_hg_repo_from_dump,
402 'svn': utils.extract_svn_repo_from_dump,
402 'svn': utils.extract_svn_repo_from_dump,
403 }
403 }
404
404
405 def __init__(self):
405 def __init__(self):
406 self._cleanup_repos = []
406 self._cleanup_repos = []
407 self._fixture = Fixture()
407 self._fixture = Fixture()
408 self._repos = {}
408 self._repos = {}
409
409
410 def __call__(self, dump_name, backend_alias):
410 def __call__(self, dump_name, backend_alias):
411 key = (dump_name, backend_alias)
411 key = (dump_name, backend_alias)
412 if key not in self._repos:
412 if key not in self._repos:
413 repo = self._create_repo(dump_name, backend_alias)
413 repo = self._create_repo(dump_name, backend_alias)
414 self._repos[key] = repo.repo_id
414 self._repos[key] = repo.repo_id
415 return Repository.get(self._repos[key])
415 return Repository.get(self._repos[key])
416
416
417 def _create_repo(self, dump_name, backend_alias):
417 def _create_repo(self, dump_name, backend_alias):
418 repo_name = '%s-%s' % (backend_alias, dump_name)
418 repo_name = '%s-%s' % (backend_alias, dump_name)
419 backend_class = get_backend(backend_alias)
419 backend_class = get_backend(backend_alias)
420 dump_extractor = self.dump_extractors[backend_alias]
420 dump_extractor = self.dump_extractors[backend_alias]
421 repo_path = dump_extractor(dump_name, repo_name)
421 repo_path = dump_extractor(dump_name, repo_name)
422 vcs_repo = backend_class(repo_path)
422 vcs_repo = backend_class(repo_path)
423 repo2db_mapper({repo_name: vcs_repo})
423 repo2db_mapper({repo_name: vcs_repo})
424 repo = RepoModel().get_by_repo_name(repo_name)
424 repo = RepoModel().get_by_repo_name(repo_name)
425 self._cleanup_repos.append(repo_name)
425 self._cleanup_repos.append(repo_name)
426 return repo
426 return repo
427
427
428 def _cleanup(self):
428 def _cleanup(self):
429 for repo_name in reversed(self._cleanup_repos):
429 for repo_name in reversed(self._cleanup_repos):
430 self._fixture.destroy_repo(repo_name)
430 self._fixture.destroy_repo(repo_name)
431
431
432
432
433 @pytest.fixture
433 @pytest.fixture
434 def backend(request, backend_alias, pylonsapp, test_repo):
434 def backend(request, backend_alias, pylonsapp, test_repo):
435 """
435 """
436 Parametrized fixture which represents a single backend implementation.
436 Parametrized fixture which represents a single backend implementation.
437
437
438 It respects the option `--backends` to focus the test run on specific
438 It respects the option `--backends` to focus the test run on specific
439 backend implementations.
439 backend implementations.
440
440
441 It also supports `pytest.mark.xfail_backends` to mark tests as failing
441 It also supports `pytest.mark.xfail_backends` to mark tests as failing
442 for specific backends. This is intended as a utility for incremental
442 for specific backends. This is intended as a utility for incremental
443 development of a new backend implementation.
443 development of a new backend implementation.
444 """
444 """
445 if backend_alias not in request.config.getoption('--backends'):
445 if backend_alias not in request.config.getoption('--backends'):
446 pytest.skip("Backend %s not selected." % (backend_alias, ))
446 pytest.skip("Backend %s not selected." % (backend_alias, ))
447
447
448 utils.check_xfail_backends(request.node, backend_alias)
448 utils.check_xfail_backends(request.node, backend_alias)
449 utils.check_skip_backends(request.node, backend_alias)
449 utils.check_skip_backends(request.node, backend_alias)
450
450
451 repo_name = 'vcs_test_%s' % (backend_alias, )
451 repo_name = 'vcs_test_%s' % (backend_alias, )
452 backend = Backend(
452 backend = Backend(
453 alias=backend_alias,
453 alias=backend_alias,
454 repo_name=repo_name,
454 repo_name=repo_name,
455 test_name=request.node.name,
455 test_name=request.node.name,
456 test_repo_container=test_repo)
456 test_repo_container=test_repo)
457 request.addfinalizer(backend.cleanup)
457 request.addfinalizer(backend.cleanup)
458 return backend
458 return backend
459
459
460
460
461 @pytest.fixture
461 @pytest.fixture
462 def backend_git(request, pylonsapp, test_repo):
462 def backend_git(request, pylonsapp, test_repo):
463 return backend(request, 'git', pylonsapp, test_repo)
463 return backend(request, 'git', pylonsapp, test_repo)
464
464
465
465
466 @pytest.fixture
466 @pytest.fixture
467 def backend_hg(request, pylonsapp, test_repo):
467 def backend_hg(request, pylonsapp, test_repo):
468 return backend(request, 'hg', pylonsapp, test_repo)
468 return backend(request, 'hg', pylonsapp, test_repo)
469
469
470
470
471 @pytest.fixture
471 @pytest.fixture
472 def backend_svn(request, pylonsapp, test_repo):
472 def backend_svn(request, pylonsapp, test_repo):
473 return backend(request, 'svn', pylonsapp, test_repo)
473 return backend(request, 'svn', pylonsapp, test_repo)
474
474
475
475
476 @pytest.fixture
476 @pytest.fixture
477 def backend_random(backend_git):
477 def backend_random(backend_git):
478 """
478 """
479 Use this to express that your tests need "a backend.
479 Use this to express that your tests need "a backend.
480
480
481 A few of our tests need a backend, so that we can run the code. This
481 A few of our tests need a backend, so that we can run the code. This
482 fixture is intended to be used for such cases. It will pick one of the
482 fixture is intended to be used for such cases. It will pick one of the
483 backends and run the tests.
483 backends and run the tests.
484
484
485 The fixture `backend` would run the test multiple times for each
485 The fixture `backend` would run the test multiple times for each
486 available backend which is a pure waste of time if the test is
486 available backend which is a pure waste of time if the test is
487 independent of the backend type.
487 independent of the backend type.
488 """
488 """
489 # TODO: johbo: Change this to pick a random backend
489 # TODO: johbo: Change this to pick a random backend
490 return backend_git
490 return backend_git
491
491
492
492
493 @pytest.fixture
493 @pytest.fixture
494 def backend_stub(backend_git):
494 def backend_stub(backend_git):
495 """
495 """
496 Use this to express that your tests need a backend stub
496 Use this to express that your tests need a backend stub
497
497
498 TODO: mikhail: Implement a real stub logic instead of returning
498 TODO: mikhail: Implement a real stub logic instead of returning
499 a git backend
499 a git backend
500 """
500 """
501 return backend_git
501 return backend_git
502
502
503
503
504 @pytest.fixture
504 @pytest.fixture
505 def repo_stub(backend_stub):
505 def repo_stub(backend_stub):
506 """
506 """
507 Use this to express that your tests need a repository stub
507 Use this to express that your tests need a repository stub
508 """
508 """
509 return backend_stub.create_repo()
509 return backend_stub.create_repo()
510
510
511
511
512 class Backend(object):
512 class Backend(object):
513 """
513 """
514 Represents the test configuration for one supported backend
514 Represents the test configuration for one supported backend
515
515
516 Provides easy access to different test repositories based on
516 Provides easy access to different test repositories based on
517 `__getitem__`. Such repositories will only be created once per test
517 `__getitem__`. Such repositories will only be created once per test
518 session.
518 session.
519 """
519 """
520
520
521 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
521 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
522 _master_repo = None
522 _master_repo = None
523 _commit_ids = {}
523 _commit_ids = {}
524
524
525 def __init__(self, alias, repo_name, test_name, test_repo_container):
525 def __init__(self, alias, repo_name, test_name, test_repo_container):
526 self.alias = alias
526 self.alias = alias
527 self.repo_name = repo_name
527 self.repo_name = repo_name
528 self._cleanup_repos = []
528 self._cleanup_repos = []
529 self._test_name = test_name
529 self._test_name = test_name
530 self._test_repo_container = test_repo_container
530 self._test_repo_container = test_repo_container
531 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
531 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
532 # Fixture will survive in the end.
532 # Fixture will survive in the end.
533 self._fixture = Fixture()
533 self._fixture = Fixture()
534
534
535 def __getitem__(self, key):
535 def __getitem__(self, key):
536 return self._test_repo_container(key, self.alias)
536 return self._test_repo_container(key, self.alias)
537
537
538 @property
538 @property
539 def repo(self):
539 def repo(self):
540 """
540 """
541 Returns the "current" repository. This is the vcs_test repo or the
541 Returns the "current" repository. This is the vcs_test repo or the
542 last repo which has been created with `create_repo`.
542 last repo which has been created with `create_repo`.
543 """
543 """
544 from rhodecode.model.db import Repository
544 from rhodecode.model.db import Repository
545 return Repository.get_by_repo_name(self.repo_name)
545 return Repository.get_by_repo_name(self.repo_name)
546
546
547 @property
547 @property
548 def default_branch_name(self):
548 def default_branch_name(self):
549 VcsRepository = get_backend(self.alias)
549 VcsRepository = get_backend(self.alias)
550 return VcsRepository.DEFAULT_BRANCH_NAME
550 return VcsRepository.DEFAULT_BRANCH_NAME
551
551
552 @property
552 @property
553 def default_head_id(self):
553 def default_head_id(self):
554 """
554 """
555 Returns the default head id of the underlying backend.
555 Returns the default head id of the underlying backend.
556
556
557 This will be the default branch name in case the backend does have a
557 This will be the default branch name in case the backend does have a
558 default branch. In the other cases it will point to a valid head
558 default branch. In the other cases it will point to a valid head
559 which can serve as the base to create a new commit on top of it.
559 which can serve as the base to create a new commit on top of it.
560 """
560 """
561 vcsrepo = self.repo.scm_instance()
561 vcsrepo = self.repo.scm_instance()
562 head_id = (
562 head_id = (
563 vcsrepo.DEFAULT_BRANCH_NAME or
563 vcsrepo.DEFAULT_BRANCH_NAME or
564 vcsrepo.commit_ids[-1])
564 vcsrepo.commit_ids[-1])
565 return head_id
565 return head_id
566
566
567 @property
567 @property
568 def commit_ids(self):
568 def commit_ids(self):
569 """
569 """
570 Returns the list of commits for the last created repository
570 Returns the list of commits for the last created repository
571 """
571 """
572 return self._commit_ids
572 return self._commit_ids
573
573
574 def create_master_repo(self, commits):
574 def create_master_repo(self, commits):
575 """
575 """
576 Create a repository and remember it as a template.
576 Create a repository and remember it as a template.
577
577
578 This allows to easily create derived repositories to construct
578 This allows to easily create derived repositories to construct
579 more complex scenarios for diff, compare and pull requests.
579 more complex scenarios for diff, compare and pull requests.
580
580
581 Returns a commit map which maps from commit message to raw_id.
581 Returns a commit map which maps from commit message to raw_id.
582 """
582 """
583 self._master_repo = self.create_repo(commits=commits)
583 self._master_repo = self.create_repo(commits=commits)
584 return self._commit_ids
584 return self._commit_ids
585
585
586 def create_repo(
586 def create_repo(
587 self, commits=None, number_of_commits=0, heads=None,
587 self, commits=None, number_of_commits=0, heads=None,
588 name_suffix=u'', **kwargs):
588 name_suffix=u'', **kwargs):
589 """
589 """
590 Create a repository and record it for later cleanup.
590 Create a repository and record it for later cleanup.
591
591
592 :param commits: Optional. A sequence of dict instances.
592 :param commits: Optional. A sequence of dict instances.
593 Will add a commit per entry to the new repository.
593 Will add a commit per entry to the new repository.
594 :param number_of_commits: Optional. If set to a number, this number of
594 :param number_of_commits: Optional. If set to a number, this number of
595 commits will be added to the new repository.
595 commits will be added to the new repository.
596 :param heads: Optional. Can be set to a sequence of of commit
596 :param heads: Optional. Can be set to a sequence of of commit
597 names which shall be pulled in from the master repository.
597 names which shall be pulled in from the master repository.
598
598
599 """
599 """
600 self.repo_name = self._next_repo_name() + name_suffix
600 self.repo_name = self._next_repo_name() + name_suffix
601 repo = self._fixture.create_repo(
601 repo = self._fixture.create_repo(
602 self.repo_name, repo_type=self.alias, **kwargs)
602 self.repo_name, repo_type=self.alias, **kwargs)
603 self._cleanup_repos.append(repo.repo_name)
603 self._cleanup_repos.append(repo.repo_name)
604
604
605 commits = commits or [
605 commits = commits or [
606 {'message': 'Commit %s of %s' % (x, self.repo_name)}
606 {'message': 'Commit %s of %s' % (x, self.repo_name)}
607 for x in xrange(number_of_commits)]
607 for x in xrange(number_of_commits)]
608 self._add_commits_to_repo(repo.scm_instance(), commits)
608 self._add_commits_to_repo(repo.scm_instance(), commits)
609 if heads:
609 if heads:
610 self.pull_heads(repo, heads)
610 self.pull_heads(repo, heads)
611
611
612 return repo
612 return repo
613
613
614 def pull_heads(self, repo, heads):
614 def pull_heads(self, repo, heads):
615 """
615 """
616 Make sure that repo contains all commits mentioned in `heads`
616 Make sure that repo contains all commits mentioned in `heads`
617 """
617 """
618 vcsmaster = self._master_repo.scm_instance()
618 vcsmaster = self._master_repo.scm_instance()
619 vcsrepo = repo.scm_instance()
619 vcsrepo = repo.scm_instance()
620 vcsrepo.config.clear_section('hooks')
620 vcsrepo.config.clear_section('hooks')
621 commit_ids = [self._commit_ids[h] for h in heads]
621 commit_ids = [self._commit_ids[h] for h in heads]
622 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
622 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
623
623
624 def create_fork(self):
624 def create_fork(self):
625 repo_to_fork = self.repo_name
625 repo_to_fork = self.repo_name
626 self.repo_name = self._next_repo_name()
626 self.repo_name = self._next_repo_name()
627 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
627 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
628 self._cleanup_repos.append(self.repo_name)
628 self._cleanup_repos.append(self.repo_name)
629 return repo
629 return repo
630
630
631 def new_repo_name(self, suffix=u''):
631 def new_repo_name(self, suffix=u''):
632 self.repo_name = self._next_repo_name() + suffix
632 self.repo_name = self._next_repo_name() + suffix
633 self._cleanup_repos.append(self.repo_name)
633 self._cleanup_repos.append(self.repo_name)
634 return self.repo_name
634 return self.repo_name
635
635
636 def _next_repo_name(self):
636 def _next_repo_name(self):
637 return u"%s_%s" % (
637 return u"%s_%s" % (
638 self.invalid_repo_name.sub(u'_', self._test_name),
638 self.invalid_repo_name.sub(u'_', self._test_name),
639 len(self._cleanup_repos))
639 len(self._cleanup_repos))
640
640
641 def ensure_file(self, filename, content='Test content\n'):
641 def ensure_file(self, filename, content='Test content\n'):
642 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
642 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
643 commits = [
643 commits = [
644 {'added': [
644 {'added': [
645 FileNode(filename, content=content),
645 FileNode(filename, content=content),
646 ]},
646 ]},
647 ]
647 ]
648 self._add_commits_to_repo(self.repo.scm_instance(), commits)
648 self._add_commits_to_repo(self.repo.scm_instance(), commits)
649
649
650 def enable_downloads(self):
650 def enable_downloads(self):
651 repo = self.repo
651 repo = self.repo
652 repo.enable_downloads = True
652 repo.enable_downloads = True
653 Session().add(repo)
653 Session().add(repo)
654 Session().commit()
654 Session().commit()
655
655
656 def cleanup(self):
656 def cleanup(self):
657 for repo_name in reversed(self._cleanup_repos):
657 for repo_name in reversed(self._cleanup_repos):
658 self._fixture.destroy_repo(repo_name)
658 self._fixture.destroy_repo(repo_name)
659
659
660 def _add_commits_to_repo(self, repo, commits):
660 def _add_commits_to_repo(self, repo, commits):
661 commit_ids = _add_commits_to_repo(repo, commits)
661 commit_ids = _add_commits_to_repo(repo, commits)
662 if not commit_ids:
662 if not commit_ids:
663 return
663 return
664 self._commit_ids = commit_ids
664 self._commit_ids = commit_ids
665
665
666 # Creating refs for Git to allow fetching them from remote repository
666 # Creating refs for Git to allow fetching them from remote repository
667 if self.alias == 'git':
667 if self.alias == 'git':
668 refs = {}
668 refs = {}
669 for message in self._commit_ids:
669 for message in self._commit_ids:
670 # TODO: mikhail: do more special chars replacements
670 # TODO: mikhail: do more special chars replacements
671 ref_name = 'refs/test-refs/{}'.format(
671 ref_name = 'refs/test-refs/{}'.format(
672 message.replace(' ', ''))
672 message.replace(' ', ''))
673 refs[ref_name] = self._commit_ids[message]
673 refs[ref_name] = self._commit_ids[message]
674 self._create_refs(repo, refs)
674 self._create_refs(repo, refs)
675
675
676 def _create_refs(self, repo, refs):
676 def _create_refs(self, repo, refs):
677 for ref_name in refs:
677 for ref_name in refs:
678 repo.set_refs(ref_name, refs[ref_name])
678 repo.set_refs(ref_name, refs[ref_name])
679
679
680
680
681 @pytest.fixture
681 @pytest.fixture
682 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
682 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
683 """
683 """
684 Parametrized fixture which represents a single vcs backend implementation.
684 Parametrized fixture which represents a single vcs backend implementation.
685
685
686 See the fixture `backend` for more details. This one implements the same
686 See the fixture `backend` for more details. This one implements the same
687 concept, but on vcs level. So it does not provide model instances etc.
687 concept, but on vcs level. So it does not provide model instances etc.
688
688
689 Parameters are generated dynamically, see :func:`pytest_generate_tests`
689 Parameters are generated dynamically, see :func:`pytest_generate_tests`
690 for how this works.
690 for how this works.
691 """
691 """
692 if backend_alias not in request.config.getoption('--backends'):
692 if backend_alias not in request.config.getoption('--backends'):
693 pytest.skip("Backend %s not selected." % (backend_alias, ))
693 pytest.skip("Backend %s not selected." % (backend_alias, ))
694
694
695 utils.check_xfail_backends(request.node, backend_alias)
695 utils.check_xfail_backends(request.node, backend_alias)
696 utils.check_skip_backends(request.node, backend_alias)
696 utils.check_skip_backends(request.node, backend_alias)
697
697
698 repo_name = 'vcs_test_%s' % (backend_alias, )
698 repo_name = 'vcs_test_%s' % (backend_alias, )
699 repo_path = os.path.join(tests_tmp_path, repo_name)
699 repo_path = os.path.join(tests_tmp_path, repo_name)
700 backend = VcsBackend(
700 backend = VcsBackend(
701 alias=backend_alias,
701 alias=backend_alias,
702 repo_path=repo_path,
702 repo_path=repo_path,
703 test_name=request.node.name,
703 test_name=request.node.name,
704 test_repo_container=test_repo)
704 test_repo_container=test_repo)
705 request.addfinalizer(backend.cleanup)
705 request.addfinalizer(backend.cleanup)
706 return backend
706 return backend
707
707
708
708
709 @pytest.fixture
709 @pytest.fixture
710 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
710 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
711 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
711 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
712
712
713
713
714 @pytest.fixture
714 @pytest.fixture
715 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
715 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
716 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
716 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
717
717
718
718
719 @pytest.fixture
719 @pytest.fixture
720 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
720 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
721 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
721 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
722
722
723
723
724 @pytest.fixture
724 @pytest.fixture
725 def vcsbackend_random(vcsbackend_git):
725 def vcsbackend_random(vcsbackend_git):
726 """
726 """
727 Use this to express that your tests need "a vcsbackend".
727 Use this to express that your tests need "a vcsbackend".
728
728
729 The fixture `vcsbackend` would run the test multiple times for each
729 The fixture `vcsbackend` would run the test multiple times for each
730 available vcs backend which is a pure waste of time if the test is
730 available vcs backend which is a pure waste of time if the test is
731 independent of the vcs backend type.
731 independent of the vcs backend type.
732 """
732 """
733 # TODO: johbo: Change this to pick a random backend
733 # TODO: johbo: Change this to pick a random backend
734 return vcsbackend_git
734 return vcsbackend_git
735
735
736
736
737 @pytest.fixture
737 @pytest.fixture
738 def vcsbackend_stub(vcsbackend_git):
738 def vcsbackend_stub(vcsbackend_git):
739 """
739 """
740 Use this to express that your test just needs a stub of a vcsbackend.
740 Use this to express that your test just needs a stub of a vcsbackend.
741
741
742 Plan is to eventually implement an in-memory stub to speed tests up.
742 Plan is to eventually implement an in-memory stub to speed tests up.
743 """
743 """
744 return vcsbackend_git
744 return vcsbackend_git
745
745
746
746
747 class VcsBackend(object):
747 class VcsBackend(object):
748 """
748 """
749 Represents the test configuration for one supported vcs backend.
749 Represents the test configuration for one supported vcs backend.
750 """
750 """
751
751
752 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
752 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
753
753
754 def __init__(self, alias, repo_path, test_name, test_repo_container):
754 def __init__(self, alias, repo_path, test_name, test_repo_container):
755 self.alias = alias
755 self.alias = alias
756 self._repo_path = repo_path
756 self._repo_path = repo_path
757 self._cleanup_repos = []
757 self._cleanup_repos = []
758 self._test_name = test_name
758 self._test_name = test_name
759 self._test_repo_container = test_repo_container
759 self._test_repo_container = test_repo_container
760
760
761 def __getitem__(self, key):
761 def __getitem__(self, key):
762 return self._test_repo_container(key, self.alias).scm_instance()
762 return self._test_repo_container(key, self.alias).scm_instance()
763
763
764 @property
764 @property
765 def repo(self):
765 def repo(self):
766 """
766 """
767 Returns the "current" repository. This is the vcs_test repo of the last
767 Returns the "current" repository. This is the vcs_test repo of the last
768 repo which has been created.
768 repo which has been created.
769 """
769 """
770 Repository = get_backend(self.alias)
770 Repository = get_backend(self.alias)
771 return Repository(self._repo_path)
771 return Repository(self._repo_path)
772
772
773 @property
773 @property
774 def backend(self):
774 def backend(self):
775 """
775 """
776 Returns the backend implementation class.
776 Returns the backend implementation class.
777 """
777 """
778 return get_backend(self.alias)
778 return get_backend(self.alias)
779
779
780 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
780 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
781 repo_name = self._next_repo_name()
781 repo_name = self._next_repo_name()
782 self._repo_path = get_new_dir(repo_name)
782 self._repo_path = get_new_dir(repo_name)
783 repo_class = get_backend(self.alias)
783 repo_class = get_backend(self.alias)
784 src_url = None
784 src_url = None
785 if _clone_repo:
785 if _clone_repo:
786 src_url = _clone_repo.path
786 src_url = _clone_repo.path
787 repo = repo_class(self._repo_path, create=True, src_url=src_url)
787 repo = repo_class(self._repo_path, create=True, src_url=src_url)
788 self._cleanup_repos.append(repo)
788 self._cleanup_repos.append(repo)
789
789
790 commits = commits or [
790 commits = commits or [
791 {'message': 'Commit %s of %s' % (x, repo_name)}
791 {'message': 'Commit %s of %s' % (x, repo_name)}
792 for x in xrange(number_of_commits)]
792 for x in xrange(number_of_commits)]
793 _add_commits_to_repo(repo, commits)
793 _add_commits_to_repo(repo, commits)
794 return repo
794 return repo
795
795
796 def clone_repo(self, repo):
796 def clone_repo(self, repo):
797 return self.create_repo(_clone_repo=repo)
797 return self.create_repo(_clone_repo=repo)
798
798
799 def cleanup(self):
799 def cleanup(self):
800 for repo in self._cleanup_repos:
800 for repo in self._cleanup_repos:
801 shutil.rmtree(repo.path)
801 shutil.rmtree(repo.path)
802
802
803 def new_repo_path(self):
803 def new_repo_path(self):
804 repo_name = self._next_repo_name()
804 repo_name = self._next_repo_name()
805 self._repo_path = get_new_dir(repo_name)
805 self._repo_path = get_new_dir(repo_name)
806 return self._repo_path
806 return self._repo_path
807
807
808 def _next_repo_name(self):
808 def _next_repo_name(self):
809 return "%s_%s" % (
809 return "%s_%s" % (
810 self.invalid_repo_name.sub('_', self._test_name),
810 self.invalid_repo_name.sub('_', self._test_name),
811 len(self._cleanup_repos))
811 len(self._cleanup_repos))
812
812
813 def add_file(self, repo, filename, content='Test content\n'):
813 def add_file(self, repo, filename, content='Test content\n'):
814 imc = repo.in_memory_commit
814 imc = repo.in_memory_commit
815 imc.add(FileNode(filename, content=content))
815 imc.add(FileNode(filename, content=content))
816 imc.commit(
816 imc.commit(
817 message=u'Automatic commit from vcsbackend fixture',
817 message=u'Automatic commit from vcsbackend fixture',
818 author=u'Automatic')
818 author=u'Automatic')
819
819
820 def ensure_file(self, filename, content='Test content\n'):
820 def ensure_file(self, filename, content='Test content\n'):
821 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
821 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
822 self.add_file(self.repo, filename, content)
822 self.add_file(self.repo, filename, content)
823
823
824
824
825 def _add_commits_to_repo(vcs_repo, commits):
825 def _add_commits_to_repo(vcs_repo, commits):
826 commit_ids = {}
826 commit_ids = {}
827 if not commits:
827 if not commits:
828 return commit_ids
828 return commit_ids
829
829
830 imc = vcs_repo.in_memory_commit
830 imc = vcs_repo.in_memory_commit
831 commit = None
831 commit = None
832
832
833 for idx, commit in enumerate(commits):
833 for idx, commit in enumerate(commits):
834 message = unicode(commit.get('message', 'Commit %s' % idx))
834 message = unicode(commit.get('message', 'Commit %s' % idx))
835
835
836 for node in commit.get('added', []):
836 for node in commit.get('added', []):
837 imc.add(FileNode(node.path, content=node.content))
837 imc.add(FileNode(node.path, content=node.content))
838 for node in commit.get('changed', []):
838 for node in commit.get('changed', []):
839 imc.change(FileNode(node.path, content=node.content))
839 imc.change(FileNode(node.path, content=node.content))
840 for node in commit.get('removed', []):
840 for node in commit.get('removed', []):
841 imc.remove(FileNode(node.path))
841 imc.remove(FileNode(node.path))
842
842
843 parents = [
843 parents = [
844 vcs_repo.get_commit(commit_id=commit_ids[p])
844 vcs_repo.get_commit(commit_id=commit_ids[p])
845 for p in commit.get('parents', [])]
845 for p in commit.get('parents', [])]
846
846
847 operations = ('added', 'changed', 'removed')
847 operations = ('added', 'changed', 'removed')
848 if not any((commit.get(o) for o in operations)):
848 if not any((commit.get(o) for o in operations)):
849 imc.add(FileNode('file_%s' % idx, content=message))
849 imc.add(FileNode('file_%s' % idx, content=message))
850
850
851 commit = imc.commit(
851 commit = imc.commit(
852 message=message,
852 message=message,
853 author=unicode(commit.get('author', 'Automatic')),
853 author=unicode(commit.get('author', 'Automatic')),
854 date=commit.get('date'),
854 date=commit.get('date'),
855 branch=commit.get('branch'),
855 branch=commit.get('branch'),
856 parents=parents)
856 parents=parents)
857
857
858 commit_ids[commit.message] = commit.raw_id
858 commit_ids[commit.message] = commit.raw_id
859
859
860 return commit_ids
860 return commit_ids
861
861
862
862
863 @pytest.fixture
863 @pytest.fixture
864 def reposerver(request):
864 def reposerver(request):
865 """
865 """
866 Allows to serve a backend repository
866 Allows to serve a backend repository
867 """
867 """
868
868
869 repo_server = RepoServer()
869 repo_server = RepoServer()
870 request.addfinalizer(repo_server.cleanup)
870 request.addfinalizer(repo_server.cleanup)
871 return repo_server
871 return repo_server
872
872
873
873
874 class RepoServer(object):
874 class RepoServer(object):
875 """
875 """
876 Utility to serve a local repository for the duration of a test case.
876 Utility to serve a local repository for the duration of a test case.
877
877
878 Supports only Subversion so far.
878 Supports only Subversion so far.
879 """
879 """
880
880
881 url = None
881 url = None
882
882
883 def __init__(self):
883 def __init__(self):
884 self._cleanup_servers = []
884 self._cleanup_servers = []
885
885
886 def serve(self, vcsrepo):
886 def serve(self, vcsrepo):
887 if vcsrepo.alias != 'svn':
887 if vcsrepo.alias != 'svn':
888 raise TypeError("Backend %s not supported" % vcsrepo.alias)
888 raise TypeError("Backend %s not supported" % vcsrepo.alias)
889
889
890 proc = subprocess32.Popen(
890 proc = subprocess32.Popen(
891 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
891 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
892 '--root', vcsrepo.path])
892 '--root', vcsrepo.path])
893 self._cleanup_servers.append(proc)
893 self._cleanup_servers.append(proc)
894 self.url = 'svn://localhost'
894 self.url = 'svn://localhost'
895
895
896 def cleanup(self):
896 def cleanup(self):
897 for proc in self._cleanup_servers:
897 for proc in self._cleanup_servers:
898 proc.terminate()
898 proc.terminate()
899
899
900
900
901 @pytest.fixture
901 @pytest.fixture
902 def pr_util(backend, request):
902 def pr_util(backend, request):
903 """
903 """
904 Utility for tests of models and for functional tests around pull requests.
904 Utility for tests of models and for functional tests around pull requests.
905
905
906 It gives an instance of :class:`PRTestUtility` which provides various
906 It gives an instance of :class:`PRTestUtility` which provides various
907 utility methods around one pull request.
907 utility methods around one pull request.
908
908
909 This fixture uses `backend` and inherits its parameterization.
909 This fixture uses `backend` and inherits its parameterization.
910 """
910 """
911
911
912 util = PRTestUtility(backend)
912 util = PRTestUtility(backend)
913
913
914 @request.addfinalizer
914 @request.addfinalizer
915 def cleanup():
915 def cleanup():
916 util.cleanup()
916 util.cleanup()
917
917
918 return util
918 return util
919
919
920
920
921 class PRTestUtility(object):
921 class PRTestUtility(object):
922
922
923 pull_request = None
923 pull_request = None
924 pull_request_id = None
924 pull_request_id = None
925 mergeable_patcher = None
925 mergeable_patcher = None
926 mergeable_mock = None
926 mergeable_mock = None
927 notification_patcher = None
927 notification_patcher = None
928
928
929 def __init__(self, backend):
929 def __init__(self, backend):
930 self.backend = backend
930 self.backend = backend
931
931
932 def create_pull_request(
932 def create_pull_request(
933 self, commits=None, target_head=None, source_head=None,
933 self, commits=None, target_head=None, source_head=None,
934 revisions=None, approved=False, author=None, mergeable=False,
934 revisions=None, approved=False, author=None, mergeable=False,
935 enable_notifications=True, name_suffix=u'', reviewers=None,
935 enable_notifications=True, name_suffix=u'', reviewers=None,
936 title=u"Test", description=u"Description"):
936 title=u"Test", description=u"Description"):
937 self.set_mergeable(mergeable)
937 self.set_mergeable(mergeable)
938 if not enable_notifications:
938 if not enable_notifications:
939 # mock notification side effect
939 # mock notification side effect
940 self.notification_patcher = mock.patch(
940 self.notification_patcher = mock.patch(
941 'rhodecode.model.notification.NotificationModel.create')
941 'rhodecode.model.notification.NotificationModel.create')
942 self.notification_patcher.start()
942 self.notification_patcher.start()
943
943
944 if not self.pull_request:
944 if not self.pull_request:
945 if not commits:
945 if not commits:
946 commits = [
946 commits = [
947 {'message': 'c1'},
947 {'message': 'c1'},
948 {'message': 'c2'},
948 {'message': 'c2'},
949 {'message': 'c3'},
949 {'message': 'c3'},
950 ]
950 ]
951 target_head = 'c1'
951 target_head = 'c1'
952 source_head = 'c2'
952 source_head = 'c2'
953 revisions = ['c2']
953 revisions = ['c2']
954
954
955 self.commit_ids = self.backend.create_master_repo(commits)
955 self.commit_ids = self.backend.create_master_repo(commits)
956 self.target_repository = self.backend.create_repo(
956 self.target_repository = self.backend.create_repo(
957 heads=[target_head], name_suffix=name_suffix)
957 heads=[target_head], name_suffix=name_suffix)
958 self.source_repository = self.backend.create_repo(
958 self.source_repository = self.backend.create_repo(
959 heads=[source_head], name_suffix=name_suffix)
959 heads=[source_head], name_suffix=name_suffix)
960 self.author = author or UserModel().get_by_username(
960 self.author = author or UserModel().get_by_username(
961 TEST_USER_ADMIN_LOGIN)
961 TEST_USER_ADMIN_LOGIN)
962
962
963 model = PullRequestModel()
963 model = PullRequestModel()
964 self.create_parameters = {
964 self.create_parameters = {
965 'created_by': self.author,
965 'created_by': self.author,
966 'source_repo': self.source_repository.repo_name,
966 'source_repo': self.source_repository.repo_name,
967 'source_ref': self._default_branch_reference(source_head),
967 'source_ref': self._default_branch_reference(source_head),
968 'target_repo': self.target_repository.repo_name,
968 'target_repo': self.target_repository.repo_name,
969 'target_ref': self._default_branch_reference(target_head),
969 'target_ref': self._default_branch_reference(target_head),
970 'revisions': [self.commit_ids[r] for r in revisions],
970 'revisions': [self.commit_ids[r] for r in revisions],
971 'reviewers': reviewers or self._get_reviewers(),
971 'reviewers': reviewers or self._get_reviewers(),
972 'title': title,
972 'title': title,
973 'description': description,
973 'description': description,
974 }
974 }
975 self.pull_request = model.create(**self.create_parameters)
975 self.pull_request = model.create(**self.create_parameters)
976 assert model.get_versions(self.pull_request) == []
976 assert model.get_versions(self.pull_request) == []
977
977
978 self.pull_request_id = self.pull_request.pull_request_id
978 self.pull_request_id = self.pull_request.pull_request_id
979
979
980 if approved:
980 if approved:
981 self.approve()
981 self.approve()
982
982
983 Session().add(self.pull_request)
983 Session().add(self.pull_request)
984 Session().commit()
984 Session().commit()
985
985
986 return self.pull_request
986 return self.pull_request
987
987
988 def approve(self):
988 def approve(self):
989 self.create_status_votes(
989 self.create_status_votes(
990 ChangesetStatus.STATUS_APPROVED,
990 ChangesetStatus.STATUS_APPROVED,
991 *self.pull_request.reviewers)
991 *self.pull_request.reviewers)
992
992
993 def close(self):
993 def close(self):
994 PullRequestModel().close_pull_request(self.pull_request, self.author)
994 PullRequestModel().close_pull_request(self.pull_request, self.author)
995
995
996 def _default_branch_reference(self, commit_message):
996 def _default_branch_reference(self, commit_message):
997 reference = '%s:%s:%s' % (
997 reference = '%s:%s:%s' % (
998 'branch',
998 'branch',
999 self.backend.default_branch_name,
999 self.backend.default_branch_name,
1000 self.commit_ids[commit_message])
1000 self.commit_ids[commit_message])
1001 return reference
1001 return reference
1002
1002
1003 def _get_reviewers(self):
1003 def _get_reviewers(self):
1004 model = UserModel()
1004 model = UserModel()
1005 return [
1005 return [
1006 model.get_by_username(TEST_USER_REGULAR_LOGIN),
1006 model.get_by_username(TEST_USER_REGULAR_LOGIN),
1007 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
1007 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
1008 ]
1008 ]
1009
1009
1010 def update_source_repository(self, head=None):
1010 def update_source_repository(self, head=None):
1011 heads = [head or 'c3']
1011 heads = [head or 'c3']
1012 self.backend.pull_heads(self.source_repository, heads=heads)
1012 self.backend.pull_heads(self.source_repository, heads=heads)
1013
1013
1014 def add_one_commit(self, head=None):
1014 def add_one_commit(self, head=None):
1015 self.update_source_repository(head=head)
1015 self.update_source_repository(head=head)
1016 old_commit_ids = set(self.pull_request.revisions)
1016 old_commit_ids = set(self.pull_request.revisions)
1017 PullRequestModel().update_commits(self.pull_request)
1017 PullRequestModel().update_commits(self.pull_request)
1018 commit_ids = set(self.pull_request.revisions)
1018 commit_ids = set(self.pull_request.revisions)
1019 new_commit_ids = commit_ids - old_commit_ids
1019 new_commit_ids = commit_ids - old_commit_ids
1020 assert len(new_commit_ids) == 1
1020 assert len(new_commit_ids) == 1
1021 return new_commit_ids.pop()
1021 return new_commit_ids.pop()
1022
1022
1023 def remove_one_commit(self):
1023 def remove_one_commit(self):
1024 assert len(self.pull_request.revisions) == 2
1024 assert len(self.pull_request.revisions) == 2
1025 source_vcs = self.source_repository.scm_instance()
1025 source_vcs = self.source_repository.scm_instance()
1026 removed_commit_id = source_vcs.commit_ids[-1]
1026 removed_commit_id = source_vcs.commit_ids[-1]
1027
1027
1028 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1028 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1029 # remove the if once that's sorted out.
1029 # remove the if once that's sorted out.
1030 if self.backend.alias == "git":
1030 if self.backend.alias == "git":
1031 kwargs = {'branch_name': self.backend.default_branch_name}
1031 kwargs = {'branch_name': self.backend.default_branch_name}
1032 else:
1032 else:
1033 kwargs = {}
1033 kwargs = {}
1034 source_vcs.strip(removed_commit_id, **kwargs)
1034 source_vcs.strip(removed_commit_id, **kwargs)
1035
1035
1036 PullRequestModel().update_commits(self.pull_request)
1036 PullRequestModel().update_commits(self.pull_request)
1037 assert len(self.pull_request.revisions) == 1
1037 assert len(self.pull_request.revisions) == 1
1038 return removed_commit_id
1038 return removed_commit_id
1039
1039
1040 def create_comment(self, linked_to=None):
1040 def create_comment(self, linked_to=None):
1041 comment = ChangesetCommentsModel().create(
1041 comment = ChangesetCommentsModel().create(
1042 text=u"Test comment",
1042 text=u"Test comment",
1043 repo=self.target_repository.repo_name,
1043 repo=self.target_repository.repo_name,
1044 user=self.author,
1044 user=self.author,
1045 pull_request=self.pull_request)
1045 pull_request=self.pull_request)
1046 assert comment.pull_request_version_id is None
1046 assert comment.pull_request_version_id is None
1047
1047
1048 if linked_to:
1048 if linked_to:
1049 PullRequestModel()._link_comments_to_version(linked_to)
1049 PullRequestModel()._link_comments_to_version(linked_to)
1050
1050
1051 return comment
1051 return comment
1052
1052
1053 def create_inline_comment(
1053 def create_inline_comment(
1054 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1054 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1055 comment = ChangesetCommentsModel().create(
1055 comment = ChangesetCommentsModel().create(
1056 text=u"Test comment",
1056 text=u"Test comment",
1057 repo=self.target_repository.repo_name,
1057 repo=self.target_repository.repo_name,
1058 user=self.author,
1058 user=self.author,
1059 line_no=line_no,
1059 line_no=line_no,
1060 f_path=file_path,
1060 f_path=file_path,
1061 pull_request=self.pull_request)
1061 pull_request=self.pull_request)
1062 assert comment.pull_request_version_id is None
1062 assert comment.pull_request_version_id is None
1063
1063
1064 if linked_to:
1064 if linked_to:
1065 PullRequestModel()._link_comments_to_version(linked_to)
1065 PullRequestModel()._link_comments_to_version(linked_to)
1066
1066
1067 return comment
1067 return comment
1068
1068
1069 def create_version_of_pull_request(self):
1069 def create_version_of_pull_request(self):
1070 pull_request = self.create_pull_request()
1070 pull_request = self.create_pull_request()
1071 version = PullRequestModel()._create_version_from_snapshot(
1071 version = PullRequestModel()._create_version_from_snapshot(
1072 pull_request)
1072 pull_request)
1073 return version
1073 return version
1074
1074
1075 def create_status_votes(self, status, *reviewers):
1075 def create_status_votes(self, status, *reviewers):
1076 for reviewer in reviewers:
1076 for reviewer in reviewers:
1077 ChangesetStatusModel().set_status(
1077 ChangesetStatusModel().set_status(
1078 repo=self.pull_request.target_repo,
1078 repo=self.pull_request.target_repo,
1079 status=status,
1079 status=status,
1080 user=reviewer.user_id,
1080 user=reviewer.user_id,
1081 pull_request=self.pull_request)
1081 pull_request=self.pull_request)
1082
1082
1083 def set_mergeable(self, value):
1083 def set_mergeable(self, value):
1084 if not self.mergeable_patcher:
1084 if not self.mergeable_patcher:
1085 self.mergeable_patcher = mock.patch.object(
1085 self.mergeable_patcher = mock.patch.object(
1086 VcsSettingsModel, 'get_general_settings')
1086 VcsSettingsModel, 'get_general_settings')
1087 self.mergeable_mock = self.mergeable_patcher.start()
1087 self.mergeable_mock = self.mergeable_patcher.start()
1088 self.mergeable_mock.return_value = {
1088 self.mergeable_mock.return_value = {
1089 'rhodecode_pr_merge_enabled': value}
1089 'rhodecode_pr_merge_enabled': value}
1090
1090
1091 def cleanup(self):
1091 def cleanup(self):
1092 # In case the source repository is already cleaned up, the pull
1092 # In case the source repository is already cleaned up, the pull
1093 # request will already be deleted.
1093 # request will already be deleted.
1094 pull_request = PullRequest().get(self.pull_request_id)
1094 pull_request = PullRequest().get(self.pull_request_id)
1095 if pull_request:
1095 if pull_request:
1096 PullRequestModel().delete(pull_request)
1096 PullRequestModel().delete(pull_request)
1097 Session().commit()
1097 Session().commit()
1098
1098
1099 if self.notification_patcher:
1099 if self.notification_patcher:
1100 self.notification_patcher.stop()
1100 self.notification_patcher.stop()
1101
1101
1102 if self.mergeable_patcher:
1102 if self.mergeable_patcher:
1103 self.mergeable_patcher.stop()
1103 self.mergeable_patcher.stop()
1104
1104
1105
1105
1106 @pytest.fixture
1106 @pytest.fixture
1107 def user_admin(pylonsapp):
1107 def user_admin(pylonsapp):
1108 """
1108 """
1109 Provides the default admin test user as an instance of `db.User`.
1109 Provides the default admin test user as an instance of `db.User`.
1110 """
1110 """
1111 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1111 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1112 return user
1112 return user
1113
1113
1114
1114
1115 @pytest.fixture
1115 @pytest.fixture
1116 def user_regular(pylonsapp):
1116 def user_regular(pylonsapp):
1117 """
1117 """
1118 Provides the default regular test user as an instance of `db.User`.
1118 Provides the default regular test user as an instance of `db.User`.
1119 """
1119 """
1120 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1120 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1121 return user
1121 return user
1122
1122
1123
1123
1124 @pytest.fixture
1124 @pytest.fixture
1125 def user_util(request, pylonsapp):
1125 def user_util(request, pylonsapp):
1126 """
1126 """
1127 Provides a wired instance of `UserUtility` with integrated cleanup.
1127 Provides a wired instance of `UserUtility` with integrated cleanup.
1128 """
1128 """
1129 utility = UserUtility(test_name=request.node.name)
1129 utility = UserUtility(test_name=request.node.name)
1130 request.addfinalizer(utility.cleanup)
1130 request.addfinalizer(utility.cleanup)
1131 return utility
1131 return utility
1132
1132
1133
1133
1134 # TODO: johbo: Split this up into utilities per domain or something similar
1134 # TODO: johbo: Split this up into utilities per domain or something similar
1135 class UserUtility(object):
1135 class UserUtility(object):
1136
1136
1137 def __init__(self, test_name="test"):
1137 def __init__(self, test_name="test"):
1138 self._test_name = self._sanitize_name(test_name)
1138 self._test_name = self._sanitize_name(test_name)
1139 self.fixture = Fixture()
1139 self.fixture = Fixture()
1140 self.repo_group_ids = []
1140 self.repo_group_ids = []
1141 self.repos_ids = []
1141 self.user_ids = []
1142 self.user_ids = []
1142 self.user_group_ids = []
1143 self.user_group_ids = []
1143 self.user_repo_permission_ids = []
1144 self.user_repo_permission_ids = []
1144 self.user_group_repo_permission_ids = []
1145 self.user_group_repo_permission_ids = []
1145 self.user_repo_group_permission_ids = []
1146 self.user_repo_group_permission_ids = []
1146 self.user_group_repo_group_permission_ids = []
1147 self.user_group_repo_group_permission_ids = []
1147 self.user_user_group_permission_ids = []
1148 self.user_user_group_permission_ids = []
1148 self.user_group_user_group_permission_ids = []
1149 self.user_group_user_group_permission_ids = []
1149 self.user_permissions = []
1150 self.user_permissions = []
1150
1151
1151 def _sanitize_name(self, name):
1152 def _sanitize_name(self, name):
1152 for char in ['[', ']']:
1153 for char in ['[', ']']:
1153 name = name.replace(char, '_')
1154 name = name.replace(char, '_')
1154 return name
1155 return name
1155
1156
1156 def create_repo_group(
1157 def create_repo_group(
1157 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1158 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1158 group_name = "{prefix}_repogroup_{count}".format(
1159 group_name = "{prefix}_repogroup_{count}".format(
1159 prefix=self._test_name,
1160 prefix=self._test_name,
1160 count=len(self.repo_group_ids))
1161 count=len(self.repo_group_ids))
1161 repo_group = self.fixture.create_repo_group(
1162 repo_group = self.fixture.create_repo_group(
1162 group_name, cur_user=owner)
1163 group_name, cur_user=owner)
1163 if auto_cleanup:
1164 if auto_cleanup:
1164 self.repo_group_ids.append(repo_group.group_id)
1165 self.repo_group_ids.append(repo_group.group_id)
1165 return repo_group
1166 return repo_group
1166
1167
1168 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True):
1169 repo_name = "{prefix}_repository_{count}".format(
1170 prefix=self._test_name,
1171 count=len(self.repos_ids))
1172
1173 repository = self.fixture.create_repo(
1174 repo_name, cur_user=owner, repo_group=parent)
1175 if auto_cleanup:
1176 self.repos_ids.append(repository.repo_id)
1177 return repository
1178
1167 def create_user(self, auto_cleanup=True, **kwargs):
1179 def create_user(self, auto_cleanup=True, **kwargs):
1168 user_name = "{prefix}_user_{count}".format(
1180 user_name = "{prefix}_user_{count}".format(
1169 prefix=self._test_name,
1181 prefix=self._test_name,
1170 count=len(self.user_ids))
1182 count=len(self.user_ids))
1171 user = self.fixture.create_user(user_name, **kwargs)
1183 user = self.fixture.create_user(user_name, **kwargs)
1172 if auto_cleanup:
1184 if auto_cleanup:
1173 self.user_ids.append(user.user_id)
1185 self.user_ids.append(user.user_id)
1174 return user
1186 return user
1175
1187
1176 def create_user_with_group(self):
1188 def create_user_with_group(self):
1177 user = self.create_user()
1189 user = self.create_user()
1178 user_group = self.create_user_group(members=[user])
1190 user_group = self.create_user_group(members=[user])
1179 return user, user_group
1191 return user, user_group
1180
1192
1181 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1193 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1182 group_name = "{prefix}_usergroup_{count}".format(
1194 group_name = "{prefix}_usergroup_{count}".format(
1183 prefix=self._test_name,
1195 prefix=self._test_name,
1184 count=len(self.user_group_ids))
1196 count=len(self.user_group_ids))
1185 user_group = self.fixture.create_user_group(group_name, **kwargs)
1197 user_group = self.fixture.create_user_group(group_name, **kwargs)
1186 if auto_cleanup:
1198 if auto_cleanup:
1187 self.user_group_ids.append(user_group.users_group_id)
1199 self.user_group_ids.append(user_group.users_group_id)
1188 if members:
1200 if members:
1189 for user in members:
1201 for user in members:
1190 UserGroupModel().add_user_to_group(user_group, user)
1202 UserGroupModel().add_user_to_group(user_group, user)
1191 return user_group
1203 return user_group
1192
1204
1193 def grant_user_permission(self, user_name, permission_name):
1205 def grant_user_permission(self, user_name, permission_name):
1194 self._inherit_default_user_permissions(user_name, False)
1206 self._inherit_default_user_permissions(user_name, False)
1195 self.user_permissions.append((user_name, permission_name))
1207 self.user_permissions.append((user_name, permission_name))
1196
1208
1197 def grant_user_permission_to_repo_group(
1209 def grant_user_permission_to_repo_group(
1198 self, repo_group, user, permission_name):
1210 self, repo_group, user, permission_name):
1199 permission = RepoGroupModel().grant_user_permission(
1211 permission = RepoGroupModel().grant_user_permission(
1200 repo_group, user, permission_name)
1212 repo_group, user, permission_name)
1201 self.user_repo_group_permission_ids.append(
1213 self.user_repo_group_permission_ids.append(
1202 (repo_group.group_id, user.user_id))
1214 (repo_group.group_id, user.user_id))
1203 return permission
1215 return permission
1204
1216
1205 def grant_user_group_permission_to_repo_group(
1217 def grant_user_group_permission_to_repo_group(
1206 self, repo_group, user_group, permission_name):
1218 self, repo_group, user_group, permission_name):
1207 permission = RepoGroupModel().grant_user_group_permission(
1219 permission = RepoGroupModel().grant_user_group_permission(
1208 repo_group, user_group, permission_name)
1220 repo_group, user_group, permission_name)
1209 self.user_group_repo_group_permission_ids.append(
1221 self.user_group_repo_group_permission_ids.append(
1210 (repo_group.group_id, user_group.users_group_id))
1222 (repo_group.group_id, user_group.users_group_id))
1211 return permission
1223 return permission
1212
1224
1213 def grant_user_permission_to_repo(
1225 def grant_user_permission_to_repo(
1214 self, repo, user, permission_name):
1226 self, repo, user, permission_name):
1215 permission = RepoModel().grant_user_permission(
1227 permission = RepoModel().grant_user_permission(
1216 repo, user, permission_name)
1228 repo, user, permission_name)
1217 self.user_repo_permission_ids.append(
1229 self.user_repo_permission_ids.append(
1218 (repo.repo_id, user.user_id))
1230 (repo.repo_id, user.user_id))
1219 return permission
1231 return permission
1220
1232
1221 def grant_user_group_permission_to_repo(
1233 def grant_user_group_permission_to_repo(
1222 self, repo, user_group, permission_name):
1234 self, repo, user_group, permission_name):
1223 permission = RepoModel().grant_user_group_permission(
1235 permission = RepoModel().grant_user_group_permission(
1224 repo, user_group, permission_name)
1236 repo, user_group, permission_name)
1225 self.user_group_repo_permission_ids.append(
1237 self.user_group_repo_permission_ids.append(
1226 (repo.repo_id, user_group.users_group_id))
1238 (repo.repo_id, user_group.users_group_id))
1227 return permission
1239 return permission
1228
1240
1229 def grant_user_permission_to_user_group(
1241 def grant_user_permission_to_user_group(
1230 self, target_user_group, user, permission_name):
1242 self, target_user_group, user, permission_name):
1231 permission = UserGroupModel().grant_user_permission(
1243 permission = UserGroupModel().grant_user_permission(
1232 target_user_group, user, permission_name)
1244 target_user_group, user, permission_name)
1233 self.user_user_group_permission_ids.append(
1245 self.user_user_group_permission_ids.append(
1234 (target_user_group.users_group_id, user.user_id))
1246 (target_user_group.users_group_id, user.user_id))
1235 return permission
1247 return permission
1236
1248
1237 def grant_user_group_permission_to_user_group(
1249 def grant_user_group_permission_to_user_group(
1238 self, target_user_group, user_group, permission_name):
1250 self, target_user_group, user_group, permission_name):
1239 permission = UserGroupModel().grant_user_group_permission(
1251 permission = UserGroupModel().grant_user_group_permission(
1240 target_user_group, user_group, permission_name)
1252 target_user_group, user_group, permission_name)
1241 self.user_group_user_group_permission_ids.append(
1253 self.user_group_user_group_permission_ids.append(
1242 (target_user_group.users_group_id, user_group.users_group_id))
1254 (target_user_group.users_group_id, user_group.users_group_id))
1243 return permission
1255 return permission
1244
1256
1245 def revoke_user_permission(self, user_name, permission_name):
1257 def revoke_user_permission(self, user_name, permission_name):
1246 self._inherit_default_user_permissions(user_name, True)
1258 self._inherit_default_user_permissions(user_name, True)
1247 UserModel().revoke_perm(user_name, permission_name)
1259 UserModel().revoke_perm(user_name, permission_name)
1248
1260
1249 def _inherit_default_user_permissions(self, user_name, value):
1261 def _inherit_default_user_permissions(self, user_name, value):
1250 user = UserModel().get_by_username(user_name)
1262 user = UserModel().get_by_username(user_name)
1251 user.inherit_default_permissions = value
1263 user.inherit_default_permissions = value
1252 Session().add(user)
1264 Session().add(user)
1253 Session().commit()
1265 Session().commit()
1254
1266
1255 def cleanup(self):
1267 def cleanup(self):
1256 self._cleanup_permissions()
1268 self._cleanup_permissions()
1269 self._cleanup_repos()
1257 self._cleanup_repo_groups()
1270 self._cleanup_repo_groups()
1258 self._cleanup_user_groups()
1271 self._cleanup_user_groups()
1259 self._cleanup_users()
1272 self._cleanup_users()
1260
1273
1261 def _cleanup_permissions(self):
1274 def _cleanup_permissions(self):
1262 if self.user_permissions:
1275 if self.user_permissions:
1263 for user_name, permission_name in self.user_permissions:
1276 for user_name, permission_name in self.user_permissions:
1264 self.revoke_user_permission(user_name, permission_name)
1277 self.revoke_user_permission(user_name, permission_name)
1265
1278
1266 for permission in self.user_repo_permission_ids:
1279 for permission in self.user_repo_permission_ids:
1267 RepoModel().revoke_user_permission(*permission)
1280 RepoModel().revoke_user_permission(*permission)
1268
1281
1269 for permission in self.user_group_repo_permission_ids:
1282 for permission in self.user_group_repo_permission_ids:
1270 RepoModel().revoke_user_group_permission(*permission)
1283 RepoModel().revoke_user_group_permission(*permission)
1271
1284
1272 for permission in self.user_repo_group_permission_ids:
1285 for permission in self.user_repo_group_permission_ids:
1273 RepoGroupModel().revoke_user_permission(*permission)
1286 RepoGroupModel().revoke_user_permission(*permission)
1274
1287
1275 for permission in self.user_group_repo_group_permission_ids:
1288 for permission in self.user_group_repo_group_permission_ids:
1276 RepoGroupModel().revoke_user_group_permission(*permission)
1289 RepoGroupModel().revoke_user_group_permission(*permission)
1277
1290
1278 for permission in self.user_user_group_permission_ids:
1291 for permission in self.user_user_group_permission_ids:
1279 UserGroupModel().revoke_user_permission(*permission)
1292 UserGroupModel().revoke_user_permission(*permission)
1280
1293
1281 for permission in self.user_group_user_group_permission_ids:
1294 for permission in self.user_group_user_group_permission_ids:
1282 UserGroupModel().revoke_user_group_permission(*permission)
1295 UserGroupModel().revoke_user_group_permission(*permission)
1283
1296
1284 def _cleanup_repo_groups(self):
1297 def _cleanup_repo_groups(self):
1285 def _repo_group_compare(first_group_id, second_group_id):
1298 def _repo_group_compare(first_group_id, second_group_id):
1286 """
1299 """
1287 Gives higher priority to the groups with the most complex paths
1300 Gives higher priority to the groups with the most complex paths
1288 """
1301 """
1289 first_group = RepoGroup.get(first_group_id)
1302 first_group = RepoGroup.get(first_group_id)
1290 second_group = RepoGroup.get(second_group_id)
1303 second_group = RepoGroup.get(second_group_id)
1291 first_group_parts = (
1304 first_group_parts = (
1292 len(first_group.group_name.split('/')) if first_group else 0)
1305 len(first_group.group_name.split('/')) if first_group else 0)
1293 second_group_parts = (
1306 second_group_parts = (
1294 len(second_group.group_name.split('/')) if second_group else 0)
1307 len(second_group.group_name.split('/')) if second_group else 0)
1295 return cmp(second_group_parts, first_group_parts)
1308 return cmp(second_group_parts, first_group_parts)
1296
1309
1297 sorted_repo_group_ids = sorted(
1310 sorted_repo_group_ids = sorted(
1298 self.repo_group_ids, cmp=_repo_group_compare)
1311 self.repo_group_ids, cmp=_repo_group_compare)
1299 for repo_group_id in sorted_repo_group_ids:
1312 for repo_group_id in sorted_repo_group_ids:
1300 self.fixture.destroy_repo_group(repo_group_id)
1313 self.fixture.destroy_repo_group(repo_group_id)
1301
1314
1315 def _cleanup_repos(self):
1316 sorted_repos_ids = sorted(self.repos_ids)
1317 for repo_id in sorted_repos_ids:
1318 self.fixture.destroy_repo(repo_id)
1319
1302 def _cleanup_user_groups(self):
1320 def _cleanup_user_groups(self):
1303 def _user_group_compare(first_group_id, second_group_id):
1321 def _user_group_compare(first_group_id, second_group_id):
1304 """
1322 """
1305 Gives higher priority to the groups with the most complex paths
1323 Gives higher priority to the groups with the most complex paths
1306 """
1324 """
1307 first_group = UserGroup.get(first_group_id)
1325 first_group = UserGroup.get(first_group_id)
1308 second_group = UserGroup.get(second_group_id)
1326 second_group = UserGroup.get(second_group_id)
1309 first_group_parts = (
1327 first_group_parts = (
1310 len(first_group.users_group_name.split('/'))
1328 len(first_group.users_group_name.split('/'))
1311 if first_group else 0)
1329 if first_group else 0)
1312 second_group_parts = (
1330 second_group_parts = (
1313 len(second_group.users_group_name.split('/'))
1331 len(second_group.users_group_name.split('/'))
1314 if second_group else 0)
1332 if second_group else 0)
1315 return cmp(second_group_parts, first_group_parts)
1333 return cmp(second_group_parts, first_group_parts)
1316
1334
1317 sorted_user_group_ids = sorted(
1335 sorted_user_group_ids = sorted(
1318 self.user_group_ids, cmp=_user_group_compare)
1336 self.user_group_ids, cmp=_user_group_compare)
1319 for user_group_id in sorted_user_group_ids:
1337 for user_group_id in sorted_user_group_ids:
1320 self.fixture.destroy_user_group(user_group_id)
1338 self.fixture.destroy_user_group(user_group_id)
1321
1339
1322 def _cleanup_users(self):
1340 def _cleanup_users(self):
1323 for user_id in self.user_ids:
1341 for user_id in self.user_ids:
1324 self.fixture.destroy_user(user_id)
1342 self.fixture.destroy_user(user_id)
1325
1343
1326
1344
1327 # TODO: Think about moving this into a pytest-pyro package and make it a
1345 # TODO: Think about moving this into a pytest-pyro package and make it a
1328 # pytest plugin
1346 # pytest plugin
1329 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1347 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1330 def pytest_runtest_makereport(item, call):
1348 def pytest_runtest_makereport(item, call):
1331 """
1349 """
1332 Adding the remote traceback if the exception has this information.
1350 Adding the remote traceback if the exception has this information.
1333
1351
1334 Pyro4 attaches this information as the attribute `_vcs_server_traceback`
1352 Pyro4 attaches this information as the attribute `_vcs_server_traceback`
1335 to the exception instance.
1353 to the exception instance.
1336 """
1354 """
1337 outcome = yield
1355 outcome = yield
1338 report = outcome.get_result()
1356 report = outcome.get_result()
1339 if call.excinfo:
1357 if call.excinfo:
1340 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1358 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1341
1359
1342
1360
1343 def _add_vcsserver_remote_traceback(report, exc):
1361 def _add_vcsserver_remote_traceback(report, exc):
1344 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1362 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1345
1363
1346 if vcsserver_traceback:
1364 if vcsserver_traceback:
1347 section = 'VCSServer remote traceback ' + report.when
1365 section = 'VCSServer remote traceback ' + report.when
1348 report.sections.append((section, vcsserver_traceback))
1366 report.sections.append((section, vcsserver_traceback))
1349
1367
1350
1368
1351 @pytest.fixture(scope='session')
1369 @pytest.fixture(scope='session')
1352 def testrun():
1370 def testrun():
1353 return {
1371 return {
1354 'uuid': uuid.uuid4(),
1372 'uuid': uuid.uuid4(),
1355 'start': datetime.datetime.utcnow().isoformat(),
1373 'start': datetime.datetime.utcnow().isoformat(),
1356 'timestamp': int(time.time()),
1374 'timestamp': int(time.time()),
1357 }
1375 }
1358
1376
1359
1377
1360 @pytest.fixture(autouse=True)
1378 @pytest.fixture(autouse=True)
1361 def collect_appenlight_stats(request, testrun):
1379 def collect_appenlight_stats(request, testrun):
1362 """
1380 """
1363 This fixture reports memory consumtion of single tests.
1381 This fixture reports memory consumtion of single tests.
1364
1382
1365 It gathers data based on `psutil` and sends them to Appenlight. The option
1383 It gathers data based on `psutil` and sends them to Appenlight. The option
1366 ``--ae`` has te be used to enable this fixture and the API key for your
1384 ``--ae`` has te be used to enable this fixture and the API key for your
1367 application has to be provided in ``--ae-key``.
1385 application has to be provided in ``--ae-key``.
1368 """
1386 """
1369 try:
1387 try:
1370 # cygwin cannot have yet psutil support.
1388 # cygwin cannot have yet psutil support.
1371 import psutil
1389 import psutil
1372 except ImportError:
1390 except ImportError:
1373 return
1391 return
1374
1392
1375 if not request.config.getoption('--appenlight'):
1393 if not request.config.getoption('--appenlight'):
1376 return
1394 return
1377 else:
1395 else:
1378 # Only request the pylonsapp fixture if appenlight tracking is
1396 # Only request the pylonsapp fixture if appenlight tracking is
1379 # enabled. This will speed up a test run of unit tests by 2 to 3
1397 # enabled. This will speed up a test run of unit tests by 2 to 3
1380 # seconds if appenlight is not enabled.
1398 # seconds if appenlight is not enabled.
1381 pylonsapp = request.getfuncargvalue("pylonsapp")
1399 pylonsapp = request.getfuncargvalue("pylonsapp")
1382 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1400 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1383 client = AppenlightClient(
1401 client = AppenlightClient(
1384 url=url,
1402 url=url,
1385 api_key=request.config.getoption('--appenlight-api-key'),
1403 api_key=request.config.getoption('--appenlight-api-key'),
1386 namespace=request.node.nodeid,
1404 namespace=request.node.nodeid,
1387 request=str(testrun['uuid']),
1405 request=str(testrun['uuid']),
1388 testrun=testrun)
1406 testrun=testrun)
1389
1407
1390 client.collect({
1408 client.collect({
1391 'message': "Starting",
1409 'message': "Starting",
1392 })
1410 })
1393
1411
1394 server_and_port = pylonsapp.config['vcs.server']
1412 server_and_port = pylonsapp.config['vcs.server']
1395 server = create_vcsserver_proxy(server_and_port)
1413 server = create_vcsserver_proxy(server_and_port)
1396 with server:
1414 with server:
1397 vcs_pid = server.get_pid()
1415 vcs_pid = server.get_pid()
1398 server.run_gc()
1416 server.run_gc()
1399 vcs_process = psutil.Process(vcs_pid)
1417 vcs_process = psutil.Process(vcs_pid)
1400 mem = vcs_process.memory_info()
1418 mem = vcs_process.memory_info()
1401 client.tag_before('vcsserver.rss', mem.rss)
1419 client.tag_before('vcsserver.rss', mem.rss)
1402 client.tag_before('vcsserver.vms', mem.vms)
1420 client.tag_before('vcsserver.vms', mem.vms)
1403
1421
1404 test_process = psutil.Process()
1422 test_process = psutil.Process()
1405 mem = test_process.memory_info()
1423 mem = test_process.memory_info()
1406 client.tag_before('test.rss', mem.rss)
1424 client.tag_before('test.rss', mem.rss)
1407 client.tag_before('test.vms', mem.vms)
1425 client.tag_before('test.vms', mem.vms)
1408
1426
1409 client.tag_before('time', time.time())
1427 client.tag_before('time', time.time())
1410
1428
1411 @request.addfinalizer
1429 @request.addfinalizer
1412 def send_stats():
1430 def send_stats():
1413 client.tag_after('time', time.time())
1431 client.tag_after('time', time.time())
1414 with server:
1432 with server:
1415 gc_stats = server.run_gc()
1433 gc_stats = server.run_gc()
1416 for tag, value in gc_stats.items():
1434 for tag, value in gc_stats.items():
1417 client.tag_after(tag, value)
1435 client.tag_after(tag, value)
1418 mem = vcs_process.memory_info()
1436 mem = vcs_process.memory_info()
1419 client.tag_after('vcsserver.rss', mem.rss)
1437 client.tag_after('vcsserver.rss', mem.rss)
1420 client.tag_after('vcsserver.vms', mem.vms)
1438 client.tag_after('vcsserver.vms', mem.vms)
1421
1439
1422 mem = test_process.memory_info()
1440 mem = test_process.memory_info()
1423 client.tag_after('test.rss', mem.rss)
1441 client.tag_after('test.rss', mem.rss)
1424 client.tag_after('test.vms', mem.vms)
1442 client.tag_after('test.vms', mem.vms)
1425
1443
1426 client.collect({
1444 client.collect({
1427 'message': "Finished",
1445 'message': "Finished",
1428 })
1446 })
1429 client.send_stats()
1447 client.send_stats()
1430
1448
1431 return client
1449 return client
1432
1450
1433
1451
1434 class AppenlightClient():
1452 class AppenlightClient():
1435
1453
1436 url_template = '{url}?protocol_version=0.5'
1454 url_template = '{url}?protocol_version=0.5'
1437
1455
1438 def __init__(
1456 def __init__(
1439 self, url, api_key, add_server=True, add_timestamp=True,
1457 self, url, api_key, add_server=True, add_timestamp=True,
1440 namespace=None, request=None, testrun=None):
1458 namespace=None, request=None, testrun=None):
1441 self.url = self.url_template.format(url=url)
1459 self.url = self.url_template.format(url=url)
1442 self.api_key = api_key
1460 self.api_key = api_key
1443 self.add_server = add_server
1461 self.add_server = add_server
1444 self.add_timestamp = add_timestamp
1462 self.add_timestamp = add_timestamp
1445 self.namespace = namespace
1463 self.namespace = namespace
1446 self.request = request
1464 self.request = request
1447 self.server = socket.getfqdn(socket.gethostname())
1465 self.server = socket.getfqdn(socket.gethostname())
1448 self.tags_before = {}
1466 self.tags_before = {}
1449 self.tags_after = {}
1467 self.tags_after = {}
1450 self.stats = []
1468 self.stats = []
1451 self.testrun = testrun or {}
1469 self.testrun = testrun or {}
1452
1470
1453 def tag_before(self, tag, value):
1471 def tag_before(self, tag, value):
1454 self.tags_before[tag] = value
1472 self.tags_before[tag] = value
1455
1473
1456 def tag_after(self, tag, value):
1474 def tag_after(self, tag, value):
1457 self.tags_after[tag] = value
1475 self.tags_after[tag] = value
1458
1476
1459 def collect(self, data):
1477 def collect(self, data):
1460 if self.add_server:
1478 if self.add_server:
1461 data.setdefault('server', self.server)
1479 data.setdefault('server', self.server)
1462 if self.add_timestamp:
1480 if self.add_timestamp:
1463 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1481 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1464 if self.namespace:
1482 if self.namespace:
1465 data.setdefault('namespace', self.namespace)
1483 data.setdefault('namespace', self.namespace)
1466 if self.request:
1484 if self.request:
1467 data.setdefault('request', self.request)
1485 data.setdefault('request', self.request)
1468 self.stats.append(data)
1486 self.stats.append(data)
1469
1487
1470 def send_stats(self):
1488 def send_stats(self):
1471 tags = [
1489 tags = [
1472 ('testrun', self.request),
1490 ('testrun', self.request),
1473 ('testrun.start', self.testrun['start']),
1491 ('testrun.start', self.testrun['start']),
1474 ('testrun.timestamp', self.testrun['timestamp']),
1492 ('testrun.timestamp', self.testrun['timestamp']),
1475 ('test', self.namespace),
1493 ('test', self.namespace),
1476 ]
1494 ]
1477 for key, value in self.tags_before.items():
1495 for key, value in self.tags_before.items():
1478 tags.append((key + '.before', value))
1496 tags.append((key + '.before', value))
1479 try:
1497 try:
1480 delta = self.tags_after[key] - value
1498 delta = self.tags_after[key] - value
1481 tags.append((key + '.delta', delta))
1499 tags.append((key + '.delta', delta))
1482 except Exception:
1500 except Exception:
1483 pass
1501 pass
1484 for key, value in self.tags_after.items():
1502 for key, value in self.tags_after.items():
1485 tags.append((key + '.after', value))
1503 tags.append((key + '.after', value))
1486 self.collect({
1504 self.collect({
1487 'message': "Collected tags",
1505 'message': "Collected tags",
1488 'tags': tags,
1506 'tags': tags,
1489 })
1507 })
1490
1508
1491 response = requests.post(
1509 response = requests.post(
1492 self.url,
1510 self.url,
1493 headers={
1511 headers={
1494 'X-appenlight-api-key': self.api_key},
1512 'X-appenlight-api-key': self.api_key},
1495 json=self.stats,
1513 json=self.stats,
1496 )
1514 )
1497
1515
1498 if not response.status_code == 200:
1516 if not response.status_code == 200:
1499 pprint.pprint(self.stats)
1517 pprint.pprint(self.stats)
1500 print response.headers
1518 print response.headers
1501 print response.text
1519 print response.text
1502 raise Exception('Sending to appenlight failed')
1520 raise Exception('Sending to appenlight failed')
1503
1521
1504
1522
1505 @pytest.fixture
1523 @pytest.fixture
1506 def gist_util(request, pylonsapp):
1524 def gist_util(request, pylonsapp):
1507 """
1525 """
1508 Provides a wired instance of `GistUtility` with integrated cleanup.
1526 Provides a wired instance of `GistUtility` with integrated cleanup.
1509 """
1527 """
1510 utility = GistUtility()
1528 utility = GistUtility()
1511 request.addfinalizer(utility.cleanup)
1529 request.addfinalizer(utility.cleanup)
1512 return utility
1530 return utility
1513
1531
1514
1532
1515 class GistUtility(object):
1533 class GistUtility(object):
1516 def __init__(self):
1534 def __init__(self):
1517 self.fixture = Fixture()
1535 self.fixture = Fixture()
1518 self.gist_ids = []
1536 self.gist_ids = []
1519
1537
1520 def create_gist(self, **kwargs):
1538 def create_gist(self, **kwargs):
1521 gist = self.fixture.create_gist(**kwargs)
1539 gist = self.fixture.create_gist(**kwargs)
1522 self.gist_ids.append(gist.gist_id)
1540 self.gist_ids.append(gist.gist_id)
1523 return gist
1541 return gist
1524
1542
1525 def cleanup(self):
1543 def cleanup(self):
1526 for id_ in self.gist_ids:
1544 for id_ in self.gist_ids:
1527 self.fixture.destroy_gists(str(id_))
1545 self.fixture.destroy_gists(str(id_))
1528
1546
1529
1547
1530 @pytest.fixture
1548 @pytest.fixture
1531 def enabled_backends(request):
1549 def enabled_backends(request):
1532 backends = request.config.option.backends
1550 backends = request.config.option.backends
1533 return backends[:]
1551 return backends[:]
1534
1552
1535
1553
1536 @pytest.fixture
1554 @pytest.fixture
1537 def settings_util(request):
1555 def settings_util(request):
1538 """
1556 """
1539 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1557 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1540 """
1558 """
1541 utility = SettingsUtility()
1559 utility = SettingsUtility()
1542 request.addfinalizer(utility.cleanup)
1560 request.addfinalizer(utility.cleanup)
1543 return utility
1561 return utility
1544
1562
1545
1563
1546 class SettingsUtility(object):
1564 class SettingsUtility(object):
1547 def __init__(self):
1565 def __init__(self):
1548 self.rhodecode_ui_ids = []
1566 self.rhodecode_ui_ids = []
1549 self.rhodecode_setting_ids = []
1567 self.rhodecode_setting_ids = []
1550 self.repo_rhodecode_ui_ids = []
1568 self.repo_rhodecode_ui_ids = []
1551 self.repo_rhodecode_setting_ids = []
1569 self.repo_rhodecode_setting_ids = []
1552
1570
1553 def create_repo_rhodecode_ui(
1571 def create_repo_rhodecode_ui(
1554 self, repo, section, value, key=None, active=True, cleanup=True):
1572 self, repo, section, value, key=None, active=True, cleanup=True):
1555 key = key or hashlib.sha1(
1573 key = key or hashlib.sha1(
1556 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1574 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1557
1575
1558 setting = RepoRhodeCodeUi()
1576 setting = RepoRhodeCodeUi()
1559 setting.repository_id = repo.repo_id
1577 setting.repository_id = repo.repo_id
1560 setting.ui_section = section
1578 setting.ui_section = section
1561 setting.ui_value = value
1579 setting.ui_value = value
1562 setting.ui_key = key
1580 setting.ui_key = key
1563 setting.ui_active = active
1581 setting.ui_active = active
1564 Session().add(setting)
1582 Session().add(setting)
1565 Session().commit()
1583 Session().commit()
1566
1584
1567 if cleanup:
1585 if cleanup:
1568 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1586 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1569 return setting
1587 return setting
1570
1588
1571 def create_rhodecode_ui(
1589 def create_rhodecode_ui(
1572 self, section, value, key=None, active=True, cleanup=True):
1590 self, section, value, key=None, active=True, cleanup=True):
1573 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1591 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1574
1592
1575 setting = RhodeCodeUi()
1593 setting = RhodeCodeUi()
1576 setting.ui_section = section
1594 setting.ui_section = section
1577 setting.ui_value = value
1595 setting.ui_value = value
1578 setting.ui_key = key
1596 setting.ui_key = key
1579 setting.ui_active = active
1597 setting.ui_active = active
1580 Session().add(setting)
1598 Session().add(setting)
1581 Session().commit()
1599 Session().commit()
1582
1600
1583 if cleanup:
1601 if cleanup:
1584 self.rhodecode_ui_ids.append(setting.ui_id)
1602 self.rhodecode_ui_ids.append(setting.ui_id)
1585 return setting
1603 return setting
1586
1604
1587 def create_repo_rhodecode_setting(
1605 def create_repo_rhodecode_setting(
1588 self, repo, name, value, type_, cleanup=True):
1606 self, repo, name, value, type_, cleanup=True):
1589 setting = RepoRhodeCodeSetting(
1607 setting = RepoRhodeCodeSetting(
1590 repo.repo_id, key=name, val=value, type=type_)
1608 repo.repo_id, key=name, val=value, type=type_)
1591 Session().add(setting)
1609 Session().add(setting)
1592 Session().commit()
1610 Session().commit()
1593
1611
1594 if cleanup:
1612 if cleanup:
1595 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1613 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1596 return setting
1614 return setting
1597
1615
1598 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1616 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1599 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1617 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1600 Session().add(setting)
1618 Session().add(setting)
1601 Session().commit()
1619 Session().commit()
1602
1620
1603 if cleanup:
1621 if cleanup:
1604 self.rhodecode_setting_ids.append(setting.app_settings_id)
1622 self.rhodecode_setting_ids.append(setting.app_settings_id)
1605
1623
1606 return setting
1624 return setting
1607
1625
1608 def cleanup(self):
1626 def cleanup(self):
1609 for id_ in self.rhodecode_ui_ids:
1627 for id_ in self.rhodecode_ui_ids:
1610 setting = RhodeCodeUi.get(id_)
1628 setting = RhodeCodeUi.get(id_)
1611 Session().delete(setting)
1629 Session().delete(setting)
1612
1630
1613 for id_ in self.rhodecode_setting_ids:
1631 for id_ in self.rhodecode_setting_ids:
1614 setting = RhodeCodeSetting.get(id_)
1632 setting = RhodeCodeSetting.get(id_)
1615 Session().delete(setting)
1633 Session().delete(setting)
1616
1634
1617 for id_ in self.repo_rhodecode_ui_ids:
1635 for id_ in self.repo_rhodecode_ui_ids:
1618 setting = RepoRhodeCodeUi.get(id_)
1636 setting = RepoRhodeCodeUi.get(id_)
1619 Session().delete(setting)
1637 Session().delete(setting)
1620
1638
1621 for id_ in self.repo_rhodecode_setting_ids:
1639 for id_ in self.repo_rhodecode_setting_ids:
1622 setting = RepoRhodeCodeSetting.get(id_)
1640 setting = RepoRhodeCodeSetting.get(id_)
1623 Session().delete(setting)
1641 Session().delete(setting)
1624
1642
1625 Session().commit()
1643 Session().commit()
1626
1644
1627
1645
1628 @pytest.fixture
1646 @pytest.fixture
1629 def no_notifications(request):
1647 def no_notifications(request):
1630 notification_patcher = mock.patch(
1648 notification_patcher = mock.patch(
1631 'rhodecode.model.notification.NotificationModel.create')
1649 'rhodecode.model.notification.NotificationModel.create')
1632 notification_patcher.start()
1650 notification_patcher.start()
1633 request.addfinalizer(notification_patcher.stop)
1651 request.addfinalizer(notification_patcher.stop)
1634
1652
1635
1653
1636 @pytest.fixture
1654 @pytest.fixture
1637 def silence_action_logger(request):
1655 def silence_action_logger(request):
1638 notification_patcher = mock.patch(
1656 notification_patcher = mock.patch(
1639 'rhodecode.lib.utils.action_logger')
1657 'rhodecode.lib.utils.action_logger')
1640 notification_patcher.start()
1658 notification_patcher.start()
1641 request.addfinalizer(notification_patcher.stop)
1659 request.addfinalizer(notification_patcher.stop)
1642
1660
1643
1661
1644 @pytest.fixture(scope='session')
1662 @pytest.fixture(scope='session')
1645 def repeat(request):
1663 def repeat(request):
1646 """
1664 """
1647 The number of repetitions is based on this fixture.
1665 The number of repetitions is based on this fixture.
1648
1666
1649 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1667 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1650 tests are not too slow in our default test suite.
1668 tests are not too slow in our default test suite.
1651 """
1669 """
1652 return request.config.getoption('--repeat')
1670 return request.config.getoption('--repeat')
1653
1671
1654
1672
1655 @pytest.fixture
1673 @pytest.fixture
1656 def rhodecode_fixtures():
1674 def rhodecode_fixtures():
1657 return Fixture()
1675 return Fixture()
1658
1676
1659
1677
1660 @pytest.fixture
1678 @pytest.fixture
1661 def request_stub():
1679 def request_stub():
1662 """
1680 """
1663 Stub request object.
1681 Stub request object.
1664 """
1682 """
1665 request = pyramid.testing.DummyRequest()
1683 request = pyramid.testing.DummyRequest()
1666 request.scheme = 'https'
1684 request.scheme = 'https'
1667 return request
1685 return request
1668
1686
1669
1687
1670 @pytest.fixture
1688 @pytest.fixture
1671 def config_stub(request, request_stub):
1689 def config_stub(request, request_stub):
1672 """
1690 """
1673 Set up pyramid.testing and return the Configurator.
1691 Set up pyramid.testing and return the Configurator.
1674 """
1692 """
1675 config = pyramid.testing.setUp(request=request_stub)
1693 config = pyramid.testing.setUp(request=request_stub)
1676
1694
1677 @request.addfinalizer
1695 @request.addfinalizer
1678 def cleanup():
1696 def cleanup():
1679 pyramid.testing.tearDown()
1697 pyramid.testing.tearDown()
1680
1698
1681 return config
1699 return config
1682
1700
1683
1701
1684 @pytest.fixture
1702 @pytest.fixture
1685 def StubIntegrationType():
1703 def StubIntegrationType():
1686 class _StubIntegrationType(IntegrationTypeBase):
1704 class _StubIntegrationType(IntegrationTypeBase):
1687 """ Test integration type class """
1705 """ Test integration type class """
1688
1706
1689 key = 'test'
1707 key = 'test'
1690 display_name = 'Test integration type'
1708 display_name = 'Test integration type'
1691 description = 'A test integration type for testing'
1709 description = 'A test integration type for testing'
1692 icon = 'test_icon_html_image'
1710 icon = 'test_icon_html_image'
1693
1711
1694 def __init__(self, settings):
1712 def __init__(self, settings):
1695 super(_StubIntegrationType, self).__init__(settings)
1713 super(_StubIntegrationType, self).__init__(settings)
1696 self.sent_events = [] # for testing
1714 self.sent_events = [] # for testing
1697
1715
1698 def send_event(self, event):
1716 def send_event(self, event):
1699 self.sent_events.append(event)
1717 self.sent_events.append(event)
1700
1718
1701 def settings_schema(self):
1719 def settings_schema(self):
1702 class SettingsSchema(colander.Schema):
1720 class SettingsSchema(colander.Schema):
1703 test_string_field = colander.SchemaNode(
1721 test_string_field = colander.SchemaNode(
1704 colander.String(),
1722 colander.String(),
1705 missing=colander.required,
1723 missing=colander.required,
1706 title='test string field',
1724 title='test string field',
1707 )
1725 )
1708 test_int_field = colander.SchemaNode(
1726 test_int_field = colander.SchemaNode(
1709 colander.Int(),
1727 colander.Int(),
1710 title='some integer setting',
1728 title='some integer setting',
1711 )
1729 )
1712 return SettingsSchema()
1730 return SettingsSchema()
1713
1731
1714
1732
1715 integration_type_registry.register_integration_type(_StubIntegrationType)
1733 integration_type_registry.register_integration_type(_StubIntegrationType)
1716 return _StubIntegrationType
1734 return _StubIntegrationType
1717
1735
1718 @pytest.fixture
1736 @pytest.fixture
1719 def stub_integration_settings():
1737 def stub_integration_settings():
1720 return {
1738 return {
1721 'test_string_field': 'some data',
1739 'test_string_field': 'some data',
1722 'test_int_field': 100,
1740 'test_int_field': 100,
1723 }
1741 }
1724
1742
1725
1743
1726 @pytest.fixture
1744 @pytest.fixture
1727 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1745 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1728 stub_integration_settings):
1746 stub_integration_settings):
1729 integration = IntegrationModel().create(
1747 integration = IntegrationModel().create(
1730 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1748 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1731 name='test repo integration',
1749 name='test repo integration',
1732 repo=repo_stub, repo_group=None, child_repos_only=None)
1750 repo=repo_stub, repo_group=None, child_repos_only=None)
1733
1751
1734 @request.addfinalizer
1752 @request.addfinalizer
1735 def cleanup():
1753 def cleanup():
1736 IntegrationModel().delete(integration)
1754 IntegrationModel().delete(integration)
1737
1755
1738 return integration
1756 return integration
1739
1757
1740
1758
1741 @pytest.fixture
1759 @pytest.fixture
1742 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1760 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1743 stub_integration_settings):
1761 stub_integration_settings):
1744 integration = IntegrationModel().create(
1762 integration = IntegrationModel().create(
1745 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1763 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1746 name='test repogroup integration',
1764 name='test repogroup integration',
1747 repo=None, repo_group=test_repo_group, child_repos_only=True)
1765 repo=None, repo_group=test_repo_group, child_repos_only=True)
1748
1766
1749 @request.addfinalizer
1767 @request.addfinalizer
1750 def cleanup():
1768 def cleanup():
1751 IntegrationModel().delete(integration)
1769 IntegrationModel().delete(integration)
1752
1770
1753 return integration
1771 return integration
1754
1772
1755
1773
1756 @pytest.fixture
1774 @pytest.fixture
1757 def repogroup_recursive_integration_stub(request, test_repo_group,
1775 def repogroup_recursive_integration_stub(request, test_repo_group,
1758 StubIntegrationType, stub_integration_settings):
1776 StubIntegrationType, stub_integration_settings):
1759 integration = IntegrationModel().create(
1777 integration = IntegrationModel().create(
1760 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1778 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1761 name='test recursive repogroup integration',
1779 name='test recursive repogroup integration',
1762 repo=None, repo_group=test_repo_group, child_repos_only=False)
1780 repo=None, repo_group=test_repo_group, child_repos_only=False)
1763
1781
1764 @request.addfinalizer
1782 @request.addfinalizer
1765 def cleanup():
1783 def cleanup():
1766 IntegrationModel().delete(integration)
1784 IntegrationModel().delete(integration)
1767
1785
1768 return integration
1786 return integration
1769
1787
1770
1788
1771 @pytest.fixture
1789 @pytest.fixture
1772 def global_integration_stub(request, StubIntegrationType,
1790 def global_integration_stub(request, StubIntegrationType,
1773 stub_integration_settings):
1791 stub_integration_settings):
1774 integration = IntegrationModel().create(
1792 integration = IntegrationModel().create(
1775 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1793 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1776 name='test global integration',
1794 name='test global integration',
1777 repo=None, repo_group=None, child_repos_only=None)
1795 repo=None, repo_group=None, child_repos_only=None)
1778
1796
1779 @request.addfinalizer
1797 @request.addfinalizer
1780 def cleanup():
1798 def cleanup():
1781 IntegrationModel().delete(integration)
1799 IntegrationModel().delete(integration)
1782
1800
1783 return integration
1801 return integration
1784
1802
1785
1803
1786 @pytest.fixture
1804 @pytest.fixture
1787 def root_repos_integration_stub(request, StubIntegrationType,
1805 def root_repos_integration_stub(request, StubIntegrationType,
1788 stub_integration_settings):
1806 stub_integration_settings):
1789 integration = IntegrationModel().create(
1807 integration = IntegrationModel().create(
1790 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1808 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1791 name='test global integration',
1809 name='test global integration',
1792 repo=None, repo_group=None, child_repos_only=True)
1810 repo=None, repo_group=None, child_repos_only=True)
1793
1811
1794 @request.addfinalizer
1812 @request.addfinalizer
1795 def cleanup():
1813 def cleanup():
1796 IntegrationModel().delete(integration)
1814 IntegrationModel().delete(integration)
1797
1815
1798 return integration
1816 return integration
General Comments 0
You need to be logged in to leave comments. Login now