##// END OF EJS Templates
tests: Add a ``db`` fixture that initializes the database....
Martin Bornhold -
r914:cf699af2 default
parent child Browse files
Show More
@@ -1,1779 +1,1791 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess
30 30 import time
31 31 import uuid
32 32
33 33 import mock
34 34 import pyramid.testing
35 35 import pytest
36 36 import colander
37 37 import requests
38 38 from webtest.app import TestApp
39 39
40 40 import rhodecode
41 41 from rhodecode.model.changeset_status import ChangesetStatusModel
42 42 from rhodecode.model.comment import ChangesetCommentsModel
43 43 from rhodecode.model.db import (
44 44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, Integration)
46 46 from rhodecode.model.meta import Session
47 47 from rhodecode.model.pull_request import PullRequestModel
48 48 from rhodecode.model.repo import RepoModel
49 49 from rhodecode.model.repo_group import RepoGroupModel
50 50 from rhodecode.model.user import UserModel
51 51 from rhodecode.model.settings import VcsSettingsModel
52 52 from rhodecode.model.user_group import UserGroupModel
53 53 from rhodecode.model.integration import IntegrationModel
54 54 from rhodecode.integrations import integration_type_registry
55 55 from rhodecode.integrations.types.base import IntegrationTypeBase
56 56 from rhodecode.lib.utils import repo2db_mapper
57 57 from rhodecode.lib.vcs import create_vcsserver_proxy
58 58 from rhodecode.lib.vcs.backends import get_backend
59 59 from rhodecode.lib.vcs.nodes import FileNode
60 60 from rhodecode.tests import (
61 61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 63 TEST_USER_REGULAR_PASS)
64 64 from rhodecode.tests.fixture import Fixture
65 65
66 66
67 67 def _split_comma(value):
68 68 return value.split(',')
69 69
70 70
71 71 def pytest_addoption(parser):
72 72 parser.addoption(
73 73 '--keep-tmp-path', action='store_true',
74 74 help="Keep the test temporary directories")
75 75 parser.addoption(
76 76 '--backends', action='store', type=_split_comma,
77 77 default=['git', 'hg', 'svn'],
78 78 help="Select which backends to test for backend specific tests.")
79 79 parser.addoption(
80 80 '--dbs', action='store', type=_split_comma,
81 81 default=['sqlite'],
82 82 help="Select which database to test for database specific tests. "
83 83 "Possible options are sqlite,postgres,mysql")
84 84 parser.addoption(
85 85 '--appenlight', '--ae', action='store_true',
86 86 help="Track statistics in appenlight.")
87 87 parser.addoption(
88 88 '--appenlight-api-key', '--ae-key',
89 89 help="API key for Appenlight.")
90 90 parser.addoption(
91 91 '--appenlight-url', '--ae-url',
92 92 default="https://ae.rhodecode.com",
93 93 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
94 94 parser.addoption(
95 95 '--sqlite-connection-string', action='store',
96 96 default='', help="Connection string for the dbs tests with SQLite")
97 97 parser.addoption(
98 98 '--postgres-connection-string', action='store',
99 99 default='', help="Connection string for the dbs tests with Postgres")
100 100 parser.addoption(
101 101 '--mysql-connection-string', action='store',
102 102 default='', help="Connection string for the dbs tests with MySQL")
103 103 parser.addoption(
104 104 '--repeat', type=int, default=100,
105 105 help="Number of repetitions in performance tests.")
106 106
107 107
108 108 def pytest_configure(config):
109 109 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
110 110 from rhodecode.config import patches
111 111 patches.kombu_1_5_1_python_2_7_11()
112 112
113 113
114 114 def pytest_collection_modifyitems(session, config, items):
115 115 # nottest marked, compare nose, used for transition from nose to pytest
116 116 remaining = [
117 117 i for i in items if getattr(i.obj, '__test__', True)]
118 118 items[:] = remaining
119 119
120 120
121 121 def pytest_generate_tests(metafunc):
122 122 # Support test generation based on --backend parameter
123 123 if 'backend_alias' in metafunc.fixturenames:
124 124 backends = get_backends_from_metafunc(metafunc)
125 125 scope = None
126 126 if not backends:
127 127 pytest.skip("Not enabled for any of selected backends")
128 128 metafunc.parametrize('backend_alias', backends, scope=scope)
129 129 elif hasattr(metafunc.function, 'backends'):
130 130 backends = get_backends_from_metafunc(metafunc)
131 131 if not backends:
132 132 pytest.skip("Not enabled for any of selected backends")
133 133
134 134
135 135 def get_backends_from_metafunc(metafunc):
136 136 requested_backends = set(metafunc.config.getoption('--backends'))
137 137 if hasattr(metafunc.function, 'backends'):
138 138 # Supported backends by this test function, created from
139 139 # pytest.mark.backends
140 140 backends = metafunc.function.backends.args
141 141 elif hasattr(metafunc.cls, 'backend_alias'):
142 142 # Support class attribute "backend_alias", this is mainly
143 143 # for legacy reasons for tests not yet using pytest.mark.backends
144 144 backends = [metafunc.cls.backend_alias]
145 145 else:
146 146 backends = metafunc.config.getoption('--backends')
147 147 return requested_backends.intersection(backends)
148 148
149 149
150 150 @pytest.fixture(scope='session', autouse=True)
151 151 def activate_example_rcextensions(request):
152 152 """
153 153 Patch in an example rcextensions module which verifies passed in kwargs.
154 154 """
155 155 from rhodecode.tests.other import example_rcextensions
156 156
157 157 old_extensions = rhodecode.EXTENSIONS
158 158 rhodecode.EXTENSIONS = example_rcextensions
159 159
160 160 @request.addfinalizer
161 161 def cleanup():
162 162 rhodecode.EXTENSIONS = old_extensions
163 163
164 164
165 165 @pytest.fixture
166 166 def capture_rcextensions():
167 167 """
168 168 Returns the recorded calls to entry points in rcextensions.
169 169 """
170 170 calls = rhodecode.EXTENSIONS.calls
171 171 calls.clear()
172 172 # Note: At this moment, it is still the empty dict, but that will
173 173 # be filled during the test run and since it is a reference this
174 174 # is enough to make it work.
175 175 return calls
176 176
177 177
178 178 @pytest.fixture(scope='session')
179 179 def http_environ_session():
180 180 """
181 181 Allow to use "http_environ" in session scope.
182 182 """
183 183 return http_environ(
184 184 http_host_stub=http_host_stub())
185 185
186 186
187 187 @pytest.fixture
188 188 def http_host_stub():
189 189 """
190 190 Value of HTTP_HOST in the test run.
191 191 """
192 192 return 'test.example.com:80'
193 193
194 194
195 195 @pytest.fixture
196 196 def http_environ(http_host_stub):
197 197 """
198 198 HTTP extra environ keys.
199 199
200 200 User by the test application and as well for setting up the pylons
201 201 environment. In the case of the fixture "app" it should be possible
202 202 to override this for a specific test case.
203 203 """
204 204 return {
205 205 'SERVER_NAME': http_host_stub.split(':')[0],
206 206 'SERVER_PORT': http_host_stub.split(':')[1],
207 207 'HTTP_HOST': http_host_stub,
208 208 }
209 209
210 210
211 211 @pytest.fixture(scope='function')
212 212 def app(request, pylonsapp, http_environ):
213 213 app = TestApp(
214 214 pylonsapp,
215 215 extra_environ=http_environ)
216 216 if request.cls:
217 217 request.cls.app = app
218 218 return app
219 219
220 220
221 @pytest.fixture()
221 @pytest.fixture(scope='session')
222 222 def app_settings(pylonsapp, pylons_config):
223 223 """
224 224 Settings dictionary used to create the app.
225 225
226 226 Parses the ini file and passes the result through the sanitize and apply
227 227 defaults mechanism in `rhodecode.config.middleware`.
228 228 """
229 229 from paste.deploy.loadwsgi import loadcontext, APP
230 230 from rhodecode.config.middleware import (
231 231 sanitize_settings_and_apply_defaults)
232 232 context = loadcontext(APP, 'config:' + pylons_config)
233 233 settings = sanitize_settings_and_apply_defaults(context.config())
234 234 return settings
235 235
236 236
237 @pytest.fixture(scope='session')
238 def db(app_settings):
239 """
240 Initializes the database connection.
241
242 It uses the same settings which are used to create the ``pylonsapp`` or
243 ``app`` fixtures.
244 """
245 from rhodecode.config.utils import initialize_database
246 initialize_database(app_settings)
247
248
237 249 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
238 250
239 251
240 252 def _autologin_user(app, *args):
241 253 session = login_user_session(app, *args)
242 254 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
243 255 return LoginData(csrf_token, session['rhodecode_user'])
244 256
245 257
246 258 @pytest.fixture
247 259 def autologin_user(app):
248 260 """
249 261 Utility fixture which makes sure that the admin user is logged in
250 262 """
251 263 return _autologin_user(app)
252 264
253 265
254 266 @pytest.fixture
255 267 def autologin_regular_user(app):
256 268 """
257 269 Utility fixture which makes sure that the regular user is logged in
258 270 """
259 271 return _autologin_user(
260 272 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
261 273
262 274
263 275 @pytest.fixture(scope='function')
264 276 def csrf_token(request, autologin_user):
265 277 return autologin_user.csrf_token
266 278
267 279
268 280 @pytest.fixture(scope='function')
269 281 def xhr_header(request):
270 282 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
271 283
272 284
273 285 @pytest.fixture
274 286 def real_crypto_backend(monkeypatch):
275 287 """
276 288 Switch the production crypto backend on for this test.
277 289
278 290 During the test run the crypto backend is replaced with a faster
279 291 implementation based on the MD5 algorithm.
280 292 """
281 293 monkeypatch.setattr(rhodecode, 'is_test', False)
282 294
283 295
284 296 @pytest.fixture(scope='class')
285 297 def index_location(request, pylonsapp):
286 298 index_location = pylonsapp.config['app_conf']['search.location']
287 299 if request.cls:
288 300 request.cls.index_location = index_location
289 301 return index_location
290 302
291 303
292 304 @pytest.fixture(scope='session', autouse=True)
293 305 def tests_tmp_path(request):
294 306 """
295 307 Create temporary directory to be used during the test session.
296 308 """
297 309 if not os.path.exists(TESTS_TMP_PATH):
298 310 os.makedirs(TESTS_TMP_PATH)
299 311
300 312 if not request.config.getoption('--keep-tmp-path'):
301 313 @request.addfinalizer
302 314 def remove_tmp_path():
303 315 shutil.rmtree(TESTS_TMP_PATH)
304 316
305 317 return TESTS_TMP_PATH
306 318
307 319
308 320 @pytest.fixture(scope='session', autouse=True)
309 321 def patch_pyro_request_scope_proxy_factory(request):
310 322 """
311 323 Patch the pyro proxy factory to always use the same dummy request object
312 324 when under test. This will return the same pyro proxy on every call.
313 325 """
314 326 dummy_request = pyramid.testing.DummyRequest()
315 327
316 328 def mocked_call(self, request=None):
317 329 return self.getProxy(request=dummy_request)
318 330
319 331 patcher = mock.patch(
320 332 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
321 333 new=mocked_call)
322 334 patcher.start()
323 335
324 336 @request.addfinalizer
325 337 def undo_patching():
326 338 patcher.stop()
327 339
328 340
329 341 @pytest.fixture
330 342 def test_repo_group(request):
331 343 """
332 344 Create a temporary repository group, and destroy it after
333 345 usage automatically
334 346 """
335 347 fixture = Fixture()
336 348 repogroupid = 'test_repo_group_%s' % int(time.time())
337 349 repo_group = fixture.create_repo_group(repogroupid)
338 350
339 351 def _cleanup():
340 352 fixture.destroy_repo_group(repogroupid)
341 353
342 354 request.addfinalizer(_cleanup)
343 355 return repo_group
344 356
345 357
346 358 @pytest.fixture
347 359 def test_user_group(request):
348 360 """
349 361 Create a temporary user group, and destroy it after
350 362 usage automatically
351 363 """
352 364 fixture = Fixture()
353 365 usergroupid = 'test_user_group_%s' % int(time.time())
354 366 user_group = fixture.create_user_group(usergroupid)
355 367
356 368 def _cleanup():
357 369 fixture.destroy_user_group(user_group)
358 370
359 371 request.addfinalizer(_cleanup)
360 372 return user_group
361 373
362 374
363 375 @pytest.fixture(scope='session')
364 376 def test_repo(request):
365 377 container = TestRepoContainer()
366 378 request.addfinalizer(container._cleanup)
367 379 return container
368 380
369 381
370 382 class TestRepoContainer(object):
371 383 """
372 384 Container for test repositories which are used read only.
373 385
374 386 Repositories will be created on demand and re-used during the lifetime
375 387 of this object.
376 388
377 389 Usage to get the svn test repository "minimal"::
378 390
379 391 test_repo = TestContainer()
380 392 repo = test_repo('minimal', 'svn')
381 393
382 394 """
383 395
384 396 dump_extractors = {
385 397 'git': utils.extract_git_repo_from_dump,
386 398 'hg': utils.extract_hg_repo_from_dump,
387 399 'svn': utils.extract_svn_repo_from_dump,
388 400 }
389 401
390 402 def __init__(self):
391 403 self._cleanup_repos = []
392 404 self._fixture = Fixture()
393 405 self._repos = {}
394 406
395 407 def __call__(self, dump_name, backend_alias):
396 408 key = (dump_name, backend_alias)
397 409 if key not in self._repos:
398 410 repo = self._create_repo(dump_name, backend_alias)
399 411 self._repos[key] = repo.repo_id
400 412 return Repository.get(self._repos[key])
401 413
402 414 def _create_repo(self, dump_name, backend_alias):
403 415 repo_name = '%s-%s' % (backend_alias, dump_name)
404 416 backend_class = get_backend(backend_alias)
405 417 dump_extractor = self.dump_extractors[backend_alias]
406 418 repo_path = dump_extractor(dump_name, repo_name)
407 419 vcs_repo = backend_class(repo_path)
408 420 repo2db_mapper({repo_name: vcs_repo})
409 421 repo = RepoModel().get_by_repo_name(repo_name)
410 422 self._cleanup_repos.append(repo_name)
411 423 return repo
412 424
413 425 def _cleanup(self):
414 426 for repo_name in reversed(self._cleanup_repos):
415 427 self._fixture.destroy_repo(repo_name)
416 428
417 429
418 430 @pytest.fixture
419 431 def backend(request, backend_alias, pylonsapp, test_repo):
420 432 """
421 433 Parametrized fixture which represents a single backend implementation.
422 434
423 435 It respects the option `--backends` to focus the test run on specific
424 436 backend implementations.
425 437
426 438 It also supports `pytest.mark.xfail_backends` to mark tests as failing
427 439 for specific backends. This is intended as a utility for incremental
428 440 development of a new backend implementation.
429 441 """
430 442 if backend_alias not in request.config.getoption('--backends'):
431 443 pytest.skip("Backend %s not selected." % (backend_alias, ))
432 444
433 445 utils.check_xfail_backends(request.node, backend_alias)
434 446 utils.check_skip_backends(request.node, backend_alias)
435 447
436 448 repo_name = 'vcs_test_%s' % (backend_alias, )
437 449 backend = Backend(
438 450 alias=backend_alias,
439 451 repo_name=repo_name,
440 452 test_name=request.node.name,
441 453 test_repo_container=test_repo)
442 454 request.addfinalizer(backend.cleanup)
443 455 return backend
444 456
445 457
446 458 @pytest.fixture
447 459 def backend_git(request, pylonsapp, test_repo):
448 460 return backend(request, 'git', pylonsapp, test_repo)
449 461
450 462
451 463 @pytest.fixture
452 464 def backend_hg(request, pylonsapp, test_repo):
453 465 return backend(request, 'hg', pylonsapp, test_repo)
454 466
455 467
456 468 @pytest.fixture
457 469 def backend_svn(request, pylonsapp, test_repo):
458 470 return backend(request, 'svn', pylonsapp, test_repo)
459 471
460 472
461 473 @pytest.fixture
462 474 def backend_random(backend_git):
463 475 """
464 476 Use this to express that your tests need "a backend.
465 477
466 478 A few of our tests need a backend, so that we can run the code. This
467 479 fixture is intended to be used for such cases. It will pick one of the
468 480 backends and run the tests.
469 481
470 482 The fixture `backend` would run the test multiple times for each
471 483 available backend which is a pure waste of time if the test is
472 484 independent of the backend type.
473 485 """
474 486 # TODO: johbo: Change this to pick a random backend
475 487 return backend_git
476 488
477 489
478 490 @pytest.fixture
479 491 def backend_stub(backend_git):
480 492 """
481 493 Use this to express that your tests need a backend stub
482 494
483 495 TODO: mikhail: Implement a real stub logic instead of returning
484 496 a git backend
485 497 """
486 498 return backend_git
487 499
488 500
489 501 @pytest.fixture
490 502 def repo_stub(backend_stub):
491 503 """
492 504 Use this to express that your tests need a repository stub
493 505 """
494 506 return backend_stub.create_repo()
495 507
496 508
497 509 class Backend(object):
498 510 """
499 511 Represents the test configuration for one supported backend
500 512
501 513 Provides easy access to different test repositories based on
502 514 `__getitem__`. Such repositories will only be created once per test
503 515 session.
504 516 """
505 517
506 518 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
507 519 _master_repo = None
508 520 _commit_ids = {}
509 521
510 522 def __init__(self, alias, repo_name, test_name, test_repo_container):
511 523 self.alias = alias
512 524 self.repo_name = repo_name
513 525 self._cleanup_repos = []
514 526 self._test_name = test_name
515 527 self._test_repo_container = test_repo_container
516 528 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
517 529 # Fixture will survive in the end.
518 530 self._fixture = Fixture()
519 531
520 532 def __getitem__(self, key):
521 533 return self._test_repo_container(key, self.alias)
522 534
523 535 @property
524 536 def repo(self):
525 537 """
526 538 Returns the "current" repository. This is the vcs_test repo or the
527 539 last repo which has been created with `create_repo`.
528 540 """
529 541 from rhodecode.model.db import Repository
530 542 return Repository.get_by_repo_name(self.repo_name)
531 543
532 544 @property
533 545 def default_branch_name(self):
534 546 VcsRepository = get_backend(self.alias)
535 547 return VcsRepository.DEFAULT_BRANCH_NAME
536 548
537 549 @property
538 550 def default_head_id(self):
539 551 """
540 552 Returns the default head id of the underlying backend.
541 553
542 554 This will be the default branch name in case the backend does have a
543 555 default branch. In the other cases it will point to a valid head
544 556 which can serve as the base to create a new commit on top of it.
545 557 """
546 558 vcsrepo = self.repo.scm_instance()
547 559 head_id = (
548 560 vcsrepo.DEFAULT_BRANCH_NAME or
549 561 vcsrepo.commit_ids[-1])
550 562 return head_id
551 563
552 564 @property
553 565 def commit_ids(self):
554 566 """
555 567 Returns the list of commits for the last created repository
556 568 """
557 569 return self._commit_ids
558 570
559 571 def create_master_repo(self, commits):
560 572 """
561 573 Create a repository and remember it as a template.
562 574
563 575 This allows to easily create derived repositories to construct
564 576 more complex scenarios for diff, compare and pull requests.
565 577
566 578 Returns a commit map which maps from commit message to raw_id.
567 579 """
568 580 self._master_repo = self.create_repo(commits=commits)
569 581 return self._commit_ids
570 582
571 583 def create_repo(
572 584 self, commits=None, number_of_commits=0, heads=None,
573 585 name_suffix=u'', **kwargs):
574 586 """
575 587 Create a repository and record it for later cleanup.
576 588
577 589 :param commits: Optional. A sequence of dict instances.
578 590 Will add a commit per entry to the new repository.
579 591 :param number_of_commits: Optional. If set to a number, this number of
580 592 commits will be added to the new repository.
581 593 :param heads: Optional. Can be set to a sequence of of commit
582 594 names which shall be pulled in from the master repository.
583 595
584 596 """
585 597 self.repo_name = self._next_repo_name() + name_suffix
586 598 repo = self._fixture.create_repo(
587 599 self.repo_name, repo_type=self.alias, **kwargs)
588 600 self._cleanup_repos.append(repo.repo_name)
589 601
590 602 commits = commits or [
591 603 {'message': 'Commit %s of %s' % (x, self.repo_name)}
592 604 for x in xrange(number_of_commits)]
593 605 self._add_commits_to_repo(repo.scm_instance(), commits)
594 606 if heads:
595 607 self.pull_heads(repo, heads)
596 608
597 609 return repo
598 610
599 611 def pull_heads(self, repo, heads):
600 612 """
601 613 Make sure that repo contains all commits mentioned in `heads`
602 614 """
603 615 vcsmaster = self._master_repo.scm_instance()
604 616 vcsrepo = repo.scm_instance()
605 617 vcsrepo.config.clear_section('hooks')
606 618 commit_ids = [self._commit_ids[h] for h in heads]
607 619 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
608 620
609 621 def create_fork(self):
610 622 repo_to_fork = self.repo_name
611 623 self.repo_name = self._next_repo_name()
612 624 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
613 625 self._cleanup_repos.append(self.repo_name)
614 626 return repo
615 627
616 628 def new_repo_name(self, suffix=u''):
617 629 self.repo_name = self._next_repo_name() + suffix
618 630 self._cleanup_repos.append(self.repo_name)
619 631 return self.repo_name
620 632
621 633 def _next_repo_name(self):
622 634 return u"%s_%s" % (
623 635 self.invalid_repo_name.sub(u'_', self._test_name),
624 636 len(self._cleanup_repos))
625 637
626 638 def ensure_file(self, filename, content='Test content\n'):
627 639 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
628 640 commits = [
629 641 {'added': [
630 642 FileNode(filename, content=content),
631 643 ]},
632 644 ]
633 645 self._add_commits_to_repo(self.repo.scm_instance(), commits)
634 646
635 647 def enable_downloads(self):
636 648 repo = self.repo
637 649 repo.enable_downloads = True
638 650 Session().add(repo)
639 651 Session().commit()
640 652
641 653 def cleanup(self):
642 654 for repo_name in reversed(self._cleanup_repos):
643 655 self._fixture.destroy_repo(repo_name)
644 656
645 657 def _add_commits_to_repo(self, repo, commits):
646 658 commit_ids = _add_commits_to_repo(repo, commits)
647 659 if not commit_ids:
648 660 return
649 661 self._commit_ids = commit_ids
650 662
651 663 # Creating refs for Git to allow fetching them from remote repository
652 664 if self.alias == 'git':
653 665 refs = {}
654 666 for message in self._commit_ids:
655 667 # TODO: mikhail: do more special chars replacements
656 668 ref_name = 'refs/test-refs/{}'.format(
657 669 message.replace(' ', ''))
658 670 refs[ref_name] = self._commit_ids[message]
659 671 self._create_refs(repo, refs)
660 672
661 673 def _create_refs(self, repo, refs):
662 674 for ref_name in refs:
663 675 repo.set_refs(ref_name, refs[ref_name])
664 676
665 677
666 678 @pytest.fixture
667 679 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
668 680 """
669 681 Parametrized fixture which represents a single vcs backend implementation.
670 682
671 683 See the fixture `backend` for more details. This one implements the same
672 684 concept, but on vcs level. So it does not provide model instances etc.
673 685
674 686 Parameters are generated dynamically, see :func:`pytest_generate_tests`
675 687 for how this works.
676 688 """
677 689 if backend_alias not in request.config.getoption('--backends'):
678 690 pytest.skip("Backend %s not selected." % (backend_alias, ))
679 691
680 692 utils.check_xfail_backends(request.node, backend_alias)
681 693 utils.check_skip_backends(request.node, backend_alias)
682 694
683 695 repo_name = 'vcs_test_%s' % (backend_alias, )
684 696 repo_path = os.path.join(tests_tmp_path, repo_name)
685 697 backend = VcsBackend(
686 698 alias=backend_alias,
687 699 repo_path=repo_path,
688 700 test_name=request.node.name,
689 701 test_repo_container=test_repo)
690 702 request.addfinalizer(backend.cleanup)
691 703 return backend
692 704
693 705
694 706 @pytest.fixture
695 707 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
696 708 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
697 709
698 710
699 711 @pytest.fixture
700 712 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
701 713 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
702 714
703 715
704 716 @pytest.fixture
705 717 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
706 718 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
707 719
708 720
709 721 @pytest.fixture
710 722 def vcsbackend_random(vcsbackend_git):
711 723 """
712 724 Use this to express that your tests need "a vcsbackend".
713 725
714 726 The fixture `vcsbackend` would run the test multiple times for each
715 727 available vcs backend which is a pure waste of time if the test is
716 728 independent of the vcs backend type.
717 729 """
718 730 # TODO: johbo: Change this to pick a random backend
719 731 return vcsbackend_git
720 732
721 733
722 734 @pytest.fixture
723 735 def vcsbackend_stub(vcsbackend_git):
724 736 """
725 737 Use this to express that your test just needs a stub of a vcsbackend.
726 738
727 739 Plan is to eventually implement an in-memory stub to speed tests up.
728 740 """
729 741 return vcsbackend_git
730 742
731 743
732 744 class VcsBackend(object):
733 745 """
734 746 Represents the test configuration for one supported vcs backend.
735 747 """
736 748
737 749 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
738 750
739 751 def __init__(self, alias, repo_path, test_name, test_repo_container):
740 752 self.alias = alias
741 753 self._repo_path = repo_path
742 754 self._cleanup_repos = []
743 755 self._test_name = test_name
744 756 self._test_repo_container = test_repo_container
745 757
746 758 def __getitem__(self, key):
747 759 return self._test_repo_container(key, self.alias).scm_instance()
748 760
749 761 @property
750 762 def repo(self):
751 763 """
752 764 Returns the "current" repository. This is the vcs_test repo of the last
753 765 repo which has been created.
754 766 """
755 767 Repository = get_backend(self.alias)
756 768 return Repository(self._repo_path)
757 769
758 770 @property
759 771 def backend(self):
760 772 """
761 773 Returns the backend implementation class.
762 774 """
763 775 return get_backend(self.alias)
764 776
765 777 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
766 778 repo_name = self._next_repo_name()
767 779 self._repo_path = get_new_dir(repo_name)
768 780 repo_class = get_backend(self.alias)
769 781 src_url = None
770 782 if _clone_repo:
771 783 src_url = _clone_repo.path
772 784 repo = repo_class(self._repo_path, create=True, src_url=src_url)
773 785 self._cleanup_repos.append(repo)
774 786
775 787 commits = commits or [
776 788 {'message': 'Commit %s of %s' % (x, repo_name)}
777 789 for x in xrange(number_of_commits)]
778 790 _add_commits_to_repo(repo, commits)
779 791 return repo
780 792
781 793 def clone_repo(self, repo):
782 794 return self.create_repo(_clone_repo=repo)
783 795
784 796 def cleanup(self):
785 797 for repo in self._cleanup_repos:
786 798 shutil.rmtree(repo.path)
787 799
788 800 def new_repo_path(self):
789 801 repo_name = self._next_repo_name()
790 802 self._repo_path = get_new_dir(repo_name)
791 803 return self._repo_path
792 804
793 805 def _next_repo_name(self):
794 806 return "%s_%s" % (
795 807 self.invalid_repo_name.sub('_', self._test_name),
796 808 len(self._cleanup_repos))
797 809
798 810 def add_file(self, repo, filename, content='Test content\n'):
799 811 imc = repo.in_memory_commit
800 812 imc.add(FileNode(filename, content=content))
801 813 imc.commit(
802 814 message=u'Automatic commit from vcsbackend fixture',
803 815 author=u'Automatic')
804 816
805 817 def ensure_file(self, filename, content='Test content\n'):
806 818 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
807 819 self.add_file(self.repo, filename, content)
808 820
809 821
810 822 def _add_commits_to_repo(vcs_repo, commits):
811 823 commit_ids = {}
812 824 if not commits:
813 825 return commit_ids
814 826
815 827 imc = vcs_repo.in_memory_commit
816 828 commit = None
817 829
818 830 for idx, commit in enumerate(commits):
819 831 message = unicode(commit.get('message', 'Commit %s' % idx))
820 832
821 833 for node in commit.get('added', []):
822 834 imc.add(FileNode(node.path, content=node.content))
823 835 for node in commit.get('changed', []):
824 836 imc.change(FileNode(node.path, content=node.content))
825 837 for node in commit.get('removed', []):
826 838 imc.remove(FileNode(node.path))
827 839
828 840 parents = [
829 841 vcs_repo.get_commit(commit_id=commit_ids[p])
830 842 for p in commit.get('parents', [])]
831 843
832 844 operations = ('added', 'changed', 'removed')
833 845 if not any((commit.get(o) for o in operations)):
834 846 imc.add(FileNode('file_%s' % idx, content=message))
835 847
836 848 commit = imc.commit(
837 849 message=message,
838 850 author=unicode(commit.get('author', 'Automatic')),
839 851 date=commit.get('date'),
840 852 branch=commit.get('branch'),
841 853 parents=parents)
842 854
843 855 commit_ids[commit.message] = commit.raw_id
844 856
845 857 return commit_ids
846 858
847 859
848 860 @pytest.fixture
849 861 def reposerver(request):
850 862 """
851 863 Allows to serve a backend repository
852 864 """
853 865
854 866 repo_server = RepoServer()
855 867 request.addfinalizer(repo_server.cleanup)
856 868 return repo_server
857 869
858 870
859 871 class RepoServer(object):
860 872 """
861 873 Utility to serve a local repository for the duration of a test case.
862 874
863 875 Supports only Subversion so far.
864 876 """
865 877
866 878 url = None
867 879
868 880 def __init__(self):
869 881 self._cleanup_servers = []
870 882
871 883 def serve(self, vcsrepo):
872 884 if vcsrepo.alias != 'svn':
873 885 raise TypeError("Backend %s not supported" % vcsrepo.alias)
874 886
875 887 proc = subprocess.Popen(
876 888 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
877 889 '--root', vcsrepo.path])
878 890 self._cleanup_servers.append(proc)
879 891 self.url = 'svn://localhost'
880 892
881 893 def cleanup(self):
882 894 for proc in self._cleanup_servers:
883 895 proc.terminate()
884 896
885 897
886 898 @pytest.fixture
887 899 def pr_util(backend, request):
888 900 """
889 901 Utility for tests of models and for functional tests around pull requests.
890 902
891 903 It gives an instance of :class:`PRTestUtility` which provides various
892 904 utility methods around one pull request.
893 905
894 906 This fixture uses `backend` and inherits its parameterization.
895 907 """
896 908
897 909 util = PRTestUtility(backend)
898 910
899 911 @request.addfinalizer
900 912 def cleanup():
901 913 util.cleanup()
902 914
903 915 return util
904 916
905 917
906 918 class PRTestUtility(object):
907 919
908 920 pull_request = None
909 921 pull_request_id = None
910 922 mergeable_patcher = None
911 923 mergeable_mock = None
912 924 notification_patcher = None
913 925
914 926 def __init__(self, backend):
915 927 self.backend = backend
916 928
917 929 def create_pull_request(
918 930 self, commits=None, target_head=None, source_head=None,
919 931 revisions=None, approved=False, author=None, mergeable=False,
920 932 enable_notifications=True, name_suffix=u'', reviewers=None,
921 933 title=u"Test", description=u"Description"):
922 934 self.set_mergeable(mergeable)
923 935 if not enable_notifications:
924 936 # mock notification side effect
925 937 self.notification_patcher = mock.patch(
926 938 'rhodecode.model.notification.NotificationModel.create')
927 939 self.notification_patcher.start()
928 940
929 941 if not self.pull_request:
930 942 if not commits:
931 943 commits = [
932 944 {'message': 'c1'},
933 945 {'message': 'c2'},
934 946 {'message': 'c3'},
935 947 ]
936 948 target_head = 'c1'
937 949 source_head = 'c2'
938 950 revisions = ['c2']
939 951
940 952 self.commit_ids = self.backend.create_master_repo(commits)
941 953 self.target_repository = self.backend.create_repo(
942 954 heads=[target_head], name_suffix=name_suffix)
943 955 self.source_repository = self.backend.create_repo(
944 956 heads=[source_head], name_suffix=name_suffix)
945 957 self.author = author or UserModel().get_by_username(
946 958 TEST_USER_ADMIN_LOGIN)
947 959
948 960 model = PullRequestModel()
949 961 self.create_parameters = {
950 962 'created_by': self.author,
951 963 'source_repo': self.source_repository.repo_name,
952 964 'source_ref': self._default_branch_reference(source_head),
953 965 'target_repo': self.target_repository.repo_name,
954 966 'target_ref': self._default_branch_reference(target_head),
955 967 'revisions': [self.commit_ids[r] for r in revisions],
956 968 'reviewers': reviewers or self._get_reviewers(),
957 969 'title': title,
958 970 'description': description,
959 971 }
960 972 self.pull_request = model.create(**self.create_parameters)
961 973 assert model.get_versions(self.pull_request) == []
962 974
963 975 self.pull_request_id = self.pull_request.pull_request_id
964 976
965 977 if approved:
966 978 self.approve()
967 979
968 980 Session().add(self.pull_request)
969 981 Session().commit()
970 982
971 983 return self.pull_request
972 984
973 985 def approve(self):
974 986 self.create_status_votes(
975 987 ChangesetStatus.STATUS_APPROVED,
976 988 *self.pull_request.reviewers)
977 989
978 990 def close(self):
979 991 PullRequestModel().close_pull_request(self.pull_request, self.author)
980 992
981 993 def _default_branch_reference(self, commit_message):
982 994 reference = '%s:%s:%s' % (
983 995 'branch',
984 996 self.backend.default_branch_name,
985 997 self.commit_ids[commit_message])
986 998 return reference
987 999
988 1000 def _get_reviewers(self):
989 1001 model = UserModel()
990 1002 return [
991 1003 model.get_by_username(TEST_USER_REGULAR_LOGIN),
992 1004 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
993 1005 ]
994 1006
995 1007 def update_source_repository(self, head=None):
996 1008 heads = [head or 'c3']
997 1009 self.backend.pull_heads(self.source_repository, heads=heads)
998 1010
999 1011 def add_one_commit(self, head=None):
1000 1012 self.update_source_repository(head=head)
1001 1013 old_commit_ids = set(self.pull_request.revisions)
1002 1014 PullRequestModel().update_commits(self.pull_request)
1003 1015 commit_ids = set(self.pull_request.revisions)
1004 1016 new_commit_ids = commit_ids - old_commit_ids
1005 1017 assert len(new_commit_ids) == 1
1006 1018 return new_commit_ids.pop()
1007 1019
1008 1020 def remove_one_commit(self):
1009 1021 assert len(self.pull_request.revisions) == 2
1010 1022 source_vcs = self.source_repository.scm_instance()
1011 1023 removed_commit_id = source_vcs.commit_ids[-1]
1012 1024
1013 1025 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1014 1026 # remove the if once that's sorted out.
1015 1027 if self.backend.alias == "git":
1016 1028 kwargs = {'branch_name': self.backend.default_branch_name}
1017 1029 else:
1018 1030 kwargs = {}
1019 1031 source_vcs.strip(removed_commit_id, **kwargs)
1020 1032
1021 1033 PullRequestModel().update_commits(self.pull_request)
1022 1034 assert len(self.pull_request.revisions) == 1
1023 1035 return removed_commit_id
1024 1036
1025 1037 def create_comment(self, linked_to=None):
1026 1038 comment = ChangesetCommentsModel().create(
1027 1039 text=u"Test comment",
1028 1040 repo=self.target_repository.repo_name,
1029 1041 user=self.author,
1030 1042 pull_request=self.pull_request)
1031 1043 assert comment.pull_request_version_id is None
1032 1044
1033 1045 if linked_to:
1034 1046 PullRequestModel()._link_comments_to_version(linked_to)
1035 1047
1036 1048 return comment
1037 1049
1038 1050 def create_inline_comment(
1039 1051 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1040 1052 comment = ChangesetCommentsModel().create(
1041 1053 text=u"Test comment",
1042 1054 repo=self.target_repository.repo_name,
1043 1055 user=self.author,
1044 1056 line_no=line_no,
1045 1057 f_path=file_path,
1046 1058 pull_request=self.pull_request)
1047 1059 assert comment.pull_request_version_id is None
1048 1060
1049 1061 if linked_to:
1050 1062 PullRequestModel()._link_comments_to_version(linked_to)
1051 1063
1052 1064 return comment
1053 1065
1054 1066 def create_version_of_pull_request(self):
1055 1067 pull_request = self.create_pull_request()
1056 1068 version = PullRequestModel()._create_version_from_snapshot(
1057 1069 pull_request)
1058 1070 return version
1059 1071
1060 1072 def create_status_votes(self, status, *reviewers):
1061 1073 for reviewer in reviewers:
1062 1074 ChangesetStatusModel().set_status(
1063 1075 repo=self.pull_request.target_repo,
1064 1076 status=status,
1065 1077 user=reviewer.user_id,
1066 1078 pull_request=self.pull_request)
1067 1079
1068 1080 def set_mergeable(self, value):
1069 1081 if not self.mergeable_patcher:
1070 1082 self.mergeable_patcher = mock.patch.object(
1071 1083 VcsSettingsModel, 'get_general_settings')
1072 1084 self.mergeable_mock = self.mergeable_patcher.start()
1073 1085 self.mergeable_mock.return_value = {
1074 1086 'rhodecode_pr_merge_enabled': value}
1075 1087
1076 1088 def cleanup(self):
1077 1089 # In case the source repository is already cleaned up, the pull
1078 1090 # request will already be deleted.
1079 1091 pull_request = PullRequest().get(self.pull_request_id)
1080 1092 if pull_request:
1081 1093 PullRequestModel().delete(pull_request)
1082 1094 Session().commit()
1083 1095
1084 1096 if self.notification_patcher:
1085 1097 self.notification_patcher.stop()
1086 1098
1087 1099 if self.mergeable_patcher:
1088 1100 self.mergeable_patcher.stop()
1089 1101
1090 1102
1091 1103 @pytest.fixture
1092 1104 def user_admin(pylonsapp):
1093 1105 """
1094 1106 Provides the default admin test user as an instance of `db.User`.
1095 1107 """
1096 1108 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1097 1109 return user
1098 1110
1099 1111
1100 1112 @pytest.fixture
1101 1113 def user_regular(pylonsapp):
1102 1114 """
1103 1115 Provides the default regular test user as an instance of `db.User`.
1104 1116 """
1105 1117 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1106 1118 return user
1107 1119
1108 1120
1109 1121 @pytest.fixture
1110 1122 def user_util(request, pylonsapp):
1111 1123 """
1112 1124 Provides a wired instance of `UserUtility` with integrated cleanup.
1113 1125 """
1114 1126 utility = UserUtility(test_name=request.node.name)
1115 1127 request.addfinalizer(utility.cleanup)
1116 1128 return utility
1117 1129
1118 1130
1119 1131 # TODO: johbo: Split this up into utilities per domain or something similar
1120 1132 class UserUtility(object):
1121 1133
1122 1134 def __init__(self, test_name="test"):
1123 1135 self._test_name = test_name
1124 1136 self.fixture = Fixture()
1125 1137 self.repo_group_ids = []
1126 1138 self.user_ids = []
1127 1139 self.user_group_ids = []
1128 1140 self.user_repo_permission_ids = []
1129 1141 self.user_group_repo_permission_ids = []
1130 1142 self.user_repo_group_permission_ids = []
1131 1143 self.user_group_repo_group_permission_ids = []
1132 1144 self.user_user_group_permission_ids = []
1133 1145 self.user_group_user_group_permission_ids = []
1134 1146 self.user_permissions = []
1135 1147
1136 1148 def create_repo_group(
1137 1149 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1138 1150 group_name = "{prefix}_repogroup_{count}".format(
1139 1151 prefix=self._test_name,
1140 1152 count=len(self.repo_group_ids))
1141 1153 repo_group = self.fixture.create_repo_group(
1142 1154 group_name, cur_user=owner)
1143 1155 if auto_cleanup:
1144 1156 self.repo_group_ids.append(repo_group.group_id)
1145 1157 return repo_group
1146 1158
1147 1159 def create_user(self, auto_cleanup=True, **kwargs):
1148 1160 user_name = "{prefix}_user_{count}".format(
1149 1161 prefix=self._test_name,
1150 1162 count=len(self.user_ids))
1151 1163 user = self.fixture.create_user(user_name, **kwargs)
1152 1164 if auto_cleanup:
1153 1165 self.user_ids.append(user.user_id)
1154 1166 return user
1155 1167
1156 1168 def create_user_with_group(self):
1157 1169 user = self.create_user()
1158 1170 user_group = self.create_user_group(members=[user])
1159 1171 return user, user_group
1160 1172
1161 1173 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1162 1174 group_name = "{prefix}_usergroup_{count}".format(
1163 1175 prefix=self._test_name,
1164 1176 count=len(self.user_group_ids))
1165 1177 user_group = self.fixture.create_user_group(group_name, **kwargs)
1166 1178 if auto_cleanup:
1167 1179 self.user_group_ids.append(user_group.users_group_id)
1168 1180 if members:
1169 1181 for user in members:
1170 1182 UserGroupModel().add_user_to_group(user_group, user)
1171 1183 return user_group
1172 1184
1173 1185 def grant_user_permission(self, user_name, permission_name):
1174 1186 self._inherit_default_user_permissions(user_name, False)
1175 1187 self.user_permissions.append((user_name, permission_name))
1176 1188
1177 1189 def grant_user_permission_to_repo_group(
1178 1190 self, repo_group, user, permission_name):
1179 1191 permission = RepoGroupModel().grant_user_permission(
1180 1192 repo_group, user, permission_name)
1181 1193 self.user_repo_group_permission_ids.append(
1182 1194 (repo_group.group_id, user.user_id))
1183 1195 return permission
1184 1196
1185 1197 def grant_user_group_permission_to_repo_group(
1186 1198 self, repo_group, user_group, permission_name):
1187 1199 permission = RepoGroupModel().grant_user_group_permission(
1188 1200 repo_group, user_group, permission_name)
1189 1201 self.user_group_repo_group_permission_ids.append(
1190 1202 (repo_group.group_id, user_group.users_group_id))
1191 1203 return permission
1192 1204
1193 1205 def grant_user_permission_to_repo(
1194 1206 self, repo, user, permission_name):
1195 1207 permission = RepoModel().grant_user_permission(
1196 1208 repo, user, permission_name)
1197 1209 self.user_repo_permission_ids.append(
1198 1210 (repo.repo_id, user.user_id))
1199 1211 return permission
1200 1212
1201 1213 def grant_user_group_permission_to_repo(
1202 1214 self, repo, user_group, permission_name):
1203 1215 permission = RepoModel().grant_user_group_permission(
1204 1216 repo, user_group, permission_name)
1205 1217 self.user_group_repo_permission_ids.append(
1206 1218 (repo.repo_id, user_group.users_group_id))
1207 1219 return permission
1208 1220
1209 1221 def grant_user_permission_to_user_group(
1210 1222 self, target_user_group, user, permission_name):
1211 1223 permission = UserGroupModel().grant_user_permission(
1212 1224 target_user_group, user, permission_name)
1213 1225 self.user_user_group_permission_ids.append(
1214 1226 (target_user_group.users_group_id, user.user_id))
1215 1227 return permission
1216 1228
1217 1229 def grant_user_group_permission_to_user_group(
1218 1230 self, target_user_group, user_group, permission_name):
1219 1231 permission = UserGroupModel().grant_user_group_permission(
1220 1232 target_user_group, user_group, permission_name)
1221 1233 self.user_group_user_group_permission_ids.append(
1222 1234 (target_user_group.users_group_id, user_group.users_group_id))
1223 1235 return permission
1224 1236
1225 1237 def revoke_user_permission(self, user_name, permission_name):
1226 1238 self._inherit_default_user_permissions(user_name, True)
1227 1239 UserModel().revoke_perm(user_name, permission_name)
1228 1240
1229 1241 def _inherit_default_user_permissions(self, user_name, value):
1230 1242 user = UserModel().get_by_username(user_name)
1231 1243 user.inherit_default_permissions = value
1232 1244 Session().add(user)
1233 1245 Session().commit()
1234 1246
1235 1247 def cleanup(self):
1236 1248 self._cleanup_permissions()
1237 1249 self._cleanup_repo_groups()
1238 1250 self._cleanup_user_groups()
1239 1251 self._cleanup_users()
1240 1252
1241 1253 def _cleanup_permissions(self):
1242 1254 if self.user_permissions:
1243 1255 for user_name, permission_name in self.user_permissions:
1244 1256 self.revoke_user_permission(user_name, permission_name)
1245 1257
1246 1258 for permission in self.user_repo_permission_ids:
1247 1259 RepoModel().revoke_user_permission(*permission)
1248 1260
1249 1261 for permission in self.user_group_repo_permission_ids:
1250 1262 RepoModel().revoke_user_group_permission(*permission)
1251 1263
1252 1264 for permission in self.user_repo_group_permission_ids:
1253 1265 RepoGroupModel().revoke_user_permission(*permission)
1254 1266
1255 1267 for permission in self.user_group_repo_group_permission_ids:
1256 1268 RepoGroupModel().revoke_user_group_permission(*permission)
1257 1269
1258 1270 for permission in self.user_user_group_permission_ids:
1259 1271 UserGroupModel().revoke_user_permission(*permission)
1260 1272
1261 1273 for permission in self.user_group_user_group_permission_ids:
1262 1274 UserGroupModel().revoke_user_group_permission(*permission)
1263 1275
1264 1276 def _cleanup_repo_groups(self):
1265 1277 def _repo_group_compare(first_group_id, second_group_id):
1266 1278 """
1267 1279 Gives higher priority to the groups with the most complex paths
1268 1280 """
1269 1281 first_group = RepoGroup.get(first_group_id)
1270 1282 second_group = RepoGroup.get(second_group_id)
1271 1283 first_group_parts = (
1272 1284 len(first_group.group_name.split('/')) if first_group else 0)
1273 1285 second_group_parts = (
1274 1286 len(second_group.group_name.split('/')) if second_group else 0)
1275 1287 return cmp(second_group_parts, first_group_parts)
1276 1288
1277 1289 sorted_repo_group_ids = sorted(
1278 1290 self.repo_group_ids, cmp=_repo_group_compare)
1279 1291 for repo_group_id in sorted_repo_group_ids:
1280 1292 self.fixture.destroy_repo_group(repo_group_id)
1281 1293
1282 1294 def _cleanup_user_groups(self):
1283 1295 def _user_group_compare(first_group_id, second_group_id):
1284 1296 """
1285 1297 Gives higher priority to the groups with the most complex paths
1286 1298 """
1287 1299 first_group = UserGroup.get(first_group_id)
1288 1300 second_group = UserGroup.get(second_group_id)
1289 1301 first_group_parts = (
1290 1302 len(first_group.users_group_name.split('/'))
1291 1303 if first_group else 0)
1292 1304 second_group_parts = (
1293 1305 len(second_group.users_group_name.split('/'))
1294 1306 if second_group else 0)
1295 1307 return cmp(second_group_parts, first_group_parts)
1296 1308
1297 1309 sorted_user_group_ids = sorted(
1298 1310 self.user_group_ids, cmp=_user_group_compare)
1299 1311 for user_group_id in sorted_user_group_ids:
1300 1312 self.fixture.destroy_user_group(user_group_id)
1301 1313
1302 1314 def _cleanup_users(self):
1303 1315 for user_id in self.user_ids:
1304 1316 self.fixture.destroy_user(user_id)
1305 1317
1306 1318
1307 1319 # TODO: Think about moving this into a pytest-pyro package and make it a
1308 1320 # pytest plugin
1309 1321 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1310 1322 def pytest_runtest_makereport(item, call):
1311 1323 """
1312 1324 Adding the remote traceback if the exception has this information.
1313 1325
1314 1326 Pyro4 attaches this information as the attribute `_pyroTraceback`
1315 1327 to the exception instance.
1316 1328 """
1317 1329 outcome = yield
1318 1330 report = outcome.get_result()
1319 1331 if call.excinfo:
1320 1332 _add_pyro_remote_traceback(report, call.excinfo.value)
1321 1333
1322 1334
1323 1335 def _add_pyro_remote_traceback(report, exc):
1324 1336 pyro_traceback = getattr(exc, '_pyroTraceback', None)
1325 1337
1326 1338 if pyro_traceback:
1327 1339 traceback = ''.join(pyro_traceback)
1328 1340 section = 'Pyro4 remote traceback ' + report.when
1329 1341 report.sections.append((section, traceback))
1330 1342
1331 1343
1332 1344 @pytest.fixture(scope='session')
1333 1345 def testrun():
1334 1346 return {
1335 1347 'uuid': uuid.uuid4(),
1336 1348 'start': datetime.datetime.utcnow().isoformat(),
1337 1349 'timestamp': int(time.time()),
1338 1350 }
1339 1351
1340 1352
1341 1353 @pytest.fixture(autouse=True)
1342 1354 def collect_appenlight_stats(request, testrun):
1343 1355 """
1344 1356 This fixture reports memory consumtion of single tests.
1345 1357
1346 1358 It gathers data based on `psutil` and sends them to Appenlight. The option
1347 1359 ``--ae`` has te be used to enable this fixture and the API key for your
1348 1360 application has to be provided in ``--ae-key``.
1349 1361 """
1350 1362 try:
1351 1363 # cygwin cannot have yet psutil support.
1352 1364 import psutil
1353 1365 except ImportError:
1354 1366 return
1355 1367
1356 1368 if not request.config.getoption('--appenlight'):
1357 1369 return
1358 1370 else:
1359 1371 # Only request the pylonsapp fixture if appenlight tracking is
1360 1372 # enabled. This will speed up a test run of unit tests by 2 to 3
1361 1373 # seconds if appenlight is not enabled.
1362 1374 pylonsapp = request.getfuncargvalue("pylonsapp")
1363 1375 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1364 1376 client = AppenlightClient(
1365 1377 url=url,
1366 1378 api_key=request.config.getoption('--appenlight-api-key'),
1367 1379 namespace=request.node.nodeid,
1368 1380 request=str(testrun['uuid']),
1369 1381 testrun=testrun)
1370 1382
1371 1383 client.collect({
1372 1384 'message': "Starting",
1373 1385 })
1374 1386
1375 1387 server_and_port = pylonsapp.config['vcs.server']
1376 1388 server = create_vcsserver_proxy(server_and_port)
1377 1389 with server:
1378 1390 vcs_pid = server.get_pid()
1379 1391 server.run_gc()
1380 1392 vcs_process = psutil.Process(vcs_pid)
1381 1393 mem = vcs_process.memory_info()
1382 1394 client.tag_before('vcsserver.rss', mem.rss)
1383 1395 client.tag_before('vcsserver.vms', mem.vms)
1384 1396
1385 1397 test_process = psutil.Process()
1386 1398 mem = test_process.memory_info()
1387 1399 client.tag_before('test.rss', mem.rss)
1388 1400 client.tag_before('test.vms', mem.vms)
1389 1401
1390 1402 client.tag_before('time', time.time())
1391 1403
1392 1404 @request.addfinalizer
1393 1405 def send_stats():
1394 1406 client.tag_after('time', time.time())
1395 1407 with server:
1396 1408 gc_stats = server.run_gc()
1397 1409 for tag, value in gc_stats.items():
1398 1410 client.tag_after(tag, value)
1399 1411 mem = vcs_process.memory_info()
1400 1412 client.tag_after('vcsserver.rss', mem.rss)
1401 1413 client.tag_after('vcsserver.vms', mem.vms)
1402 1414
1403 1415 mem = test_process.memory_info()
1404 1416 client.tag_after('test.rss', mem.rss)
1405 1417 client.tag_after('test.vms', mem.vms)
1406 1418
1407 1419 client.collect({
1408 1420 'message': "Finished",
1409 1421 })
1410 1422 client.send_stats()
1411 1423
1412 1424 return client
1413 1425
1414 1426
1415 1427 class AppenlightClient():
1416 1428
1417 1429 url_template = '{url}?protocol_version=0.5'
1418 1430
1419 1431 def __init__(
1420 1432 self, url, api_key, add_server=True, add_timestamp=True,
1421 1433 namespace=None, request=None, testrun=None):
1422 1434 self.url = self.url_template.format(url=url)
1423 1435 self.api_key = api_key
1424 1436 self.add_server = add_server
1425 1437 self.add_timestamp = add_timestamp
1426 1438 self.namespace = namespace
1427 1439 self.request = request
1428 1440 self.server = socket.getfqdn(socket.gethostname())
1429 1441 self.tags_before = {}
1430 1442 self.tags_after = {}
1431 1443 self.stats = []
1432 1444 self.testrun = testrun or {}
1433 1445
1434 1446 def tag_before(self, tag, value):
1435 1447 self.tags_before[tag] = value
1436 1448
1437 1449 def tag_after(self, tag, value):
1438 1450 self.tags_after[tag] = value
1439 1451
1440 1452 def collect(self, data):
1441 1453 if self.add_server:
1442 1454 data.setdefault('server', self.server)
1443 1455 if self.add_timestamp:
1444 1456 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1445 1457 if self.namespace:
1446 1458 data.setdefault('namespace', self.namespace)
1447 1459 if self.request:
1448 1460 data.setdefault('request', self.request)
1449 1461 self.stats.append(data)
1450 1462
1451 1463 def send_stats(self):
1452 1464 tags = [
1453 1465 ('testrun', self.request),
1454 1466 ('testrun.start', self.testrun['start']),
1455 1467 ('testrun.timestamp', self.testrun['timestamp']),
1456 1468 ('test', self.namespace),
1457 1469 ]
1458 1470 for key, value in self.tags_before.items():
1459 1471 tags.append((key + '.before', value))
1460 1472 try:
1461 1473 delta = self.tags_after[key] - value
1462 1474 tags.append((key + '.delta', delta))
1463 1475 except Exception:
1464 1476 pass
1465 1477 for key, value in self.tags_after.items():
1466 1478 tags.append((key + '.after', value))
1467 1479 self.collect({
1468 1480 'message': "Collected tags",
1469 1481 'tags': tags,
1470 1482 })
1471 1483
1472 1484 response = requests.post(
1473 1485 self.url,
1474 1486 headers={
1475 1487 'X-appenlight-api-key': self.api_key},
1476 1488 json=self.stats,
1477 1489 )
1478 1490
1479 1491 if not response.status_code == 200:
1480 1492 pprint.pprint(self.stats)
1481 1493 print response.headers
1482 1494 print response.text
1483 1495 raise Exception('Sending to appenlight failed')
1484 1496
1485 1497
1486 1498 @pytest.fixture
1487 1499 def gist_util(request, pylonsapp):
1488 1500 """
1489 1501 Provides a wired instance of `GistUtility` with integrated cleanup.
1490 1502 """
1491 1503 utility = GistUtility()
1492 1504 request.addfinalizer(utility.cleanup)
1493 1505 return utility
1494 1506
1495 1507
1496 1508 class GistUtility(object):
1497 1509 def __init__(self):
1498 1510 self.fixture = Fixture()
1499 1511 self.gist_ids = []
1500 1512
1501 1513 def create_gist(self, **kwargs):
1502 1514 gist = self.fixture.create_gist(**kwargs)
1503 1515 self.gist_ids.append(gist.gist_id)
1504 1516 return gist
1505 1517
1506 1518 def cleanup(self):
1507 1519 for id_ in self.gist_ids:
1508 1520 self.fixture.destroy_gists(str(id_))
1509 1521
1510 1522
1511 1523 @pytest.fixture
1512 1524 def enabled_backends(request):
1513 1525 backends = request.config.option.backends
1514 1526 return backends[:]
1515 1527
1516 1528
1517 1529 @pytest.fixture
1518 1530 def settings_util(request):
1519 1531 """
1520 1532 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1521 1533 """
1522 1534 utility = SettingsUtility()
1523 1535 request.addfinalizer(utility.cleanup)
1524 1536 return utility
1525 1537
1526 1538
1527 1539 class SettingsUtility(object):
1528 1540 def __init__(self):
1529 1541 self.rhodecode_ui_ids = []
1530 1542 self.rhodecode_setting_ids = []
1531 1543 self.repo_rhodecode_ui_ids = []
1532 1544 self.repo_rhodecode_setting_ids = []
1533 1545
1534 1546 def create_repo_rhodecode_ui(
1535 1547 self, repo, section, value, key=None, active=True, cleanup=True):
1536 1548 key = key or hashlib.sha1(
1537 1549 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1538 1550
1539 1551 setting = RepoRhodeCodeUi()
1540 1552 setting.repository_id = repo.repo_id
1541 1553 setting.ui_section = section
1542 1554 setting.ui_value = value
1543 1555 setting.ui_key = key
1544 1556 setting.ui_active = active
1545 1557 Session().add(setting)
1546 1558 Session().commit()
1547 1559
1548 1560 if cleanup:
1549 1561 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1550 1562 return setting
1551 1563
1552 1564 def create_rhodecode_ui(
1553 1565 self, section, value, key=None, active=True, cleanup=True):
1554 1566 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1555 1567
1556 1568 setting = RhodeCodeUi()
1557 1569 setting.ui_section = section
1558 1570 setting.ui_value = value
1559 1571 setting.ui_key = key
1560 1572 setting.ui_active = active
1561 1573 Session().add(setting)
1562 1574 Session().commit()
1563 1575
1564 1576 if cleanup:
1565 1577 self.rhodecode_ui_ids.append(setting.ui_id)
1566 1578 return setting
1567 1579
1568 1580 def create_repo_rhodecode_setting(
1569 1581 self, repo, name, value, type_, cleanup=True):
1570 1582 setting = RepoRhodeCodeSetting(
1571 1583 repo.repo_id, key=name, val=value, type=type_)
1572 1584 Session().add(setting)
1573 1585 Session().commit()
1574 1586
1575 1587 if cleanup:
1576 1588 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1577 1589 return setting
1578 1590
1579 1591 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1580 1592 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1581 1593 Session().add(setting)
1582 1594 Session().commit()
1583 1595
1584 1596 if cleanup:
1585 1597 self.rhodecode_setting_ids.append(setting.app_settings_id)
1586 1598
1587 1599 return setting
1588 1600
1589 1601 def cleanup(self):
1590 1602 for id_ in self.rhodecode_ui_ids:
1591 1603 setting = RhodeCodeUi.get(id_)
1592 1604 Session().delete(setting)
1593 1605
1594 1606 for id_ in self.rhodecode_setting_ids:
1595 1607 setting = RhodeCodeSetting.get(id_)
1596 1608 Session().delete(setting)
1597 1609
1598 1610 for id_ in self.repo_rhodecode_ui_ids:
1599 1611 setting = RepoRhodeCodeUi.get(id_)
1600 1612 Session().delete(setting)
1601 1613
1602 1614 for id_ in self.repo_rhodecode_setting_ids:
1603 1615 setting = RepoRhodeCodeSetting.get(id_)
1604 1616 Session().delete(setting)
1605 1617
1606 1618 Session().commit()
1607 1619
1608 1620
1609 1621 @pytest.fixture
1610 1622 def no_notifications(request):
1611 1623 notification_patcher = mock.patch(
1612 1624 'rhodecode.model.notification.NotificationModel.create')
1613 1625 notification_patcher.start()
1614 1626 request.addfinalizer(notification_patcher.stop)
1615 1627
1616 1628
1617 1629 @pytest.fixture
1618 1630 def silence_action_logger(request):
1619 1631 notification_patcher = mock.patch(
1620 1632 'rhodecode.lib.utils.action_logger')
1621 1633 notification_patcher.start()
1622 1634 request.addfinalizer(notification_patcher.stop)
1623 1635
1624 1636
1625 1637 @pytest.fixture(scope='session')
1626 1638 def repeat(request):
1627 1639 """
1628 1640 The number of repetitions is based on this fixture.
1629 1641
1630 1642 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1631 1643 tests are not too slow in our default test suite.
1632 1644 """
1633 1645 return request.config.getoption('--repeat')
1634 1646
1635 1647
1636 1648 @pytest.fixture
1637 1649 def rhodecode_fixtures():
1638 1650 return Fixture()
1639 1651
1640 1652
1641 1653 @pytest.fixture
1642 1654 def request_stub():
1643 1655 """
1644 1656 Stub request object.
1645 1657 """
1646 1658 request = pyramid.testing.DummyRequest()
1647 1659 request.scheme = 'https'
1648 1660 return request
1649 1661
1650 1662
1651 1663 @pytest.fixture
1652 1664 def config_stub(request, request_stub):
1653 1665 """
1654 1666 Set up pyramid.testing and return the Configurator.
1655 1667 """
1656 1668 config = pyramid.testing.setUp(request=request_stub)
1657 1669
1658 1670 @request.addfinalizer
1659 1671 def cleanup():
1660 1672 pyramid.testing.tearDown()
1661 1673
1662 1674 return config
1663 1675
1664 1676
1665 1677 @pytest.fixture
1666 1678 def StubIntegrationType():
1667 1679 class _StubIntegrationType(IntegrationTypeBase):
1668 1680 """ Test integration type class """
1669 1681
1670 1682 key = 'test'
1671 1683 display_name = 'Test integration type'
1672 1684 description = 'A test integration type for testing'
1673 1685 icon = 'test_icon_html_image'
1674 1686
1675 1687 def __init__(self, settings):
1676 1688 super(_StubIntegrationType, self).__init__(settings)
1677 1689 self.sent_events = [] # for testing
1678 1690
1679 1691 def send_event(self, event):
1680 1692 self.sent_events.append(event)
1681 1693
1682 1694 def settings_schema(self):
1683 1695 class SettingsSchema(colander.Schema):
1684 1696 test_string_field = colander.SchemaNode(
1685 1697 colander.String(),
1686 1698 missing=colander.required,
1687 1699 title='test string field',
1688 1700 )
1689 1701 test_int_field = colander.SchemaNode(
1690 1702 colander.Int(),
1691 1703 title='some integer setting',
1692 1704 )
1693 1705 return SettingsSchema()
1694 1706
1695 1707
1696 1708 integration_type_registry.register_integration_type(_StubIntegrationType)
1697 1709 return _StubIntegrationType
1698 1710
1699 1711 @pytest.fixture
1700 1712 def stub_integration_settings():
1701 1713 return {
1702 1714 'test_string_field': 'some data',
1703 1715 'test_int_field': 100,
1704 1716 }
1705 1717
1706 1718
1707 1719 @pytest.fixture
1708 1720 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1709 1721 stub_integration_settings):
1710 1722 integration = IntegrationModel().create(
1711 1723 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1712 1724 name='test repo integration',
1713 1725 repo=repo_stub, repo_group=None, child_repos_only=None)
1714 1726
1715 1727 @request.addfinalizer
1716 1728 def cleanup():
1717 1729 IntegrationModel().delete(integration)
1718 1730
1719 1731 return integration
1720 1732
1721 1733
1722 1734 @pytest.fixture
1723 1735 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1724 1736 stub_integration_settings):
1725 1737 integration = IntegrationModel().create(
1726 1738 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1727 1739 name='test repogroup integration',
1728 1740 repo=None, repo_group=test_repo_group, child_repos_only=True)
1729 1741
1730 1742 @request.addfinalizer
1731 1743 def cleanup():
1732 1744 IntegrationModel().delete(integration)
1733 1745
1734 1746 return integration
1735 1747
1736 1748
1737 1749 @pytest.fixture
1738 1750 def repogroup_recursive_integration_stub(request, test_repo_group,
1739 1751 StubIntegrationType, stub_integration_settings):
1740 1752 integration = IntegrationModel().create(
1741 1753 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1742 1754 name='test recursive repogroup integration',
1743 1755 repo=None, repo_group=test_repo_group, child_repos_only=False)
1744 1756
1745 1757 @request.addfinalizer
1746 1758 def cleanup():
1747 1759 IntegrationModel().delete(integration)
1748 1760
1749 1761 return integration
1750 1762
1751 1763
1752 1764 @pytest.fixture
1753 1765 def global_integration_stub(request, StubIntegrationType,
1754 1766 stub_integration_settings):
1755 1767 integration = IntegrationModel().create(
1756 1768 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1757 1769 name='test global integration',
1758 1770 repo=None, repo_group=None, child_repos_only=None)
1759 1771
1760 1772 @request.addfinalizer
1761 1773 def cleanup():
1762 1774 IntegrationModel().delete(integration)
1763 1775
1764 1776 return integration
1765 1777
1766 1778
1767 1779 @pytest.fixture
1768 1780 def root_repos_integration_stub(request, StubIntegrationType,
1769 1781 stub_integration_settings):
1770 1782 integration = IntegrationModel().create(
1771 1783 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1772 1784 name='test global integration',
1773 1785 repo=None, repo_group=None, child_repos_only=True)
1774 1786
1775 1787 @request.addfinalizer
1776 1788 def cleanup():
1777 1789 IntegrationModel().delete(integration)
1778 1790
1779 1791 return integration
General Comments 0
You need to be logged in to leave comments. Login now