##// END OF EJS Templates
pytest: moved baseapp to plugin
marcink -
r2371:d1e6574b default
parent child Browse files
Show More
@@ -1,1836 +1,1851 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33
34 34 import mock
35 35 import pyramid.testing
36 36 import pytest
37 37 import colander
38 38 import requests
39 import pyramid.paster
39 40
40 41 import rhodecode
41 42 from rhodecode.lib.utils2 import AttributeDict
42 43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 44 from rhodecode.model.comment import CommentsModel
44 45 from rhodecode.model.db import (
45 46 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
46 47 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 48 from rhodecode.model.meta import Session
48 49 from rhodecode.model.pull_request import PullRequestModel
49 50 from rhodecode.model.repo import RepoModel
50 51 from rhodecode.model.repo_group import RepoGroupModel
51 52 from rhodecode.model.user import UserModel
52 53 from rhodecode.model.settings import VcsSettingsModel
53 54 from rhodecode.model.user_group import UserGroupModel
54 55 from rhodecode.model.integration import IntegrationModel
55 56 from rhodecode.integrations import integration_type_registry
56 57 from rhodecode.integrations.types.base import IntegrationTypeBase
57 58 from rhodecode.lib.utils import repo2db_mapper
58 59 from rhodecode.lib.vcs import create_vcsserver_proxy
59 60 from rhodecode.lib.vcs.backends import get_backend
60 61 from rhodecode.lib.vcs.nodes import FileNode
61 62 from rhodecode.tests import (
62 63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 65 TEST_USER_REGULAR_PASS)
65 66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 67 from rhodecode.tests.fixture import Fixture
67 68
68 69
69 70 def _split_comma(value):
70 71 return value.split(',')
71 72
72 73
73 74 def pytest_addoption(parser):
74 75 parser.addoption(
75 76 '--keep-tmp-path', action='store_true',
76 77 help="Keep the test temporary directories")
77 78 parser.addoption(
78 79 '--backends', action='store', type=_split_comma,
79 80 default=['git', 'hg', 'svn'],
80 81 help="Select which backends to test for backend specific tests.")
81 82 parser.addoption(
82 83 '--dbs', action='store', type=_split_comma,
83 84 default=['sqlite'],
84 85 help="Select which database to test for database specific tests. "
85 86 "Possible options are sqlite,postgres,mysql")
86 87 parser.addoption(
87 88 '--appenlight', '--ae', action='store_true',
88 89 help="Track statistics in appenlight.")
89 90 parser.addoption(
90 91 '--appenlight-api-key', '--ae-key',
91 92 help="API key for Appenlight.")
92 93 parser.addoption(
93 94 '--appenlight-url', '--ae-url',
94 95 default="https://ae.rhodecode.com",
95 96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
96 97 parser.addoption(
97 98 '--sqlite-connection-string', action='store',
98 99 default='', help="Connection string for the dbs tests with SQLite")
99 100 parser.addoption(
100 101 '--postgres-connection-string', action='store',
101 102 default='', help="Connection string for the dbs tests with Postgres")
102 103 parser.addoption(
103 104 '--mysql-connection-string', action='store',
104 105 default='', help="Connection string for the dbs tests with MySQL")
105 106 parser.addoption(
106 107 '--repeat', type=int, default=100,
107 108 help="Number of repetitions in performance tests.")
108 109
109 110
110 111 def pytest_configure(config):
111 112 from rhodecode.config import patches
112 113
113 114
114 115 def pytest_collection_modifyitems(session, config, items):
115 116 # nottest marked, compare nose, used for transition from nose to pytest
116 117 remaining = [
117 118 i for i in items if getattr(i.obj, '__test__', True)]
118 119 items[:] = remaining
119 120
120 121
121 122 def pytest_generate_tests(metafunc):
122 123 # Support test generation based on --backend parameter
123 124 if 'backend_alias' in metafunc.fixturenames:
124 125 backends = get_backends_from_metafunc(metafunc)
125 126 scope = None
126 127 if not backends:
127 128 pytest.skip("Not enabled for any of selected backends")
128 129 metafunc.parametrize('backend_alias', backends, scope=scope)
129 130 elif hasattr(metafunc.function, 'backends'):
130 131 backends = get_backends_from_metafunc(metafunc)
131 132 if not backends:
132 133 pytest.skip("Not enabled for any of selected backends")
133 134
134 135
135 136 def get_backends_from_metafunc(metafunc):
136 137 requested_backends = set(metafunc.config.getoption('--backends'))
137 138 if hasattr(metafunc.function, 'backends'):
138 139 # Supported backends by this test function, created from
139 140 # pytest.mark.backends
140 141 backends = metafunc.function.backends.args
141 142 elif hasattr(metafunc.cls, 'backend_alias'):
142 143 # Support class attribute "backend_alias", this is mainly
143 144 # for legacy reasons for tests not yet using pytest.mark.backends
144 145 backends = [metafunc.cls.backend_alias]
145 146 else:
146 147 backends = metafunc.config.getoption('--backends')
147 148 return requested_backends.intersection(backends)
148 149
149 150
150 151 @pytest.fixture(scope='session', autouse=True)
151 152 def activate_example_rcextensions(request):
152 153 """
153 154 Patch in an example rcextensions module which verifies passed in kwargs.
154 155 """
155 156 from rhodecode.tests.other import example_rcextensions
156 157
157 158 old_extensions = rhodecode.EXTENSIONS
158 159 rhodecode.EXTENSIONS = example_rcextensions
159 160
160 161 @request.addfinalizer
161 162 def cleanup():
162 163 rhodecode.EXTENSIONS = old_extensions
163 164
164 165
165 166 @pytest.fixture
166 167 def capture_rcextensions():
167 168 """
168 169 Returns the recorded calls to entry points in rcextensions.
169 170 """
170 171 calls = rhodecode.EXTENSIONS.calls
171 172 calls.clear()
172 173 # Note: At this moment, it is still the empty dict, but that will
173 174 # be filled during the test run and since it is a reference this
174 175 # is enough to make it work.
175 176 return calls
176 177
177 178
178 179 @pytest.fixture(scope='session')
179 180 def http_environ_session():
180 181 """
181 182 Allow to use "http_environ" in session scope.
182 183 """
183 184 return http_environ(
184 185 http_host_stub=http_host_stub())
185 186
186 187
187 188 @pytest.fixture
188 189 def http_host_stub():
189 190 """
190 191 Value of HTTP_HOST in the test run.
191 192 """
192 193 return 'example.com:80'
193 194
194 195
195 196 @pytest.fixture
196 197 def http_host_only_stub():
197 198 """
198 199 Value of HTTP_HOST in the test run.
199 200 """
200 201 return http_host_stub().split(':')[0]
201 202
202 203
203 204 @pytest.fixture
204 205 def http_environ(http_host_stub):
205 206 """
206 207 HTTP extra environ keys.
207 208
208 209 User by the test application and as well for setting up the pylons
209 210 environment. In the case of the fixture "app" it should be possible
210 211 to override this for a specific test case.
211 212 """
212 213 return {
213 214 'SERVER_NAME': http_host_only_stub(),
214 215 'SERVER_PORT': http_host_stub.split(':')[1],
215 216 'HTTP_HOST': http_host_stub,
216 217 'HTTP_USER_AGENT': 'rc-test-agent',
217 218 'REQUEST_METHOD': 'GET'
218 219 }
219 220
220 221
222 @pytest.fixture(scope='session')
223 def baseapp(ini_config, vcsserver, http_environ_session):
224 from rhodecode.lib.pyramid_utils import get_app_config
225 from rhodecode.config.middleware import make_pyramid_app
226
227 print("Using the RhodeCode configuration:{}".format(ini_config))
228 pyramid.paster.setup_logging(ini_config)
229
230 settings = get_app_config(ini_config)
231 app = make_pyramid_app({'__file__': ini_config}, **settings)
232
233 return app
234
235
221 236 @pytest.fixture(scope='function')
222 237 def app(request, config_stub, baseapp, http_environ):
223 238 app = CustomTestApp(
224 239 baseapp,
225 240 extra_environ=http_environ)
226 241 if request.cls:
227 242 request.cls.app = app
228 243 return app
229 244
230 245
231 246 @pytest.fixture(scope='session')
232 247 def app_settings(baseapp, ini_config):
233 248 """
234 249 Settings dictionary used to create the app.
235 250
236 251 Parses the ini file and passes the result through the sanitize and apply
237 252 defaults mechanism in `rhodecode.config.middleware`.
238 253 """
239 254 return baseapp.config.get_settings()
240 255
241 256
242 257 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
243 258
244 259
245 260 def _autologin_user(app, *args):
246 261 session = login_user_session(app, *args)
247 262 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
248 263 return LoginData(csrf_token, session['rhodecode_user'])
249 264
250 265
251 266 @pytest.fixture
252 267 def autologin_user(app):
253 268 """
254 269 Utility fixture which makes sure that the admin user is logged in
255 270 """
256 271 return _autologin_user(app)
257 272
258 273
259 274 @pytest.fixture
260 275 def autologin_regular_user(app):
261 276 """
262 277 Utility fixture which makes sure that the regular user is logged in
263 278 """
264 279 return _autologin_user(
265 280 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
266 281
267 282
268 283 @pytest.fixture(scope='function')
269 284 def csrf_token(request, autologin_user):
270 285 return autologin_user.csrf_token
271 286
272 287
273 288 @pytest.fixture(scope='function')
274 289 def xhr_header(request):
275 290 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
276 291
277 292
278 293 @pytest.fixture
279 294 def real_crypto_backend(monkeypatch):
280 295 """
281 296 Switch the production crypto backend on for this test.
282 297
283 298 During the test run the crypto backend is replaced with a faster
284 299 implementation based on the MD5 algorithm.
285 300 """
286 301 monkeypatch.setattr(rhodecode, 'is_test', False)
287 302
288 303
289 304 @pytest.fixture(scope='class')
290 305 def index_location(request, baseapp):
291 306 index_location = baseapp.config.get_settings()['search.location']
292 307 if request.cls:
293 308 request.cls.index_location = index_location
294 309 return index_location
295 310
296 311
297 312 @pytest.fixture(scope='session', autouse=True)
298 313 def tests_tmp_path(request):
299 314 """
300 315 Create temporary directory to be used during the test session.
301 316 """
302 317 if not os.path.exists(TESTS_TMP_PATH):
303 318 os.makedirs(TESTS_TMP_PATH)
304 319
305 320 if not request.config.getoption('--keep-tmp-path'):
306 321 @request.addfinalizer
307 322 def remove_tmp_path():
308 323 shutil.rmtree(TESTS_TMP_PATH)
309 324
310 325 return TESTS_TMP_PATH
311 326
312 327
313 328 @pytest.fixture
314 329 def test_repo_group(request):
315 330 """
316 331 Create a temporary repository group, and destroy it after
317 332 usage automatically
318 333 """
319 334 fixture = Fixture()
320 335 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
321 336 repo_group = fixture.create_repo_group(repogroupid)
322 337
323 338 def _cleanup():
324 339 fixture.destroy_repo_group(repogroupid)
325 340
326 341 request.addfinalizer(_cleanup)
327 342 return repo_group
328 343
329 344
330 345 @pytest.fixture
331 346 def test_user_group(request):
332 347 """
333 348 Create a temporary user group, and destroy it after
334 349 usage automatically
335 350 """
336 351 fixture = Fixture()
337 352 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
338 353 user_group = fixture.create_user_group(usergroupid)
339 354
340 355 def _cleanup():
341 356 fixture.destroy_user_group(user_group)
342 357
343 358 request.addfinalizer(_cleanup)
344 359 return user_group
345 360
346 361
347 362 @pytest.fixture(scope='session')
348 363 def test_repo(request):
349 364 container = TestRepoContainer()
350 365 request.addfinalizer(container._cleanup)
351 366 return container
352 367
353 368
354 369 class TestRepoContainer(object):
355 370 """
356 371 Container for test repositories which are used read only.
357 372
358 373 Repositories will be created on demand and re-used during the lifetime
359 374 of this object.
360 375
361 376 Usage to get the svn test repository "minimal"::
362 377
363 378 test_repo = TestContainer()
364 379 repo = test_repo('minimal', 'svn')
365 380
366 381 """
367 382
368 383 dump_extractors = {
369 384 'git': utils.extract_git_repo_from_dump,
370 385 'hg': utils.extract_hg_repo_from_dump,
371 386 'svn': utils.extract_svn_repo_from_dump,
372 387 }
373 388
374 389 def __init__(self):
375 390 self._cleanup_repos = []
376 391 self._fixture = Fixture()
377 392 self._repos = {}
378 393
379 394 def __call__(self, dump_name, backend_alias, config=None):
380 395 key = (dump_name, backend_alias)
381 396 if key not in self._repos:
382 397 repo = self._create_repo(dump_name, backend_alias, config)
383 398 self._repos[key] = repo.repo_id
384 399 return Repository.get(self._repos[key])
385 400
386 401 def _create_repo(self, dump_name, backend_alias, config):
387 402 repo_name = '%s-%s' % (backend_alias, dump_name)
388 403 backend_class = get_backend(backend_alias)
389 404 dump_extractor = self.dump_extractors[backend_alias]
390 405 repo_path = dump_extractor(dump_name, repo_name)
391 406
392 407 vcs_repo = backend_class(repo_path, config=config)
393 408 repo2db_mapper({repo_name: vcs_repo})
394 409
395 410 repo = RepoModel().get_by_repo_name(repo_name)
396 411 self._cleanup_repos.append(repo_name)
397 412 return repo
398 413
399 414 def _cleanup(self):
400 415 for repo_name in reversed(self._cleanup_repos):
401 416 self._fixture.destroy_repo(repo_name)
402 417
403 418
404 419 @pytest.fixture
405 420 def backend(request, backend_alias, baseapp, test_repo):
406 421 """
407 422 Parametrized fixture which represents a single backend implementation.
408 423
409 424 It respects the option `--backends` to focus the test run on specific
410 425 backend implementations.
411 426
412 427 It also supports `pytest.mark.xfail_backends` to mark tests as failing
413 428 for specific backends. This is intended as a utility for incremental
414 429 development of a new backend implementation.
415 430 """
416 431 if backend_alias not in request.config.getoption('--backends'):
417 432 pytest.skip("Backend %s not selected." % (backend_alias, ))
418 433
419 434 utils.check_xfail_backends(request.node, backend_alias)
420 435 utils.check_skip_backends(request.node, backend_alias)
421 436
422 437 repo_name = 'vcs_test_%s' % (backend_alias, )
423 438 backend = Backend(
424 439 alias=backend_alias,
425 440 repo_name=repo_name,
426 441 test_name=request.node.name,
427 442 test_repo_container=test_repo)
428 443 request.addfinalizer(backend.cleanup)
429 444 return backend
430 445
431 446
432 447 @pytest.fixture
433 448 def backend_git(request, baseapp, test_repo):
434 449 return backend(request, 'git', baseapp, test_repo)
435 450
436 451
437 452 @pytest.fixture
438 453 def backend_hg(request, baseapp, test_repo):
439 454 return backend(request, 'hg', baseapp, test_repo)
440 455
441 456
442 457 @pytest.fixture
443 458 def backend_svn(request, baseapp, test_repo):
444 459 return backend(request, 'svn', baseapp, test_repo)
445 460
446 461
447 462 @pytest.fixture
448 463 def backend_random(backend_git):
449 464 """
450 465 Use this to express that your tests need "a backend.
451 466
452 467 A few of our tests need a backend, so that we can run the code. This
453 468 fixture is intended to be used for such cases. It will pick one of the
454 469 backends and run the tests.
455 470
456 471 The fixture `backend` would run the test multiple times for each
457 472 available backend which is a pure waste of time if the test is
458 473 independent of the backend type.
459 474 """
460 475 # TODO: johbo: Change this to pick a random backend
461 476 return backend_git
462 477
463 478
464 479 @pytest.fixture
465 480 def backend_stub(backend_git):
466 481 """
467 482 Use this to express that your tests need a backend stub
468 483
469 484 TODO: mikhail: Implement a real stub logic instead of returning
470 485 a git backend
471 486 """
472 487 return backend_git
473 488
474 489
475 490 @pytest.fixture
476 491 def repo_stub(backend_stub):
477 492 """
478 493 Use this to express that your tests need a repository stub
479 494 """
480 495 return backend_stub.create_repo()
481 496
482 497
483 498 class Backend(object):
484 499 """
485 500 Represents the test configuration for one supported backend
486 501
487 502 Provides easy access to different test repositories based on
488 503 `__getitem__`. Such repositories will only be created once per test
489 504 session.
490 505 """
491 506
492 507 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
493 508 _master_repo = None
494 509 _commit_ids = {}
495 510
496 511 def __init__(self, alias, repo_name, test_name, test_repo_container):
497 512 self.alias = alias
498 513 self.repo_name = repo_name
499 514 self._cleanup_repos = []
500 515 self._test_name = test_name
501 516 self._test_repo_container = test_repo_container
502 517 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
503 518 # Fixture will survive in the end.
504 519 self._fixture = Fixture()
505 520
506 521 def __getitem__(self, key):
507 522 return self._test_repo_container(key, self.alias)
508 523
509 524 def create_test_repo(self, key, config=None):
510 525 return self._test_repo_container(key, self.alias, config)
511 526
512 527 @property
513 528 def repo(self):
514 529 """
515 530 Returns the "current" repository. This is the vcs_test repo or the
516 531 last repo which has been created with `create_repo`.
517 532 """
518 533 from rhodecode.model.db import Repository
519 534 return Repository.get_by_repo_name(self.repo_name)
520 535
521 536 @property
522 537 def default_branch_name(self):
523 538 VcsRepository = get_backend(self.alias)
524 539 return VcsRepository.DEFAULT_BRANCH_NAME
525 540
526 541 @property
527 542 def default_head_id(self):
528 543 """
529 544 Returns the default head id of the underlying backend.
530 545
531 546 This will be the default branch name in case the backend does have a
532 547 default branch. In the other cases it will point to a valid head
533 548 which can serve as the base to create a new commit on top of it.
534 549 """
535 550 vcsrepo = self.repo.scm_instance()
536 551 head_id = (
537 552 vcsrepo.DEFAULT_BRANCH_NAME or
538 553 vcsrepo.commit_ids[-1])
539 554 return head_id
540 555
541 556 @property
542 557 def commit_ids(self):
543 558 """
544 559 Returns the list of commits for the last created repository
545 560 """
546 561 return self._commit_ids
547 562
548 563 def create_master_repo(self, commits):
549 564 """
550 565 Create a repository and remember it as a template.
551 566
552 567 This allows to easily create derived repositories to construct
553 568 more complex scenarios for diff, compare and pull requests.
554 569
555 570 Returns a commit map which maps from commit message to raw_id.
556 571 """
557 572 self._master_repo = self.create_repo(commits=commits)
558 573 return self._commit_ids
559 574
560 575 def create_repo(
561 576 self, commits=None, number_of_commits=0, heads=None,
562 577 name_suffix=u'', **kwargs):
563 578 """
564 579 Create a repository and record it for later cleanup.
565 580
566 581 :param commits: Optional. A sequence of dict instances.
567 582 Will add a commit per entry to the new repository.
568 583 :param number_of_commits: Optional. If set to a number, this number of
569 584 commits will be added to the new repository.
570 585 :param heads: Optional. Can be set to a sequence of of commit
571 586 names which shall be pulled in from the master repository.
572 587
573 588 """
574 589 self.repo_name = self._next_repo_name() + name_suffix
575 590 repo = self._fixture.create_repo(
576 591 self.repo_name, repo_type=self.alias, **kwargs)
577 592 self._cleanup_repos.append(repo.repo_name)
578 593
579 594 commits = commits or [
580 595 {'message': 'Commit %s of %s' % (x, self.repo_name)}
581 596 for x in xrange(number_of_commits)]
582 597 self._add_commits_to_repo(repo.scm_instance(), commits)
583 598 if heads:
584 599 self.pull_heads(repo, heads)
585 600
586 601 return repo
587 602
588 603 def pull_heads(self, repo, heads):
589 604 """
590 605 Make sure that repo contains all commits mentioned in `heads`
591 606 """
592 607 vcsmaster = self._master_repo.scm_instance()
593 608 vcsrepo = repo.scm_instance()
594 609 vcsrepo.config.clear_section('hooks')
595 610 commit_ids = [self._commit_ids[h] for h in heads]
596 611 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
597 612
598 613 def create_fork(self):
599 614 repo_to_fork = self.repo_name
600 615 self.repo_name = self._next_repo_name()
601 616 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
602 617 self._cleanup_repos.append(self.repo_name)
603 618 return repo
604 619
605 620 def new_repo_name(self, suffix=u''):
606 621 self.repo_name = self._next_repo_name() + suffix
607 622 self._cleanup_repos.append(self.repo_name)
608 623 return self.repo_name
609 624
610 625 def _next_repo_name(self):
611 626 return u"%s_%s" % (
612 627 self.invalid_repo_name.sub(u'_', self._test_name),
613 628 len(self._cleanup_repos))
614 629
615 630 def ensure_file(self, filename, content='Test content\n'):
616 631 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
617 632 commits = [
618 633 {'added': [
619 634 FileNode(filename, content=content),
620 635 ]},
621 636 ]
622 637 self._add_commits_to_repo(self.repo.scm_instance(), commits)
623 638
624 639 def enable_downloads(self):
625 640 repo = self.repo
626 641 repo.enable_downloads = True
627 642 Session().add(repo)
628 643 Session().commit()
629 644
630 645 def cleanup(self):
631 646 for repo_name in reversed(self._cleanup_repos):
632 647 self._fixture.destroy_repo(repo_name)
633 648
634 649 def _add_commits_to_repo(self, repo, commits):
635 650 commit_ids = _add_commits_to_repo(repo, commits)
636 651 if not commit_ids:
637 652 return
638 653 self._commit_ids = commit_ids
639 654
640 655 # Creating refs for Git to allow fetching them from remote repository
641 656 if self.alias == 'git':
642 657 refs = {}
643 658 for message in self._commit_ids:
644 659 # TODO: mikhail: do more special chars replacements
645 660 ref_name = 'refs/test-refs/{}'.format(
646 661 message.replace(' ', ''))
647 662 refs[ref_name] = self._commit_ids[message]
648 663 self._create_refs(repo, refs)
649 664
650 665 def _create_refs(self, repo, refs):
651 666 for ref_name in refs:
652 667 repo.set_refs(ref_name, refs[ref_name])
653 668
654 669
655 670 @pytest.fixture
656 671 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
657 672 """
658 673 Parametrized fixture which represents a single vcs backend implementation.
659 674
660 675 See the fixture `backend` for more details. This one implements the same
661 676 concept, but on vcs level. So it does not provide model instances etc.
662 677
663 678 Parameters are generated dynamically, see :func:`pytest_generate_tests`
664 679 for how this works.
665 680 """
666 681 if backend_alias not in request.config.getoption('--backends'):
667 682 pytest.skip("Backend %s not selected." % (backend_alias, ))
668 683
669 684 utils.check_xfail_backends(request.node, backend_alias)
670 685 utils.check_skip_backends(request.node, backend_alias)
671 686
672 687 repo_name = 'vcs_test_%s' % (backend_alias, )
673 688 repo_path = os.path.join(tests_tmp_path, repo_name)
674 689 backend = VcsBackend(
675 690 alias=backend_alias,
676 691 repo_path=repo_path,
677 692 test_name=request.node.name,
678 693 test_repo_container=test_repo)
679 694 request.addfinalizer(backend.cleanup)
680 695 return backend
681 696
682 697
683 698 @pytest.fixture
684 699 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
685 700 return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo)
686 701
687 702
688 703 @pytest.fixture
689 704 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
690 705 return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo)
691 706
692 707
693 708 @pytest.fixture
694 709 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
695 710 return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo)
696 711
697 712
698 713 @pytest.fixture
699 714 def vcsbackend_random(vcsbackend_git):
700 715 """
701 716 Use this to express that your tests need "a vcsbackend".
702 717
703 718 The fixture `vcsbackend` would run the test multiple times for each
704 719 available vcs backend which is a pure waste of time if the test is
705 720 independent of the vcs backend type.
706 721 """
707 722 # TODO: johbo: Change this to pick a random backend
708 723 return vcsbackend_git
709 724
710 725
711 726 @pytest.fixture
712 727 def vcsbackend_stub(vcsbackend_git):
713 728 """
714 729 Use this to express that your test just needs a stub of a vcsbackend.
715 730
716 731 Plan is to eventually implement an in-memory stub to speed tests up.
717 732 """
718 733 return vcsbackend_git
719 734
720 735
721 736 class VcsBackend(object):
722 737 """
723 738 Represents the test configuration for one supported vcs backend.
724 739 """
725 740
726 741 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
727 742
728 743 def __init__(self, alias, repo_path, test_name, test_repo_container):
729 744 self.alias = alias
730 745 self._repo_path = repo_path
731 746 self._cleanup_repos = []
732 747 self._test_name = test_name
733 748 self._test_repo_container = test_repo_container
734 749
735 750 def __getitem__(self, key):
736 751 return self._test_repo_container(key, self.alias).scm_instance()
737 752
738 753 @property
739 754 def repo(self):
740 755 """
741 756 Returns the "current" repository. This is the vcs_test repo of the last
742 757 repo which has been created.
743 758 """
744 759 Repository = get_backend(self.alias)
745 760 return Repository(self._repo_path)
746 761
747 762 @property
748 763 def backend(self):
749 764 """
750 765 Returns the backend implementation class.
751 766 """
752 767 return get_backend(self.alias)
753 768
754 769 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
755 770 repo_name = self._next_repo_name()
756 771 self._repo_path = get_new_dir(repo_name)
757 772 repo_class = get_backend(self.alias)
758 773 src_url = None
759 774 if _clone_repo:
760 775 src_url = _clone_repo.path
761 776 repo = repo_class(self._repo_path, create=True, src_url=src_url)
762 777 self._cleanup_repos.append(repo)
763 778
764 779 commits = commits or [
765 780 {'message': 'Commit %s of %s' % (x, repo_name)}
766 781 for x in xrange(number_of_commits)]
767 782 _add_commits_to_repo(repo, commits)
768 783 return repo
769 784
770 785 def clone_repo(self, repo):
771 786 return self.create_repo(_clone_repo=repo)
772 787
773 788 def cleanup(self):
774 789 for repo in self._cleanup_repos:
775 790 shutil.rmtree(repo.path)
776 791
777 792 def new_repo_path(self):
778 793 repo_name = self._next_repo_name()
779 794 self._repo_path = get_new_dir(repo_name)
780 795 return self._repo_path
781 796
782 797 def _next_repo_name(self):
783 798 return "%s_%s" % (
784 799 self.invalid_repo_name.sub('_', self._test_name),
785 800 len(self._cleanup_repos))
786 801
787 802 def add_file(self, repo, filename, content='Test content\n'):
788 803 imc = repo.in_memory_commit
789 804 imc.add(FileNode(filename, content=content))
790 805 imc.commit(
791 806 message=u'Automatic commit from vcsbackend fixture',
792 807 author=u'Automatic')
793 808
794 809 def ensure_file(self, filename, content='Test content\n'):
795 810 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
796 811 self.add_file(self.repo, filename, content)
797 812
798 813
799 814 def _add_commits_to_repo(vcs_repo, commits):
800 815 commit_ids = {}
801 816 if not commits:
802 817 return commit_ids
803 818
804 819 imc = vcs_repo.in_memory_commit
805 820 commit = None
806 821
807 822 for idx, commit in enumerate(commits):
808 823 message = unicode(commit.get('message', 'Commit %s' % idx))
809 824
810 825 for node in commit.get('added', []):
811 826 imc.add(FileNode(node.path, content=node.content))
812 827 for node in commit.get('changed', []):
813 828 imc.change(FileNode(node.path, content=node.content))
814 829 for node in commit.get('removed', []):
815 830 imc.remove(FileNode(node.path))
816 831
817 832 parents = [
818 833 vcs_repo.get_commit(commit_id=commit_ids[p])
819 834 for p in commit.get('parents', [])]
820 835
821 836 operations = ('added', 'changed', 'removed')
822 837 if not any((commit.get(o) for o in operations)):
823 838 imc.add(FileNode('file_%s' % idx, content=message))
824 839
825 840 commit = imc.commit(
826 841 message=message,
827 842 author=unicode(commit.get('author', 'Automatic')),
828 843 date=commit.get('date'),
829 844 branch=commit.get('branch'),
830 845 parents=parents)
831 846
832 847 commit_ids[commit.message] = commit.raw_id
833 848
834 849 return commit_ids
835 850
836 851
837 852 @pytest.fixture
838 853 def reposerver(request):
839 854 """
840 855 Allows to serve a backend repository
841 856 """
842 857
843 858 repo_server = RepoServer()
844 859 request.addfinalizer(repo_server.cleanup)
845 860 return repo_server
846 861
847 862
848 863 class RepoServer(object):
849 864 """
850 865 Utility to serve a local repository for the duration of a test case.
851 866
852 867 Supports only Subversion so far.
853 868 """
854 869
855 870 url = None
856 871
857 872 def __init__(self):
858 873 self._cleanup_servers = []
859 874
860 875 def serve(self, vcsrepo):
861 876 if vcsrepo.alias != 'svn':
862 877 raise TypeError("Backend %s not supported" % vcsrepo.alias)
863 878
864 879 proc = subprocess32.Popen(
865 880 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
866 881 '--root', vcsrepo.path])
867 882 self._cleanup_servers.append(proc)
868 883 self.url = 'svn://localhost'
869 884
870 885 def cleanup(self):
871 886 for proc in self._cleanup_servers:
872 887 proc.terminate()
873 888
874 889
875 890 @pytest.fixture
876 891 def pr_util(backend, request, config_stub):
877 892 """
878 893 Utility for tests of models and for functional tests around pull requests.
879 894
880 895 It gives an instance of :class:`PRTestUtility` which provides various
881 896 utility methods around one pull request.
882 897
883 898 This fixture uses `backend` and inherits its parameterization.
884 899 """
885 900
886 901 util = PRTestUtility(backend)
887 902 request.addfinalizer(util.cleanup)
888 903
889 904 return util
890 905
891 906
892 907 class PRTestUtility(object):
893 908
894 909 pull_request = None
895 910 pull_request_id = None
896 911 mergeable_patcher = None
897 912 mergeable_mock = None
898 913 notification_patcher = None
899 914
900 915 def __init__(self, backend):
901 916 self.backend = backend
902 917
903 918 def create_pull_request(
904 919 self, commits=None, target_head=None, source_head=None,
905 920 revisions=None, approved=False, author=None, mergeable=False,
906 921 enable_notifications=True, name_suffix=u'', reviewers=None,
907 922 title=u"Test", description=u"Description"):
908 923 self.set_mergeable(mergeable)
909 924 if not enable_notifications:
910 925 # mock notification side effect
911 926 self.notification_patcher = mock.patch(
912 927 'rhodecode.model.notification.NotificationModel.create')
913 928 self.notification_patcher.start()
914 929
915 930 if not self.pull_request:
916 931 if not commits:
917 932 commits = [
918 933 {'message': 'c1'},
919 934 {'message': 'c2'},
920 935 {'message': 'c3'},
921 936 ]
922 937 target_head = 'c1'
923 938 source_head = 'c2'
924 939 revisions = ['c2']
925 940
926 941 self.commit_ids = self.backend.create_master_repo(commits)
927 942 self.target_repository = self.backend.create_repo(
928 943 heads=[target_head], name_suffix=name_suffix)
929 944 self.source_repository = self.backend.create_repo(
930 945 heads=[source_head], name_suffix=name_suffix)
931 946 self.author = author or UserModel().get_by_username(
932 947 TEST_USER_ADMIN_LOGIN)
933 948
934 949 model = PullRequestModel()
935 950 self.create_parameters = {
936 951 'created_by': self.author,
937 952 'source_repo': self.source_repository.repo_name,
938 953 'source_ref': self._default_branch_reference(source_head),
939 954 'target_repo': self.target_repository.repo_name,
940 955 'target_ref': self._default_branch_reference(target_head),
941 956 'revisions': [self.commit_ids[r] for r in revisions],
942 957 'reviewers': reviewers or self._get_reviewers(),
943 958 'title': title,
944 959 'description': description,
945 960 }
946 961 self.pull_request = model.create(**self.create_parameters)
947 962 assert model.get_versions(self.pull_request) == []
948 963
949 964 self.pull_request_id = self.pull_request.pull_request_id
950 965
951 966 if approved:
952 967 self.approve()
953 968
954 969 Session().add(self.pull_request)
955 970 Session().commit()
956 971
957 972 return self.pull_request
958 973
959 974 def approve(self):
960 975 self.create_status_votes(
961 976 ChangesetStatus.STATUS_APPROVED,
962 977 *self.pull_request.reviewers)
963 978
964 979 def close(self):
965 980 PullRequestModel().close_pull_request(self.pull_request, self.author)
966 981
967 982 def _default_branch_reference(self, commit_message):
968 983 reference = '%s:%s:%s' % (
969 984 'branch',
970 985 self.backend.default_branch_name,
971 986 self.commit_ids[commit_message])
972 987 return reference
973 988
974 989 def _get_reviewers(self):
975 990 return [
976 991 (TEST_USER_REGULAR_LOGIN, ['default1'], False),
977 992 (TEST_USER_REGULAR2_LOGIN, ['default2'], False),
978 993 ]
979 994
980 995 def update_source_repository(self, head=None):
981 996 heads = [head or 'c3']
982 997 self.backend.pull_heads(self.source_repository, heads=heads)
983 998
984 999 def add_one_commit(self, head=None):
985 1000 self.update_source_repository(head=head)
986 1001 old_commit_ids = set(self.pull_request.revisions)
987 1002 PullRequestModel().update_commits(self.pull_request)
988 1003 commit_ids = set(self.pull_request.revisions)
989 1004 new_commit_ids = commit_ids - old_commit_ids
990 1005 assert len(new_commit_ids) == 1
991 1006 return new_commit_ids.pop()
992 1007
993 1008 def remove_one_commit(self):
994 1009 assert len(self.pull_request.revisions) == 2
995 1010 source_vcs = self.source_repository.scm_instance()
996 1011 removed_commit_id = source_vcs.commit_ids[-1]
997 1012
998 1013 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
999 1014 # remove the if once that's sorted out.
1000 1015 if self.backend.alias == "git":
1001 1016 kwargs = {'branch_name': self.backend.default_branch_name}
1002 1017 else:
1003 1018 kwargs = {}
1004 1019 source_vcs.strip(removed_commit_id, **kwargs)
1005 1020
1006 1021 PullRequestModel().update_commits(self.pull_request)
1007 1022 assert len(self.pull_request.revisions) == 1
1008 1023 return removed_commit_id
1009 1024
1010 1025 def create_comment(self, linked_to=None):
1011 1026 comment = CommentsModel().create(
1012 1027 text=u"Test comment",
1013 1028 repo=self.target_repository.repo_name,
1014 1029 user=self.author,
1015 1030 pull_request=self.pull_request)
1016 1031 assert comment.pull_request_version_id is None
1017 1032
1018 1033 if linked_to:
1019 1034 PullRequestModel()._link_comments_to_version(linked_to)
1020 1035
1021 1036 return comment
1022 1037
1023 1038 def create_inline_comment(
1024 1039 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1025 1040 comment = CommentsModel().create(
1026 1041 text=u"Test comment",
1027 1042 repo=self.target_repository.repo_name,
1028 1043 user=self.author,
1029 1044 line_no=line_no,
1030 1045 f_path=file_path,
1031 1046 pull_request=self.pull_request)
1032 1047 assert comment.pull_request_version_id is None
1033 1048
1034 1049 if linked_to:
1035 1050 PullRequestModel()._link_comments_to_version(linked_to)
1036 1051
1037 1052 return comment
1038 1053
1039 1054 def create_version_of_pull_request(self):
1040 1055 pull_request = self.create_pull_request()
1041 1056 version = PullRequestModel()._create_version_from_snapshot(
1042 1057 pull_request)
1043 1058 return version
1044 1059
1045 1060 def create_status_votes(self, status, *reviewers):
1046 1061 for reviewer in reviewers:
1047 1062 ChangesetStatusModel().set_status(
1048 1063 repo=self.pull_request.target_repo,
1049 1064 status=status,
1050 1065 user=reviewer.user_id,
1051 1066 pull_request=self.pull_request)
1052 1067
1053 1068 def set_mergeable(self, value):
1054 1069 if not self.mergeable_patcher:
1055 1070 self.mergeable_patcher = mock.patch.object(
1056 1071 VcsSettingsModel, 'get_general_settings')
1057 1072 self.mergeable_mock = self.mergeable_patcher.start()
1058 1073 self.mergeable_mock.return_value = {
1059 1074 'rhodecode_pr_merge_enabled': value}
1060 1075
1061 1076 def cleanup(self):
1062 1077 # In case the source repository is already cleaned up, the pull
1063 1078 # request will already be deleted.
1064 1079 pull_request = PullRequest().get(self.pull_request_id)
1065 1080 if pull_request:
1066 1081 PullRequestModel().delete(pull_request, pull_request.author)
1067 1082 Session().commit()
1068 1083
1069 1084 if self.notification_patcher:
1070 1085 self.notification_patcher.stop()
1071 1086
1072 1087 if self.mergeable_patcher:
1073 1088 self.mergeable_patcher.stop()
1074 1089
1075 1090
1076 1091 @pytest.fixture
1077 1092 def user_admin(baseapp):
1078 1093 """
1079 1094 Provides the default admin test user as an instance of `db.User`.
1080 1095 """
1081 1096 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1082 1097 return user
1083 1098
1084 1099
1085 1100 @pytest.fixture
1086 1101 def user_regular(baseapp):
1087 1102 """
1088 1103 Provides the default regular test user as an instance of `db.User`.
1089 1104 """
1090 1105 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1091 1106 return user
1092 1107
1093 1108
1094 1109 @pytest.fixture
1095 1110 def user_util(request, baseapp):
1096 1111 """
1097 1112 Provides a wired instance of `UserUtility` with integrated cleanup.
1098 1113 """
1099 1114 utility = UserUtility(test_name=request.node.name)
1100 1115 request.addfinalizer(utility.cleanup)
1101 1116 return utility
1102 1117
1103 1118
1104 1119 # TODO: johbo: Split this up into utilities per domain or something similar
1105 1120 class UserUtility(object):
1106 1121
1107 1122 def __init__(self, test_name="test"):
1108 1123 self._test_name = self._sanitize_name(test_name)
1109 1124 self.fixture = Fixture()
1110 1125 self.repo_group_ids = []
1111 1126 self.repos_ids = []
1112 1127 self.user_ids = []
1113 1128 self.user_group_ids = []
1114 1129 self.user_repo_permission_ids = []
1115 1130 self.user_group_repo_permission_ids = []
1116 1131 self.user_repo_group_permission_ids = []
1117 1132 self.user_group_repo_group_permission_ids = []
1118 1133 self.user_user_group_permission_ids = []
1119 1134 self.user_group_user_group_permission_ids = []
1120 1135 self.user_permissions = []
1121 1136
1122 1137 def _sanitize_name(self, name):
1123 1138 for char in ['[', ']']:
1124 1139 name = name.replace(char, '_')
1125 1140 return name
1126 1141
1127 1142 def create_repo_group(
1128 1143 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1129 1144 group_name = "{prefix}_repogroup_{count}".format(
1130 1145 prefix=self._test_name,
1131 1146 count=len(self.repo_group_ids))
1132 1147 repo_group = self.fixture.create_repo_group(
1133 1148 group_name, cur_user=owner)
1134 1149 if auto_cleanup:
1135 1150 self.repo_group_ids.append(repo_group.group_id)
1136 1151 return repo_group
1137 1152
1138 1153 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1139 1154 auto_cleanup=True, repo_type='hg'):
1140 1155 repo_name = "{prefix}_repository_{count}".format(
1141 1156 prefix=self._test_name,
1142 1157 count=len(self.repos_ids))
1143 1158
1144 1159 repository = self.fixture.create_repo(
1145 1160 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1146 1161 if auto_cleanup:
1147 1162 self.repos_ids.append(repository.repo_id)
1148 1163 return repository
1149 1164
1150 1165 def create_user(self, auto_cleanup=True, **kwargs):
1151 1166 user_name = "{prefix}_user_{count}".format(
1152 1167 prefix=self._test_name,
1153 1168 count=len(self.user_ids))
1154 1169 user = self.fixture.create_user(user_name, **kwargs)
1155 1170 if auto_cleanup:
1156 1171 self.user_ids.append(user.user_id)
1157 1172 return user
1158 1173
1159 1174 def create_user_with_group(self):
1160 1175 user = self.create_user()
1161 1176 user_group = self.create_user_group(members=[user])
1162 1177 return user, user_group
1163 1178
1164 1179 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1165 1180 auto_cleanup=True, **kwargs):
1166 1181 group_name = "{prefix}_usergroup_{count}".format(
1167 1182 prefix=self._test_name,
1168 1183 count=len(self.user_group_ids))
1169 1184 user_group = self.fixture.create_user_group(
1170 1185 group_name, cur_user=owner, **kwargs)
1171 1186
1172 1187 if auto_cleanup:
1173 1188 self.user_group_ids.append(user_group.users_group_id)
1174 1189 if members:
1175 1190 for user in members:
1176 1191 UserGroupModel().add_user_to_group(user_group, user)
1177 1192 return user_group
1178 1193
1179 1194 def grant_user_permission(self, user_name, permission_name):
1180 1195 self._inherit_default_user_permissions(user_name, False)
1181 1196 self.user_permissions.append((user_name, permission_name))
1182 1197
1183 1198 def grant_user_permission_to_repo_group(
1184 1199 self, repo_group, user, permission_name):
1185 1200 permission = RepoGroupModel().grant_user_permission(
1186 1201 repo_group, user, permission_name)
1187 1202 self.user_repo_group_permission_ids.append(
1188 1203 (repo_group.group_id, user.user_id))
1189 1204 return permission
1190 1205
1191 1206 def grant_user_group_permission_to_repo_group(
1192 1207 self, repo_group, user_group, permission_name):
1193 1208 permission = RepoGroupModel().grant_user_group_permission(
1194 1209 repo_group, user_group, permission_name)
1195 1210 self.user_group_repo_group_permission_ids.append(
1196 1211 (repo_group.group_id, user_group.users_group_id))
1197 1212 return permission
1198 1213
1199 1214 def grant_user_permission_to_repo(
1200 1215 self, repo, user, permission_name):
1201 1216 permission = RepoModel().grant_user_permission(
1202 1217 repo, user, permission_name)
1203 1218 self.user_repo_permission_ids.append(
1204 1219 (repo.repo_id, user.user_id))
1205 1220 return permission
1206 1221
1207 1222 def grant_user_group_permission_to_repo(
1208 1223 self, repo, user_group, permission_name):
1209 1224 permission = RepoModel().grant_user_group_permission(
1210 1225 repo, user_group, permission_name)
1211 1226 self.user_group_repo_permission_ids.append(
1212 1227 (repo.repo_id, user_group.users_group_id))
1213 1228 return permission
1214 1229
1215 1230 def grant_user_permission_to_user_group(
1216 1231 self, target_user_group, user, permission_name):
1217 1232 permission = UserGroupModel().grant_user_permission(
1218 1233 target_user_group, user, permission_name)
1219 1234 self.user_user_group_permission_ids.append(
1220 1235 (target_user_group.users_group_id, user.user_id))
1221 1236 return permission
1222 1237
1223 1238 def grant_user_group_permission_to_user_group(
1224 1239 self, target_user_group, user_group, permission_name):
1225 1240 permission = UserGroupModel().grant_user_group_permission(
1226 1241 target_user_group, user_group, permission_name)
1227 1242 self.user_group_user_group_permission_ids.append(
1228 1243 (target_user_group.users_group_id, user_group.users_group_id))
1229 1244 return permission
1230 1245
1231 1246 def revoke_user_permission(self, user_name, permission_name):
1232 1247 self._inherit_default_user_permissions(user_name, True)
1233 1248 UserModel().revoke_perm(user_name, permission_name)
1234 1249
1235 1250 def _inherit_default_user_permissions(self, user_name, value):
1236 1251 user = UserModel().get_by_username(user_name)
1237 1252 user.inherit_default_permissions = value
1238 1253 Session().add(user)
1239 1254 Session().commit()
1240 1255
1241 1256 def cleanup(self):
1242 1257 self._cleanup_permissions()
1243 1258 self._cleanup_repos()
1244 1259 self._cleanup_repo_groups()
1245 1260 self._cleanup_user_groups()
1246 1261 self._cleanup_users()
1247 1262
1248 1263 def _cleanup_permissions(self):
1249 1264 if self.user_permissions:
1250 1265 for user_name, permission_name in self.user_permissions:
1251 1266 self.revoke_user_permission(user_name, permission_name)
1252 1267
1253 1268 for permission in self.user_repo_permission_ids:
1254 1269 RepoModel().revoke_user_permission(*permission)
1255 1270
1256 1271 for permission in self.user_group_repo_permission_ids:
1257 1272 RepoModel().revoke_user_group_permission(*permission)
1258 1273
1259 1274 for permission in self.user_repo_group_permission_ids:
1260 1275 RepoGroupModel().revoke_user_permission(*permission)
1261 1276
1262 1277 for permission in self.user_group_repo_group_permission_ids:
1263 1278 RepoGroupModel().revoke_user_group_permission(*permission)
1264 1279
1265 1280 for permission in self.user_user_group_permission_ids:
1266 1281 UserGroupModel().revoke_user_permission(*permission)
1267 1282
1268 1283 for permission in self.user_group_user_group_permission_ids:
1269 1284 UserGroupModel().revoke_user_group_permission(*permission)
1270 1285
1271 1286 def _cleanup_repo_groups(self):
1272 1287 def _repo_group_compare(first_group_id, second_group_id):
1273 1288 """
1274 1289 Gives higher priority to the groups with the most complex paths
1275 1290 """
1276 1291 first_group = RepoGroup.get(first_group_id)
1277 1292 second_group = RepoGroup.get(second_group_id)
1278 1293 first_group_parts = (
1279 1294 len(first_group.group_name.split('/')) if first_group else 0)
1280 1295 second_group_parts = (
1281 1296 len(second_group.group_name.split('/')) if second_group else 0)
1282 1297 return cmp(second_group_parts, first_group_parts)
1283 1298
1284 1299 sorted_repo_group_ids = sorted(
1285 1300 self.repo_group_ids, cmp=_repo_group_compare)
1286 1301 for repo_group_id in sorted_repo_group_ids:
1287 1302 self.fixture.destroy_repo_group(repo_group_id)
1288 1303
1289 1304 def _cleanup_repos(self):
1290 1305 sorted_repos_ids = sorted(self.repos_ids)
1291 1306 for repo_id in sorted_repos_ids:
1292 1307 self.fixture.destroy_repo(repo_id)
1293 1308
1294 1309 def _cleanup_user_groups(self):
1295 1310 def _user_group_compare(first_group_id, second_group_id):
1296 1311 """
1297 1312 Gives higher priority to the groups with the most complex paths
1298 1313 """
1299 1314 first_group = UserGroup.get(first_group_id)
1300 1315 second_group = UserGroup.get(second_group_id)
1301 1316 first_group_parts = (
1302 1317 len(first_group.users_group_name.split('/'))
1303 1318 if first_group else 0)
1304 1319 second_group_parts = (
1305 1320 len(second_group.users_group_name.split('/'))
1306 1321 if second_group else 0)
1307 1322 return cmp(second_group_parts, first_group_parts)
1308 1323
1309 1324 sorted_user_group_ids = sorted(
1310 1325 self.user_group_ids, cmp=_user_group_compare)
1311 1326 for user_group_id in sorted_user_group_ids:
1312 1327 self.fixture.destroy_user_group(user_group_id)
1313 1328
1314 1329 def _cleanup_users(self):
1315 1330 for user_id in self.user_ids:
1316 1331 self.fixture.destroy_user(user_id)
1317 1332
1318 1333
1319 1334 # TODO: Think about moving this into a pytest-pyro package and make it a
1320 1335 # pytest plugin
1321 1336 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1322 1337 def pytest_runtest_makereport(item, call):
1323 1338 """
1324 1339 Adding the remote traceback if the exception has this information.
1325 1340
1326 1341 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1327 1342 to the exception instance.
1328 1343 """
1329 1344 outcome = yield
1330 1345 report = outcome.get_result()
1331 1346 if call.excinfo:
1332 1347 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1333 1348
1334 1349
1335 1350 def _add_vcsserver_remote_traceback(report, exc):
1336 1351 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1337 1352
1338 1353 if vcsserver_traceback:
1339 1354 section = 'VCSServer remote traceback ' + report.when
1340 1355 report.sections.append((section, vcsserver_traceback))
1341 1356
1342 1357
1343 1358 @pytest.fixture(scope='session')
1344 1359 def testrun():
1345 1360 return {
1346 1361 'uuid': uuid.uuid4(),
1347 1362 'start': datetime.datetime.utcnow().isoformat(),
1348 1363 'timestamp': int(time.time()),
1349 1364 }
1350 1365
1351 1366
1352 1367 @pytest.fixture(autouse=True)
1353 1368 def collect_appenlight_stats(request, testrun):
1354 1369 """
1355 1370 This fixture reports memory consumtion of single tests.
1356 1371
1357 1372 It gathers data based on `psutil` and sends them to Appenlight. The option
1358 1373 ``--ae`` has te be used to enable this fixture and the API key for your
1359 1374 application has to be provided in ``--ae-key``.
1360 1375 """
1361 1376 try:
1362 1377 # cygwin cannot have yet psutil support.
1363 1378 import psutil
1364 1379 except ImportError:
1365 1380 return
1366 1381
1367 1382 if not request.config.getoption('--appenlight'):
1368 1383 return
1369 1384 else:
1370 1385 # Only request the baseapp fixture if appenlight tracking is
1371 1386 # enabled. This will speed up a test run of unit tests by 2 to 3
1372 1387 # seconds if appenlight is not enabled.
1373 1388 baseapp = request.getfuncargvalue("baseapp")
1374 1389 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1375 1390 client = AppenlightClient(
1376 1391 url=url,
1377 1392 api_key=request.config.getoption('--appenlight-api-key'),
1378 1393 namespace=request.node.nodeid,
1379 1394 request=str(testrun['uuid']),
1380 1395 testrun=testrun)
1381 1396
1382 1397 client.collect({
1383 1398 'message': "Starting",
1384 1399 })
1385 1400
1386 1401 server_and_port = baseapp.config.get_settings()['vcs.server']
1387 1402 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1388 1403 server = create_vcsserver_proxy(server_and_port, protocol)
1389 1404 with server:
1390 1405 vcs_pid = server.get_pid()
1391 1406 server.run_gc()
1392 1407 vcs_process = psutil.Process(vcs_pid)
1393 1408 mem = vcs_process.memory_info()
1394 1409 client.tag_before('vcsserver.rss', mem.rss)
1395 1410 client.tag_before('vcsserver.vms', mem.vms)
1396 1411
1397 1412 test_process = psutil.Process()
1398 1413 mem = test_process.memory_info()
1399 1414 client.tag_before('test.rss', mem.rss)
1400 1415 client.tag_before('test.vms', mem.vms)
1401 1416
1402 1417 client.tag_before('time', time.time())
1403 1418
1404 1419 @request.addfinalizer
1405 1420 def send_stats():
1406 1421 client.tag_after('time', time.time())
1407 1422 with server:
1408 1423 gc_stats = server.run_gc()
1409 1424 for tag, value in gc_stats.items():
1410 1425 client.tag_after(tag, value)
1411 1426 mem = vcs_process.memory_info()
1412 1427 client.tag_after('vcsserver.rss', mem.rss)
1413 1428 client.tag_after('vcsserver.vms', mem.vms)
1414 1429
1415 1430 mem = test_process.memory_info()
1416 1431 client.tag_after('test.rss', mem.rss)
1417 1432 client.tag_after('test.vms', mem.vms)
1418 1433
1419 1434 client.collect({
1420 1435 'message': "Finished",
1421 1436 })
1422 1437 client.send_stats()
1423 1438
1424 1439 return client
1425 1440
1426 1441
1427 1442 class AppenlightClient():
1428 1443
1429 1444 url_template = '{url}?protocol_version=0.5'
1430 1445
1431 1446 def __init__(
1432 1447 self, url, api_key, add_server=True, add_timestamp=True,
1433 1448 namespace=None, request=None, testrun=None):
1434 1449 self.url = self.url_template.format(url=url)
1435 1450 self.api_key = api_key
1436 1451 self.add_server = add_server
1437 1452 self.add_timestamp = add_timestamp
1438 1453 self.namespace = namespace
1439 1454 self.request = request
1440 1455 self.server = socket.getfqdn(socket.gethostname())
1441 1456 self.tags_before = {}
1442 1457 self.tags_after = {}
1443 1458 self.stats = []
1444 1459 self.testrun = testrun or {}
1445 1460
1446 1461 def tag_before(self, tag, value):
1447 1462 self.tags_before[tag] = value
1448 1463
1449 1464 def tag_after(self, tag, value):
1450 1465 self.tags_after[tag] = value
1451 1466
1452 1467 def collect(self, data):
1453 1468 if self.add_server:
1454 1469 data.setdefault('server', self.server)
1455 1470 if self.add_timestamp:
1456 1471 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1457 1472 if self.namespace:
1458 1473 data.setdefault('namespace', self.namespace)
1459 1474 if self.request:
1460 1475 data.setdefault('request', self.request)
1461 1476 self.stats.append(data)
1462 1477
1463 1478 def send_stats(self):
1464 1479 tags = [
1465 1480 ('testrun', self.request),
1466 1481 ('testrun.start', self.testrun['start']),
1467 1482 ('testrun.timestamp', self.testrun['timestamp']),
1468 1483 ('test', self.namespace),
1469 1484 ]
1470 1485 for key, value in self.tags_before.items():
1471 1486 tags.append((key + '.before', value))
1472 1487 try:
1473 1488 delta = self.tags_after[key] - value
1474 1489 tags.append((key + '.delta', delta))
1475 1490 except Exception:
1476 1491 pass
1477 1492 for key, value in self.tags_after.items():
1478 1493 tags.append((key + '.after', value))
1479 1494 self.collect({
1480 1495 'message': "Collected tags",
1481 1496 'tags': tags,
1482 1497 })
1483 1498
1484 1499 response = requests.post(
1485 1500 self.url,
1486 1501 headers={
1487 1502 'X-appenlight-api-key': self.api_key},
1488 1503 json=self.stats,
1489 1504 )
1490 1505
1491 1506 if not response.status_code == 200:
1492 1507 pprint.pprint(self.stats)
1493 1508 print(response.headers)
1494 1509 print(response.text)
1495 1510 raise Exception('Sending to appenlight failed')
1496 1511
1497 1512
1498 1513 @pytest.fixture
1499 1514 def gist_util(request, baseapp):
1500 1515 """
1501 1516 Provides a wired instance of `GistUtility` with integrated cleanup.
1502 1517 """
1503 1518 utility = GistUtility()
1504 1519 request.addfinalizer(utility.cleanup)
1505 1520 return utility
1506 1521
1507 1522
1508 1523 class GistUtility(object):
1509 1524 def __init__(self):
1510 1525 self.fixture = Fixture()
1511 1526 self.gist_ids = []
1512 1527
1513 1528 def create_gist(self, **kwargs):
1514 1529 gist = self.fixture.create_gist(**kwargs)
1515 1530 self.gist_ids.append(gist.gist_id)
1516 1531 return gist
1517 1532
1518 1533 def cleanup(self):
1519 1534 for id_ in self.gist_ids:
1520 1535 self.fixture.destroy_gists(str(id_))
1521 1536
1522 1537
1523 1538 @pytest.fixture
1524 1539 def enabled_backends(request):
1525 1540 backends = request.config.option.backends
1526 1541 return backends[:]
1527 1542
1528 1543
1529 1544 @pytest.fixture
1530 1545 def settings_util(request):
1531 1546 """
1532 1547 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1533 1548 """
1534 1549 utility = SettingsUtility()
1535 1550 request.addfinalizer(utility.cleanup)
1536 1551 return utility
1537 1552
1538 1553
1539 1554 class SettingsUtility(object):
1540 1555 def __init__(self):
1541 1556 self.rhodecode_ui_ids = []
1542 1557 self.rhodecode_setting_ids = []
1543 1558 self.repo_rhodecode_ui_ids = []
1544 1559 self.repo_rhodecode_setting_ids = []
1545 1560
1546 1561 def create_repo_rhodecode_ui(
1547 1562 self, repo, section, value, key=None, active=True, cleanup=True):
1548 1563 key = key or hashlib.sha1(
1549 1564 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1550 1565
1551 1566 setting = RepoRhodeCodeUi()
1552 1567 setting.repository_id = repo.repo_id
1553 1568 setting.ui_section = section
1554 1569 setting.ui_value = value
1555 1570 setting.ui_key = key
1556 1571 setting.ui_active = active
1557 1572 Session().add(setting)
1558 1573 Session().commit()
1559 1574
1560 1575 if cleanup:
1561 1576 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1562 1577 return setting
1563 1578
1564 1579 def create_rhodecode_ui(
1565 1580 self, section, value, key=None, active=True, cleanup=True):
1566 1581 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1567 1582
1568 1583 setting = RhodeCodeUi()
1569 1584 setting.ui_section = section
1570 1585 setting.ui_value = value
1571 1586 setting.ui_key = key
1572 1587 setting.ui_active = active
1573 1588 Session().add(setting)
1574 1589 Session().commit()
1575 1590
1576 1591 if cleanup:
1577 1592 self.rhodecode_ui_ids.append(setting.ui_id)
1578 1593 return setting
1579 1594
1580 1595 def create_repo_rhodecode_setting(
1581 1596 self, repo, name, value, type_, cleanup=True):
1582 1597 setting = RepoRhodeCodeSetting(
1583 1598 repo.repo_id, key=name, val=value, type=type_)
1584 1599 Session().add(setting)
1585 1600 Session().commit()
1586 1601
1587 1602 if cleanup:
1588 1603 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1589 1604 return setting
1590 1605
1591 1606 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1592 1607 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1593 1608 Session().add(setting)
1594 1609 Session().commit()
1595 1610
1596 1611 if cleanup:
1597 1612 self.rhodecode_setting_ids.append(setting.app_settings_id)
1598 1613
1599 1614 return setting
1600 1615
1601 1616 def cleanup(self):
1602 1617 for id_ in self.rhodecode_ui_ids:
1603 1618 setting = RhodeCodeUi.get(id_)
1604 1619 Session().delete(setting)
1605 1620
1606 1621 for id_ in self.rhodecode_setting_ids:
1607 1622 setting = RhodeCodeSetting.get(id_)
1608 1623 Session().delete(setting)
1609 1624
1610 1625 for id_ in self.repo_rhodecode_ui_ids:
1611 1626 setting = RepoRhodeCodeUi.get(id_)
1612 1627 Session().delete(setting)
1613 1628
1614 1629 for id_ in self.repo_rhodecode_setting_ids:
1615 1630 setting = RepoRhodeCodeSetting.get(id_)
1616 1631 Session().delete(setting)
1617 1632
1618 1633 Session().commit()
1619 1634
1620 1635
1621 1636 @pytest.fixture
1622 1637 def no_notifications(request):
1623 1638 notification_patcher = mock.patch(
1624 1639 'rhodecode.model.notification.NotificationModel.create')
1625 1640 notification_patcher.start()
1626 1641 request.addfinalizer(notification_patcher.stop)
1627 1642
1628 1643
1629 1644 @pytest.fixture(scope='session')
1630 1645 def repeat(request):
1631 1646 """
1632 1647 The number of repetitions is based on this fixture.
1633 1648
1634 1649 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1635 1650 tests are not too slow in our default test suite.
1636 1651 """
1637 1652 return request.config.getoption('--repeat')
1638 1653
1639 1654
1640 1655 @pytest.fixture
1641 1656 def rhodecode_fixtures():
1642 1657 return Fixture()
1643 1658
1644 1659
1645 1660 @pytest.fixture
1646 1661 def context_stub():
1647 1662 """
1648 1663 Stub context object.
1649 1664 """
1650 1665 context = pyramid.testing.DummyResource()
1651 1666 return context
1652 1667
1653 1668
1654 1669 @pytest.fixture
1655 1670 def request_stub():
1656 1671 """
1657 1672 Stub request object.
1658 1673 """
1659 1674 from rhodecode.lib.base import bootstrap_request
1660 1675 request = bootstrap_request(scheme='https')
1661 1676 return request
1662 1677
1663 1678
1664 1679 @pytest.fixture
1665 1680 def config_stub(request, request_stub):
1666 1681 """
1667 1682 Set up pyramid.testing and return the Configurator.
1668 1683 """
1669 1684 from rhodecode.lib.base import bootstrap_config
1670 1685 config = bootstrap_config(request=request_stub)
1671 1686
1672 1687 @request.addfinalizer
1673 1688 def cleanup():
1674 1689 pyramid.testing.tearDown()
1675 1690
1676 1691 return config
1677 1692
1678 1693
1679 1694 @pytest.fixture
1680 1695 def StubIntegrationType():
1681 1696 class _StubIntegrationType(IntegrationTypeBase):
1682 1697 """ Test integration type class """
1683 1698
1684 1699 key = 'test'
1685 1700 display_name = 'Test integration type'
1686 1701 description = 'A test integration type for testing'
1687 1702 icon = 'test_icon_html_image'
1688 1703
1689 1704 def __init__(self, settings):
1690 1705 super(_StubIntegrationType, self).__init__(settings)
1691 1706 self.sent_events = [] # for testing
1692 1707
1693 1708 def send_event(self, event):
1694 1709 self.sent_events.append(event)
1695 1710
1696 1711 def settings_schema(self):
1697 1712 class SettingsSchema(colander.Schema):
1698 1713 test_string_field = colander.SchemaNode(
1699 1714 colander.String(),
1700 1715 missing=colander.required,
1701 1716 title='test string field',
1702 1717 )
1703 1718 test_int_field = colander.SchemaNode(
1704 1719 colander.Int(),
1705 1720 title='some integer setting',
1706 1721 )
1707 1722 return SettingsSchema()
1708 1723
1709 1724
1710 1725 integration_type_registry.register_integration_type(_StubIntegrationType)
1711 1726 return _StubIntegrationType
1712 1727
1713 1728 @pytest.fixture
1714 1729 def stub_integration_settings():
1715 1730 return {
1716 1731 'test_string_field': 'some data',
1717 1732 'test_int_field': 100,
1718 1733 }
1719 1734
1720 1735
1721 1736 @pytest.fixture
1722 1737 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1723 1738 stub_integration_settings):
1724 1739 integration = IntegrationModel().create(
1725 1740 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1726 1741 name='test repo integration',
1727 1742 repo=repo_stub, repo_group=None, child_repos_only=None)
1728 1743
1729 1744 @request.addfinalizer
1730 1745 def cleanup():
1731 1746 IntegrationModel().delete(integration)
1732 1747
1733 1748 return integration
1734 1749
1735 1750
1736 1751 @pytest.fixture
1737 1752 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1738 1753 stub_integration_settings):
1739 1754 integration = IntegrationModel().create(
1740 1755 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1741 1756 name='test repogroup integration',
1742 1757 repo=None, repo_group=test_repo_group, child_repos_only=True)
1743 1758
1744 1759 @request.addfinalizer
1745 1760 def cleanup():
1746 1761 IntegrationModel().delete(integration)
1747 1762
1748 1763 return integration
1749 1764
1750 1765
1751 1766 @pytest.fixture
1752 1767 def repogroup_recursive_integration_stub(request, test_repo_group,
1753 1768 StubIntegrationType, stub_integration_settings):
1754 1769 integration = IntegrationModel().create(
1755 1770 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1756 1771 name='test recursive repogroup integration',
1757 1772 repo=None, repo_group=test_repo_group, child_repos_only=False)
1758 1773
1759 1774 @request.addfinalizer
1760 1775 def cleanup():
1761 1776 IntegrationModel().delete(integration)
1762 1777
1763 1778 return integration
1764 1779
1765 1780
1766 1781 @pytest.fixture
1767 1782 def global_integration_stub(request, StubIntegrationType,
1768 1783 stub_integration_settings):
1769 1784 integration = IntegrationModel().create(
1770 1785 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1771 1786 name='test global integration',
1772 1787 repo=None, repo_group=None, child_repos_only=None)
1773 1788
1774 1789 @request.addfinalizer
1775 1790 def cleanup():
1776 1791 IntegrationModel().delete(integration)
1777 1792
1778 1793 return integration
1779 1794
1780 1795
1781 1796 @pytest.fixture
1782 1797 def root_repos_integration_stub(request, StubIntegrationType,
1783 1798 stub_integration_settings):
1784 1799 integration = IntegrationModel().create(
1785 1800 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1786 1801 name='test global integration',
1787 1802 repo=None, repo_group=None, child_repos_only=True)
1788 1803
1789 1804 @request.addfinalizer
1790 1805 def cleanup():
1791 1806 IntegrationModel().delete(integration)
1792 1807
1793 1808 return integration
1794 1809
1795 1810
1796 1811 @pytest.fixture
1797 1812 def local_dt_to_utc():
1798 1813 def _factory(dt):
1799 1814 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1800 1815 dateutil.tz.tzutc()).replace(tzinfo=None)
1801 1816 return _factory
1802 1817
1803 1818
1804 1819 @pytest.fixture
1805 1820 def disable_anonymous_user(request, baseapp):
1806 1821 set_anonymous_access(False)
1807 1822
1808 1823 @request.addfinalizer
1809 1824 def cleanup():
1810 1825 set_anonymous_access(True)
1811 1826
1812 1827
1813 1828 @pytest.fixture(scope='module')
1814 1829 def rc_fixture(request):
1815 1830 return Fixture()
1816 1831
1817 1832
1818 1833 @pytest.fixture
1819 1834 def repo_groups(request):
1820 1835 fixture = Fixture()
1821 1836
1822 1837 session = Session()
1823 1838 zombie_group = fixture.create_repo_group('zombie')
1824 1839 parent_group = fixture.create_repo_group('parent')
1825 1840 child_group = fixture.create_repo_group('parent/child')
1826 1841 groups_in_db = session.query(RepoGroup).all()
1827 1842 assert len(groups_in_db) == 3
1828 1843 assert child_group.group_parent_id == parent_group.group_id
1829 1844
1830 1845 @request.addfinalizer
1831 1846 def cleanup():
1832 1847 fixture.destroy_repo_group(zombie_group)
1833 1848 fixture.destroy_repo_group(child_group)
1834 1849 fixture.destroy_repo_group(parent_group)
1835 1850
1836 1851 return zombie_group, parent_group, child_group
@@ -1,386 +1,372 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import json
23 23 import platform
24 24 import socket
25 25 import subprocess32
26 26 import time
27 27 from urllib2 import urlopen, URLError
28 28
29 29 import configobj
30 30 import pytest
31 31
32 32 import pyramid.paster
33 33 from rhodecode.tests.fixture import TestINI
34 34 import rhodecode
35 35
36 36
37 37 def _parse_json(value):
38 38 return json.loads(value) if value else None
39 39
40 40
41 41 def pytest_addoption(parser):
42 42 parser.addoption(
43 43 '--test-loglevel', dest='test_loglevel',
44 44 help="Set default Logging level for tests, warn (default), info, debug")
45 45 group = parser.getgroup('pylons')
46 46 group.addoption(
47 47 '--with-pylons', dest='pyramid_config',
48 48 help="Set up a Pylons environment with the specified config file.")
49 49 group.addoption(
50 50 '--ini-config-override', action='store', type=_parse_json,
51 51 default=None, dest='pyramid_config_override', help=(
52 52 "Overrides the .ini file settings. Should be specified in JSON"
53 53 " format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
54 54 )
55 55 )
56 56 parser.addini(
57 57 'pyramid_config',
58 58 "Set up a Pyramid environment with the specified config file.")
59 59
60 60 vcsgroup = parser.getgroup('vcs')
61 61 vcsgroup.addoption(
62 62 '--without-vcsserver', dest='with_vcsserver', action='store_false',
63 63 help="Do not start the VCSServer in a background process.")
64 64 vcsgroup.addoption(
65 65 '--with-vcsserver-http', dest='vcsserver_config_http',
66 66 help="Start the HTTP VCSServer with the specified config file.")
67 67 vcsgroup.addoption(
68 68 '--vcsserver-protocol', dest='vcsserver_protocol',
69 69 help="Start the VCSServer with HTTP protocol support.")
70 70 vcsgroup.addoption(
71 71 '--vcsserver-config-override', action='store', type=_parse_json,
72 72 default=None, dest='vcsserver_config_override', help=(
73 73 "Overrides the .ini file settings for the VCSServer. "
74 74 "Should be specified in JSON "
75 75 "format, e.g. '{\"section\": {\"parameter\": \"value\", ...}}'"
76 76 )
77 77 )
78 78 vcsgroup.addoption(
79 79 '--vcsserver-port', action='store', type=int,
80 80 default=None, help=(
81 81 "Allows to set the port of the vcsserver. Useful when testing "
82 82 "against an already running server and random ports cause "
83 83 "trouble."))
84 84 parser.addini(
85 85 'vcsserver_config_http',
86 86 "Start the HTTP VCSServer with the specified config file.")
87 87 parser.addini(
88 88 'vcsserver_protocol',
89 89 "Start the VCSServer with HTTP protocol support.")
90 90
91 91
92 92 @pytest.fixture(scope='session')
93 93 def vcsserver(request, vcsserver_port, vcsserver_factory):
94 94 """
95 95 Session scope VCSServer.
96 96
97 97 Tests wich need the VCSServer have to rely on this fixture in order
98 98 to ensure it will be running.
99 99
100 100 For specific needs, the fixture vcsserver_factory can be used. It allows to
101 101 adjust the configuration file for the test run.
102 102
103 103 Command line args:
104 104
105 105 --without-vcsserver: Allows to switch this fixture off. You have to
106 106 manually start the server.
107 107
108 108 --vcsserver-port: Will expect the VCSServer to listen on this port.
109 109 """
110 110
111 111 if not request.config.getoption('with_vcsserver'):
112 112 return None
113 113
114 114 use_http = _use_vcs_http_server(request.config)
115 115 return vcsserver_factory(
116 116 request, use_http=use_http, vcsserver_port=vcsserver_port)
117 117
118 118
119 119 @pytest.fixture(scope='session')
120 120 def vcsserver_factory(tmpdir_factory):
121 121 """
122 122 Use this if you need a running vcsserver with a special configuration.
123 123 """
124 124
125 125 def factory(request, use_http=True, overrides=(), vcsserver_port=None):
126 126
127 127 if vcsserver_port is None:
128 128 vcsserver_port = get_available_port()
129 129
130 130 overrides = list(overrides)
131 131 if use_http:
132 132 overrides.append({'server:main': {'port': vcsserver_port}})
133 133 else:
134 134 overrides.append({'DEFAULT': {'port': vcsserver_port}})
135 135
136 136 if is_cygwin():
137 137 platform_override = {'DEFAULT': {
138 138 'beaker.cache.repo_object.type': 'nocache'}}
139 139 overrides.append(platform_override)
140 140
141 141 option_name = 'vcsserver_config_http' if use_http else ''
142 142 override_option_name = 'vcsserver_config_override'
143 143 config_file = get_config(
144 144 request.config, option_name=option_name,
145 145 override_option_name=override_option_name, overrides=overrides,
146 146 basetemp=tmpdir_factory.getbasetemp().strpath,
147 147 prefix='test_vcs_')
148 148
149 149 print("Using the VCSServer configuration:{}".format(config_file))
150 150 ServerClass = HttpVCSServer if use_http else None
151 151 server = ServerClass(config_file)
152 152 server.start()
153 153
154 154 @request.addfinalizer
155 155 def cleanup():
156 156 server.shutdown()
157 157
158 158 server.wait_until_ready()
159 159 return server
160 160
161 161 return factory
162 162
163 163
164 164 def is_cygwin():
165 165 return 'cygwin' in platform.system().lower()
166 166
167 167
168 168 def _use_vcs_http_server(config):
169 169 protocol_option = 'vcsserver_protocol'
170 170 protocol = (
171 171 config.getoption(protocol_option) or
172 172 config.getini(protocol_option) or
173 173 'http')
174 174 return protocol == 'http'
175 175
176 176
177 177 def _use_log_level(config):
178 178 level = config.getoption('test_loglevel') or 'warn'
179 179 return level.upper()
180 180
181 181
182 182 class VCSServer(object):
183 183 """
184 184 Represents a running VCSServer instance.
185 185 """
186 186
187 187 _args = []
188 188
189 189 def start(self):
190 190 print("Starting the VCSServer: {}".format(self._args))
191 191 self.process = subprocess32.Popen(self._args)
192 192
193 193 def wait_until_ready(self, timeout=30):
194 194 raise NotImplementedError()
195 195
196 196 def shutdown(self):
197 197 self.process.kill()
198 198
199 199
200 200 class HttpVCSServer(VCSServer):
201 201 """
202 202 Represents a running VCSServer instance.
203 203 """
204 204 def __init__(self, config_file):
205 205 config_data = configobj.ConfigObj(config_file)
206 206 self._config = config_data['server:main']
207 207
208 208 args = ['pserve', config_file]
209 209 self._args = args
210 210
211 211 @property
212 212 def http_url(self):
213 213 template = 'http://{host}:{port}/'
214 214 return template.format(**self._config)
215 215
216 216 def start(self):
217 217 self.process = subprocess32.Popen(self._args)
218 218
219 219 def wait_until_ready(self, timeout=30):
220 220 host = self._config['host']
221 221 port = self._config['port']
222 222 status_url = 'http://{host}:{port}/status'.format(host=host, port=port)
223 223 start = time.time()
224 224
225 225 while time.time() - start < timeout:
226 226 try:
227 227 urlopen(status_url)
228 228 break
229 229 except URLError:
230 230 time.sleep(0.2)
231 231 else:
232 232 pytest.exit(
233 233 "Starting the VCSServer failed or took more than {} "
234 234 "seconds. cmd: `{}`".format(timeout, ' '.join(self._args)))
235 235
236 236 def shutdown(self):
237 237 self.process.kill()
238 238
239 239
240 240 @pytest.fixture(scope='session')
241 241 def ini_config(request, tmpdir_factory, rcserver_port, vcsserver_port):
242 242 option_name = 'pyramid_config'
243 243 log_level = _use_log_level(request.config)
244 244
245 245 overrides = [
246 246 {'server:main': {'port': rcserver_port}},
247 247 {'app:main': {
248 248 'vcs.server': 'localhost:%s' % vcsserver_port,
249 249 # johbo: We will always start the VCSServer on our own based on the
250 250 # fixtures of the test cases. For the test run it must always be
251 251 # off in the INI file.
252 252 'vcs.start_server': 'false',
253 253 }},
254 254
255 255 {'handler_console': {
256 256 'class ': 'StreamHandler',
257 257 'args ': '(sys.stderr,)',
258 258 'level': log_level,
259 259 }},
260 260
261 261 ]
262 262 if _use_vcs_http_server(request.config):
263 263 overrides.append({
264 264 'app:main': {
265 265 'vcs.server.protocol': 'http',
266 266 'vcs.scm_app_implementation': 'http',
267 267 'vcs.hooks.protocol': 'http',
268 268 }
269 269 })
270 270
271 271 filename = get_config(
272 272 request.config, option_name=option_name,
273 273 override_option_name='{}_override'.format(option_name),
274 274 overrides=overrides,
275 275 basetemp=tmpdir_factory.getbasetemp().strpath,
276 276 prefix='test_rce_')
277 277 return filename
278 278
279 279
280 280 @pytest.fixture(scope='session')
281 281 def rcserver_port(request):
282 282 port = get_available_port()
283 283 print('Using rcserver port {}'.format(port))
284 284 return port
285 285
286 286
287 287 @pytest.fixture(scope='session')
288 288 def vcsserver_port(request):
289 289 port = request.config.getoption('--vcsserver-port')
290 290 if port is None:
291 291 port = get_available_port()
292 292 print('Using vcsserver port {}'.format(port))
293 293 return port
294 294
295 295
296 296 def get_available_port():
297 297 family = socket.AF_INET
298 298 socktype = socket.SOCK_STREAM
299 299 host = '127.0.0.1'
300 300
301 301 mysocket = socket.socket(family, socktype)
302 302 mysocket.bind((host, 0))
303 303 port = mysocket.getsockname()[1]
304 304 mysocket.close()
305 305 del mysocket
306 306 return port
307 307
308 308
309 309 @pytest.fixture(scope='session')
310 310 def available_port_factory():
311 311 """
312 312 Returns a callable which returns free port numbers.
313 313 """
314 314 return get_available_port
315 315
316 316
317 317 @pytest.fixture
318 318 def available_port(available_port_factory):
319 319 """
320 320 Gives you one free port for the current test.
321 321
322 322 Uses "available_port_factory" to retrieve the port.
323 323 """
324 324 return available_port_factory()
325 325
326 326
327 327 @pytest.fixture(scope='session')
328 def baseapp(ini_config, vcsserver, http_environ_session):
329 from rhodecode.lib.pyramid_utils import get_app_config
330 from rhodecode.config.middleware import make_pyramid_app
331
332 print("Using the RhodeCode configuration:{}".format(ini_config))
333 pyramid.paster.setup_logging(ini_config)
334
335 settings = get_app_config(ini_config)
336 app = make_pyramid_app({'__file__': ini_config}, **settings)
337
338 return app
339
340
341 @pytest.fixture(scope='session')
342 328 def testini_factory(tmpdir_factory, ini_config):
343 329 """
344 330 Factory to create an INI file based on TestINI.
345 331
346 332 It will make sure to place the INI file in the correct directory.
347 333 """
348 334 basetemp = tmpdir_factory.getbasetemp().strpath
349 335 return TestIniFactory(basetemp, ini_config)
350 336
351 337
352 338 class TestIniFactory(object):
353 339
354 340 def __init__(self, basetemp, template_ini):
355 341 self._basetemp = basetemp
356 342 self._template_ini = template_ini
357 343
358 344 def __call__(self, ini_params, new_file_prefix='test'):
359 345 ini_file = TestINI(
360 346 self._template_ini, ini_params=ini_params,
361 347 new_file_prefix=new_file_prefix, dir=self._basetemp)
362 348 result = ini_file.create()
363 349 return result
364 350
365 351
366 352 def get_config(
367 353 config, option_name, override_option_name, overrides=None,
368 354 basetemp=None, prefix='test'):
369 355 """
370 356 Find a configuration file and apply overrides for the given `prefix`.
371 357 """
372 358 config_file = (
373 359 config.getoption(option_name) or config.getini(option_name))
374 360 if not config_file:
375 361 pytest.exit(
376 362 "Configuration error, could not extract {}.".format(option_name))
377 363
378 364 overrides = overrides or []
379 365 config_override = config.getoption(override_option_name)
380 366 if config_override:
381 367 overrides.append(config_override)
382 368 temp_ini_file = TestINI(
383 369 config_file, ini_params=overrides, new_file_prefix=prefix,
384 370 dir=basetemp)
385 371
386 372 return temp_ini_file.create()
General Comments 0
You need to be logged in to leave comments. Login now