##// END OF EJS Templates
pytest: reduce *_util fixture to just relly on db_connection instead of whole baseapp.
marcink -
r2373:82ef6695 default
parent child Browse files
Show More
@@ -1,1858 +1,1858 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33 import functools
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 61 from rhodecode.lib.vcs.backends import get_backend
62 62 from rhodecode.lib.vcs.nodes import FileNode
63 63 from rhodecode.tests import (
64 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 66 TEST_USER_REGULAR_PASS)
67 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 68 from rhodecode.tests.fixture import Fixture
69 69 from rhodecode.config import utils as config_utils
70 70
71 71 def _split_comma(value):
72 72 return value.split(',')
73 73
74 74
75 75 def pytest_addoption(parser):
76 76 parser.addoption(
77 77 '--keep-tmp-path', action='store_true',
78 78 help="Keep the test temporary directories")
79 79 parser.addoption(
80 80 '--backends', action='store', type=_split_comma,
81 81 default=['git', 'hg', 'svn'],
82 82 help="Select which backends to test for backend specific tests.")
83 83 parser.addoption(
84 84 '--dbs', action='store', type=_split_comma,
85 85 default=['sqlite'],
86 86 help="Select which database to test for database specific tests. "
87 87 "Possible options are sqlite,postgres,mysql")
88 88 parser.addoption(
89 89 '--appenlight', '--ae', action='store_true',
90 90 help="Track statistics in appenlight.")
91 91 parser.addoption(
92 92 '--appenlight-api-key', '--ae-key',
93 93 help="API key for Appenlight.")
94 94 parser.addoption(
95 95 '--appenlight-url', '--ae-url',
96 96 default="https://ae.rhodecode.com",
97 97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 98 parser.addoption(
99 99 '--sqlite-connection-string', action='store',
100 100 default='', help="Connection string for the dbs tests with SQLite")
101 101 parser.addoption(
102 102 '--postgres-connection-string', action='store',
103 103 default='', help="Connection string for the dbs tests with Postgres")
104 104 parser.addoption(
105 105 '--mysql-connection-string', action='store',
106 106 default='', help="Connection string for the dbs tests with MySQL")
107 107 parser.addoption(
108 108 '--repeat', type=int, default=100,
109 109 help="Number of repetitions in performance tests.")
110 110
111 111
112 112 def pytest_configure(config):
113 113 from rhodecode.config import patches
114 114
115 115
116 116 def pytest_collection_modifyitems(session, config, items):
117 117 # nottest marked, compare nose, used for transition from nose to pytest
118 118 remaining = [
119 119 i for i in items if getattr(i.obj, '__test__', True)]
120 120 items[:] = remaining
121 121
122 122
123 123 def pytest_generate_tests(metafunc):
124 124 # Support test generation based on --backend parameter
125 125 if 'backend_alias' in metafunc.fixturenames:
126 126 backends = get_backends_from_metafunc(metafunc)
127 127 scope = None
128 128 if not backends:
129 129 pytest.skip("Not enabled for any of selected backends")
130 130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 131 elif hasattr(metafunc.function, 'backends'):
132 132 backends = get_backends_from_metafunc(metafunc)
133 133 if not backends:
134 134 pytest.skip("Not enabled for any of selected backends")
135 135
136 136
137 137 def get_backends_from_metafunc(metafunc):
138 138 requested_backends = set(metafunc.config.getoption('--backends'))
139 139 if hasattr(metafunc.function, 'backends'):
140 140 # Supported backends by this test function, created from
141 141 # pytest.mark.backends
142 142 backends = metafunc.function.backends.args
143 143 elif hasattr(metafunc.cls, 'backend_alias'):
144 144 # Support class attribute "backend_alias", this is mainly
145 145 # for legacy reasons for tests not yet using pytest.mark.backends
146 146 backends = [metafunc.cls.backend_alias]
147 147 else:
148 148 backends = metafunc.config.getoption('--backends')
149 149 return requested_backends.intersection(backends)
150 150
151 151
152 152 @pytest.fixture(scope='session', autouse=True)
153 153 def activate_example_rcextensions(request):
154 154 """
155 155 Patch in an example rcextensions module which verifies passed in kwargs.
156 156 """
157 157 from rhodecode.tests.other import example_rcextensions
158 158
159 159 old_extensions = rhodecode.EXTENSIONS
160 160 rhodecode.EXTENSIONS = example_rcextensions
161 161
162 162 @request.addfinalizer
163 163 def cleanup():
164 164 rhodecode.EXTENSIONS = old_extensions
165 165
166 166
167 167 @pytest.fixture
168 168 def capture_rcextensions():
169 169 """
170 170 Returns the recorded calls to entry points in rcextensions.
171 171 """
172 172 calls = rhodecode.EXTENSIONS.calls
173 173 calls.clear()
174 174 # Note: At this moment, it is still the empty dict, but that will
175 175 # be filled during the test run and since it is a reference this
176 176 # is enough to make it work.
177 177 return calls
178 178
179 179
180 180 @pytest.fixture(scope='session')
181 181 def http_environ_session():
182 182 """
183 183 Allow to use "http_environ" in session scope.
184 184 """
185 185 return http_environ(
186 186 http_host_stub=http_host_stub())
187 187
188 188
189 189 @pytest.fixture
190 190 def http_host_stub():
191 191 """
192 192 Value of HTTP_HOST in the test run.
193 193 """
194 194 return 'example.com:80'
195 195
196 196
197 197 @pytest.fixture
198 198 def http_host_only_stub():
199 199 """
200 200 Value of HTTP_HOST in the test run.
201 201 """
202 202 return http_host_stub().split(':')[0]
203 203
204 204
205 205 @pytest.fixture
206 206 def http_environ(http_host_stub):
207 207 """
208 208 HTTP extra environ keys.
209 209
210 210 User by the test application and as well for setting up the pylons
211 211 environment. In the case of the fixture "app" it should be possible
212 212 to override this for a specific test case.
213 213 """
214 214 return {
215 215 'SERVER_NAME': http_host_only_stub(),
216 216 'SERVER_PORT': http_host_stub.split(':')[1],
217 217 'HTTP_HOST': http_host_stub,
218 218 'HTTP_USER_AGENT': 'rc-test-agent',
219 219 'REQUEST_METHOD': 'GET'
220 220 }
221 221
222 222
223 223 @pytest.fixture(scope='session')
224 224 def baseapp(ini_config, vcsserver, http_environ_session):
225 225 from rhodecode.lib.pyramid_utils import get_app_config
226 226 from rhodecode.config.middleware import make_pyramid_app
227 227
228 228 print("Using the RhodeCode configuration:{}".format(ini_config))
229 229 pyramid.paster.setup_logging(ini_config)
230 230
231 231 settings = get_app_config(ini_config)
232 232 app = make_pyramid_app({'__file__': ini_config}, **settings)
233 233
234 234 return app
235 235
236 236
237 237 @pytest.fixture(scope='function')
238 238 def app(request, config_stub, baseapp, http_environ):
239 239 app = CustomTestApp(
240 240 baseapp,
241 241 extra_environ=http_environ)
242 242 if request.cls:
243 243 request.cls.app = app
244 244 return app
245 245
246 246
247 247 @pytest.fixture(scope='session')
248 248 def app_settings(baseapp, ini_config):
249 249 """
250 250 Settings dictionary used to create the app.
251 251
252 252 Parses the ini file and passes the result through the sanitize and apply
253 253 defaults mechanism in `rhodecode.config.middleware`.
254 254 """
255 255 return baseapp.config.get_settings()
256 256
257 257
258 258 @pytest.fixture(scope='session')
259 259 def db_connection(ini_settings):
260 260 # Initialize the database connection.
261 261 config_utils.initialize_database(ini_settings)
262 262
263 263
264 264 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
265 265
266 266
267 267 def _autologin_user(app, *args):
268 268 session = login_user_session(app, *args)
269 269 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
270 270 return LoginData(csrf_token, session['rhodecode_user'])
271 271
272 272
273 273 @pytest.fixture
274 274 def autologin_user(app):
275 275 """
276 276 Utility fixture which makes sure that the admin user is logged in
277 277 """
278 278 return _autologin_user(app)
279 279
280 280
281 281 @pytest.fixture
282 282 def autologin_regular_user(app):
283 283 """
284 284 Utility fixture which makes sure that the regular user is logged in
285 285 """
286 286 return _autologin_user(
287 287 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
288 288
289 289
290 290 @pytest.fixture(scope='function')
291 291 def csrf_token(request, autologin_user):
292 292 return autologin_user.csrf_token
293 293
294 294
295 295 @pytest.fixture(scope='function')
296 296 def xhr_header(request):
297 297 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
298 298
299 299
300 300 @pytest.fixture
301 301 def real_crypto_backend(monkeypatch):
302 302 """
303 303 Switch the production crypto backend on for this test.
304 304
305 305 During the test run the crypto backend is replaced with a faster
306 306 implementation based on the MD5 algorithm.
307 307 """
308 308 monkeypatch.setattr(rhodecode, 'is_test', False)
309 309
310 310
311 311 @pytest.fixture(scope='class')
312 312 def index_location(request, baseapp):
313 313 index_location = baseapp.config.get_settings()['search.location']
314 314 if request.cls:
315 315 request.cls.index_location = index_location
316 316 return index_location
317 317
318 318
319 319 @pytest.fixture(scope='session', autouse=True)
320 320 def tests_tmp_path(request):
321 321 """
322 322 Create temporary directory to be used during the test session.
323 323 """
324 324 if not os.path.exists(TESTS_TMP_PATH):
325 325 os.makedirs(TESTS_TMP_PATH)
326 326
327 327 if not request.config.getoption('--keep-tmp-path'):
328 328 @request.addfinalizer
329 329 def remove_tmp_path():
330 330 shutil.rmtree(TESTS_TMP_PATH)
331 331
332 332 return TESTS_TMP_PATH
333 333
334 334
335 335 @pytest.fixture
336 336 def test_repo_group(request):
337 337 """
338 338 Create a temporary repository group, and destroy it after
339 339 usage automatically
340 340 """
341 341 fixture = Fixture()
342 342 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
343 343 repo_group = fixture.create_repo_group(repogroupid)
344 344
345 345 def _cleanup():
346 346 fixture.destroy_repo_group(repogroupid)
347 347
348 348 request.addfinalizer(_cleanup)
349 349 return repo_group
350 350
351 351
352 352 @pytest.fixture
353 353 def test_user_group(request):
354 354 """
355 355 Create a temporary user group, and destroy it after
356 356 usage automatically
357 357 """
358 358 fixture = Fixture()
359 359 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
360 360 user_group = fixture.create_user_group(usergroupid)
361 361
362 362 def _cleanup():
363 363 fixture.destroy_user_group(user_group)
364 364
365 365 request.addfinalizer(_cleanup)
366 366 return user_group
367 367
368 368
369 369 @pytest.fixture(scope='session')
370 370 def test_repo(request):
371 371 container = TestRepoContainer()
372 372 request.addfinalizer(container._cleanup)
373 373 return container
374 374
375 375
376 376 class TestRepoContainer(object):
377 377 """
378 378 Container for test repositories which are used read only.
379 379
380 380 Repositories will be created on demand and re-used during the lifetime
381 381 of this object.
382 382
383 383 Usage to get the svn test repository "minimal"::
384 384
385 385 test_repo = TestContainer()
386 386 repo = test_repo('minimal', 'svn')
387 387
388 388 """
389 389
390 390 dump_extractors = {
391 391 'git': utils.extract_git_repo_from_dump,
392 392 'hg': utils.extract_hg_repo_from_dump,
393 393 'svn': utils.extract_svn_repo_from_dump,
394 394 }
395 395
396 396 def __init__(self):
397 397 self._cleanup_repos = []
398 398 self._fixture = Fixture()
399 399 self._repos = {}
400 400
401 401 def __call__(self, dump_name, backend_alias, config=None):
402 402 key = (dump_name, backend_alias)
403 403 if key not in self._repos:
404 404 repo = self._create_repo(dump_name, backend_alias, config)
405 405 self._repos[key] = repo.repo_id
406 406 return Repository.get(self._repos[key])
407 407
408 408 def _create_repo(self, dump_name, backend_alias, config):
409 409 repo_name = '%s-%s' % (backend_alias, dump_name)
410 410 backend_class = get_backend(backend_alias)
411 411 dump_extractor = self.dump_extractors[backend_alias]
412 412 repo_path = dump_extractor(dump_name, repo_name)
413 413
414 414 vcs_repo = backend_class(repo_path, config=config)
415 415 repo2db_mapper({repo_name: vcs_repo})
416 416
417 417 repo = RepoModel().get_by_repo_name(repo_name)
418 418 self._cleanup_repos.append(repo_name)
419 419 return repo
420 420
421 421 def _cleanup(self):
422 422 for repo_name in reversed(self._cleanup_repos):
423 423 self._fixture.destroy_repo(repo_name)
424 424
425 425
426 426 @pytest.fixture
427 427 def backend(request, backend_alias, baseapp, test_repo):
428 428 """
429 429 Parametrized fixture which represents a single backend implementation.
430 430
431 431 It respects the option `--backends` to focus the test run on specific
432 432 backend implementations.
433 433
434 434 It also supports `pytest.mark.xfail_backends` to mark tests as failing
435 435 for specific backends. This is intended as a utility for incremental
436 436 development of a new backend implementation.
437 437 """
438 438 if backend_alias not in request.config.getoption('--backends'):
439 439 pytest.skip("Backend %s not selected." % (backend_alias, ))
440 440
441 441 utils.check_xfail_backends(request.node, backend_alias)
442 442 utils.check_skip_backends(request.node, backend_alias)
443 443
444 444 repo_name = 'vcs_test_%s' % (backend_alias, )
445 445 backend = Backend(
446 446 alias=backend_alias,
447 447 repo_name=repo_name,
448 448 test_name=request.node.name,
449 449 test_repo_container=test_repo)
450 450 request.addfinalizer(backend.cleanup)
451 451 return backend
452 452
453 453
454 454 @pytest.fixture
455 455 def backend_git(request, baseapp, test_repo):
456 456 return backend(request, 'git', baseapp, test_repo)
457 457
458 458
459 459 @pytest.fixture
460 460 def backend_hg(request, baseapp, test_repo):
461 461 return backend(request, 'hg', baseapp, test_repo)
462 462
463 463
464 464 @pytest.fixture
465 465 def backend_svn(request, baseapp, test_repo):
466 466 return backend(request, 'svn', baseapp, test_repo)
467 467
468 468
469 469 @pytest.fixture
470 470 def backend_random(backend_git):
471 471 """
472 472 Use this to express that your tests need "a backend.
473 473
474 474 A few of our tests need a backend, so that we can run the code. This
475 475 fixture is intended to be used for such cases. It will pick one of the
476 476 backends and run the tests.
477 477
478 478 The fixture `backend` would run the test multiple times for each
479 479 available backend which is a pure waste of time if the test is
480 480 independent of the backend type.
481 481 """
482 482 # TODO: johbo: Change this to pick a random backend
483 483 return backend_git
484 484
485 485
486 486 @pytest.fixture
487 487 def backend_stub(backend_git):
488 488 """
489 489 Use this to express that your tests need a backend stub
490 490
491 491 TODO: mikhail: Implement a real stub logic instead of returning
492 492 a git backend
493 493 """
494 494 return backend_git
495 495
496 496
497 497 @pytest.fixture
498 498 def repo_stub(backend_stub):
499 499 """
500 500 Use this to express that your tests need a repository stub
501 501 """
502 502 return backend_stub.create_repo()
503 503
504 504
505 505 class Backend(object):
506 506 """
507 507 Represents the test configuration for one supported backend
508 508
509 509 Provides easy access to different test repositories based on
510 510 `__getitem__`. Such repositories will only be created once per test
511 511 session.
512 512 """
513 513
514 514 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
515 515 _master_repo = None
516 516 _commit_ids = {}
517 517
518 518 def __init__(self, alias, repo_name, test_name, test_repo_container):
519 519 self.alias = alias
520 520 self.repo_name = repo_name
521 521 self._cleanup_repos = []
522 522 self._test_name = test_name
523 523 self._test_repo_container = test_repo_container
524 524 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
525 525 # Fixture will survive in the end.
526 526 self._fixture = Fixture()
527 527
528 528 def __getitem__(self, key):
529 529 return self._test_repo_container(key, self.alias)
530 530
531 531 def create_test_repo(self, key, config=None):
532 532 return self._test_repo_container(key, self.alias, config)
533 533
534 534 @property
535 535 def repo(self):
536 536 """
537 537 Returns the "current" repository. This is the vcs_test repo or the
538 538 last repo which has been created with `create_repo`.
539 539 """
540 540 from rhodecode.model.db import Repository
541 541 return Repository.get_by_repo_name(self.repo_name)
542 542
543 543 @property
544 544 def default_branch_name(self):
545 545 VcsRepository = get_backend(self.alias)
546 546 return VcsRepository.DEFAULT_BRANCH_NAME
547 547
548 548 @property
549 549 def default_head_id(self):
550 550 """
551 551 Returns the default head id of the underlying backend.
552 552
553 553 This will be the default branch name in case the backend does have a
554 554 default branch. In the other cases it will point to a valid head
555 555 which can serve as the base to create a new commit on top of it.
556 556 """
557 557 vcsrepo = self.repo.scm_instance()
558 558 head_id = (
559 559 vcsrepo.DEFAULT_BRANCH_NAME or
560 560 vcsrepo.commit_ids[-1])
561 561 return head_id
562 562
563 563 @property
564 564 def commit_ids(self):
565 565 """
566 566 Returns the list of commits for the last created repository
567 567 """
568 568 return self._commit_ids
569 569
570 570 def create_master_repo(self, commits):
571 571 """
572 572 Create a repository and remember it as a template.
573 573
574 574 This allows to easily create derived repositories to construct
575 575 more complex scenarios for diff, compare and pull requests.
576 576
577 577 Returns a commit map which maps from commit message to raw_id.
578 578 """
579 579 self._master_repo = self.create_repo(commits=commits)
580 580 return self._commit_ids
581 581
582 582 def create_repo(
583 583 self, commits=None, number_of_commits=0, heads=None,
584 584 name_suffix=u'', **kwargs):
585 585 """
586 586 Create a repository and record it for later cleanup.
587 587
588 588 :param commits: Optional. A sequence of dict instances.
589 589 Will add a commit per entry to the new repository.
590 590 :param number_of_commits: Optional. If set to a number, this number of
591 591 commits will be added to the new repository.
592 592 :param heads: Optional. Can be set to a sequence of of commit
593 593 names which shall be pulled in from the master repository.
594 594
595 595 """
596 596 self.repo_name = self._next_repo_name() + name_suffix
597 597 repo = self._fixture.create_repo(
598 598 self.repo_name, repo_type=self.alias, **kwargs)
599 599 self._cleanup_repos.append(repo.repo_name)
600 600
601 601 commits = commits or [
602 602 {'message': 'Commit %s of %s' % (x, self.repo_name)}
603 603 for x in xrange(number_of_commits)]
604 604 self._add_commits_to_repo(repo.scm_instance(), commits)
605 605 if heads:
606 606 self.pull_heads(repo, heads)
607 607
608 608 return repo
609 609
610 610 def pull_heads(self, repo, heads):
611 611 """
612 612 Make sure that repo contains all commits mentioned in `heads`
613 613 """
614 614 vcsmaster = self._master_repo.scm_instance()
615 615 vcsrepo = repo.scm_instance()
616 616 vcsrepo.config.clear_section('hooks')
617 617 commit_ids = [self._commit_ids[h] for h in heads]
618 618 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
619 619
620 620 def create_fork(self):
621 621 repo_to_fork = self.repo_name
622 622 self.repo_name = self._next_repo_name()
623 623 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
624 624 self._cleanup_repos.append(self.repo_name)
625 625 return repo
626 626
627 627 def new_repo_name(self, suffix=u''):
628 628 self.repo_name = self._next_repo_name() + suffix
629 629 self._cleanup_repos.append(self.repo_name)
630 630 return self.repo_name
631 631
632 632 def _next_repo_name(self):
633 633 return u"%s_%s" % (
634 634 self.invalid_repo_name.sub(u'_', self._test_name),
635 635 len(self._cleanup_repos))
636 636
637 637 def ensure_file(self, filename, content='Test content\n'):
638 638 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
639 639 commits = [
640 640 {'added': [
641 641 FileNode(filename, content=content),
642 642 ]},
643 643 ]
644 644 self._add_commits_to_repo(self.repo.scm_instance(), commits)
645 645
646 646 def enable_downloads(self):
647 647 repo = self.repo
648 648 repo.enable_downloads = True
649 649 Session().add(repo)
650 650 Session().commit()
651 651
652 652 def cleanup(self):
653 653 for repo_name in reversed(self._cleanup_repos):
654 654 self._fixture.destroy_repo(repo_name)
655 655
656 656 def _add_commits_to_repo(self, repo, commits):
657 657 commit_ids = _add_commits_to_repo(repo, commits)
658 658 if not commit_ids:
659 659 return
660 660 self._commit_ids = commit_ids
661 661
662 662 # Creating refs for Git to allow fetching them from remote repository
663 663 if self.alias == 'git':
664 664 refs = {}
665 665 for message in self._commit_ids:
666 666 # TODO: mikhail: do more special chars replacements
667 667 ref_name = 'refs/test-refs/{}'.format(
668 668 message.replace(' ', ''))
669 669 refs[ref_name] = self._commit_ids[message]
670 670 self._create_refs(repo, refs)
671 671
672 672 def _create_refs(self, repo, refs):
673 673 for ref_name in refs:
674 674 repo.set_refs(ref_name, refs[ref_name])
675 675
676 676
677 677 @pytest.fixture
678 678 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
679 679 """
680 680 Parametrized fixture which represents a single vcs backend implementation.
681 681
682 682 See the fixture `backend` for more details. This one implements the same
683 683 concept, but on vcs level. So it does not provide model instances etc.
684 684
685 685 Parameters are generated dynamically, see :func:`pytest_generate_tests`
686 686 for how this works.
687 687 """
688 688 if backend_alias not in request.config.getoption('--backends'):
689 689 pytest.skip("Backend %s not selected." % (backend_alias, ))
690 690
691 691 utils.check_xfail_backends(request.node, backend_alias)
692 692 utils.check_skip_backends(request.node, backend_alias)
693 693
694 694 repo_name = 'vcs_test_%s' % (backend_alias, )
695 695 repo_path = os.path.join(tests_tmp_path, repo_name)
696 696 backend = VcsBackend(
697 697 alias=backend_alias,
698 698 repo_path=repo_path,
699 699 test_name=request.node.name,
700 700 test_repo_container=test_repo)
701 701 request.addfinalizer(backend.cleanup)
702 702 return backend
703 703
704 704
705 705 @pytest.fixture
706 706 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
707 707 return vcsbackend(request, 'git', tests_tmp_path, baseapp, test_repo)
708 708
709 709
710 710 @pytest.fixture
711 711 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
712 712 return vcsbackend(request, 'hg', tests_tmp_path, baseapp, test_repo)
713 713
714 714
715 715 @pytest.fixture
716 716 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
717 717 return vcsbackend(request, 'svn', tests_tmp_path, baseapp, test_repo)
718 718
719 719
720 720 @pytest.fixture
721 721 def vcsbackend_random(vcsbackend_git):
722 722 """
723 723 Use this to express that your tests need "a vcsbackend".
724 724
725 725 The fixture `vcsbackend` would run the test multiple times for each
726 726 available vcs backend which is a pure waste of time if the test is
727 727 independent of the vcs backend type.
728 728 """
729 729 # TODO: johbo: Change this to pick a random backend
730 730 return vcsbackend_git
731 731
732 732
733 733 @pytest.fixture
734 734 def vcsbackend_stub(vcsbackend_git):
735 735 """
736 736 Use this to express that your test just needs a stub of a vcsbackend.
737 737
738 738 Plan is to eventually implement an in-memory stub to speed tests up.
739 739 """
740 740 return vcsbackend_git
741 741
742 742
743 743 class VcsBackend(object):
744 744 """
745 745 Represents the test configuration for one supported vcs backend.
746 746 """
747 747
748 748 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
749 749
750 750 def __init__(self, alias, repo_path, test_name, test_repo_container):
751 751 self.alias = alias
752 752 self._repo_path = repo_path
753 753 self._cleanup_repos = []
754 754 self._test_name = test_name
755 755 self._test_repo_container = test_repo_container
756 756
757 757 def __getitem__(self, key):
758 758 return self._test_repo_container(key, self.alias).scm_instance()
759 759
760 760 @property
761 761 def repo(self):
762 762 """
763 763 Returns the "current" repository. This is the vcs_test repo of the last
764 764 repo which has been created.
765 765 """
766 766 Repository = get_backend(self.alias)
767 767 return Repository(self._repo_path)
768 768
769 769 @property
770 770 def backend(self):
771 771 """
772 772 Returns the backend implementation class.
773 773 """
774 774 return get_backend(self.alias)
775 775
776 776 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
777 777 repo_name = self._next_repo_name()
778 778 self._repo_path = get_new_dir(repo_name)
779 779 repo_class = get_backend(self.alias)
780 780 src_url = None
781 781 if _clone_repo:
782 782 src_url = _clone_repo.path
783 783 repo = repo_class(self._repo_path, create=True, src_url=src_url)
784 784 self._cleanup_repos.append(repo)
785 785
786 786 commits = commits or [
787 787 {'message': 'Commit %s of %s' % (x, repo_name)}
788 788 for x in xrange(number_of_commits)]
789 789 _add_commits_to_repo(repo, commits)
790 790 return repo
791 791
792 792 def clone_repo(self, repo):
793 793 return self.create_repo(_clone_repo=repo)
794 794
795 795 def cleanup(self):
796 796 for repo in self._cleanup_repos:
797 797 shutil.rmtree(repo.path)
798 798
799 799 def new_repo_path(self):
800 800 repo_name = self._next_repo_name()
801 801 self._repo_path = get_new_dir(repo_name)
802 802 return self._repo_path
803 803
804 804 def _next_repo_name(self):
805 805 return "%s_%s" % (
806 806 self.invalid_repo_name.sub('_', self._test_name),
807 807 len(self._cleanup_repos))
808 808
809 809 def add_file(self, repo, filename, content='Test content\n'):
810 810 imc = repo.in_memory_commit
811 811 imc.add(FileNode(filename, content=content))
812 812 imc.commit(
813 813 message=u'Automatic commit from vcsbackend fixture',
814 814 author=u'Automatic')
815 815
816 816 def ensure_file(self, filename, content='Test content\n'):
817 817 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
818 818 self.add_file(self.repo, filename, content)
819 819
820 820
821 821 def _add_commits_to_repo(vcs_repo, commits):
822 822 commit_ids = {}
823 823 if not commits:
824 824 return commit_ids
825 825
826 826 imc = vcs_repo.in_memory_commit
827 827 commit = None
828 828
829 829 for idx, commit in enumerate(commits):
830 830 message = unicode(commit.get('message', 'Commit %s' % idx))
831 831
832 832 for node in commit.get('added', []):
833 833 imc.add(FileNode(node.path, content=node.content))
834 834 for node in commit.get('changed', []):
835 835 imc.change(FileNode(node.path, content=node.content))
836 836 for node in commit.get('removed', []):
837 837 imc.remove(FileNode(node.path))
838 838
839 839 parents = [
840 840 vcs_repo.get_commit(commit_id=commit_ids[p])
841 841 for p in commit.get('parents', [])]
842 842
843 843 operations = ('added', 'changed', 'removed')
844 844 if not any((commit.get(o) for o in operations)):
845 845 imc.add(FileNode('file_%s' % idx, content=message))
846 846
847 847 commit = imc.commit(
848 848 message=message,
849 849 author=unicode(commit.get('author', 'Automatic')),
850 850 date=commit.get('date'),
851 851 branch=commit.get('branch'),
852 852 parents=parents)
853 853
854 854 commit_ids[commit.message] = commit.raw_id
855 855
856 856 return commit_ids
857 857
858 858
859 859 @pytest.fixture
860 860 def reposerver(request):
861 861 """
862 862 Allows to serve a backend repository
863 863 """
864 864
865 865 repo_server = RepoServer()
866 866 request.addfinalizer(repo_server.cleanup)
867 867 return repo_server
868 868
869 869
870 870 class RepoServer(object):
871 871 """
872 872 Utility to serve a local repository for the duration of a test case.
873 873
874 874 Supports only Subversion so far.
875 875 """
876 876
877 877 url = None
878 878
879 879 def __init__(self):
880 880 self._cleanup_servers = []
881 881
882 882 def serve(self, vcsrepo):
883 883 if vcsrepo.alias != 'svn':
884 884 raise TypeError("Backend %s not supported" % vcsrepo.alias)
885 885
886 886 proc = subprocess32.Popen(
887 887 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
888 888 '--root', vcsrepo.path])
889 889 self._cleanup_servers.append(proc)
890 890 self.url = 'svn://localhost'
891 891
892 892 def cleanup(self):
893 893 for proc in self._cleanup_servers:
894 894 proc.terminate()
895 895
896 896
897 897 @pytest.fixture
898 898 def pr_util(backend, request, config_stub):
899 899 """
900 900 Utility for tests of models and for functional tests around pull requests.
901 901
902 902 It gives an instance of :class:`PRTestUtility` which provides various
903 903 utility methods around one pull request.
904 904
905 905 This fixture uses `backend` and inherits its parameterization.
906 906 """
907 907
908 908 util = PRTestUtility(backend)
909 909 request.addfinalizer(util.cleanup)
910 910
911 911 return util
912 912
913 913
914 914 class PRTestUtility(object):
915 915
916 916 pull_request = None
917 917 pull_request_id = None
918 918 mergeable_patcher = None
919 919 mergeable_mock = None
920 920 notification_patcher = None
921 921
922 922 def __init__(self, backend):
923 923 self.backend = backend
924 924
925 925 def create_pull_request(
926 926 self, commits=None, target_head=None, source_head=None,
927 927 revisions=None, approved=False, author=None, mergeable=False,
928 928 enable_notifications=True, name_suffix=u'', reviewers=None,
929 929 title=u"Test", description=u"Description"):
930 930 self.set_mergeable(mergeable)
931 931 if not enable_notifications:
932 932 # mock notification side effect
933 933 self.notification_patcher = mock.patch(
934 934 'rhodecode.model.notification.NotificationModel.create')
935 935 self.notification_patcher.start()
936 936
937 937 if not self.pull_request:
938 938 if not commits:
939 939 commits = [
940 940 {'message': 'c1'},
941 941 {'message': 'c2'},
942 942 {'message': 'c3'},
943 943 ]
944 944 target_head = 'c1'
945 945 source_head = 'c2'
946 946 revisions = ['c2']
947 947
948 948 self.commit_ids = self.backend.create_master_repo(commits)
949 949 self.target_repository = self.backend.create_repo(
950 950 heads=[target_head], name_suffix=name_suffix)
951 951 self.source_repository = self.backend.create_repo(
952 952 heads=[source_head], name_suffix=name_suffix)
953 953 self.author = author or UserModel().get_by_username(
954 954 TEST_USER_ADMIN_LOGIN)
955 955
956 956 model = PullRequestModel()
957 957 self.create_parameters = {
958 958 'created_by': self.author,
959 959 'source_repo': self.source_repository.repo_name,
960 960 'source_ref': self._default_branch_reference(source_head),
961 961 'target_repo': self.target_repository.repo_name,
962 962 'target_ref': self._default_branch_reference(target_head),
963 963 'revisions': [self.commit_ids[r] for r in revisions],
964 964 'reviewers': reviewers or self._get_reviewers(),
965 965 'title': title,
966 966 'description': description,
967 967 }
968 968 self.pull_request = model.create(**self.create_parameters)
969 969 assert model.get_versions(self.pull_request) == []
970 970
971 971 self.pull_request_id = self.pull_request.pull_request_id
972 972
973 973 if approved:
974 974 self.approve()
975 975
976 976 Session().add(self.pull_request)
977 977 Session().commit()
978 978
979 979 return self.pull_request
980 980
981 981 def approve(self):
982 982 self.create_status_votes(
983 983 ChangesetStatus.STATUS_APPROVED,
984 984 *self.pull_request.reviewers)
985 985
986 986 def close(self):
987 987 PullRequestModel().close_pull_request(self.pull_request, self.author)
988 988
989 989 def _default_branch_reference(self, commit_message):
990 990 reference = '%s:%s:%s' % (
991 991 'branch',
992 992 self.backend.default_branch_name,
993 993 self.commit_ids[commit_message])
994 994 return reference
995 995
996 996 def _get_reviewers(self):
997 997 return [
998 998 (TEST_USER_REGULAR_LOGIN, ['default1'], False),
999 999 (TEST_USER_REGULAR2_LOGIN, ['default2'], False),
1000 1000 ]
1001 1001
1002 1002 def update_source_repository(self, head=None):
1003 1003 heads = [head or 'c3']
1004 1004 self.backend.pull_heads(self.source_repository, heads=heads)
1005 1005
1006 1006 def add_one_commit(self, head=None):
1007 1007 self.update_source_repository(head=head)
1008 1008 old_commit_ids = set(self.pull_request.revisions)
1009 1009 PullRequestModel().update_commits(self.pull_request)
1010 1010 commit_ids = set(self.pull_request.revisions)
1011 1011 new_commit_ids = commit_ids - old_commit_ids
1012 1012 assert len(new_commit_ids) == 1
1013 1013 return new_commit_ids.pop()
1014 1014
1015 1015 def remove_one_commit(self):
1016 1016 assert len(self.pull_request.revisions) == 2
1017 1017 source_vcs = self.source_repository.scm_instance()
1018 1018 removed_commit_id = source_vcs.commit_ids[-1]
1019 1019
1020 1020 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1021 1021 # remove the if once that's sorted out.
1022 1022 if self.backend.alias == "git":
1023 1023 kwargs = {'branch_name': self.backend.default_branch_name}
1024 1024 else:
1025 1025 kwargs = {}
1026 1026 source_vcs.strip(removed_commit_id, **kwargs)
1027 1027
1028 1028 PullRequestModel().update_commits(self.pull_request)
1029 1029 assert len(self.pull_request.revisions) == 1
1030 1030 return removed_commit_id
1031 1031
1032 1032 def create_comment(self, linked_to=None):
1033 1033 comment = CommentsModel().create(
1034 1034 text=u"Test comment",
1035 1035 repo=self.target_repository.repo_name,
1036 1036 user=self.author,
1037 1037 pull_request=self.pull_request)
1038 1038 assert comment.pull_request_version_id is None
1039 1039
1040 1040 if linked_to:
1041 1041 PullRequestModel()._link_comments_to_version(linked_to)
1042 1042
1043 1043 return comment
1044 1044
1045 1045 def create_inline_comment(
1046 1046 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1047 1047 comment = CommentsModel().create(
1048 1048 text=u"Test comment",
1049 1049 repo=self.target_repository.repo_name,
1050 1050 user=self.author,
1051 1051 line_no=line_no,
1052 1052 f_path=file_path,
1053 1053 pull_request=self.pull_request)
1054 1054 assert comment.pull_request_version_id is None
1055 1055
1056 1056 if linked_to:
1057 1057 PullRequestModel()._link_comments_to_version(linked_to)
1058 1058
1059 1059 return comment
1060 1060
1061 1061 def create_version_of_pull_request(self):
1062 1062 pull_request = self.create_pull_request()
1063 1063 version = PullRequestModel()._create_version_from_snapshot(
1064 1064 pull_request)
1065 1065 return version
1066 1066
1067 1067 def create_status_votes(self, status, *reviewers):
1068 1068 for reviewer in reviewers:
1069 1069 ChangesetStatusModel().set_status(
1070 1070 repo=self.pull_request.target_repo,
1071 1071 status=status,
1072 1072 user=reviewer.user_id,
1073 1073 pull_request=self.pull_request)
1074 1074
1075 1075 def set_mergeable(self, value):
1076 1076 if not self.mergeable_patcher:
1077 1077 self.mergeable_patcher = mock.patch.object(
1078 1078 VcsSettingsModel, 'get_general_settings')
1079 1079 self.mergeable_mock = self.mergeable_patcher.start()
1080 1080 self.mergeable_mock.return_value = {
1081 1081 'rhodecode_pr_merge_enabled': value}
1082 1082
1083 1083 def cleanup(self):
1084 1084 # In case the source repository is already cleaned up, the pull
1085 1085 # request will already be deleted.
1086 1086 pull_request = PullRequest().get(self.pull_request_id)
1087 1087 if pull_request:
1088 1088 PullRequestModel().delete(pull_request, pull_request.author)
1089 1089 Session().commit()
1090 1090
1091 1091 if self.notification_patcher:
1092 1092 self.notification_patcher.stop()
1093 1093
1094 1094 if self.mergeable_patcher:
1095 1095 self.mergeable_patcher.stop()
1096 1096
1097 1097
1098 1098 @pytest.fixture
1099 1099 def user_admin(baseapp):
1100 1100 """
1101 1101 Provides the default admin test user as an instance of `db.User`.
1102 1102 """
1103 1103 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1104 1104 return user
1105 1105
1106 1106
1107 1107 @pytest.fixture
1108 1108 def user_regular(baseapp):
1109 1109 """
1110 1110 Provides the default regular test user as an instance of `db.User`.
1111 1111 """
1112 1112 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1113 1113 return user
1114 1114
1115 1115
1116 1116 @pytest.fixture
1117 def user_util(request, baseapp):
1117 def user_util(request, db_connection):
1118 1118 """
1119 1119 Provides a wired instance of `UserUtility` with integrated cleanup.
1120 1120 """
1121 1121 utility = UserUtility(test_name=request.node.name)
1122 1122 request.addfinalizer(utility.cleanup)
1123 1123 return utility
1124 1124
1125 1125
1126 1126 # TODO: johbo: Split this up into utilities per domain or something similar
1127 1127 class UserUtility(object):
1128 1128
1129 1129 def __init__(self, test_name="test"):
1130 1130 self._test_name = self._sanitize_name(test_name)
1131 1131 self.fixture = Fixture()
1132 1132 self.repo_group_ids = []
1133 1133 self.repos_ids = []
1134 1134 self.user_ids = []
1135 1135 self.user_group_ids = []
1136 1136 self.user_repo_permission_ids = []
1137 1137 self.user_group_repo_permission_ids = []
1138 1138 self.user_repo_group_permission_ids = []
1139 1139 self.user_group_repo_group_permission_ids = []
1140 1140 self.user_user_group_permission_ids = []
1141 1141 self.user_group_user_group_permission_ids = []
1142 1142 self.user_permissions = []
1143 1143
1144 1144 def _sanitize_name(self, name):
1145 1145 for char in ['[', ']']:
1146 1146 name = name.replace(char, '_')
1147 1147 return name
1148 1148
1149 1149 def create_repo_group(
1150 1150 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1151 1151 group_name = "{prefix}_repogroup_{count}".format(
1152 1152 prefix=self._test_name,
1153 1153 count=len(self.repo_group_ids))
1154 1154 repo_group = self.fixture.create_repo_group(
1155 1155 group_name, cur_user=owner)
1156 1156 if auto_cleanup:
1157 1157 self.repo_group_ids.append(repo_group.group_id)
1158 1158 return repo_group
1159 1159
1160 1160 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1161 1161 auto_cleanup=True, repo_type='hg'):
1162 1162 repo_name = "{prefix}_repository_{count}".format(
1163 1163 prefix=self._test_name,
1164 1164 count=len(self.repos_ids))
1165 1165
1166 1166 repository = self.fixture.create_repo(
1167 1167 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1168 1168 if auto_cleanup:
1169 1169 self.repos_ids.append(repository.repo_id)
1170 1170 return repository
1171 1171
1172 1172 def create_user(self, auto_cleanup=True, **kwargs):
1173 1173 user_name = "{prefix}_user_{count}".format(
1174 1174 prefix=self._test_name,
1175 1175 count=len(self.user_ids))
1176 1176 user = self.fixture.create_user(user_name, **kwargs)
1177 1177 if auto_cleanup:
1178 1178 self.user_ids.append(user.user_id)
1179 1179 return user
1180 1180
1181 1181 def create_user_with_group(self):
1182 1182 user = self.create_user()
1183 1183 user_group = self.create_user_group(members=[user])
1184 1184 return user, user_group
1185 1185
1186 1186 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1187 1187 auto_cleanup=True, **kwargs):
1188 1188 group_name = "{prefix}_usergroup_{count}".format(
1189 1189 prefix=self._test_name,
1190 1190 count=len(self.user_group_ids))
1191 1191 user_group = self.fixture.create_user_group(
1192 1192 group_name, cur_user=owner, **kwargs)
1193 1193
1194 1194 if auto_cleanup:
1195 1195 self.user_group_ids.append(user_group.users_group_id)
1196 1196 if members:
1197 1197 for user in members:
1198 1198 UserGroupModel().add_user_to_group(user_group, user)
1199 1199 return user_group
1200 1200
1201 1201 def grant_user_permission(self, user_name, permission_name):
1202 1202 self._inherit_default_user_permissions(user_name, False)
1203 1203 self.user_permissions.append((user_name, permission_name))
1204 1204
1205 1205 def grant_user_permission_to_repo_group(
1206 1206 self, repo_group, user, permission_name):
1207 1207 permission = RepoGroupModel().grant_user_permission(
1208 1208 repo_group, user, permission_name)
1209 1209 self.user_repo_group_permission_ids.append(
1210 1210 (repo_group.group_id, user.user_id))
1211 1211 return permission
1212 1212
1213 1213 def grant_user_group_permission_to_repo_group(
1214 1214 self, repo_group, user_group, permission_name):
1215 1215 permission = RepoGroupModel().grant_user_group_permission(
1216 1216 repo_group, user_group, permission_name)
1217 1217 self.user_group_repo_group_permission_ids.append(
1218 1218 (repo_group.group_id, user_group.users_group_id))
1219 1219 return permission
1220 1220
1221 1221 def grant_user_permission_to_repo(
1222 1222 self, repo, user, permission_name):
1223 1223 permission = RepoModel().grant_user_permission(
1224 1224 repo, user, permission_name)
1225 1225 self.user_repo_permission_ids.append(
1226 1226 (repo.repo_id, user.user_id))
1227 1227 return permission
1228 1228
1229 1229 def grant_user_group_permission_to_repo(
1230 1230 self, repo, user_group, permission_name):
1231 1231 permission = RepoModel().grant_user_group_permission(
1232 1232 repo, user_group, permission_name)
1233 1233 self.user_group_repo_permission_ids.append(
1234 1234 (repo.repo_id, user_group.users_group_id))
1235 1235 return permission
1236 1236
1237 1237 def grant_user_permission_to_user_group(
1238 1238 self, target_user_group, user, permission_name):
1239 1239 permission = UserGroupModel().grant_user_permission(
1240 1240 target_user_group, user, permission_name)
1241 1241 self.user_user_group_permission_ids.append(
1242 1242 (target_user_group.users_group_id, user.user_id))
1243 1243 return permission
1244 1244
1245 1245 def grant_user_group_permission_to_user_group(
1246 1246 self, target_user_group, user_group, permission_name):
1247 1247 permission = UserGroupModel().grant_user_group_permission(
1248 1248 target_user_group, user_group, permission_name)
1249 1249 self.user_group_user_group_permission_ids.append(
1250 1250 (target_user_group.users_group_id, user_group.users_group_id))
1251 1251 return permission
1252 1252
1253 1253 def revoke_user_permission(self, user_name, permission_name):
1254 1254 self._inherit_default_user_permissions(user_name, True)
1255 1255 UserModel().revoke_perm(user_name, permission_name)
1256 1256
1257 1257 def _inherit_default_user_permissions(self, user_name, value):
1258 1258 user = UserModel().get_by_username(user_name)
1259 1259 user.inherit_default_permissions = value
1260 1260 Session().add(user)
1261 1261 Session().commit()
1262 1262
1263 1263 def cleanup(self):
1264 1264 self._cleanup_permissions()
1265 1265 self._cleanup_repos()
1266 1266 self._cleanup_repo_groups()
1267 1267 self._cleanup_user_groups()
1268 1268 self._cleanup_users()
1269 1269
1270 1270 def _cleanup_permissions(self):
1271 1271 if self.user_permissions:
1272 1272 for user_name, permission_name in self.user_permissions:
1273 1273 self.revoke_user_permission(user_name, permission_name)
1274 1274
1275 1275 for permission in self.user_repo_permission_ids:
1276 1276 RepoModel().revoke_user_permission(*permission)
1277 1277
1278 1278 for permission in self.user_group_repo_permission_ids:
1279 1279 RepoModel().revoke_user_group_permission(*permission)
1280 1280
1281 1281 for permission in self.user_repo_group_permission_ids:
1282 1282 RepoGroupModel().revoke_user_permission(*permission)
1283 1283
1284 1284 for permission in self.user_group_repo_group_permission_ids:
1285 1285 RepoGroupModel().revoke_user_group_permission(*permission)
1286 1286
1287 1287 for permission in self.user_user_group_permission_ids:
1288 1288 UserGroupModel().revoke_user_permission(*permission)
1289 1289
1290 1290 for permission in self.user_group_user_group_permission_ids:
1291 1291 UserGroupModel().revoke_user_group_permission(*permission)
1292 1292
1293 1293 def _cleanup_repo_groups(self):
1294 1294 def _repo_group_compare(first_group_id, second_group_id):
1295 1295 """
1296 1296 Gives higher priority to the groups with the most complex paths
1297 1297 """
1298 1298 first_group = RepoGroup.get(first_group_id)
1299 1299 second_group = RepoGroup.get(second_group_id)
1300 1300 first_group_parts = (
1301 1301 len(first_group.group_name.split('/')) if first_group else 0)
1302 1302 second_group_parts = (
1303 1303 len(second_group.group_name.split('/')) if second_group else 0)
1304 1304 return cmp(second_group_parts, first_group_parts)
1305 1305
1306 1306 sorted_repo_group_ids = sorted(
1307 1307 self.repo_group_ids, cmp=_repo_group_compare)
1308 1308 for repo_group_id in sorted_repo_group_ids:
1309 1309 self.fixture.destroy_repo_group(repo_group_id)
1310 1310
1311 1311 def _cleanup_repos(self):
1312 1312 sorted_repos_ids = sorted(self.repos_ids)
1313 1313 for repo_id in sorted_repos_ids:
1314 1314 self.fixture.destroy_repo(repo_id)
1315 1315
1316 1316 def _cleanup_user_groups(self):
1317 1317 def _user_group_compare(first_group_id, second_group_id):
1318 1318 """
1319 1319 Gives higher priority to the groups with the most complex paths
1320 1320 """
1321 1321 first_group = UserGroup.get(first_group_id)
1322 1322 second_group = UserGroup.get(second_group_id)
1323 1323 first_group_parts = (
1324 1324 len(first_group.users_group_name.split('/'))
1325 1325 if first_group else 0)
1326 1326 second_group_parts = (
1327 1327 len(second_group.users_group_name.split('/'))
1328 1328 if second_group else 0)
1329 1329 return cmp(second_group_parts, first_group_parts)
1330 1330
1331 1331 sorted_user_group_ids = sorted(
1332 1332 self.user_group_ids, cmp=_user_group_compare)
1333 1333 for user_group_id in sorted_user_group_ids:
1334 1334 self.fixture.destroy_user_group(user_group_id)
1335 1335
1336 1336 def _cleanup_users(self):
1337 1337 for user_id in self.user_ids:
1338 1338 self.fixture.destroy_user(user_id)
1339 1339
1340 1340
1341 1341 # TODO: Think about moving this into a pytest-pyro package and make it a
1342 1342 # pytest plugin
1343 1343 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1344 1344 def pytest_runtest_makereport(item, call):
1345 1345 """
1346 1346 Adding the remote traceback if the exception has this information.
1347 1347
1348 1348 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1349 1349 to the exception instance.
1350 1350 """
1351 1351 outcome = yield
1352 1352 report = outcome.get_result()
1353 1353 if call.excinfo:
1354 1354 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1355 1355
1356 1356
1357 1357 def _add_vcsserver_remote_traceback(report, exc):
1358 1358 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1359 1359
1360 1360 if vcsserver_traceback:
1361 1361 section = 'VCSServer remote traceback ' + report.when
1362 1362 report.sections.append((section, vcsserver_traceback))
1363 1363
1364 1364
1365 1365 @pytest.fixture(scope='session')
1366 1366 def testrun():
1367 1367 return {
1368 1368 'uuid': uuid.uuid4(),
1369 1369 'start': datetime.datetime.utcnow().isoformat(),
1370 1370 'timestamp': int(time.time()),
1371 1371 }
1372 1372
1373 1373
1374 1374 @pytest.fixture(autouse=True)
1375 1375 def collect_appenlight_stats(request, testrun):
1376 1376 """
1377 1377 This fixture reports memory consumtion of single tests.
1378 1378
1379 1379 It gathers data based on `psutil` and sends them to Appenlight. The option
1380 1380 ``--ae`` has te be used to enable this fixture and the API key for your
1381 1381 application has to be provided in ``--ae-key``.
1382 1382 """
1383 1383 try:
1384 1384 # cygwin cannot have yet psutil support.
1385 1385 import psutil
1386 1386 except ImportError:
1387 1387 return
1388 1388
1389 1389 if not request.config.getoption('--appenlight'):
1390 1390 return
1391 1391 else:
1392 1392 # Only request the baseapp fixture if appenlight tracking is
1393 1393 # enabled. This will speed up a test run of unit tests by 2 to 3
1394 1394 # seconds if appenlight is not enabled.
1395 1395 baseapp = request.getfuncargvalue("baseapp")
1396 1396 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1397 1397 client = AppenlightClient(
1398 1398 url=url,
1399 1399 api_key=request.config.getoption('--appenlight-api-key'),
1400 1400 namespace=request.node.nodeid,
1401 1401 request=str(testrun['uuid']),
1402 1402 testrun=testrun)
1403 1403
1404 1404 client.collect({
1405 1405 'message': "Starting",
1406 1406 })
1407 1407
1408 1408 server_and_port = baseapp.config.get_settings()['vcs.server']
1409 1409 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1410 1410 server = create_vcsserver_proxy(server_and_port, protocol)
1411 1411 with server:
1412 1412 vcs_pid = server.get_pid()
1413 1413 server.run_gc()
1414 1414 vcs_process = psutil.Process(vcs_pid)
1415 1415 mem = vcs_process.memory_info()
1416 1416 client.tag_before('vcsserver.rss', mem.rss)
1417 1417 client.tag_before('vcsserver.vms', mem.vms)
1418 1418
1419 1419 test_process = psutil.Process()
1420 1420 mem = test_process.memory_info()
1421 1421 client.tag_before('test.rss', mem.rss)
1422 1422 client.tag_before('test.vms', mem.vms)
1423 1423
1424 1424 client.tag_before('time', time.time())
1425 1425
1426 1426 @request.addfinalizer
1427 1427 def send_stats():
1428 1428 client.tag_after('time', time.time())
1429 1429 with server:
1430 1430 gc_stats = server.run_gc()
1431 1431 for tag, value in gc_stats.items():
1432 1432 client.tag_after(tag, value)
1433 1433 mem = vcs_process.memory_info()
1434 1434 client.tag_after('vcsserver.rss', mem.rss)
1435 1435 client.tag_after('vcsserver.vms', mem.vms)
1436 1436
1437 1437 mem = test_process.memory_info()
1438 1438 client.tag_after('test.rss', mem.rss)
1439 1439 client.tag_after('test.vms', mem.vms)
1440 1440
1441 1441 client.collect({
1442 1442 'message': "Finished",
1443 1443 })
1444 1444 client.send_stats()
1445 1445
1446 1446 return client
1447 1447
1448 1448
1449 1449 class AppenlightClient():
1450 1450
1451 1451 url_template = '{url}?protocol_version=0.5'
1452 1452
1453 1453 def __init__(
1454 1454 self, url, api_key, add_server=True, add_timestamp=True,
1455 1455 namespace=None, request=None, testrun=None):
1456 1456 self.url = self.url_template.format(url=url)
1457 1457 self.api_key = api_key
1458 1458 self.add_server = add_server
1459 1459 self.add_timestamp = add_timestamp
1460 1460 self.namespace = namespace
1461 1461 self.request = request
1462 1462 self.server = socket.getfqdn(socket.gethostname())
1463 1463 self.tags_before = {}
1464 1464 self.tags_after = {}
1465 1465 self.stats = []
1466 1466 self.testrun = testrun or {}
1467 1467
1468 1468 def tag_before(self, tag, value):
1469 1469 self.tags_before[tag] = value
1470 1470
1471 1471 def tag_after(self, tag, value):
1472 1472 self.tags_after[tag] = value
1473 1473
1474 1474 def collect(self, data):
1475 1475 if self.add_server:
1476 1476 data.setdefault('server', self.server)
1477 1477 if self.add_timestamp:
1478 1478 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1479 1479 if self.namespace:
1480 1480 data.setdefault('namespace', self.namespace)
1481 1481 if self.request:
1482 1482 data.setdefault('request', self.request)
1483 1483 self.stats.append(data)
1484 1484
1485 1485 def send_stats(self):
1486 1486 tags = [
1487 1487 ('testrun', self.request),
1488 1488 ('testrun.start', self.testrun['start']),
1489 1489 ('testrun.timestamp', self.testrun['timestamp']),
1490 1490 ('test', self.namespace),
1491 1491 ]
1492 1492 for key, value in self.tags_before.items():
1493 1493 tags.append((key + '.before', value))
1494 1494 try:
1495 1495 delta = self.tags_after[key] - value
1496 1496 tags.append((key + '.delta', delta))
1497 1497 except Exception:
1498 1498 pass
1499 1499 for key, value in self.tags_after.items():
1500 1500 tags.append((key + '.after', value))
1501 1501 self.collect({
1502 1502 'message': "Collected tags",
1503 1503 'tags': tags,
1504 1504 })
1505 1505
1506 1506 response = requests.post(
1507 1507 self.url,
1508 1508 headers={
1509 1509 'X-appenlight-api-key': self.api_key},
1510 1510 json=self.stats,
1511 1511 )
1512 1512
1513 1513 if not response.status_code == 200:
1514 1514 pprint.pprint(self.stats)
1515 1515 print(response.headers)
1516 1516 print(response.text)
1517 1517 raise Exception('Sending to appenlight failed')
1518 1518
1519 1519
1520 1520 @pytest.fixture
1521 def gist_util(request, baseapp):
1521 def gist_util(request, db_connection):
1522 1522 """
1523 1523 Provides a wired instance of `GistUtility` with integrated cleanup.
1524 1524 """
1525 1525 utility = GistUtility()
1526 1526 request.addfinalizer(utility.cleanup)
1527 1527 return utility
1528 1528
1529 1529
1530 1530 class GistUtility(object):
1531 1531 def __init__(self):
1532 1532 self.fixture = Fixture()
1533 1533 self.gist_ids = []
1534 1534
1535 1535 def create_gist(self, **kwargs):
1536 1536 gist = self.fixture.create_gist(**kwargs)
1537 1537 self.gist_ids.append(gist.gist_id)
1538 1538 return gist
1539 1539
1540 1540 def cleanup(self):
1541 1541 for id_ in self.gist_ids:
1542 1542 self.fixture.destroy_gists(str(id_))
1543 1543
1544 1544
1545 1545 @pytest.fixture
1546 1546 def enabled_backends(request):
1547 1547 backends = request.config.option.backends
1548 1548 return backends[:]
1549 1549
1550 1550
1551 1551 @pytest.fixture
1552 def settings_util(request):
1552 def settings_util(request, db_connection):
1553 1553 """
1554 1554 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1555 1555 """
1556 1556 utility = SettingsUtility()
1557 1557 request.addfinalizer(utility.cleanup)
1558 1558 return utility
1559 1559
1560 1560
1561 1561 class SettingsUtility(object):
1562 1562 def __init__(self):
1563 1563 self.rhodecode_ui_ids = []
1564 1564 self.rhodecode_setting_ids = []
1565 1565 self.repo_rhodecode_ui_ids = []
1566 1566 self.repo_rhodecode_setting_ids = []
1567 1567
1568 1568 def create_repo_rhodecode_ui(
1569 1569 self, repo, section, value, key=None, active=True, cleanup=True):
1570 1570 key = key or hashlib.sha1(
1571 1571 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1572 1572
1573 1573 setting = RepoRhodeCodeUi()
1574 1574 setting.repository_id = repo.repo_id
1575 1575 setting.ui_section = section
1576 1576 setting.ui_value = value
1577 1577 setting.ui_key = key
1578 1578 setting.ui_active = active
1579 1579 Session().add(setting)
1580 1580 Session().commit()
1581 1581
1582 1582 if cleanup:
1583 1583 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1584 1584 return setting
1585 1585
1586 1586 def create_rhodecode_ui(
1587 1587 self, section, value, key=None, active=True, cleanup=True):
1588 1588 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1589 1589
1590 1590 setting = RhodeCodeUi()
1591 1591 setting.ui_section = section
1592 1592 setting.ui_value = value
1593 1593 setting.ui_key = key
1594 1594 setting.ui_active = active
1595 1595 Session().add(setting)
1596 1596 Session().commit()
1597 1597
1598 1598 if cleanup:
1599 1599 self.rhodecode_ui_ids.append(setting.ui_id)
1600 1600 return setting
1601 1601
1602 1602 def create_repo_rhodecode_setting(
1603 1603 self, repo, name, value, type_, cleanup=True):
1604 1604 setting = RepoRhodeCodeSetting(
1605 1605 repo.repo_id, key=name, val=value, type=type_)
1606 1606 Session().add(setting)
1607 1607 Session().commit()
1608 1608
1609 1609 if cleanup:
1610 1610 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1611 1611 return setting
1612 1612
1613 1613 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1614 1614 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1615 1615 Session().add(setting)
1616 1616 Session().commit()
1617 1617
1618 1618 if cleanup:
1619 1619 self.rhodecode_setting_ids.append(setting.app_settings_id)
1620 1620
1621 1621 return setting
1622 1622
1623 1623 def cleanup(self):
1624 1624 for id_ in self.rhodecode_ui_ids:
1625 1625 setting = RhodeCodeUi.get(id_)
1626 1626 Session().delete(setting)
1627 1627
1628 1628 for id_ in self.rhodecode_setting_ids:
1629 1629 setting = RhodeCodeSetting.get(id_)
1630 1630 Session().delete(setting)
1631 1631
1632 1632 for id_ in self.repo_rhodecode_ui_ids:
1633 1633 setting = RepoRhodeCodeUi.get(id_)
1634 1634 Session().delete(setting)
1635 1635
1636 1636 for id_ in self.repo_rhodecode_setting_ids:
1637 1637 setting = RepoRhodeCodeSetting.get(id_)
1638 1638 Session().delete(setting)
1639 1639
1640 1640 Session().commit()
1641 1641
1642 1642
1643 1643 @pytest.fixture
1644 1644 def no_notifications(request):
1645 1645 notification_patcher = mock.patch(
1646 1646 'rhodecode.model.notification.NotificationModel.create')
1647 1647 notification_patcher.start()
1648 1648 request.addfinalizer(notification_patcher.stop)
1649 1649
1650 1650
1651 1651 @pytest.fixture(scope='session')
1652 1652 def repeat(request):
1653 1653 """
1654 1654 The number of repetitions is based on this fixture.
1655 1655
1656 1656 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1657 1657 tests are not too slow in our default test suite.
1658 1658 """
1659 1659 return request.config.getoption('--repeat')
1660 1660
1661 1661
1662 1662 @pytest.fixture
1663 1663 def rhodecode_fixtures():
1664 1664 return Fixture()
1665 1665
1666 1666
1667 1667 @pytest.fixture
1668 1668 def context_stub():
1669 1669 """
1670 1670 Stub context object.
1671 1671 """
1672 1672 context = pyramid.testing.DummyResource()
1673 1673 return context
1674 1674
1675 1675
1676 1676 @pytest.fixture
1677 1677 def request_stub():
1678 1678 """
1679 1679 Stub request object.
1680 1680 """
1681 1681 from rhodecode.lib.base import bootstrap_request
1682 1682 request = bootstrap_request(scheme='https')
1683 1683 return request
1684 1684
1685 1685
1686 1686 @pytest.fixture
1687 1687 def config_stub(request, request_stub):
1688 1688 """
1689 1689 Set up pyramid.testing and return the Configurator.
1690 1690 """
1691 1691 from rhodecode.lib.base import bootstrap_config
1692 1692 config = bootstrap_config(request=request_stub)
1693 1693
1694 1694 @request.addfinalizer
1695 1695 def cleanup():
1696 1696 pyramid.testing.tearDown()
1697 1697
1698 1698 return config
1699 1699
1700 1700
1701 1701 @pytest.fixture
1702 1702 def StubIntegrationType():
1703 1703 class _StubIntegrationType(IntegrationTypeBase):
1704 1704 """ Test integration type class """
1705 1705
1706 1706 key = 'test'
1707 1707 display_name = 'Test integration type'
1708 1708 description = 'A test integration type for testing'
1709 1709 icon = 'test_icon_html_image'
1710 1710
1711 1711 def __init__(self, settings):
1712 1712 super(_StubIntegrationType, self).__init__(settings)
1713 1713 self.sent_events = [] # for testing
1714 1714
1715 1715 def send_event(self, event):
1716 1716 self.sent_events.append(event)
1717 1717
1718 1718 def settings_schema(self):
1719 1719 class SettingsSchema(colander.Schema):
1720 1720 test_string_field = colander.SchemaNode(
1721 1721 colander.String(),
1722 1722 missing=colander.required,
1723 1723 title='test string field',
1724 1724 )
1725 1725 test_int_field = colander.SchemaNode(
1726 1726 colander.Int(),
1727 1727 title='some integer setting',
1728 1728 )
1729 1729 return SettingsSchema()
1730 1730
1731 1731
1732 1732 integration_type_registry.register_integration_type(_StubIntegrationType)
1733 1733 return _StubIntegrationType
1734 1734
1735 1735 @pytest.fixture
1736 1736 def stub_integration_settings():
1737 1737 return {
1738 1738 'test_string_field': 'some data',
1739 1739 'test_int_field': 100,
1740 1740 }
1741 1741
1742 1742
1743 1743 @pytest.fixture
1744 1744 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1745 1745 stub_integration_settings):
1746 1746 integration = IntegrationModel().create(
1747 1747 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1748 1748 name='test repo integration',
1749 1749 repo=repo_stub, repo_group=None, child_repos_only=None)
1750 1750
1751 1751 @request.addfinalizer
1752 1752 def cleanup():
1753 1753 IntegrationModel().delete(integration)
1754 1754
1755 1755 return integration
1756 1756
1757 1757
1758 1758 @pytest.fixture
1759 1759 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1760 1760 stub_integration_settings):
1761 1761 integration = IntegrationModel().create(
1762 1762 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1763 1763 name='test repogroup integration',
1764 1764 repo=None, repo_group=test_repo_group, child_repos_only=True)
1765 1765
1766 1766 @request.addfinalizer
1767 1767 def cleanup():
1768 1768 IntegrationModel().delete(integration)
1769 1769
1770 1770 return integration
1771 1771
1772 1772
1773 1773 @pytest.fixture
1774 1774 def repogroup_recursive_integration_stub(request, test_repo_group,
1775 1775 StubIntegrationType, stub_integration_settings):
1776 1776 integration = IntegrationModel().create(
1777 1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1778 1778 name='test recursive repogroup integration',
1779 1779 repo=None, repo_group=test_repo_group, child_repos_only=False)
1780 1780
1781 1781 @request.addfinalizer
1782 1782 def cleanup():
1783 1783 IntegrationModel().delete(integration)
1784 1784
1785 1785 return integration
1786 1786
1787 1787
1788 1788 @pytest.fixture
1789 1789 def global_integration_stub(request, StubIntegrationType,
1790 1790 stub_integration_settings):
1791 1791 integration = IntegrationModel().create(
1792 1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1793 1793 name='test global integration',
1794 1794 repo=None, repo_group=None, child_repos_only=None)
1795 1795
1796 1796 @request.addfinalizer
1797 1797 def cleanup():
1798 1798 IntegrationModel().delete(integration)
1799 1799
1800 1800 return integration
1801 1801
1802 1802
1803 1803 @pytest.fixture
1804 1804 def root_repos_integration_stub(request, StubIntegrationType,
1805 1805 stub_integration_settings):
1806 1806 integration = IntegrationModel().create(
1807 1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1808 1808 name='test global integration',
1809 1809 repo=None, repo_group=None, child_repos_only=True)
1810 1810
1811 1811 @request.addfinalizer
1812 1812 def cleanup():
1813 1813 IntegrationModel().delete(integration)
1814 1814
1815 1815 return integration
1816 1816
1817 1817
1818 1818 @pytest.fixture
1819 1819 def local_dt_to_utc():
1820 1820 def _factory(dt):
1821 1821 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1822 1822 dateutil.tz.tzutc()).replace(tzinfo=None)
1823 1823 return _factory
1824 1824
1825 1825
1826 1826 @pytest.fixture
1827 1827 def disable_anonymous_user(request, baseapp):
1828 1828 set_anonymous_access(False)
1829 1829
1830 1830 @request.addfinalizer
1831 1831 def cleanup():
1832 1832 set_anonymous_access(True)
1833 1833
1834 1834
1835 1835 @pytest.fixture(scope='module')
1836 1836 def rc_fixture(request):
1837 1837 return Fixture()
1838 1838
1839 1839
1840 1840 @pytest.fixture
1841 1841 def repo_groups(request):
1842 1842 fixture = Fixture()
1843 1843
1844 1844 session = Session()
1845 1845 zombie_group = fixture.create_repo_group('zombie')
1846 1846 parent_group = fixture.create_repo_group('parent')
1847 1847 child_group = fixture.create_repo_group('parent/child')
1848 1848 groups_in_db = session.query(RepoGroup).all()
1849 1849 assert len(groups_in_db) == 3
1850 1850 assert child_group.group_parent_id == parent_group.group_id
1851 1851
1852 1852 @request.addfinalizer
1853 1853 def cleanup():
1854 1854 fixture.destroy_repo_group(zombie_group)
1855 1855 fixture.destroy_repo_group(child_group)
1856 1856 fixture.destroy_repo_group(parent_group)
1857 1857
1858 1858 return zombie_group, parent_group, child_group
General Comments 0
You need to be logged in to leave comments. Login now