##// END OF EJS Templates
tests: Add a fixture to get the settings dict.
Martin Bornhold -
r602:3bdca272 default
parent child Browse files
Show More
@@ -1,1622 +1,1638 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess
30 30 import time
31 31 import uuid
32 32
33 33 import mock
34 34 import pyramid.testing
35 35 import pytest
36 36 import requests
37 37 from webtest.app import TestApp
38 38
39 39 import rhodecode
40 40 from rhodecode.model.changeset_status import ChangesetStatusModel
41 41 from rhodecode.model.comment import ChangesetCommentsModel
42 42 from rhodecode.model.db import (
43 43 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
44 44 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
45 45 from rhodecode.model.meta import Session
46 46 from rhodecode.model.pull_request import PullRequestModel
47 47 from rhodecode.model.repo import RepoModel
48 48 from rhodecode.model.repo_group import RepoGroupModel
49 49 from rhodecode.model.user import UserModel
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51 from rhodecode.model.user_group import UserGroupModel
52 52 from rhodecode.lib.utils import repo2db_mapper
53 53 from rhodecode.lib.vcs import create_vcsserver_proxy
54 54 from rhodecode.lib.vcs.backends import get_backend
55 55 from rhodecode.lib.vcs.nodes import FileNode
56 56 from rhodecode.tests import (
57 57 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
58 58 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
59 59 TEST_USER_REGULAR_PASS)
60 60 from rhodecode.tests.fixture import Fixture
61 61
62 62
63 63 def _split_comma(value):
64 64 return value.split(',')
65 65
66 66
67 67 def pytest_addoption(parser):
68 68 parser.addoption(
69 69 '--keep-tmp-path', action='store_true',
70 70 help="Keep the test temporary directories")
71 71 parser.addoption(
72 72 '--backends', action='store', type=_split_comma,
73 73 default=['git', 'hg', 'svn'],
74 74 help="Select which backends to test for backend specific tests.")
75 75 parser.addoption(
76 76 '--dbs', action='store', type=_split_comma,
77 77 default=['sqlite'],
78 78 help="Select which database to test for database specific tests. "
79 79 "Possible options are sqlite,postgres,mysql")
80 80 parser.addoption(
81 81 '--appenlight', '--ae', action='store_true',
82 82 help="Track statistics in appenlight.")
83 83 parser.addoption(
84 84 '--appenlight-api-key', '--ae-key',
85 85 help="API key for Appenlight.")
86 86 parser.addoption(
87 87 '--appenlight-url', '--ae-url',
88 88 default="https://ae.rhodecode.com",
89 89 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
90 90 parser.addoption(
91 91 '--sqlite-connection-string', action='store',
92 92 default='', help="Connection string for the dbs tests with SQLite")
93 93 parser.addoption(
94 94 '--postgres-connection-string', action='store',
95 95 default='', help="Connection string for the dbs tests with Postgres")
96 96 parser.addoption(
97 97 '--mysql-connection-string', action='store',
98 98 default='', help="Connection string for the dbs tests with MySQL")
99 99 parser.addoption(
100 100 '--repeat', type=int, default=100,
101 101 help="Number of repetitions in performance tests.")
102 102
103 103
104 104 def pytest_configure(config):
105 105 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
106 106 from rhodecode.config import patches
107 107 patches.kombu_1_5_1_python_2_7_11()
108 108
109 109
110 110 def pytest_collection_modifyitems(session, config, items):
111 111 # nottest marked, compare nose, used for transition from nose to pytest
112 112 remaining = [
113 113 i for i in items if getattr(i.obj, '__test__', True)]
114 114 items[:] = remaining
115 115
116 116
117 117 def pytest_generate_tests(metafunc):
118 118 # Support test generation based on --backend parameter
119 119 if 'backend_alias' in metafunc.fixturenames:
120 120 backends = get_backends_from_metafunc(metafunc)
121 121 scope = None
122 122 if not backends:
123 123 pytest.skip("Not enabled for any of selected backends")
124 124 metafunc.parametrize('backend_alias', backends, scope=scope)
125 125 elif hasattr(metafunc.function, 'backends'):
126 126 backends = get_backends_from_metafunc(metafunc)
127 127 if not backends:
128 128 pytest.skip("Not enabled for any of selected backends")
129 129
130 130
131 131 def get_backends_from_metafunc(metafunc):
132 132 requested_backends = set(metafunc.config.getoption('--backends'))
133 133 if hasattr(metafunc.function, 'backends'):
134 134 # Supported backends by this test function, created from
135 135 # pytest.mark.backends
136 136 backends = metafunc.function.backends.args
137 137 elif hasattr(metafunc.cls, 'backend_alias'):
138 138 # Support class attribute "backend_alias", this is mainly
139 139 # for legacy reasons for tests not yet using pytest.mark.backends
140 140 backends = [metafunc.cls.backend_alias]
141 141 else:
142 142 backends = metafunc.config.getoption('--backends')
143 143 return requested_backends.intersection(backends)
144 144
145 145
146 146 @pytest.fixture(scope='session', autouse=True)
147 147 def activate_example_rcextensions(request):
148 148 """
149 149 Patch in an example rcextensions module which verifies passed in kwargs.
150 150 """
151 151 from rhodecode.tests.other import example_rcextensions
152 152
153 153 old_extensions = rhodecode.EXTENSIONS
154 154 rhodecode.EXTENSIONS = example_rcextensions
155 155
156 156 @request.addfinalizer
157 157 def cleanup():
158 158 rhodecode.EXTENSIONS = old_extensions
159 159
160 160
161 161 @pytest.fixture
162 162 def capture_rcextensions():
163 163 """
164 164 Returns the recorded calls to entry points in rcextensions.
165 165 """
166 166 calls = rhodecode.EXTENSIONS.calls
167 167 calls.clear()
168 168 # Note: At this moment, it is still the empty dict, but that will
169 169 # be filled during the test run and since it is a reference this
170 170 # is enough to make it work.
171 171 return calls
172 172
173 173
174 174 @pytest.fixture(scope='session')
175 175 def http_environ_session():
176 176 """
177 177 Allow to use "http_environ" in session scope.
178 178 """
179 179 return http_environ(
180 180 http_host_stub=http_host_stub())
181 181
182 182
183 183 @pytest.fixture
184 184 def http_host_stub():
185 185 """
186 186 Value of HTTP_HOST in the test run.
187 187 """
188 188 return 'test.example.com:80'
189 189
190 190
191 191 @pytest.fixture
192 192 def http_environ(http_host_stub):
193 193 """
194 194 HTTP extra environ keys.
195 195
196 196 User by the test application and as well for setting up the pylons
197 197 environment. In the case of the fixture "app" it should be possible
198 198 to override this for a specific test case.
199 199 """
200 200 return {
201 201 'SERVER_NAME': http_host_stub.split(':')[0],
202 202 'SERVER_PORT': http_host_stub.split(':')[1],
203 203 'HTTP_HOST': http_host_stub,
204 204 }
205 205
206 206
207 207 @pytest.fixture(scope='function')
208 208 def app(request, pylonsapp, http_environ):
209 209 app = TestApp(
210 210 pylonsapp,
211 211 extra_environ=http_environ)
212 212 if request.cls:
213 213 request.cls.app = app
214 214 return app
215 215
216 216
217 @pytest.fixture()
218 def app_settings(pylonsapp, pylons_config):
219 """
220 Settings dictionary used to create the app.
221
222 Parses the ini file and passes the result through the sanitize and apply
223 defaults mechanism in `rhodecode.config.middleware`.
224 """
225 from paste.deploy.loadwsgi import loadcontext, APP
226 from rhodecode.config.middleware import (
227 sanitize_settings_and_apply_defaults)
228 context = loadcontext(APP, 'config:' + pylons_config)
229 settings = sanitize_settings_and_apply_defaults(context.config())
230 return settings
231
232
217 233 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
218 234
219 235
220 236 def _autologin_user(app, *args):
221 237 session = login_user_session(app, *args)
222 238 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
223 239 return LoginData(csrf_token, session['rhodecode_user'])
224 240
225 241
226 242 @pytest.fixture
227 243 def autologin_user(app):
228 244 """
229 245 Utility fixture which makes sure that the admin user is logged in
230 246 """
231 247 return _autologin_user(app)
232 248
233 249
234 250 @pytest.fixture
235 251 def autologin_regular_user(app):
236 252 """
237 253 Utility fixture which makes sure that the regular user is logged in
238 254 """
239 255 return _autologin_user(
240 256 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
241 257
242 258
243 259 @pytest.fixture(scope='function')
244 260 def csrf_token(request, autologin_user):
245 261 return autologin_user.csrf_token
246 262
247 263
248 264 @pytest.fixture(scope='function')
249 265 def xhr_header(request):
250 266 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
251 267
252 268
253 269 @pytest.fixture
254 270 def real_crypto_backend(monkeypatch):
255 271 """
256 272 Switch the production crypto backend on for this test.
257 273
258 274 During the test run the crypto backend is replaced with a faster
259 275 implementation based on the MD5 algorithm.
260 276 """
261 277 monkeypatch.setattr(rhodecode, 'is_test', False)
262 278
263 279
264 280 @pytest.fixture(scope='class')
265 281 def index_location(request, pylonsapp):
266 282 index_location = pylonsapp.config['app_conf']['search.location']
267 283 if request.cls:
268 284 request.cls.index_location = index_location
269 285 return index_location
270 286
271 287
272 288 @pytest.fixture(scope='session', autouse=True)
273 289 def tests_tmp_path(request):
274 290 """
275 291 Create temporary directory to be used during the test session.
276 292 """
277 293 if not os.path.exists(TESTS_TMP_PATH):
278 294 os.makedirs(TESTS_TMP_PATH)
279 295
280 296 if not request.config.getoption('--keep-tmp-path'):
281 297 @request.addfinalizer
282 298 def remove_tmp_path():
283 299 shutil.rmtree(TESTS_TMP_PATH)
284 300
285 301 return TESTS_TMP_PATH
286 302
287 303
288 304 @pytest.fixture(scope='session', autouse=True)
289 305 def patch_pyro_request_scope_proxy_factory(request):
290 306 """
291 307 Patch the pyro proxy factory to always use the same dummy request object
292 308 when under test. This will return the same pyro proxy on every call.
293 309 """
294 310 dummy_request = pyramid.testing.DummyRequest()
295 311
296 312 def mocked_call(self, request=None):
297 313 return self.getProxy(request=dummy_request)
298 314
299 315 patcher = mock.patch(
300 316 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
301 317 new=mocked_call)
302 318 patcher.start()
303 319
304 320 @request.addfinalizer
305 321 def undo_patching():
306 322 patcher.stop()
307 323
308 324
309 325 @pytest.fixture
310 326 def test_repo_group(request):
311 327 """
312 328 Create a temporary repository group, and destroy it after
313 329 usage automatically
314 330 """
315 331 fixture = Fixture()
316 332 repogroupid = 'test_repo_group_%s' % int(time.time())
317 333 repo_group = fixture.create_repo_group(repogroupid)
318 334
319 335 def _cleanup():
320 336 fixture.destroy_repo_group(repogroupid)
321 337
322 338 request.addfinalizer(_cleanup)
323 339 return repo_group
324 340
325 341
326 342 @pytest.fixture
327 343 def test_user_group(request):
328 344 """
329 345 Create a temporary user group, and destroy it after
330 346 usage automatically
331 347 """
332 348 fixture = Fixture()
333 349 usergroupid = 'test_user_group_%s' % int(time.time())
334 350 user_group = fixture.create_user_group(usergroupid)
335 351
336 352 def _cleanup():
337 353 fixture.destroy_user_group(user_group)
338 354
339 355 request.addfinalizer(_cleanup)
340 356 return user_group
341 357
342 358
343 359 @pytest.fixture(scope='session')
344 360 def test_repo(request):
345 361 container = TestRepoContainer()
346 362 request.addfinalizer(container._cleanup)
347 363 return container
348 364
349 365
350 366 class TestRepoContainer(object):
351 367 """
352 368 Container for test repositories which are used read only.
353 369
354 370 Repositories will be created on demand and re-used during the lifetime
355 371 of this object.
356 372
357 373 Usage to get the svn test repository "minimal"::
358 374
359 375 test_repo = TestContainer()
360 376 repo = test_repo('minimal', 'svn')
361 377
362 378 """
363 379
364 380 dump_extractors = {
365 381 'git': utils.extract_git_repo_from_dump,
366 382 'hg': utils.extract_hg_repo_from_dump,
367 383 'svn': utils.extract_svn_repo_from_dump,
368 384 }
369 385
370 386 def __init__(self):
371 387 self._cleanup_repos = []
372 388 self._fixture = Fixture()
373 389 self._repos = {}
374 390
375 391 def __call__(self, dump_name, backend_alias):
376 392 key = (dump_name, backend_alias)
377 393 if key not in self._repos:
378 394 repo = self._create_repo(dump_name, backend_alias)
379 395 self._repos[key] = repo.repo_id
380 396 return Repository.get(self._repos[key])
381 397
382 398 def _create_repo(self, dump_name, backend_alias):
383 399 repo_name = '%s-%s' % (backend_alias, dump_name)
384 400 backend_class = get_backend(backend_alias)
385 401 dump_extractor = self.dump_extractors[backend_alias]
386 402 repo_path = dump_extractor(dump_name, repo_name)
387 403 vcs_repo = backend_class(repo_path)
388 404 repo2db_mapper({repo_name: vcs_repo})
389 405 repo = RepoModel().get_by_repo_name(repo_name)
390 406 self._cleanup_repos.append(repo_name)
391 407 return repo
392 408
393 409 def _cleanup(self):
394 410 for repo_name in reversed(self._cleanup_repos):
395 411 self._fixture.destroy_repo(repo_name)
396 412
397 413
398 414 @pytest.fixture
399 415 def backend(request, backend_alias, pylonsapp, test_repo):
400 416 """
401 417 Parametrized fixture which represents a single backend implementation.
402 418
403 419 It respects the option `--backends` to focus the test run on specific
404 420 backend implementations.
405 421
406 422 It also supports `pytest.mark.xfail_backends` to mark tests as failing
407 423 for specific backends. This is intended as a utility for incremental
408 424 development of a new backend implementation.
409 425 """
410 426 if backend_alias not in request.config.getoption('--backends'):
411 427 pytest.skip("Backend %s not selected." % (backend_alias, ))
412 428
413 429 utils.check_xfail_backends(request.node, backend_alias)
414 430 utils.check_skip_backends(request.node, backend_alias)
415 431
416 432 repo_name = 'vcs_test_%s' % (backend_alias, )
417 433 backend = Backend(
418 434 alias=backend_alias,
419 435 repo_name=repo_name,
420 436 test_name=request.node.name,
421 437 test_repo_container=test_repo)
422 438 request.addfinalizer(backend.cleanup)
423 439 return backend
424 440
425 441
426 442 @pytest.fixture
427 443 def backend_git(request, pylonsapp, test_repo):
428 444 return backend(request, 'git', pylonsapp, test_repo)
429 445
430 446
431 447 @pytest.fixture
432 448 def backend_hg(request, pylonsapp, test_repo):
433 449 return backend(request, 'hg', pylonsapp, test_repo)
434 450
435 451
436 452 @pytest.fixture
437 453 def backend_svn(request, pylonsapp, test_repo):
438 454 return backend(request, 'svn', pylonsapp, test_repo)
439 455
440 456
441 457 @pytest.fixture
442 458 def backend_random(backend_git):
443 459 """
444 460 Use this to express that your tests need "a backend.
445 461
446 462 A few of our tests need a backend, so that we can run the code. This
447 463 fixture is intended to be used for such cases. It will pick one of the
448 464 backends and run the tests.
449 465
450 466 The fixture `backend` would run the test multiple times for each
451 467 available backend which is a pure waste of time if the test is
452 468 independent of the backend type.
453 469 """
454 470 # TODO: johbo: Change this to pick a random backend
455 471 return backend_git
456 472
457 473
458 474 @pytest.fixture
459 475 def backend_stub(backend_git):
460 476 """
461 477 Use this to express that your tests need a backend stub
462 478
463 479 TODO: mikhail: Implement a real stub logic instead of returning
464 480 a git backend
465 481 """
466 482 return backend_git
467 483
468 484
469 485 @pytest.fixture
470 486 def repo_stub(backend_stub):
471 487 """
472 488 Use this to express that your tests need a repository stub
473 489 """
474 490 return backend_stub.create_repo()
475 491
476 492
477 493 class Backend(object):
478 494 """
479 495 Represents the test configuration for one supported backend
480 496
481 497 Provides easy access to different test repositories based on
482 498 `__getitem__`. Such repositories will only be created once per test
483 499 session.
484 500 """
485 501
486 502 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
487 503 _master_repo = None
488 504 _commit_ids = {}
489 505
490 506 def __init__(self, alias, repo_name, test_name, test_repo_container):
491 507 self.alias = alias
492 508 self.repo_name = repo_name
493 509 self._cleanup_repos = []
494 510 self._test_name = test_name
495 511 self._test_repo_container = test_repo_container
496 512 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
497 513 # Fixture will survive in the end.
498 514 self._fixture = Fixture()
499 515
500 516 def __getitem__(self, key):
501 517 return self._test_repo_container(key, self.alias)
502 518
503 519 @property
504 520 def repo(self):
505 521 """
506 522 Returns the "current" repository. This is the vcs_test repo or the
507 523 last repo which has been created with `create_repo`.
508 524 """
509 525 from rhodecode.model.db import Repository
510 526 return Repository.get_by_repo_name(self.repo_name)
511 527
512 528 @property
513 529 def default_branch_name(self):
514 530 VcsRepository = get_backend(self.alias)
515 531 return VcsRepository.DEFAULT_BRANCH_NAME
516 532
517 533 @property
518 534 def default_head_id(self):
519 535 """
520 536 Returns the default head id of the underlying backend.
521 537
522 538 This will be the default branch name in case the backend does have a
523 539 default branch. In the other cases it will point to a valid head
524 540 which can serve as the base to create a new commit on top of it.
525 541 """
526 542 vcsrepo = self.repo.scm_instance()
527 543 head_id = (
528 544 vcsrepo.DEFAULT_BRANCH_NAME or
529 545 vcsrepo.commit_ids[-1])
530 546 return head_id
531 547
532 548 @property
533 549 def commit_ids(self):
534 550 """
535 551 Returns the list of commits for the last created repository
536 552 """
537 553 return self._commit_ids
538 554
539 555 def create_master_repo(self, commits):
540 556 """
541 557 Create a repository and remember it as a template.
542 558
543 559 This allows to easily create derived repositories to construct
544 560 more complex scenarios for diff, compare and pull requests.
545 561
546 562 Returns a commit map which maps from commit message to raw_id.
547 563 """
548 564 self._master_repo = self.create_repo(commits=commits)
549 565 return self._commit_ids
550 566
551 567 def create_repo(
552 568 self, commits=None, number_of_commits=0, heads=None,
553 569 name_suffix=u'', **kwargs):
554 570 """
555 571 Create a repository and record it for later cleanup.
556 572
557 573 :param commits: Optional. A sequence of dict instances.
558 574 Will add a commit per entry to the new repository.
559 575 :param number_of_commits: Optional. If set to a number, this number of
560 576 commits will be added to the new repository.
561 577 :param heads: Optional. Can be set to a sequence of of commit
562 578 names which shall be pulled in from the master repository.
563 579
564 580 """
565 581 self.repo_name = self._next_repo_name() + name_suffix
566 582 repo = self._fixture.create_repo(
567 583 self.repo_name, repo_type=self.alias, **kwargs)
568 584 self._cleanup_repos.append(repo.repo_name)
569 585
570 586 commits = commits or [
571 587 {'message': 'Commit %s of %s' % (x, self.repo_name)}
572 588 for x in xrange(number_of_commits)]
573 589 self._add_commits_to_repo(repo.scm_instance(), commits)
574 590 if heads:
575 591 self.pull_heads(repo, heads)
576 592
577 593 return repo
578 594
579 595 def pull_heads(self, repo, heads):
580 596 """
581 597 Make sure that repo contains all commits mentioned in `heads`
582 598 """
583 599 vcsmaster = self._master_repo.scm_instance()
584 600 vcsrepo = repo.scm_instance()
585 601 vcsrepo.config.clear_section('hooks')
586 602 commit_ids = [self._commit_ids[h] for h in heads]
587 603 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
588 604
589 605 def create_fork(self):
590 606 repo_to_fork = self.repo_name
591 607 self.repo_name = self._next_repo_name()
592 608 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
593 609 self._cleanup_repos.append(self.repo_name)
594 610 return repo
595 611
596 612 def new_repo_name(self, suffix=u''):
597 613 self.repo_name = self._next_repo_name() + suffix
598 614 self._cleanup_repos.append(self.repo_name)
599 615 return self.repo_name
600 616
601 617 def _next_repo_name(self):
602 618 return u"%s_%s" % (
603 619 self.invalid_repo_name.sub(u'_', self._test_name),
604 620 len(self._cleanup_repos))
605 621
606 622 def ensure_file(self, filename, content='Test content\n'):
607 623 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
608 624 commits = [
609 625 {'added': [
610 626 FileNode(filename, content=content),
611 627 ]},
612 628 ]
613 629 self._add_commits_to_repo(self.repo.scm_instance(), commits)
614 630
615 631 def enable_downloads(self):
616 632 repo = self.repo
617 633 repo.enable_downloads = True
618 634 Session().add(repo)
619 635 Session().commit()
620 636
621 637 def cleanup(self):
622 638 for repo_name in reversed(self._cleanup_repos):
623 639 self._fixture.destroy_repo(repo_name)
624 640
625 641 def _add_commits_to_repo(self, repo, commits):
626 642 if not commits:
627 643 return
628 644
629 645 imc = repo.in_memory_commit
630 646 commit = None
631 647 self._commit_ids = {}
632 648
633 649 for idx, commit in enumerate(commits):
634 650 message = unicode(commit.get('message', 'Commit %s' % idx))
635 651
636 652 for node in commit.get('added', []):
637 653 imc.add(FileNode(node.path, content=node.content))
638 654 for node in commit.get('changed', []):
639 655 imc.change(FileNode(node.path, content=node.content))
640 656 for node in commit.get('removed', []):
641 657 imc.remove(FileNode(node.path))
642 658
643 659 parents = [
644 660 repo.get_commit(commit_id=self._commit_ids[p])
645 661 for p in commit.get('parents', [])]
646 662
647 663 operations = ('added', 'changed', 'removed')
648 664 if not any((commit.get(o) for o in operations)):
649 665 imc.add(FileNode('file_%s' % idx, content=message))
650 666
651 667 commit = imc.commit(
652 668 message=message,
653 669 author=unicode(commit.get('author', 'Automatic')),
654 670 date=commit.get('date'),
655 671 branch=commit.get('branch'),
656 672 parents=parents)
657 673
658 674 self._commit_ids[commit.message] = commit.raw_id
659 675
660 676 # Creating refs for Git to allow fetching them from remote repository
661 677 if self.alias == 'git':
662 678 refs = {}
663 679 for message in self._commit_ids:
664 680 # TODO: mikhail: do more special chars replacements
665 681 ref_name = 'refs/test-refs/{}'.format(
666 682 message.replace(' ', ''))
667 683 refs[ref_name] = self._commit_ids[message]
668 684 self._create_refs(repo, refs)
669 685
670 686 return commit
671 687
672 688 def _create_refs(self, repo, refs):
673 689 for ref_name in refs:
674 690 repo.set_refs(ref_name, refs[ref_name])
675 691
676 692
677 693 @pytest.fixture
678 694 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
679 695 """
680 696 Parametrized fixture which represents a single vcs backend implementation.
681 697
682 698 See the fixture `backend` for more details. This one implements the same
683 699 concept, but on vcs level. So it does not provide model instances etc.
684 700
685 701 Parameters are generated dynamically, see :func:`pytest_generate_tests`
686 702 for how this works.
687 703 """
688 704 if backend_alias not in request.config.getoption('--backends'):
689 705 pytest.skip("Backend %s not selected." % (backend_alias, ))
690 706
691 707 utils.check_xfail_backends(request.node, backend_alias)
692 708 utils.check_skip_backends(request.node, backend_alias)
693 709
694 710 repo_name = 'vcs_test_%s' % (backend_alias, )
695 711 repo_path = os.path.join(tests_tmp_path, repo_name)
696 712 backend = VcsBackend(
697 713 alias=backend_alias,
698 714 repo_path=repo_path,
699 715 test_name=request.node.name,
700 716 test_repo_container=test_repo)
701 717 request.addfinalizer(backend.cleanup)
702 718 return backend
703 719
704 720
705 721 @pytest.fixture
706 722 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
707 723 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
708 724
709 725
710 726 @pytest.fixture
711 727 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
712 728 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
713 729
714 730
715 731 @pytest.fixture
716 732 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
717 733 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
718 734
719 735
720 736 @pytest.fixture
721 737 def vcsbackend_random(vcsbackend_git):
722 738 """
723 739 Use this to express that your tests need "a vcsbackend".
724 740
725 741 The fixture `vcsbackend` would run the test multiple times for each
726 742 available vcs backend which is a pure waste of time if the test is
727 743 independent of the vcs backend type.
728 744 """
729 745 # TODO: johbo: Change this to pick a random backend
730 746 return vcsbackend_git
731 747
732 748
733 749 class VcsBackend(object):
734 750 """
735 751 Represents the test configuration for one supported vcs backend.
736 752 """
737 753
738 754 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
739 755
740 756 def __init__(self, alias, repo_path, test_name, test_repo_container):
741 757 self.alias = alias
742 758 self._repo_path = repo_path
743 759 self._cleanup_repos = []
744 760 self._test_name = test_name
745 761 self._test_repo_container = test_repo_container
746 762
747 763 def __getitem__(self, key):
748 764 return self._test_repo_container(key, self.alias).scm_instance()
749 765
750 766 @property
751 767 def repo(self):
752 768 """
753 769 Returns the "current" repository. This is the vcs_test repo of the last
754 770 repo which has been created.
755 771 """
756 772 Repository = get_backend(self.alias)
757 773 return Repository(self._repo_path)
758 774
759 775 @property
760 776 def backend(self):
761 777 """
762 778 Returns the backend implementation class.
763 779 """
764 780 return get_backend(self.alias)
765 781
766 782 def create_repo(self, number_of_commits=0, _clone_repo=None):
767 783 repo_name = self._next_repo_name()
768 784 self._repo_path = get_new_dir(repo_name)
769 785 Repository = get_backend(self.alias)
770 786 src_url = None
771 787 if _clone_repo:
772 788 src_url = _clone_repo.path
773 789 repo = Repository(self._repo_path, create=True, src_url=src_url)
774 790 self._cleanup_repos.append(repo)
775 791 for idx in xrange(number_of_commits):
776 792 self.ensure_file(filename='file_%s' % idx, content=repo.name)
777 793 return repo
778 794
779 795 def clone_repo(self, repo):
780 796 return self.create_repo(_clone_repo=repo)
781 797
782 798 def cleanup(self):
783 799 for repo in self._cleanup_repos:
784 800 shutil.rmtree(repo.path)
785 801
786 802 def new_repo_path(self):
787 803 repo_name = self._next_repo_name()
788 804 self._repo_path = get_new_dir(repo_name)
789 805 return self._repo_path
790 806
791 807 def _next_repo_name(self):
792 808 return "%s_%s" % (
793 809 self.invalid_repo_name.sub('_', self._test_name),
794 810 len(self._cleanup_repos))
795 811
796 812 def add_file(self, repo, filename, content='Test content\n'):
797 813 imc = repo.in_memory_commit
798 814 imc.add(FileNode(filename, content=content))
799 815 imc.commit(
800 816 message=u'Automatic commit from vcsbackend fixture',
801 817 author=u'Automatic')
802 818
803 819 def ensure_file(self, filename, content='Test content\n'):
804 820 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
805 821 self.add_file(self.repo, filename, content)
806 822
807 823
808 824 @pytest.fixture
809 825 def reposerver(request):
810 826 """
811 827 Allows to serve a backend repository
812 828 """
813 829
814 830 repo_server = RepoServer()
815 831 request.addfinalizer(repo_server.cleanup)
816 832 return repo_server
817 833
818 834
819 835 class RepoServer(object):
820 836 """
821 837 Utility to serve a local repository for the duration of a test case.
822 838
823 839 Supports only Subversion so far.
824 840 """
825 841
826 842 url = None
827 843
828 844 def __init__(self):
829 845 self._cleanup_servers = []
830 846
831 847 def serve(self, vcsrepo):
832 848 if vcsrepo.alias != 'svn':
833 849 raise TypeError("Backend %s not supported" % vcsrepo.alias)
834 850
835 851 proc = subprocess.Popen(
836 852 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
837 853 '--root', vcsrepo.path])
838 854 self._cleanup_servers.append(proc)
839 855 self.url = 'svn://localhost'
840 856
841 857 def cleanup(self):
842 858 for proc in self._cleanup_servers:
843 859 proc.terminate()
844 860
845 861
846 862 @pytest.fixture
847 863 def pr_util(backend, request):
848 864 """
849 865 Utility for tests of models and for functional tests around pull requests.
850 866
851 867 It gives an instance of :class:`PRTestUtility` which provides various
852 868 utility methods around one pull request.
853 869
854 870 This fixture uses `backend` and inherits its parameterization.
855 871 """
856 872
857 873 util = PRTestUtility(backend)
858 874
859 875 @request.addfinalizer
860 876 def cleanup():
861 877 util.cleanup()
862 878
863 879 return util
864 880
865 881
866 882 class PRTestUtility(object):
867 883
868 884 pull_request = None
869 885 pull_request_id = None
870 886 mergeable_patcher = None
871 887 mergeable_mock = None
872 888 notification_patcher = None
873 889
874 890 def __init__(self, backend):
875 891 self.backend = backend
876 892
877 893 def create_pull_request(
878 894 self, commits=None, target_head=None, source_head=None,
879 895 revisions=None, approved=False, author=None, mergeable=False,
880 896 enable_notifications=True, name_suffix=u'', reviewers=None,
881 897 title=u"Test", description=u"Description"):
882 898 self.set_mergeable(mergeable)
883 899 if not enable_notifications:
884 900 # mock notification side effect
885 901 self.notification_patcher = mock.patch(
886 902 'rhodecode.model.notification.NotificationModel.create')
887 903 self.notification_patcher.start()
888 904
889 905 if not self.pull_request:
890 906 if not commits:
891 907 commits = [
892 908 {'message': 'c1'},
893 909 {'message': 'c2'},
894 910 {'message': 'c3'},
895 911 ]
896 912 target_head = 'c1'
897 913 source_head = 'c2'
898 914 revisions = ['c2']
899 915
900 916 self.commit_ids = self.backend.create_master_repo(commits)
901 917 self.target_repository = self.backend.create_repo(
902 918 heads=[target_head], name_suffix=name_suffix)
903 919 self.source_repository = self.backend.create_repo(
904 920 heads=[source_head], name_suffix=name_suffix)
905 921 self.author = author or UserModel().get_by_username(
906 922 TEST_USER_ADMIN_LOGIN)
907 923
908 924 model = PullRequestModel()
909 925 self.create_parameters = {
910 926 'created_by': self.author,
911 927 'source_repo': self.source_repository.repo_name,
912 928 'source_ref': self._default_branch_reference(source_head),
913 929 'target_repo': self.target_repository.repo_name,
914 930 'target_ref': self._default_branch_reference(target_head),
915 931 'revisions': [self.commit_ids[r] for r in revisions],
916 932 'reviewers': reviewers or self._get_reviewers(),
917 933 'title': title,
918 934 'description': description,
919 935 }
920 936 self.pull_request = model.create(**self.create_parameters)
921 937 assert model.get_versions(self.pull_request) == []
922 938
923 939 self.pull_request_id = self.pull_request.pull_request_id
924 940
925 941 if approved:
926 942 self.approve()
927 943
928 944 Session().add(self.pull_request)
929 945 Session().commit()
930 946
931 947 return self.pull_request
932 948
933 949 def approve(self):
934 950 self.create_status_votes(
935 951 ChangesetStatus.STATUS_APPROVED,
936 952 *self.pull_request.reviewers)
937 953
938 954 def close(self):
939 955 PullRequestModel().close_pull_request(self.pull_request, self.author)
940 956
941 957 def _default_branch_reference(self, commit_message):
942 958 reference = '%s:%s:%s' % (
943 959 'branch',
944 960 self.backend.default_branch_name,
945 961 self.commit_ids[commit_message])
946 962 return reference
947 963
948 964 def _get_reviewers(self):
949 965 model = UserModel()
950 966 return [
951 967 model.get_by_username(TEST_USER_REGULAR_LOGIN),
952 968 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
953 969 ]
954 970
955 971 def update_source_repository(self, head=None):
956 972 heads = [head or 'c3']
957 973 self.backend.pull_heads(self.source_repository, heads=heads)
958 974
959 975 def add_one_commit(self, head=None):
960 976 self.update_source_repository(head=head)
961 977 old_commit_ids = set(self.pull_request.revisions)
962 978 PullRequestModel().update_commits(self.pull_request)
963 979 commit_ids = set(self.pull_request.revisions)
964 980 new_commit_ids = commit_ids - old_commit_ids
965 981 assert len(new_commit_ids) == 1
966 982 return new_commit_ids.pop()
967 983
968 984 def remove_one_commit(self):
969 985 assert len(self.pull_request.revisions) == 2
970 986 source_vcs = self.source_repository.scm_instance()
971 987 removed_commit_id = source_vcs.commit_ids[-1]
972 988
973 989 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
974 990 # remove the if once that's sorted out.
975 991 if self.backend.alias == "git":
976 992 kwargs = {'branch_name': self.backend.default_branch_name}
977 993 else:
978 994 kwargs = {}
979 995 source_vcs.strip(removed_commit_id, **kwargs)
980 996
981 997 PullRequestModel().update_commits(self.pull_request)
982 998 assert len(self.pull_request.revisions) == 1
983 999 return removed_commit_id
984 1000
985 1001 def create_comment(self, linked_to=None):
986 1002 comment = ChangesetCommentsModel().create(
987 1003 text=u"Test comment",
988 1004 repo=self.target_repository.repo_name,
989 1005 user=self.author,
990 1006 pull_request=self.pull_request)
991 1007 assert comment.pull_request_version_id is None
992 1008
993 1009 if linked_to:
994 1010 PullRequestModel()._link_comments_to_version(linked_to)
995 1011
996 1012 return comment
997 1013
998 1014 def create_inline_comment(
999 1015 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1000 1016 comment = ChangesetCommentsModel().create(
1001 1017 text=u"Test comment",
1002 1018 repo=self.target_repository.repo_name,
1003 1019 user=self.author,
1004 1020 line_no=line_no,
1005 1021 f_path=file_path,
1006 1022 pull_request=self.pull_request)
1007 1023 assert comment.pull_request_version_id is None
1008 1024
1009 1025 if linked_to:
1010 1026 PullRequestModel()._link_comments_to_version(linked_to)
1011 1027
1012 1028 return comment
1013 1029
1014 1030 def create_version_of_pull_request(self):
1015 1031 pull_request = self.create_pull_request()
1016 1032 version = PullRequestModel()._create_version_from_snapshot(
1017 1033 pull_request)
1018 1034 return version
1019 1035
1020 1036 def create_status_votes(self, status, *reviewers):
1021 1037 for reviewer in reviewers:
1022 1038 ChangesetStatusModel().set_status(
1023 1039 repo=self.pull_request.target_repo,
1024 1040 status=status,
1025 1041 user=reviewer.user_id,
1026 1042 pull_request=self.pull_request)
1027 1043
1028 1044 def set_mergeable(self, value):
1029 1045 if not self.mergeable_patcher:
1030 1046 self.mergeable_patcher = mock.patch.object(
1031 1047 VcsSettingsModel, 'get_general_settings')
1032 1048 self.mergeable_mock = self.mergeable_patcher.start()
1033 1049 self.mergeable_mock.return_value = {
1034 1050 'rhodecode_pr_merge_enabled': value}
1035 1051
1036 1052 def cleanup(self):
1037 1053 # In case the source repository is already cleaned up, the pull
1038 1054 # request will already be deleted.
1039 1055 pull_request = PullRequest().get(self.pull_request_id)
1040 1056 if pull_request:
1041 1057 PullRequestModel().delete(pull_request)
1042 1058 Session().commit()
1043 1059
1044 1060 if self.notification_patcher:
1045 1061 self.notification_patcher.stop()
1046 1062
1047 1063 if self.mergeable_patcher:
1048 1064 self.mergeable_patcher.stop()
1049 1065
1050 1066
1051 1067 @pytest.fixture
1052 1068 def user_admin(pylonsapp):
1053 1069 """
1054 1070 Provides the default admin test user as an instance of `db.User`.
1055 1071 """
1056 1072 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1057 1073 return user
1058 1074
1059 1075
1060 1076 @pytest.fixture
1061 1077 def user_regular(pylonsapp):
1062 1078 """
1063 1079 Provides the default regular test user as an instance of `db.User`.
1064 1080 """
1065 1081 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1066 1082 return user
1067 1083
1068 1084
1069 1085 @pytest.fixture
1070 1086 def user_util(request, pylonsapp):
1071 1087 """
1072 1088 Provides a wired instance of `UserUtility` with integrated cleanup.
1073 1089 """
1074 1090 utility = UserUtility(test_name=request.node.name)
1075 1091 request.addfinalizer(utility.cleanup)
1076 1092 return utility
1077 1093
1078 1094
1079 1095 # TODO: johbo: Split this up into utilities per domain or something similar
1080 1096 class UserUtility(object):
1081 1097
1082 1098 def __init__(self, test_name="test"):
1083 1099 self._test_name = test_name
1084 1100 self.fixture = Fixture()
1085 1101 self.repo_group_ids = []
1086 1102 self.user_ids = []
1087 1103 self.user_group_ids = []
1088 1104 self.user_repo_permission_ids = []
1089 1105 self.user_group_repo_permission_ids = []
1090 1106 self.user_repo_group_permission_ids = []
1091 1107 self.user_group_repo_group_permission_ids = []
1092 1108 self.user_user_group_permission_ids = []
1093 1109 self.user_group_user_group_permission_ids = []
1094 1110 self.user_permissions = []
1095 1111
1096 1112 def create_repo_group(
1097 1113 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1098 1114 group_name = "{prefix}_repogroup_{count}".format(
1099 1115 prefix=self._test_name,
1100 1116 count=len(self.repo_group_ids))
1101 1117 repo_group = self.fixture.create_repo_group(
1102 1118 group_name, cur_user=owner)
1103 1119 if auto_cleanup:
1104 1120 self.repo_group_ids.append(repo_group.group_id)
1105 1121 return repo_group
1106 1122
1107 1123 def create_user(self, auto_cleanup=True, **kwargs):
1108 1124 user_name = "{prefix}_user_{count}".format(
1109 1125 prefix=self._test_name,
1110 1126 count=len(self.user_ids))
1111 1127 user = self.fixture.create_user(user_name, **kwargs)
1112 1128 if auto_cleanup:
1113 1129 self.user_ids.append(user.user_id)
1114 1130 return user
1115 1131
1116 1132 def create_user_with_group(self):
1117 1133 user = self.create_user()
1118 1134 user_group = self.create_user_group(members=[user])
1119 1135 return user, user_group
1120 1136
1121 1137 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1122 1138 group_name = "{prefix}_usergroup_{count}".format(
1123 1139 prefix=self._test_name,
1124 1140 count=len(self.user_group_ids))
1125 1141 user_group = self.fixture.create_user_group(group_name, **kwargs)
1126 1142 if auto_cleanup:
1127 1143 self.user_group_ids.append(user_group.users_group_id)
1128 1144 if members:
1129 1145 for user in members:
1130 1146 UserGroupModel().add_user_to_group(user_group, user)
1131 1147 return user_group
1132 1148
1133 1149 def grant_user_permission(self, user_name, permission_name):
1134 1150 self._inherit_default_user_permissions(user_name, False)
1135 1151 self.user_permissions.append((user_name, permission_name))
1136 1152
1137 1153 def grant_user_permission_to_repo_group(
1138 1154 self, repo_group, user, permission_name):
1139 1155 permission = RepoGroupModel().grant_user_permission(
1140 1156 repo_group, user, permission_name)
1141 1157 self.user_repo_group_permission_ids.append(
1142 1158 (repo_group.group_id, user.user_id))
1143 1159 return permission
1144 1160
1145 1161 def grant_user_group_permission_to_repo_group(
1146 1162 self, repo_group, user_group, permission_name):
1147 1163 permission = RepoGroupModel().grant_user_group_permission(
1148 1164 repo_group, user_group, permission_name)
1149 1165 self.user_group_repo_group_permission_ids.append(
1150 1166 (repo_group.group_id, user_group.users_group_id))
1151 1167 return permission
1152 1168
1153 1169 def grant_user_permission_to_repo(
1154 1170 self, repo, user, permission_name):
1155 1171 permission = RepoModel().grant_user_permission(
1156 1172 repo, user, permission_name)
1157 1173 self.user_repo_permission_ids.append(
1158 1174 (repo.repo_id, user.user_id))
1159 1175 return permission
1160 1176
1161 1177 def grant_user_group_permission_to_repo(
1162 1178 self, repo, user_group, permission_name):
1163 1179 permission = RepoModel().grant_user_group_permission(
1164 1180 repo, user_group, permission_name)
1165 1181 self.user_group_repo_permission_ids.append(
1166 1182 (repo.repo_id, user_group.users_group_id))
1167 1183 return permission
1168 1184
1169 1185 def grant_user_permission_to_user_group(
1170 1186 self, target_user_group, user, permission_name):
1171 1187 permission = UserGroupModel().grant_user_permission(
1172 1188 target_user_group, user, permission_name)
1173 1189 self.user_user_group_permission_ids.append(
1174 1190 (target_user_group.users_group_id, user.user_id))
1175 1191 return permission
1176 1192
1177 1193 def grant_user_group_permission_to_user_group(
1178 1194 self, target_user_group, user_group, permission_name):
1179 1195 permission = UserGroupModel().grant_user_group_permission(
1180 1196 target_user_group, user_group, permission_name)
1181 1197 self.user_group_user_group_permission_ids.append(
1182 1198 (target_user_group.users_group_id, user_group.users_group_id))
1183 1199 return permission
1184 1200
1185 1201 def revoke_user_permission(self, user_name, permission_name):
1186 1202 self._inherit_default_user_permissions(user_name, True)
1187 1203 UserModel().revoke_perm(user_name, permission_name)
1188 1204
1189 1205 def _inherit_default_user_permissions(self, user_name, value):
1190 1206 user = UserModel().get_by_username(user_name)
1191 1207 user.inherit_default_permissions = value
1192 1208 Session().add(user)
1193 1209 Session().commit()
1194 1210
1195 1211 def cleanup(self):
1196 1212 self._cleanup_permissions()
1197 1213 self._cleanup_repo_groups()
1198 1214 self._cleanup_user_groups()
1199 1215 self._cleanup_users()
1200 1216
1201 1217 def _cleanup_permissions(self):
1202 1218 if self.user_permissions:
1203 1219 for user_name, permission_name in self.user_permissions:
1204 1220 self.revoke_user_permission(user_name, permission_name)
1205 1221
1206 1222 for permission in self.user_repo_permission_ids:
1207 1223 RepoModel().revoke_user_permission(*permission)
1208 1224
1209 1225 for permission in self.user_group_repo_permission_ids:
1210 1226 RepoModel().revoke_user_group_permission(*permission)
1211 1227
1212 1228 for permission in self.user_repo_group_permission_ids:
1213 1229 RepoGroupModel().revoke_user_permission(*permission)
1214 1230
1215 1231 for permission in self.user_group_repo_group_permission_ids:
1216 1232 RepoGroupModel().revoke_user_group_permission(*permission)
1217 1233
1218 1234 for permission in self.user_user_group_permission_ids:
1219 1235 UserGroupModel().revoke_user_permission(*permission)
1220 1236
1221 1237 for permission in self.user_group_user_group_permission_ids:
1222 1238 UserGroupModel().revoke_user_group_permission(*permission)
1223 1239
1224 1240 def _cleanup_repo_groups(self):
1225 1241 def _repo_group_compare(first_group_id, second_group_id):
1226 1242 """
1227 1243 Gives higher priority to the groups with the most complex paths
1228 1244 """
1229 1245 first_group = RepoGroup.get(first_group_id)
1230 1246 second_group = RepoGroup.get(second_group_id)
1231 1247 first_group_parts = (
1232 1248 len(first_group.group_name.split('/')) if first_group else 0)
1233 1249 second_group_parts = (
1234 1250 len(second_group.group_name.split('/')) if second_group else 0)
1235 1251 return cmp(second_group_parts, first_group_parts)
1236 1252
1237 1253 sorted_repo_group_ids = sorted(
1238 1254 self.repo_group_ids, cmp=_repo_group_compare)
1239 1255 for repo_group_id in sorted_repo_group_ids:
1240 1256 self.fixture.destroy_repo_group(repo_group_id)
1241 1257
1242 1258 def _cleanup_user_groups(self):
1243 1259 def _user_group_compare(first_group_id, second_group_id):
1244 1260 """
1245 1261 Gives higher priority to the groups with the most complex paths
1246 1262 """
1247 1263 first_group = UserGroup.get(first_group_id)
1248 1264 second_group = UserGroup.get(second_group_id)
1249 1265 first_group_parts = (
1250 1266 len(first_group.users_group_name.split('/'))
1251 1267 if first_group else 0)
1252 1268 second_group_parts = (
1253 1269 len(second_group.users_group_name.split('/'))
1254 1270 if second_group else 0)
1255 1271 return cmp(second_group_parts, first_group_parts)
1256 1272
1257 1273 sorted_user_group_ids = sorted(
1258 1274 self.user_group_ids, cmp=_user_group_compare)
1259 1275 for user_group_id in sorted_user_group_ids:
1260 1276 self.fixture.destroy_user_group(user_group_id)
1261 1277
1262 1278 def _cleanup_users(self):
1263 1279 for user_id in self.user_ids:
1264 1280 self.fixture.destroy_user(user_id)
1265 1281
1266 1282
1267 1283 # TODO: Think about moving this into a pytest-pyro package and make it a
1268 1284 # pytest plugin
1269 1285 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1270 1286 def pytest_runtest_makereport(item, call):
1271 1287 """
1272 1288 Adding the remote traceback if the exception has this information.
1273 1289
1274 1290 Pyro4 attaches this information as the attribute `_pyroTraceback`
1275 1291 to the exception instance.
1276 1292 """
1277 1293 outcome = yield
1278 1294 report = outcome.get_result()
1279 1295 if call.excinfo:
1280 1296 _add_pyro_remote_traceback(report, call.excinfo.value)
1281 1297
1282 1298
1283 1299 def _add_pyro_remote_traceback(report, exc):
1284 1300 pyro_traceback = getattr(exc, '_pyroTraceback', None)
1285 1301
1286 1302 if pyro_traceback:
1287 1303 traceback = ''.join(pyro_traceback)
1288 1304 section = 'Pyro4 remote traceback ' + report.when
1289 1305 report.sections.append((section, traceback))
1290 1306
1291 1307
1292 1308 @pytest.fixture(scope='session')
1293 1309 def testrun():
1294 1310 return {
1295 1311 'uuid': uuid.uuid4(),
1296 1312 'start': datetime.datetime.utcnow().isoformat(),
1297 1313 'timestamp': int(time.time()),
1298 1314 }
1299 1315
1300 1316
1301 1317 @pytest.fixture(autouse=True)
1302 1318 def collect_appenlight_stats(request, testrun):
1303 1319 """
1304 1320 This fixture reports memory consumtion of single tests.
1305 1321
1306 1322 It gathers data based on `psutil` and sends them to Appenlight. The option
1307 1323 ``--ae`` has te be used to enable this fixture and the API key for your
1308 1324 application has to be provided in ``--ae-key``.
1309 1325 """
1310 1326 try:
1311 1327 # cygwin cannot have yet psutil support.
1312 1328 import psutil
1313 1329 except ImportError:
1314 1330 return
1315 1331
1316 1332 if not request.config.getoption('--appenlight'):
1317 1333 return
1318 1334 else:
1319 1335 # Only request the pylonsapp fixture if appenlight tracking is
1320 1336 # enabled. This will speed up a test run of unit tests by 2 to 3
1321 1337 # seconds if appenlight is not enabled.
1322 1338 pylonsapp = request.getfuncargvalue("pylonsapp")
1323 1339 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1324 1340 client = AppenlightClient(
1325 1341 url=url,
1326 1342 api_key=request.config.getoption('--appenlight-api-key'),
1327 1343 namespace=request.node.nodeid,
1328 1344 request=str(testrun['uuid']),
1329 1345 testrun=testrun)
1330 1346
1331 1347 client.collect({
1332 1348 'message': "Starting",
1333 1349 })
1334 1350
1335 1351 server_and_port = pylonsapp.config['vcs.server']
1336 1352 server = create_vcsserver_proxy(server_and_port)
1337 1353 with server:
1338 1354 vcs_pid = server.get_pid()
1339 1355 server.run_gc()
1340 1356 vcs_process = psutil.Process(vcs_pid)
1341 1357 mem = vcs_process.memory_info()
1342 1358 client.tag_before('vcsserver.rss', mem.rss)
1343 1359 client.tag_before('vcsserver.vms', mem.vms)
1344 1360
1345 1361 test_process = psutil.Process()
1346 1362 mem = test_process.memory_info()
1347 1363 client.tag_before('test.rss', mem.rss)
1348 1364 client.tag_before('test.vms', mem.vms)
1349 1365
1350 1366 client.tag_before('time', time.time())
1351 1367
1352 1368 @request.addfinalizer
1353 1369 def send_stats():
1354 1370 client.tag_after('time', time.time())
1355 1371 with server:
1356 1372 gc_stats = server.run_gc()
1357 1373 for tag, value in gc_stats.items():
1358 1374 client.tag_after(tag, value)
1359 1375 mem = vcs_process.memory_info()
1360 1376 client.tag_after('vcsserver.rss', mem.rss)
1361 1377 client.tag_after('vcsserver.vms', mem.vms)
1362 1378
1363 1379 mem = test_process.memory_info()
1364 1380 client.tag_after('test.rss', mem.rss)
1365 1381 client.tag_after('test.vms', mem.vms)
1366 1382
1367 1383 client.collect({
1368 1384 'message': "Finished",
1369 1385 })
1370 1386 client.send_stats()
1371 1387
1372 1388 return client
1373 1389
1374 1390
1375 1391 class AppenlightClient():
1376 1392
1377 1393 url_template = '{url}?protocol_version=0.5'
1378 1394
1379 1395 def __init__(
1380 1396 self, url, api_key, add_server=True, add_timestamp=True,
1381 1397 namespace=None, request=None, testrun=None):
1382 1398 self.url = self.url_template.format(url=url)
1383 1399 self.api_key = api_key
1384 1400 self.add_server = add_server
1385 1401 self.add_timestamp = add_timestamp
1386 1402 self.namespace = namespace
1387 1403 self.request = request
1388 1404 self.server = socket.getfqdn(socket.gethostname())
1389 1405 self.tags_before = {}
1390 1406 self.tags_after = {}
1391 1407 self.stats = []
1392 1408 self.testrun = testrun or {}
1393 1409
1394 1410 def tag_before(self, tag, value):
1395 1411 self.tags_before[tag] = value
1396 1412
1397 1413 def tag_after(self, tag, value):
1398 1414 self.tags_after[tag] = value
1399 1415
1400 1416 def collect(self, data):
1401 1417 if self.add_server:
1402 1418 data.setdefault('server', self.server)
1403 1419 if self.add_timestamp:
1404 1420 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1405 1421 if self.namespace:
1406 1422 data.setdefault('namespace', self.namespace)
1407 1423 if self.request:
1408 1424 data.setdefault('request', self.request)
1409 1425 self.stats.append(data)
1410 1426
1411 1427 def send_stats(self):
1412 1428 tags = [
1413 1429 ('testrun', self.request),
1414 1430 ('testrun.start', self.testrun['start']),
1415 1431 ('testrun.timestamp', self.testrun['timestamp']),
1416 1432 ('test', self.namespace),
1417 1433 ]
1418 1434 for key, value in self.tags_before.items():
1419 1435 tags.append((key + '.before', value))
1420 1436 try:
1421 1437 delta = self.tags_after[key] - value
1422 1438 tags.append((key + '.delta', delta))
1423 1439 except Exception:
1424 1440 pass
1425 1441 for key, value in self.tags_after.items():
1426 1442 tags.append((key + '.after', value))
1427 1443 self.collect({
1428 1444 'message': "Collected tags",
1429 1445 'tags': tags,
1430 1446 })
1431 1447
1432 1448 response = requests.post(
1433 1449 self.url,
1434 1450 headers={
1435 1451 'X-appenlight-api-key': self.api_key},
1436 1452 json=self.stats,
1437 1453 )
1438 1454
1439 1455 if not response.status_code == 200:
1440 1456 pprint.pprint(self.stats)
1441 1457 print response.headers
1442 1458 print response.text
1443 1459 raise Exception('Sending to appenlight failed')
1444 1460
1445 1461
1446 1462 @pytest.fixture
1447 1463 def gist_util(request, pylonsapp):
1448 1464 """
1449 1465 Provides a wired instance of `GistUtility` with integrated cleanup.
1450 1466 """
1451 1467 utility = GistUtility()
1452 1468 request.addfinalizer(utility.cleanup)
1453 1469 return utility
1454 1470
1455 1471
1456 1472 class GistUtility(object):
1457 1473 def __init__(self):
1458 1474 self.fixture = Fixture()
1459 1475 self.gist_ids = []
1460 1476
1461 1477 def create_gist(self, **kwargs):
1462 1478 gist = self.fixture.create_gist(**kwargs)
1463 1479 self.gist_ids.append(gist.gist_id)
1464 1480 return gist
1465 1481
1466 1482 def cleanup(self):
1467 1483 for id_ in self.gist_ids:
1468 1484 self.fixture.destroy_gists(str(id_))
1469 1485
1470 1486
1471 1487 @pytest.fixture
1472 1488 def enabled_backends(request):
1473 1489 backends = request.config.option.backends
1474 1490 return backends[:]
1475 1491
1476 1492
1477 1493 @pytest.fixture
1478 1494 def settings_util(request):
1479 1495 """
1480 1496 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1481 1497 """
1482 1498 utility = SettingsUtility()
1483 1499 request.addfinalizer(utility.cleanup)
1484 1500 return utility
1485 1501
1486 1502
1487 1503 class SettingsUtility(object):
1488 1504 def __init__(self):
1489 1505 self.rhodecode_ui_ids = []
1490 1506 self.rhodecode_setting_ids = []
1491 1507 self.repo_rhodecode_ui_ids = []
1492 1508 self.repo_rhodecode_setting_ids = []
1493 1509
1494 1510 def create_repo_rhodecode_ui(
1495 1511 self, repo, section, value, key=None, active=True, cleanup=True):
1496 1512 key = key or hashlib.sha1(
1497 1513 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1498 1514
1499 1515 setting = RepoRhodeCodeUi()
1500 1516 setting.repository_id = repo.repo_id
1501 1517 setting.ui_section = section
1502 1518 setting.ui_value = value
1503 1519 setting.ui_key = key
1504 1520 setting.ui_active = active
1505 1521 Session().add(setting)
1506 1522 Session().commit()
1507 1523
1508 1524 if cleanup:
1509 1525 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1510 1526 return setting
1511 1527
1512 1528 def create_rhodecode_ui(
1513 1529 self, section, value, key=None, active=True, cleanup=True):
1514 1530 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1515 1531
1516 1532 setting = RhodeCodeUi()
1517 1533 setting.ui_section = section
1518 1534 setting.ui_value = value
1519 1535 setting.ui_key = key
1520 1536 setting.ui_active = active
1521 1537 Session().add(setting)
1522 1538 Session().commit()
1523 1539
1524 1540 if cleanup:
1525 1541 self.rhodecode_ui_ids.append(setting.ui_id)
1526 1542 return setting
1527 1543
1528 1544 def create_repo_rhodecode_setting(
1529 1545 self, repo, name, value, type_, cleanup=True):
1530 1546 setting = RepoRhodeCodeSetting(
1531 1547 repo.repo_id, key=name, val=value, type=type_)
1532 1548 Session().add(setting)
1533 1549 Session().commit()
1534 1550
1535 1551 if cleanup:
1536 1552 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1537 1553 return setting
1538 1554
1539 1555 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1540 1556 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1541 1557 Session().add(setting)
1542 1558 Session().commit()
1543 1559
1544 1560 if cleanup:
1545 1561 self.rhodecode_setting_ids.append(setting.app_settings_id)
1546 1562
1547 1563 return setting
1548 1564
1549 1565 def cleanup(self):
1550 1566 for id_ in self.rhodecode_ui_ids:
1551 1567 setting = RhodeCodeUi.get(id_)
1552 1568 Session().delete(setting)
1553 1569
1554 1570 for id_ in self.rhodecode_setting_ids:
1555 1571 setting = RhodeCodeSetting.get(id_)
1556 1572 Session().delete(setting)
1557 1573
1558 1574 for id_ in self.repo_rhodecode_ui_ids:
1559 1575 setting = RepoRhodeCodeUi.get(id_)
1560 1576 Session().delete(setting)
1561 1577
1562 1578 for id_ in self.repo_rhodecode_setting_ids:
1563 1579 setting = RepoRhodeCodeSetting.get(id_)
1564 1580 Session().delete(setting)
1565 1581
1566 1582 Session().commit()
1567 1583
1568 1584
1569 1585 @pytest.fixture
1570 1586 def no_notifications(request):
1571 1587 notification_patcher = mock.patch(
1572 1588 'rhodecode.model.notification.NotificationModel.create')
1573 1589 notification_patcher.start()
1574 1590 request.addfinalizer(notification_patcher.stop)
1575 1591
1576 1592
1577 1593 @pytest.fixture
1578 1594 def silence_action_logger(request):
1579 1595 notification_patcher = mock.patch(
1580 1596 'rhodecode.lib.utils.action_logger')
1581 1597 notification_patcher.start()
1582 1598 request.addfinalizer(notification_patcher.stop)
1583 1599
1584 1600
1585 1601 @pytest.fixture(scope='session')
1586 1602 def repeat(request):
1587 1603 """
1588 1604 The number of repetitions is based on this fixture.
1589 1605
1590 1606 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1591 1607 tests are not too slow in our default test suite.
1592 1608 """
1593 1609 return request.config.getoption('--repeat')
1594 1610
1595 1611
1596 1612 @pytest.fixture
1597 1613 def rhodecode_fixtures():
1598 1614 return Fixture()
1599 1615
1600 1616
1601 1617 @pytest.fixture
1602 1618 def request_stub():
1603 1619 """
1604 1620 Stub request object.
1605 1621 """
1606 1622 request = pyramid.testing.DummyRequest()
1607 1623 request.scheme = 'https'
1608 1624 return request
1609 1625
1610 1626
1611 1627 @pytest.fixture
1612 1628 def config_stub(request, request_stub):
1613 1629 """
1614 1630 Set up pyramid.testing and return the Configurator.
1615 1631 """
1616 1632 config = pyramid.testing.setUp(request=request_stub)
1617 1633
1618 1634 @request.addfinalizer
1619 1635 def cleanup():
1620 1636 pyramid.testing.tearDown()
1621 1637
1622 1638 return config
General Comments 0
You need to be logged in to leave comments. Login now