##// END OF EJS Templates
tests: allow specifing repo_type in user_util create_repo
marcink -
r1569:037619e0 default
parent child Browse files
Show More
@@ -1,1808 +1,1809 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33
34 34 import mock
35 35 import pyramid.testing
36 36 import pytest
37 37 import colander
38 38 import requests
39 39
40 40 import rhodecode
41 41 from rhodecode.lib.utils2 import AttributeDict
42 42 from rhodecode.model.changeset_status import ChangesetStatusModel
43 43 from rhodecode.model.comment import CommentsModel
44 44 from rhodecode.model.db import (
45 45 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
46 46 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.pull_request import PullRequestModel
49 49 from rhodecode.model.repo import RepoModel
50 50 from rhodecode.model.repo_group import RepoGroupModel
51 51 from rhodecode.model.user import UserModel
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.user_group import UserGroupModel
54 54 from rhodecode.model.integration import IntegrationModel
55 55 from rhodecode.integrations import integration_type_registry
56 56 from rhodecode.integrations.types.base import IntegrationTypeBase
57 57 from rhodecode.lib.utils import repo2db_mapper
58 58 from rhodecode.lib.vcs import create_vcsserver_proxy
59 59 from rhodecode.lib.vcs.backends import get_backend
60 60 from rhodecode.lib.vcs.nodes import FileNode
61 61 from rhodecode.tests import (
62 62 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 63 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 64 TEST_USER_REGULAR_PASS)
65 65 from rhodecode.tests.utils import CustomTestApp
66 66 from rhodecode.tests.fixture import Fixture
67 67
68 68
69 69 def _split_comma(value):
70 70 return value.split(',')
71 71
72 72
73 73 def pytest_addoption(parser):
74 74 parser.addoption(
75 75 '--keep-tmp-path', action='store_true',
76 76 help="Keep the test temporary directories")
77 77 parser.addoption(
78 78 '--backends', action='store', type=_split_comma,
79 79 default=['git', 'hg', 'svn'],
80 80 help="Select which backends to test for backend specific tests.")
81 81 parser.addoption(
82 82 '--dbs', action='store', type=_split_comma,
83 83 default=['sqlite'],
84 84 help="Select which database to test for database specific tests. "
85 85 "Possible options are sqlite,postgres,mysql")
86 86 parser.addoption(
87 87 '--appenlight', '--ae', action='store_true',
88 88 help="Track statistics in appenlight.")
89 89 parser.addoption(
90 90 '--appenlight-api-key', '--ae-key',
91 91 help="API key for Appenlight.")
92 92 parser.addoption(
93 93 '--appenlight-url', '--ae-url',
94 94 default="https://ae.rhodecode.com",
95 95 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
96 96 parser.addoption(
97 97 '--sqlite-connection-string', action='store',
98 98 default='', help="Connection string for the dbs tests with SQLite")
99 99 parser.addoption(
100 100 '--postgres-connection-string', action='store',
101 101 default='', help="Connection string for the dbs tests with Postgres")
102 102 parser.addoption(
103 103 '--mysql-connection-string', action='store',
104 104 default='', help="Connection string for the dbs tests with MySQL")
105 105 parser.addoption(
106 106 '--repeat', type=int, default=100,
107 107 help="Number of repetitions in performance tests.")
108 108
109 109
110 110 def pytest_configure(config):
111 111 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
112 112 from rhodecode.config import patches
113 113 patches.kombu_1_5_1_python_2_7_11()
114 114
115 115
116 116 def pytest_collection_modifyitems(session, config, items):
117 117 # nottest marked, compare nose, used for transition from nose to pytest
118 118 remaining = [
119 119 i for i in items if getattr(i.obj, '__test__', True)]
120 120 items[:] = remaining
121 121
122 122
123 123 def pytest_generate_tests(metafunc):
124 124 # Support test generation based on --backend parameter
125 125 if 'backend_alias' in metafunc.fixturenames:
126 126 backends = get_backends_from_metafunc(metafunc)
127 127 scope = None
128 128 if not backends:
129 129 pytest.skip("Not enabled for any of selected backends")
130 130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 131 elif hasattr(metafunc.function, 'backends'):
132 132 backends = get_backends_from_metafunc(metafunc)
133 133 if not backends:
134 134 pytest.skip("Not enabled for any of selected backends")
135 135
136 136
137 137 def get_backends_from_metafunc(metafunc):
138 138 requested_backends = set(metafunc.config.getoption('--backends'))
139 139 if hasattr(metafunc.function, 'backends'):
140 140 # Supported backends by this test function, created from
141 141 # pytest.mark.backends
142 142 backends = metafunc.function.backends.args
143 143 elif hasattr(metafunc.cls, 'backend_alias'):
144 144 # Support class attribute "backend_alias", this is mainly
145 145 # for legacy reasons for tests not yet using pytest.mark.backends
146 146 backends = [metafunc.cls.backend_alias]
147 147 else:
148 148 backends = metafunc.config.getoption('--backends')
149 149 return requested_backends.intersection(backends)
150 150
151 151
152 152 @pytest.fixture(scope='session', autouse=True)
153 153 def activate_example_rcextensions(request):
154 154 """
155 155 Patch in an example rcextensions module which verifies passed in kwargs.
156 156 """
157 157 from rhodecode.tests.other import example_rcextensions
158 158
159 159 old_extensions = rhodecode.EXTENSIONS
160 160 rhodecode.EXTENSIONS = example_rcextensions
161 161
162 162 @request.addfinalizer
163 163 def cleanup():
164 164 rhodecode.EXTENSIONS = old_extensions
165 165
166 166
167 167 @pytest.fixture
168 168 def capture_rcextensions():
169 169 """
170 170 Returns the recorded calls to entry points in rcextensions.
171 171 """
172 172 calls = rhodecode.EXTENSIONS.calls
173 173 calls.clear()
174 174 # Note: At this moment, it is still the empty dict, but that will
175 175 # be filled during the test run and since it is a reference this
176 176 # is enough to make it work.
177 177 return calls
178 178
179 179
180 180 @pytest.fixture(scope='session')
181 181 def http_environ_session():
182 182 """
183 183 Allow to use "http_environ" in session scope.
184 184 """
185 185 return http_environ(
186 186 http_host_stub=http_host_stub())
187 187
188 188
189 189 @pytest.fixture
190 190 def http_host_stub():
191 191 """
192 192 Value of HTTP_HOST in the test run.
193 193 """
194 194 return 'test.example.com:80'
195 195
196 196
197 197 @pytest.fixture
198 198 def http_environ(http_host_stub):
199 199 """
200 200 HTTP extra environ keys.
201 201
202 202 User by the test application and as well for setting up the pylons
203 203 environment. In the case of the fixture "app" it should be possible
204 204 to override this for a specific test case.
205 205 """
206 206 return {
207 207 'SERVER_NAME': http_host_stub.split(':')[0],
208 208 'SERVER_PORT': http_host_stub.split(':')[1],
209 209 'HTTP_HOST': http_host_stub,
210 210 'HTTP_USER_AGENT': 'rc-test-agent',
211 211 'REQUEST_METHOD': 'GET'
212 212 }
213 213
214 214
215 215 @pytest.fixture(scope='function')
216 216 def app(request, pylonsapp, http_environ):
217 217 app = CustomTestApp(
218 218 pylonsapp,
219 219 extra_environ=http_environ)
220 220 if request.cls:
221 221 request.cls.app = app
222 222 return app
223 223
224 224
225 225 @pytest.fixture(scope='session')
226 226 def app_settings(pylonsapp, pylons_config):
227 227 """
228 228 Settings dictionary used to create the app.
229 229
230 230 Parses the ini file and passes the result through the sanitize and apply
231 231 defaults mechanism in `rhodecode.config.middleware`.
232 232 """
233 233 from paste.deploy.loadwsgi import loadcontext, APP
234 234 from rhodecode.config.middleware import (
235 235 sanitize_settings_and_apply_defaults)
236 236 context = loadcontext(APP, 'config:' + pylons_config)
237 237 settings = sanitize_settings_and_apply_defaults(context.config())
238 238 return settings
239 239
240 240
241 241 @pytest.fixture(scope='session')
242 242 def db(app_settings):
243 243 """
244 244 Initializes the database connection.
245 245
246 246 It uses the same settings which are used to create the ``pylonsapp`` or
247 247 ``app`` fixtures.
248 248 """
249 249 from rhodecode.config.utils import initialize_database
250 250 initialize_database(app_settings)
251 251
252 252
253 253 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
254 254
255 255
256 256 def _autologin_user(app, *args):
257 257 session = login_user_session(app, *args)
258 258 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
259 259 return LoginData(csrf_token, session['rhodecode_user'])
260 260
261 261
262 262 @pytest.fixture
263 263 def autologin_user(app):
264 264 """
265 265 Utility fixture which makes sure that the admin user is logged in
266 266 """
267 267 return _autologin_user(app)
268 268
269 269
270 270 @pytest.fixture
271 271 def autologin_regular_user(app):
272 272 """
273 273 Utility fixture which makes sure that the regular user is logged in
274 274 """
275 275 return _autologin_user(
276 276 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
277 277
278 278
279 279 @pytest.fixture(scope='function')
280 280 def csrf_token(request, autologin_user):
281 281 return autologin_user.csrf_token
282 282
283 283
284 284 @pytest.fixture(scope='function')
285 285 def xhr_header(request):
286 286 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
287 287
288 288
289 289 @pytest.fixture
290 290 def real_crypto_backend(monkeypatch):
291 291 """
292 292 Switch the production crypto backend on for this test.
293 293
294 294 During the test run the crypto backend is replaced with a faster
295 295 implementation based on the MD5 algorithm.
296 296 """
297 297 monkeypatch.setattr(rhodecode, 'is_test', False)
298 298
299 299
300 300 @pytest.fixture(scope='class')
301 301 def index_location(request, pylonsapp):
302 302 index_location = pylonsapp.config['app_conf']['search.location']
303 303 if request.cls:
304 304 request.cls.index_location = index_location
305 305 return index_location
306 306
307 307
308 308 @pytest.fixture(scope='session', autouse=True)
309 309 def tests_tmp_path(request):
310 310 """
311 311 Create temporary directory to be used during the test session.
312 312 """
313 313 if not os.path.exists(TESTS_TMP_PATH):
314 314 os.makedirs(TESTS_TMP_PATH)
315 315
316 316 if not request.config.getoption('--keep-tmp-path'):
317 317 @request.addfinalizer
318 318 def remove_tmp_path():
319 319 shutil.rmtree(TESTS_TMP_PATH)
320 320
321 321 return TESTS_TMP_PATH
322 322
323 323
324 324 @pytest.fixture
325 325 def test_repo_group(request):
326 326 """
327 327 Create a temporary repository group, and destroy it after
328 328 usage automatically
329 329 """
330 330 fixture = Fixture()
331 331 repogroupid = 'test_repo_group_%s' % int(time.time())
332 332 repo_group = fixture.create_repo_group(repogroupid)
333 333
334 334 def _cleanup():
335 335 fixture.destroy_repo_group(repogroupid)
336 336
337 337 request.addfinalizer(_cleanup)
338 338 return repo_group
339 339
340 340
341 341 @pytest.fixture
342 342 def test_user_group(request):
343 343 """
344 344 Create a temporary user group, and destroy it after
345 345 usage automatically
346 346 """
347 347 fixture = Fixture()
348 348 usergroupid = 'test_user_group_%s' % int(time.time())
349 349 user_group = fixture.create_user_group(usergroupid)
350 350
351 351 def _cleanup():
352 352 fixture.destroy_user_group(user_group)
353 353
354 354 request.addfinalizer(_cleanup)
355 355 return user_group
356 356
357 357
358 358 @pytest.fixture(scope='session')
359 359 def test_repo(request):
360 360 container = TestRepoContainer()
361 361 request.addfinalizer(container._cleanup)
362 362 return container
363 363
364 364
365 365 class TestRepoContainer(object):
366 366 """
367 367 Container for test repositories which are used read only.
368 368
369 369 Repositories will be created on demand and re-used during the lifetime
370 370 of this object.
371 371
372 372 Usage to get the svn test repository "minimal"::
373 373
374 374 test_repo = TestContainer()
375 375 repo = test_repo('minimal', 'svn')
376 376
377 377 """
378 378
379 379 dump_extractors = {
380 380 'git': utils.extract_git_repo_from_dump,
381 381 'hg': utils.extract_hg_repo_from_dump,
382 382 'svn': utils.extract_svn_repo_from_dump,
383 383 }
384 384
385 385 def __init__(self):
386 386 self._cleanup_repos = []
387 387 self._fixture = Fixture()
388 388 self._repos = {}
389 389
390 390 def __call__(self, dump_name, backend_alias):
391 391 key = (dump_name, backend_alias)
392 392 if key not in self._repos:
393 393 repo = self._create_repo(dump_name, backend_alias)
394 394 self._repos[key] = repo.repo_id
395 395 return Repository.get(self._repos[key])
396 396
397 397 def _create_repo(self, dump_name, backend_alias):
398 398 repo_name = '%s-%s' % (backend_alias, dump_name)
399 399 backend_class = get_backend(backend_alias)
400 400 dump_extractor = self.dump_extractors[backend_alias]
401 401 repo_path = dump_extractor(dump_name, repo_name)
402 402 vcs_repo = backend_class(repo_path)
403 403 repo2db_mapper({repo_name: vcs_repo})
404 404 repo = RepoModel().get_by_repo_name(repo_name)
405 405 self._cleanup_repos.append(repo_name)
406 406 return repo
407 407
408 408 def _cleanup(self):
409 409 for repo_name in reversed(self._cleanup_repos):
410 410 self._fixture.destroy_repo(repo_name)
411 411
412 412
413 413 @pytest.fixture
414 414 def backend(request, backend_alias, pylonsapp, test_repo):
415 415 """
416 416 Parametrized fixture which represents a single backend implementation.
417 417
418 418 It respects the option `--backends` to focus the test run on specific
419 419 backend implementations.
420 420
421 421 It also supports `pytest.mark.xfail_backends` to mark tests as failing
422 422 for specific backends. This is intended as a utility for incremental
423 423 development of a new backend implementation.
424 424 """
425 425 if backend_alias not in request.config.getoption('--backends'):
426 426 pytest.skip("Backend %s not selected." % (backend_alias, ))
427 427
428 428 utils.check_xfail_backends(request.node, backend_alias)
429 429 utils.check_skip_backends(request.node, backend_alias)
430 430
431 431 repo_name = 'vcs_test_%s' % (backend_alias, )
432 432 backend = Backend(
433 433 alias=backend_alias,
434 434 repo_name=repo_name,
435 435 test_name=request.node.name,
436 436 test_repo_container=test_repo)
437 437 request.addfinalizer(backend.cleanup)
438 438 return backend
439 439
440 440
441 441 @pytest.fixture
442 442 def backend_git(request, pylonsapp, test_repo):
443 443 return backend(request, 'git', pylonsapp, test_repo)
444 444
445 445
446 446 @pytest.fixture
447 447 def backend_hg(request, pylonsapp, test_repo):
448 448 return backend(request, 'hg', pylonsapp, test_repo)
449 449
450 450
451 451 @pytest.fixture
452 452 def backend_svn(request, pylonsapp, test_repo):
453 453 return backend(request, 'svn', pylonsapp, test_repo)
454 454
455 455
456 456 @pytest.fixture
457 457 def backend_random(backend_git):
458 458 """
459 459 Use this to express that your tests need "a backend.
460 460
461 461 A few of our tests need a backend, so that we can run the code. This
462 462 fixture is intended to be used for such cases. It will pick one of the
463 463 backends and run the tests.
464 464
465 465 The fixture `backend` would run the test multiple times for each
466 466 available backend which is a pure waste of time if the test is
467 467 independent of the backend type.
468 468 """
469 469 # TODO: johbo: Change this to pick a random backend
470 470 return backend_git
471 471
472 472
473 473 @pytest.fixture
474 474 def backend_stub(backend_git):
475 475 """
476 476 Use this to express that your tests need a backend stub
477 477
478 478 TODO: mikhail: Implement a real stub logic instead of returning
479 479 a git backend
480 480 """
481 481 return backend_git
482 482
483 483
484 484 @pytest.fixture
485 485 def repo_stub(backend_stub):
486 486 """
487 487 Use this to express that your tests need a repository stub
488 488 """
489 489 return backend_stub.create_repo()
490 490
491 491
492 492 class Backend(object):
493 493 """
494 494 Represents the test configuration for one supported backend
495 495
496 496 Provides easy access to different test repositories based on
497 497 `__getitem__`. Such repositories will only be created once per test
498 498 session.
499 499 """
500 500
501 501 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
502 502 _master_repo = None
503 503 _commit_ids = {}
504 504
505 505 def __init__(self, alias, repo_name, test_name, test_repo_container):
506 506 self.alias = alias
507 507 self.repo_name = repo_name
508 508 self._cleanup_repos = []
509 509 self._test_name = test_name
510 510 self._test_repo_container = test_repo_container
511 511 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
512 512 # Fixture will survive in the end.
513 513 self._fixture = Fixture()
514 514
515 515 def __getitem__(self, key):
516 516 return self._test_repo_container(key, self.alias)
517 517
518 518 @property
519 519 def repo(self):
520 520 """
521 521 Returns the "current" repository. This is the vcs_test repo or the
522 522 last repo which has been created with `create_repo`.
523 523 """
524 524 from rhodecode.model.db import Repository
525 525 return Repository.get_by_repo_name(self.repo_name)
526 526
527 527 @property
528 528 def default_branch_name(self):
529 529 VcsRepository = get_backend(self.alias)
530 530 return VcsRepository.DEFAULT_BRANCH_NAME
531 531
532 532 @property
533 533 def default_head_id(self):
534 534 """
535 535 Returns the default head id of the underlying backend.
536 536
537 537 This will be the default branch name in case the backend does have a
538 538 default branch. In the other cases it will point to a valid head
539 539 which can serve as the base to create a new commit on top of it.
540 540 """
541 541 vcsrepo = self.repo.scm_instance()
542 542 head_id = (
543 543 vcsrepo.DEFAULT_BRANCH_NAME or
544 544 vcsrepo.commit_ids[-1])
545 545 return head_id
546 546
547 547 @property
548 548 def commit_ids(self):
549 549 """
550 550 Returns the list of commits for the last created repository
551 551 """
552 552 return self._commit_ids
553 553
554 554 def create_master_repo(self, commits):
555 555 """
556 556 Create a repository and remember it as a template.
557 557
558 558 This allows to easily create derived repositories to construct
559 559 more complex scenarios for diff, compare and pull requests.
560 560
561 561 Returns a commit map which maps from commit message to raw_id.
562 562 """
563 563 self._master_repo = self.create_repo(commits=commits)
564 564 return self._commit_ids
565 565
566 566 def create_repo(
567 567 self, commits=None, number_of_commits=0, heads=None,
568 568 name_suffix=u'', **kwargs):
569 569 """
570 570 Create a repository and record it for later cleanup.
571 571
572 572 :param commits: Optional. A sequence of dict instances.
573 573 Will add a commit per entry to the new repository.
574 574 :param number_of_commits: Optional. If set to a number, this number of
575 575 commits will be added to the new repository.
576 576 :param heads: Optional. Can be set to a sequence of of commit
577 577 names which shall be pulled in from the master repository.
578 578
579 579 """
580 580 self.repo_name = self._next_repo_name() + name_suffix
581 581 repo = self._fixture.create_repo(
582 582 self.repo_name, repo_type=self.alias, **kwargs)
583 583 self._cleanup_repos.append(repo.repo_name)
584 584
585 585 commits = commits or [
586 586 {'message': 'Commit %s of %s' % (x, self.repo_name)}
587 587 for x in xrange(number_of_commits)]
588 588 self._add_commits_to_repo(repo.scm_instance(), commits)
589 589 if heads:
590 590 self.pull_heads(repo, heads)
591 591
592 592 return repo
593 593
594 594 def pull_heads(self, repo, heads):
595 595 """
596 596 Make sure that repo contains all commits mentioned in `heads`
597 597 """
598 598 vcsmaster = self._master_repo.scm_instance()
599 599 vcsrepo = repo.scm_instance()
600 600 vcsrepo.config.clear_section('hooks')
601 601 commit_ids = [self._commit_ids[h] for h in heads]
602 602 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
603 603
604 604 def create_fork(self):
605 605 repo_to_fork = self.repo_name
606 606 self.repo_name = self._next_repo_name()
607 607 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
608 608 self._cleanup_repos.append(self.repo_name)
609 609 return repo
610 610
611 611 def new_repo_name(self, suffix=u''):
612 612 self.repo_name = self._next_repo_name() + suffix
613 613 self._cleanup_repos.append(self.repo_name)
614 614 return self.repo_name
615 615
616 616 def _next_repo_name(self):
617 617 return u"%s_%s" % (
618 618 self.invalid_repo_name.sub(u'_', self._test_name),
619 619 len(self._cleanup_repos))
620 620
621 621 def ensure_file(self, filename, content='Test content\n'):
622 622 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
623 623 commits = [
624 624 {'added': [
625 625 FileNode(filename, content=content),
626 626 ]},
627 627 ]
628 628 self._add_commits_to_repo(self.repo.scm_instance(), commits)
629 629
630 630 def enable_downloads(self):
631 631 repo = self.repo
632 632 repo.enable_downloads = True
633 633 Session().add(repo)
634 634 Session().commit()
635 635
636 636 def cleanup(self):
637 637 for repo_name in reversed(self._cleanup_repos):
638 638 self._fixture.destroy_repo(repo_name)
639 639
640 640 def _add_commits_to_repo(self, repo, commits):
641 641 commit_ids = _add_commits_to_repo(repo, commits)
642 642 if not commit_ids:
643 643 return
644 644 self._commit_ids = commit_ids
645 645
646 646 # Creating refs for Git to allow fetching them from remote repository
647 647 if self.alias == 'git':
648 648 refs = {}
649 649 for message in self._commit_ids:
650 650 # TODO: mikhail: do more special chars replacements
651 651 ref_name = 'refs/test-refs/{}'.format(
652 652 message.replace(' ', ''))
653 653 refs[ref_name] = self._commit_ids[message]
654 654 self._create_refs(repo, refs)
655 655
656 656 def _create_refs(self, repo, refs):
657 657 for ref_name in refs:
658 658 repo.set_refs(ref_name, refs[ref_name])
659 659
660 660
661 661 @pytest.fixture
662 662 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
663 663 """
664 664 Parametrized fixture which represents a single vcs backend implementation.
665 665
666 666 See the fixture `backend` for more details. This one implements the same
667 667 concept, but on vcs level. So it does not provide model instances etc.
668 668
669 669 Parameters are generated dynamically, see :func:`pytest_generate_tests`
670 670 for how this works.
671 671 """
672 672 if backend_alias not in request.config.getoption('--backends'):
673 673 pytest.skip("Backend %s not selected." % (backend_alias, ))
674 674
675 675 utils.check_xfail_backends(request.node, backend_alias)
676 676 utils.check_skip_backends(request.node, backend_alias)
677 677
678 678 repo_name = 'vcs_test_%s' % (backend_alias, )
679 679 repo_path = os.path.join(tests_tmp_path, repo_name)
680 680 backend = VcsBackend(
681 681 alias=backend_alias,
682 682 repo_path=repo_path,
683 683 test_name=request.node.name,
684 684 test_repo_container=test_repo)
685 685 request.addfinalizer(backend.cleanup)
686 686 return backend
687 687
688 688
689 689 @pytest.fixture
690 690 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
691 691 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
692 692
693 693
694 694 @pytest.fixture
695 695 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
696 696 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
697 697
698 698
699 699 @pytest.fixture
700 700 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
701 701 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
702 702
703 703
704 704 @pytest.fixture
705 705 def vcsbackend_random(vcsbackend_git):
706 706 """
707 707 Use this to express that your tests need "a vcsbackend".
708 708
709 709 The fixture `vcsbackend` would run the test multiple times for each
710 710 available vcs backend which is a pure waste of time if the test is
711 711 independent of the vcs backend type.
712 712 """
713 713 # TODO: johbo: Change this to pick a random backend
714 714 return vcsbackend_git
715 715
716 716
717 717 @pytest.fixture
718 718 def vcsbackend_stub(vcsbackend_git):
719 719 """
720 720 Use this to express that your test just needs a stub of a vcsbackend.
721 721
722 722 Plan is to eventually implement an in-memory stub to speed tests up.
723 723 """
724 724 return vcsbackend_git
725 725
726 726
727 727 class VcsBackend(object):
728 728 """
729 729 Represents the test configuration for one supported vcs backend.
730 730 """
731 731
732 732 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
733 733
734 734 def __init__(self, alias, repo_path, test_name, test_repo_container):
735 735 self.alias = alias
736 736 self._repo_path = repo_path
737 737 self._cleanup_repos = []
738 738 self._test_name = test_name
739 739 self._test_repo_container = test_repo_container
740 740
741 741 def __getitem__(self, key):
742 742 return self._test_repo_container(key, self.alias).scm_instance()
743 743
744 744 @property
745 745 def repo(self):
746 746 """
747 747 Returns the "current" repository. This is the vcs_test repo of the last
748 748 repo which has been created.
749 749 """
750 750 Repository = get_backend(self.alias)
751 751 return Repository(self._repo_path)
752 752
753 753 @property
754 754 def backend(self):
755 755 """
756 756 Returns the backend implementation class.
757 757 """
758 758 return get_backend(self.alias)
759 759
760 760 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
761 761 repo_name = self._next_repo_name()
762 762 self._repo_path = get_new_dir(repo_name)
763 763 repo_class = get_backend(self.alias)
764 764 src_url = None
765 765 if _clone_repo:
766 766 src_url = _clone_repo.path
767 767 repo = repo_class(self._repo_path, create=True, src_url=src_url)
768 768 self._cleanup_repos.append(repo)
769 769
770 770 commits = commits or [
771 771 {'message': 'Commit %s of %s' % (x, repo_name)}
772 772 for x in xrange(number_of_commits)]
773 773 _add_commits_to_repo(repo, commits)
774 774 return repo
775 775
776 776 def clone_repo(self, repo):
777 777 return self.create_repo(_clone_repo=repo)
778 778
779 779 def cleanup(self):
780 780 for repo in self._cleanup_repos:
781 781 shutil.rmtree(repo.path)
782 782
783 783 def new_repo_path(self):
784 784 repo_name = self._next_repo_name()
785 785 self._repo_path = get_new_dir(repo_name)
786 786 return self._repo_path
787 787
788 788 def _next_repo_name(self):
789 789 return "%s_%s" % (
790 790 self.invalid_repo_name.sub('_', self._test_name),
791 791 len(self._cleanup_repos))
792 792
793 793 def add_file(self, repo, filename, content='Test content\n'):
794 794 imc = repo.in_memory_commit
795 795 imc.add(FileNode(filename, content=content))
796 796 imc.commit(
797 797 message=u'Automatic commit from vcsbackend fixture',
798 798 author=u'Automatic')
799 799
800 800 def ensure_file(self, filename, content='Test content\n'):
801 801 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
802 802 self.add_file(self.repo, filename, content)
803 803
804 804
805 805 def _add_commits_to_repo(vcs_repo, commits):
806 806 commit_ids = {}
807 807 if not commits:
808 808 return commit_ids
809 809
810 810 imc = vcs_repo.in_memory_commit
811 811 commit = None
812 812
813 813 for idx, commit in enumerate(commits):
814 814 message = unicode(commit.get('message', 'Commit %s' % idx))
815 815
816 816 for node in commit.get('added', []):
817 817 imc.add(FileNode(node.path, content=node.content))
818 818 for node in commit.get('changed', []):
819 819 imc.change(FileNode(node.path, content=node.content))
820 820 for node in commit.get('removed', []):
821 821 imc.remove(FileNode(node.path))
822 822
823 823 parents = [
824 824 vcs_repo.get_commit(commit_id=commit_ids[p])
825 825 for p in commit.get('parents', [])]
826 826
827 827 operations = ('added', 'changed', 'removed')
828 828 if not any((commit.get(o) for o in operations)):
829 829 imc.add(FileNode('file_%s' % idx, content=message))
830 830
831 831 commit = imc.commit(
832 832 message=message,
833 833 author=unicode(commit.get('author', 'Automatic')),
834 834 date=commit.get('date'),
835 835 branch=commit.get('branch'),
836 836 parents=parents)
837 837
838 838 commit_ids[commit.message] = commit.raw_id
839 839
840 840 return commit_ids
841 841
842 842
843 843 @pytest.fixture
844 844 def reposerver(request):
845 845 """
846 846 Allows to serve a backend repository
847 847 """
848 848
849 849 repo_server = RepoServer()
850 850 request.addfinalizer(repo_server.cleanup)
851 851 return repo_server
852 852
853 853
854 854 class RepoServer(object):
855 855 """
856 856 Utility to serve a local repository for the duration of a test case.
857 857
858 858 Supports only Subversion so far.
859 859 """
860 860
861 861 url = None
862 862
863 863 def __init__(self):
864 864 self._cleanup_servers = []
865 865
866 866 def serve(self, vcsrepo):
867 867 if vcsrepo.alias != 'svn':
868 868 raise TypeError("Backend %s not supported" % vcsrepo.alias)
869 869
870 870 proc = subprocess32.Popen(
871 871 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
872 872 '--root', vcsrepo.path])
873 873 self._cleanup_servers.append(proc)
874 874 self.url = 'svn://localhost'
875 875
876 876 def cleanup(self):
877 877 for proc in self._cleanup_servers:
878 878 proc.terminate()
879 879
880 880
881 881 @pytest.fixture
882 882 def pr_util(backend, request):
883 883 """
884 884 Utility for tests of models and for functional tests around pull requests.
885 885
886 886 It gives an instance of :class:`PRTestUtility` which provides various
887 887 utility methods around one pull request.
888 888
889 889 This fixture uses `backend` and inherits its parameterization.
890 890 """
891 891
892 892 util = PRTestUtility(backend)
893 893
894 894 @request.addfinalizer
895 895 def cleanup():
896 896 util.cleanup()
897 897
898 898 return util
899 899
900 900
901 901 class PRTestUtility(object):
902 902
903 903 pull_request = None
904 904 pull_request_id = None
905 905 mergeable_patcher = None
906 906 mergeable_mock = None
907 907 notification_patcher = None
908 908
909 909 def __init__(self, backend):
910 910 self.backend = backend
911 911
912 912 def create_pull_request(
913 913 self, commits=None, target_head=None, source_head=None,
914 914 revisions=None, approved=False, author=None, mergeable=False,
915 915 enable_notifications=True, name_suffix=u'', reviewers=None,
916 916 title=u"Test", description=u"Description"):
917 917 self.set_mergeable(mergeable)
918 918 if not enable_notifications:
919 919 # mock notification side effect
920 920 self.notification_patcher = mock.patch(
921 921 'rhodecode.model.notification.NotificationModel.create')
922 922 self.notification_patcher.start()
923 923
924 924 if not self.pull_request:
925 925 if not commits:
926 926 commits = [
927 927 {'message': 'c1'},
928 928 {'message': 'c2'},
929 929 {'message': 'c3'},
930 930 ]
931 931 target_head = 'c1'
932 932 source_head = 'c2'
933 933 revisions = ['c2']
934 934
935 935 self.commit_ids = self.backend.create_master_repo(commits)
936 936 self.target_repository = self.backend.create_repo(
937 937 heads=[target_head], name_suffix=name_suffix)
938 938 self.source_repository = self.backend.create_repo(
939 939 heads=[source_head], name_suffix=name_suffix)
940 940 self.author = author or UserModel().get_by_username(
941 941 TEST_USER_ADMIN_LOGIN)
942 942
943 943 model = PullRequestModel()
944 944 self.create_parameters = {
945 945 'created_by': self.author,
946 946 'source_repo': self.source_repository.repo_name,
947 947 'source_ref': self._default_branch_reference(source_head),
948 948 'target_repo': self.target_repository.repo_name,
949 949 'target_ref': self._default_branch_reference(target_head),
950 950 'revisions': [self.commit_ids[r] for r in revisions],
951 951 'reviewers': reviewers or self._get_reviewers(),
952 952 'title': title,
953 953 'description': description,
954 954 }
955 955 self.pull_request = model.create(**self.create_parameters)
956 956 assert model.get_versions(self.pull_request) == []
957 957
958 958 self.pull_request_id = self.pull_request.pull_request_id
959 959
960 960 if approved:
961 961 self.approve()
962 962
963 963 Session().add(self.pull_request)
964 964 Session().commit()
965 965
966 966 return self.pull_request
967 967
968 968 def approve(self):
969 969 self.create_status_votes(
970 970 ChangesetStatus.STATUS_APPROVED,
971 971 *self.pull_request.reviewers)
972 972
973 973 def close(self):
974 974 PullRequestModel().close_pull_request(self.pull_request, self.author)
975 975
976 976 def _default_branch_reference(self, commit_message):
977 977 reference = '%s:%s:%s' % (
978 978 'branch',
979 979 self.backend.default_branch_name,
980 980 self.commit_ids[commit_message])
981 981 return reference
982 982
983 983 def _get_reviewers(self):
984 984 model = UserModel()
985 985 return [
986 986 model.get_by_username(TEST_USER_REGULAR_LOGIN),
987 987 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
988 988 ]
989 989
990 990 def update_source_repository(self, head=None):
991 991 heads = [head or 'c3']
992 992 self.backend.pull_heads(self.source_repository, heads=heads)
993 993
994 994 def add_one_commit(self, head=None):
995 995 self.update_source_repository(head=head)
996 996 old_commit_ids = set(self.pull_request.revisions)
997 997 PullRequestModel().update_commits(self.pull_request)
998 998 commit_ids = set(self.pull_request.revisions)
999 999 new_commit_ids = commit_ids - old_commit_ids
1000 1000 assert len(new_commit_ids) == 1
1001 1001 return new_commit_ids.pop()
1002 1002
1003 1003 def remove_one_commit(self):
1004 1004 assert len(self.pull_request.revisions) == 2
1005 1005 source_vcs = self.source_repository.scm_instance()
1006 1006 removed_commit_id = source_vcs.commit_ids[-1]
1007 1007
1008 1008 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1009 1009 # remove the if once that's sorted out.
1010 1010 if self.backend.alias == "git":
1011 1011 kwargs = {'branch_name': self.backend.default_branch_name}
1012 1012 else:
1013 1013 kwargs = {}
1014 1014 source_vcs.strip(removed_commit_id, **kwargs)
1015 1015
1016 1016 PullRequestModel().update_commits(self.pull_request)
1017 1017 assert len(self.pull_request.revisions) == 1
1018 1018 return removed_commit_id
1019 1019
1020 1020 def create_comment(self, linked_to=None):
1021 1021 comment = CommentsModel().create(
1022 1022 text=u"Test comment",
1023 1023 repo=self.target_repository.repo_name,
1024 1024 user=self.author,
1025 1025 pull_request=self.pull_request)
1026 1026 assert comment.pull_request_version_id is None
1027 1027
1028 1028 if linked_to:
1029 1029 PullRequestModel()._link_comments_to_version(linked_to)
1030 1030
1031 1031 return comment
1032 1032
1033 1033 def create_inline_comment(
1034 1034 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1035 1035 comment = CommentsModel().create(
1036 1036 text=u"Test comment",
1037 1037 repo=self.target_repository.repo_name,
1038 1038 user=self.author,
1039 1039 line_no=line_no,
1040 1040 f_path=file_path,
1041 1041 pull_request=self.pull_request)
1042 1042 assert comment.pull_request_version_id is None
1043 1043
1044 1044 if linked_to:
1045 1045 PullRequestModel()._link_comments_to_version(linked_to)
1046 1046
1047 1047 return comment
1048 1048
1049 1049 def create_version_of_pull_request(self):
1050 1050 pull_request = self.create_pull_request()
1051 1051 version = PullRequestModel()._create_version_from_snapshot(
1052 1052 pull_request)
1053 1053 return version
1054 1054
1055 1055 def create_status_votes(self, status, *reviewers):
1056 1056 for reviewer in reviewers:
1057 1057 ChangesetStatusModel().set_status(
1058 1058 repo=self.pull_request.target_repo,
1059 1059 status=status,
1060 1060 user=reviewer.user_id,
1061 1061 pull_request=self.pull_request)
1062 1062
1063 1063 def set_mergeable(self, value):
1064 1064 if not self.mergeable_patcher:
1065 1065 self.mergeable_patcher = mock.patch.object(
1066 1066 VcsSettingsModel, 'get_general_settings')
1067 1067 self.mergeable_mock = self.mergeable_patcher.start()
1068 1068 self.mergeable_mock.return_value = {
1069 1069 'rhodecode_pr_merge_enabled': value}
1070 1070
1071 1071 def cleanup(self):
1072 1072 # In case the source repository is already cleaned up, the pull
1073 1073 # request will already be deleted.
1074 1074 pull_request = PullRequest().get(self.pull_request_id)
1075 1075 if pull_request:
1076 1076 PullRequestModel().delete(pull_request)
1077 1077 Session().commit()
1078 1078
1079 1079 if self.notification_patcher:
1080 1080 self.notification_patcher.stop()
1081 1081
1082 1082 if self.mergeable_patcher:
1083 1083 self.mergeable_patcher.stop()
1084 1084
1085 1085
1086 1086 @pytest.fixture
1087 1087 def user_admin(pylonsapp):
1088 1088 """
1089 1089 Provides the default admin test user as an instance of `db.User`.
1090 1090 """
1091 1091 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1092 1092 return user
1093 1093
1094 1094
1095 1095 @pytest.fixture
1096 1096 def user_regular(pylonsapp):
1097 1097 """
1098 1098 Provides the default regular test user as an instance of `db.User`.
1099 1099 """
1100 1100 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1101 1101 return user
1102 1102
1103 1103
1104 1104 @pytest.fixture
1105 1105 def user_util(request, pylonsapp):
1106 1106 """
1107 1107 Provides a wired instance of `UserUtility` with integrated cleanup.
1108 1108 """
1109 1109 utility = UserUtility(test_name=request.node.name)
1110 1110 request.addfinalizer(utility.cleanup)
1111 1111 return utility
1112 1112
1113 1113
1114 1114 # TODO: johbo: Split this up into utilities per domain or something similar
1115 1115 class UserUtility(object):
1116 1116
1117 1117 def __init__(self, test_name="test"):
1118 1118 self._test_name = self._sanitize_name(test_name)
1119 1119 self.fixture = Fixture()
1120 1120 self.repo_group_ids = []
1121 1121 self.repos_ids = []
1122 1122 self.user_ids = []
1123 1123 self.user_group_ids = []
1124 1124 self.user_repo_permission_ids = []
1125 1125 self.user_group_repo_permission_ids = []
1126 1126 self.user_repo_group_permission_ids = []
1127 1127 self.user_group_repo_group_permission_ids = []
1128 1128 self.user_user_group_permission_ids = []
1129 1129 self.user_group_user_group_permission_ids = []
1130 1130 self.user_permissions = []
1131 1131
1132 1132 def _sanitize_name(self, name):
1133 1133 for char in ['[', ']']:
1134 1134 name = name.replace(char, '_')
1135 1135 return name
1136 1136
1137 1137 def create_repo_group(
1138 1138 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1139 1139 group_name = "{prefix}_repogroup_{count}".format(
1140 1140 prefix=self._test_name,
1141 1141 count=len(self.repo_group_ids))
1142 1142 repo_group = self.fixture.create_repo_group(
1143 1143 group_name, cur_user=owner)
1144 1144 if auto_cleanup:
1145 1145 self.repo_group_ids.append(repo_group.group_id)
1146 1146 return repo_group
1147 1147
1148 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, auto_cleanup=True):
1148 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1149 auto_cleanup=True, repo_type='hg'):
1149 1150 repo_name = "{prefix}_repository_{count}".format(
1150 1151 prefix=self._test_name,
1151 1152 count=len(self.repos_ids))
1152 1153
1153 1154 repository = self.fixture.create_repo(
1154 repo_name, cur_user=owner, repo_group=parent)
1155 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type)
1155 1156 if auto_cleanup:
1156 1157 self.repos_ids.append(repository.repo_id)
1157 1158 return repository
1158 1159
1159 1160 def create_user(self, auto_cleanup=True, **kwargs):
1160 1161 user_name = "{prefix}_user_{count}".format(
1161 1162 prefix=self._test_name,
1162 1163 count=len(self.user_ids))
1163 1164 user = self.fixture.create_user(user_name, **kwargs)
1164 1165 if auto_cleanup:
1165 1166 self.user_ids.append(user.user_id)
1166 1167 return user
1167 1168
1168 1169 def create_user_with_group(self):
1169 1170 user = self.create_user()
1170 1171 user_group = self.create_user_group(members=[user])
1171 1172 return user, user_group
1172 1173
1173 1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1174 1175 auto_cleanup=True, **kwargs):
1175 1176 group_name = "{prefix}_usergroup_{count}".format(
1176 1177 prefix=self._test_name,
1177 1178 count=len(self.user_group_ids))
1178 1179 user_group = self.fixture.create_user_group(
1179 1180 group_name, cur_user=owner, **kwargs)
1180 1181
1181 1182 if auto_cleanup:
1182 1183 self.user_group_ids.append(user_group.users_group_id)
1183 1184 if members:
1184 1185 for user in members:
1185 1186 UserGroupModel().add_user_to_group(user_group, user)
1186 1187 return user_group
1187 1188
1188 1189 def grant_user_permission(self, user_name, permission_name):
1189 1190 self._inherit_default_user_permissions(user_name, False)
1190 1191 self.user_permissions.append((user_name, permission_name))
1191 1192
1192 1193 def grant_user_permission_to_repo_group(
1193 1194 self, repo_group, user, permission_name):
1194 1195 permission = RepoGroupModel().grant_user_permission(
1195 1196 repo_group, user, permission_name)
1196 1197 self.user_repo_group_permission_ids.append(
1197 1198 (repo_group.group_id, user.user_id))
1198 1199 return permission
1199 1200
1200 1201 def grant_user_group_permission_to_repo_group(
1201 1202 self, repo_group, user_group, permission_name):
1202 1203 permission = RepoGroupModel().grant_user_group_permission(
1203 1204 repo_group, user_group, permission_name)
1204 1205 self.user_group_repo_group_permission_ids.append(
1205 1206 (repo_group.group_id, user_group.users_group_id))
1206 1207 return permission
1207 1208
1208 1209 def grant_user_permission_to_repo(
1209 1210 self, repo, user, permission_name):
1210 1211 permission = RepoModel().grant_user_permission(
1211 1212 repo, user, permission_name)
1212 1213 self.user_repo_permission_ids.append(
1213 1214 (repo.repo_id, user.user_id))
1214 1215 return permission
1215 1216
1216 1217 def grant_user_group_permission_to_repo(
1217 1218 self, repo, user_group, permission_name):
1218 1219 permission = RepoModel().grant_user_group_permission(
1219 1220 repo, user_group, permission_name)
1220 1221 self.user_group_repo_permission_ids.append(
1221 1222 (repo.repo_id, user_group.users_group_id))
1222 1223 return permission
1223 1224
1224 1225 def grant_user_permission_to_user_group(
1225 1226 self, target_user_group, user, permission_name):
1226 1227 permission = UserGroupModel().grant_user_permission(
1227 1228 target_user_group, user, permission_name)
1228 1229 self.user_user_group_permission_ids.append(
1229 1230 (target_user_group.users_group_id, user.user_id))
1230 1231 return permission
1231 1232
1232 1233 def grant_user_group_permission_to_user_group(
1233 1234 self, target_user_group, user_group, permission_name):
1234 1235 permission = UserGroupModel().grant_user_group_permission(
1235 1236 target_user_group, user_group, permission_name)
1236 1237 self.user_group_user_group_permission_ids.append(
1237 1238 (target_user_group.users_group_id, user_group.users_group_id))
1238 1239 return permission
1239 1240
1240 1241 def revoke_user_permission(self, user_name, permission_name):
1241 1242 self._inherit_default_user_permissions(user_name, True)
1242 1243 UserModel().revoke_perm(user_name, permission_name)
1243 1244
1244 1245 def _inherit_default_user_permissions(self, user_name, value):
1245 1246 user = UserModel().get_by_username(user_name)
1246 1247 user.inherit_default_permissions = value
1247 1248 Session().add(user)
1248 1249 Session().commit()
1249 1250
1250 1251 def cleanup(self):
1251 1252 self._cleanup_permissions()
1252 1253 self._cleanup_repos()
1253 1254 self._cleanup_repo_groups()
1254 1255 self._cleanup_user_groups()
1255 1256 self._cleanup_users()
1256 1257
1257 1258 def _cleanup_permissions(self):
1258 1259 if self.user_permissions:
1259 1260 for user_name, permission_name in self.user_permissions:
1260 1261 self.revoke_user_permission(user_name, permission_name)
1261 1262
1262 1263 for permission in self.user_repo_permission_ids:
1263 1264 RepoModel().revoke_user_permission(*permission)
1264 1265
1265 1266 for permission in self.user_group_repo_permission_ids:
1266 1267 RepoModel().revoke_user_group_permission(*permission)
1267 1268
1268 1269 for permission in self.user_repo_group_permission_ids:
1269 1270 RepoGroupModel().revoke_user_permission(*permission)
1270 1271
1271 1272 for permission in self.user_group_repo_group_permission_ids:
1272 1273 RepoGroupModel().revoke_user_group_permission(*permission)
1273 1274
1274 1275 for permission in self.user_user_group_permission_ids:
1275 1276 UserGroupModel().revoke_user_permission(*permission)
1276 1277
1277 1278 for permission in self.user_group_user_group_permission_ids:
1278 1279 UserGroupModel().revoke_user_group_permission(*permission)
1279 1280
1280 1281 def _cleanup_repo_groups(self):
1281 1282 def _repo_group_compare(first_group_id, second_group_id):
1282 1283 """
1283 1284 Gives higher priority to the groups with the most complex paths
1284 1285 """
1285 1286 first_group = RepoGroup.get(first_group_id)
1286 1287 second_group = RepoGroup.get(second_group_id)
1287 1288 first_group_parts = (
1288 1289 len(first_group.group_name.split('/')) if first_group else 0)
1289 1290 second_group_parts = (
1290 1291 len(second_group.group_name.split('/')) if second_group else 0)
1291 1292 return cmp(second_group_parts, first_group_parts)
1292 1293
1293 1294 sorted_repo_group_ids = sorted(
1294 1295 self.repo_group_ids, cmp=_repo_group_compare)
1295 1296 for repo_group_id in sorted_repo_group_ids:
1296 1297 self.fixture.destroy_repo_group(repo_group_id)
1297 1298
1298 1299 def _cleanup_repos(self):
1299 1300 sorted_repos_ids = sorted(self.repos_ids)
1300 1301 for repo_id in sorted_repos_ids:
1301 1302 self.fixture.destroy_repo(repo_id)
1302 1303
1303 1304 def _cleanup_user_groups(self):
1304 1305 def _user_group_compare(first_group_id, second_group_id):
1305 1306 """
1306 1307 Gives higher priority to the groups with the most complex paths
1307 1308 """
1308 1309 first_group = UserGroup.get(first_group_id)
1309 1310 second_group = UserGroup.get(second_group_id)
1310 1311 first_group_parts = (
1311 1312 len(first_group.users_group_name.split('/'))
1312 1313 if first_group else 0)
1313 1314 second_group_parts = (
1314 1315 len(second_group.users_group_name.split('/'))
1315 1316 if second_group else 0)
1316 1317 return cmp(second_group_parts, first_group_parts)
1317 1318
1318 1319 sorted_user_group_ids = sorted(
1319 1320 self.user_group_ids, cmp=_user_group_compare)
1320 1321 for user_group_id in sorted_user_group_ids:
1321 1322 self.fixture.destroy_user_group(user_group_id)
1322 1323
1323 1324 def _cleanup_users(self):
1324 1325 for user_id in self.user_ids:
1325 1326 self.fixture.destroy_user(user_id)
1326 1327
1327 1328
1328 1329 # TODO: Think about moving this into a pytest-pyro package and make it a
1329 1330 # pytest plugin
1330 1331 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1331 1332 def pytest_runtest_makereport(item, call):
1332 1333 """
1333 1334 Adding the remote traceback if the exception has this information.
1334 1335
1335 1336 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1336 1337 to the exception instance.
1337 1338 """
1338 1339 outcome = yield
1339 1340 report = outcome.get_result()
1340 1341 if call.excinfo:
1341 1342 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1342 1343
1343 1344
1344 1345 def _add_vcsserver_remote_traceback(report, exc):
1345 1346 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1346 1347
1347 1348 if vcsserver_traceback:
1348 1349 section = 'VCSServer remote traceback ' + report.when
1349 1350 report.sections.append((section, vcsserver_traceback))
1350 1351
1351 1352
1352 1353 @pytest.fixture(scope='session')
1353 1354 def testrun():
1354 1355 return {
1355 1356 'uuid': uuid.uuid4(),
1356 1357 'start': datetime.datetime.utcnow().isoformat(),
1357 1358 'timestamp': int(time.time()),
1358 1359 }
1359 1360
1360 1361
1361 1362 @pytest.fixture(autouse=True)
1362 1363 def collect_appenlight_stats(request, testrun):
1363 1364 """
1364 1365 This fixture reports memory consumtion of single tests.
1365 1366
1366 1367 It gathers data based on `psutil` and sends them to Appenlight. The option
1367 1368 ``--ae`` has te be used to enable this fixture and the API key for your
1368 1369 application has to be provided in ``--ae-key``.
1369 1370 """
1370 1371 try:
1371 1372 # cygwin cannot have yet psutil support.
1372 1373 import psutil
1373 1374 except ImportError:
1374 1375 return
1375 1376
1376 1377 if not request.config.getoption('--appenlight'):
1377 1378 return
1378 1379 else:
1379 1380 # Only request the pylonsapp fixture if appenlight tracking is
1380 1381 # enabled. This will speed up a test run of unit tests by 2 to 3
1381 1382 # seconds if appenlight is not enabled.
1382 1383 pylonsapp = request.getfuncargvalue("pylonsapp")
1383 1384 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1384 1385 client = AppenlightClient(
1385 1386 url=url,
1386 1387 api_key=request.config.getoption('--appenlight-api-key'),
1387 1388 namespace=request.node.nodeid,
1388 1389 request=str(testrun['uuid']),
1389 1390 testrun=testrun)
1390 1391
1391 1392 client.collect({
1392 1393 'message': "Starting",
1393 1394 })
1394 1395
1395 1396 server_and_port = pylonsapp.config['vcs.server']
1396 1397 protocol = pylonsapp.config['vcs.server.protocol']
1397 1398 server = create_vcsserver_proxy(server_and_port, protocol)
1398 1399 with server:
1399 1400 vcs_pid = server.get_pid()
1400 1401 server.run_gc()
1401 1402 vcs_process = psutil.Process(vcs_pid)
1402 1403 mem = vcs_process.memory_info()
1403 1404 client.tag_before('vcsserver.rss', mem.rss)
1404 1405 client.tag_before('vcsserver.vms', mem.vms)
1405 1406
1406 1407 test_process = psutil.Process()
1407 1408 mem = test_process.memory_info()
1408 1409 client.tag_before('test.rss', mem.rss)
1409 1410 client.tag_before('test.vms', mem.vms)
1410 1411
1411 1412 client.tag_before('time', time.time())
1412 1413
1413 1414 @request.addfinalizer
1414 1415 def send_stats():
1415 1416 client.tag_after('time', time.time())
1416 1417 with server:
1417 1418 gc_stats = server.run_gc()
1418 1419 for tag, value in gc_stats.items():
1419 1420 client.tag_after(tag, value)
1420 1421 mem = vcs_process.memory_info()
1421 1422 client.tag_after('vcsserver.rss', mem.rss)
1422 1423 client.tag_after('vcsserver.vms', mem.vms)
1423 1424
1424 1425 mem = test_process.memory_info()
1425 1426 client.tag_after('test.rss', mem.rss)
1426 1427 client.tag_after('test.vms', mem.vms)
1427 1428
1428 1429 client.collect({
1429 1430 'message': "Finished",
1430 1431 })
1431 1432 client.send_stats()
1432 1433
1433 1434 return client
1434 1435
1435 1436
1436 1437 class AppenlightClient():
1437 1438
1438 1439 url_template = '{url}?protocol_version=0.5'
1439 1440
1440 1441 def __init__(
1441 1442 self, url, api_key, add_server=True, add_timestamp=True,
1442 1443 namespace=None, request=None, testrun=None):
1443 1444 self.url = self.url_template.format(url=url)
1444 1445 self.api_key = api_key
1445 1446 self.add_server = add_server
1446 1447 self.add_timestamp = add_timestamp
1447 1448 self.namespace = namespace
1448 1449 self.request = request
1449 1450 self.server = socket.getfqdn(socket.gethostname())
1450 1451 self.tags_before = {}
1451 1452 self.tags_after = {}
1452 1453 self.stats = []
1453 1454 self.testrun = testrun or {}
1454 1455
1455 1456 def tag_before(self, tag, value):
1456 1457 self.tags_before[tag] = value
1457 1458
1458 1459 def tag_after(self, tag, value):
1459 1460 self.tags_after[tag] = value
1460 1461
1461 1462 def collect(self, data):
1462 1463 if self.add_server:
1463 1464 data.setdefault('server', self.server)
1464 1465 if self.add_timestamp:
1465 1466 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1466 1467 if self.namespace:
1467 1468 data.setdefault('namespace', self.namespace)
1468 1469 if self.request:
1469 1470 data.setdefault('request', self.request)
1470 1471 self.stats.append(data)
1471 1472
1472 1473 def send_stats(self):
1473 1474 tags = [
1474 1475 ('testrun', self.request),
1475 1476 ('testrun.start', self.testrun['start']),
1476 1477 ('testrun.timestamp', self.testrun['timestamp']),
1477 1478 ('test', self.namespace),
1478 1479 ]
1479 1480 for key, value in self.tags_before.items():
1480 1481 tags.append((key + '.before', value))
1481 1482 try:
1482 1483 delta = self.tags_after[key] - value
1483 1484 tags.append((key + '.delta', delta))
1484 1485 except Exception:
1485 1486 pass
1486 1487 for key, value in self.tags_after.items():
1487 1488 tags.append((key + '.after', value))
1488 1489 self.collect({
1489 1490 'message': "Collected tags",
1490 1491 'tags': tags,
1491 1492 })
1492 1493
1493 1494 response = requests.post(
1494 1495 self.url,
1495 1496 headers={
1496 1497 'X-appenlight-api-key': self.api_key},
1497 1498 json=self.stats,
1498 1499 )
1499 1500
1500 1501 if not response.status_code == 200:
1501 1502 pprint.pprint(self.stats)
1502 1503 print response.headers
1503 1504 print response.text
1504 1505 raise Exception('Sending to appenlight failed')
1505 1506
1506 1507
1507 1508 @pytest.fixture
1508 1509 def gist_util(request, pylonsapp):
1509 1510 """
1510 1511 Provides a wired instance of `GistUtility` with integrated cleanup.
1511 1512 """
1512 1513 utility = GistUtility()
1513 1514 request.addfinalizer(utility.cleanup)
1514 1515 return utility
1515 1516
1516 1517
1517 1518 class GistUtility(object):
1518 1519 def __init__(self):
1519 1520 self.fixture = Fixture()
1520 1521 self.gist_ids = []
1521 1522
1522 1523 def create_gist(self, **kwargs):
1523 1524 gist = self.fixture.create_gist(**kwargs)
1524 1525 self.gist_ids.append(gist.gist_id)
1525 1526 return gist
1526 1527
1527 1528 def cleanup(self):
1528 1529 for id_ in self.gist_ids:
1529 1530 self.fixture.destroy_gists(str(id_))
1530 1531
1531 1532
1532 1533 @pytest.fixture
1533 1534 def enabled_backends(request):
1534 1535 backends = request.config.option.backends
1535 1536 return backends[:]
1536 1537
1537 1538
1538 1539 @pytest.fixture
1539 1540 def settings_util(request):
1540 1541 """
1541 1542 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1542 1543 """
1543 1544 utility = SettingsUtility()
1544 1545 request.addfinalizer(utility.cleanup)
1545 1546 return utility
1546 1547
1547 1548
1548 1549 class SettingsUtility(object):
1549 1550 def __init__(self):
1550 1551 self.rhodecode_ui_ids = []
1551 1552 self.rhodecode_setting_ids = []
1552 1553 self.repo_rhodecode_ui_ids = []
1553 1554 self.repo_rhodecode_setting_ids = []
1554 1555
1555 1556 def create_repo_rhodecode_ui(
1556 1557 self, repo, section, value, key=None, active=True, cleanup=True):
1557 1558 key = key or hashlib.sha1(
1558 1559 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1559 1560
1560 1561 setting = RepoRhodeCodeUi()
1561 1562 setting.repository_id = repo.repo_id
1562 1563 setting.ui_section = section
1563 1564 setting.ui_value = value
1564 1565 setting.ui_key = key
1565 1566 setting.ui_active = active
1566 1567 Session().add(setting)
1567 1568 Session().commit()
1568 1569
1569 1570 if cleanup:
1570 1571 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1571 1572 return setting
1572 1573
1573 1574 def create_rhodecode_ui(
1574 1575 self, section, value, key=None, active=True, cleanup=True):
1575 1576 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1576 1577
1577 1578 setting = RhodeCodeUi()
1578 1579 setting.ui_section = section
1579 1580 setting.ui_value = value
1580 1581 setting.ui_key = key
1581 1582 setting.ui_active = active
1582 1583 Session().add(setting)
1583 1584 Session().commit()
1584 1585
1585 1586 if cleanup:
1586 1587 self.rhodecode_ui_ids.append(setting.ui_id)
1587 1588 return setting
1588 1589
1589 1590 def create_repo_rhodecode_setting(
1590 1591 self, repo, name, value, type_, cleanup=True):
1591 1592 setting = RepoRhodeCodeSetting(
1592 1593 repo.repo_id, key=name, val=value, type=type_)
1593 1594 Session().add(setting)
1594 1595 Session().commit()
1595 1596
1596 1597 if cleanup:
1597 1598 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1598 1599 return setting
1599 1600
1600 1601 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1601 1602 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1602 1603 Session().add(setting)
1603 1604 Session().commit()
1604 1605
1605 1606 if cleanup:
1606 1607 self.rhodecode_setting_ids.append(setting.app_settings_id)
1607 1608
1608 1609 return setting
1609 1610
1610 1611 def cleanup(self):
1611 1612 for id_ in self.rhodecode_ui_ids:
1612 1613 setting = RhodeCodeUi.get(id_)
1613 1614 Session().delete(setting)
1614 1615
1615 1616 for id_ in self.rhodecode_setting_ids:
1616 1617 setting = RhodeCodeSetting.get(id_)
1617 1618 Session().delete(setting)
1618 1619
1619 1620 for id_ in self.repo_rhodecode_ui_ids:
1620 1621 setting = RepoRhodeCodeUi.get(id_)
1621 1622 Session().delete(setting)
1622 1623
1623 1624 for id_ in self.repo_rhodecode_setting_ids:
1624 1625 setting = RepoRhodeCodeSetting.get(id_)
1625 1626 Session().delete(setting)
1626 1627
1627 1628 Session().commit()
1628 1629
1629 1630
1630 1631 @pytest.fixture
1631 1632 def no_notifications(request):
1632 1633 notification_patcher = mock.patch(
1633 1634 'rhodecode.model.notification.NotificationModel.create')
1634 1635 notification_patcher.start()
1635 1636 request.addfinalizer(notification_patcher.stop)
1636 1637
1637 1638
1638 1639 @pytest.fixture
1639 1640 def silence_action_logger(request):
1640 1641 notification_patcher = mock.patch(
1641 1642 'rhodecode.lib.utils.action_logger')
1642 1643 notification_patcher.start()
1643 1644 request.addfinalizer(notification_patcher.stop)
1644 1645
1645 1646
1646 1647 @pytest.fixture(scope='session')
1647 1648 def repeat(request):
1648 1649 """
1649 1650 The number of repetitions is based on this fixture.
1650 1651
1651 1652 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1652 1653 tests are not too slow in our default test suite.
1653 1654 """
1654 1655 return request.config.getoption('--repeat')
1655 1656
1656 1657
1657 1658 @pytest.fixture
1658 1659 def rhodecode_fixtures():
1659 1660 return Fixture()
1660 1661
1661 1662
1662 1663 @pytest.fixture
1663 1664 def request_stub():
1664 1665 """
1665 1666 Stub request object.
1666 1667 """
1667 1668 request = pyramid.testing.DummyRequest()
1668 1669 request.scheme = 'https'
1669 1670 return request
1670 1671
1671 1672
1672 1673 @pytest.fixture
1673 1674 def config_stub(request, request_stub):
1674 1675 """
1675 1676 Set up pyramid.testing and return the Configurator.
1676 1677 """
1677 1678 config = pyramid.testing.setUp(request=request_stub)
1678 1679
1679 1680 @request.addfinalizer
1680 1681 def cleanup():
1681 1682 pyramid.testing.tearDown()
1682 1683
1683 1684 return config
1684 1685
1685 1686
1686 1687 @pytest.fixture
1687 1688 def StubIntegrationType():
1688 1689 class _StubIntegrationType(IntegrationTypeBase):
1689 1690 """ Test integration type class """
1690 1691
1691 1692 key = 'test'
1692 1693 display_name = 'Test integration type'
1693 1694 description = 'A test integration type for testing'
1694 1695 icon = 'test_icon_html_image'
1695 1696
1696 1697 def __init__(self, settings):
1697 1698 super(_StubIntegrationType, self).__init__(settings)
1698 1699 self.sent_events = [] # for testing
1699 1700
1700 1701 def send_event(self, event):
1701 1702 self.sent_events.append(event)
1702 1703
1703 1704 def settings_schema(self):
1704 1705 class SettingsSchema(colander.Schema):
1705 1706 test_string_field = colander.SchemaNode(
1706 1707 colander.String(),
1707 1708 missing=colander.required,
1708 1709 title='test string field',
1709 1710 )
1710 1711 test_int_field = colander.SchemaNode(
1711 1712 colander.Int(),
1712 1713 title='some integer setting',
1713 1714 )
1714 1715 return SettingsSchema()
1715 1716
1716 1717
1717 1718 integration_type_registry.register_integration_type(_StubIntegrationType)
1718 1719 return _StubIntegrationType
1719 1720
1720 1721 @pytest.fixture
1721 1722 def stub_integration_settings():
1722 1723 return {
1723 1724 'test_string_field': 'some data',
1724 1725 'test_int_field': 100,
1725 1726 }
1726 1727
1727 1728
1728 1729 @pytest.fixture
1729 1730 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1730 1731 stub_integration_settings):
1731 1732 integration = IntegrationModel().create(
1732 1733 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1733 1734 name='test repo integration',
1734 1735 repo=repo_stub, repo_group=None, child_repos_only=None)
1735 1736
1736 1737 @request.addfinalizer
1737 1738 def cleanup():
1738 1739 IntegrationModel().delete(integration)
1739 1740
1740 1741 return integration
1741 1742
1742 1743
1743 1744 @pytest.fixture
1744 1745 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1745 1746 stub_integration_settings):
1746 1747 integration = IntegrationModel().create(
1747 1748 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1748 1749 name='test repogroup integration',
1749 1750 repo=None, repo_group=test_repo_group, child_repos_only=True)
1750 1751
1751 1752 @request.addfinalizer
1752 1753 def cleanup():
1753 1754 IntegrationModel().delete(integration)
1754 1755
1755 1756 return integration
1756 1757
1757 1758
1758 1759 @pytest.fixture
1759 1760 def repogroup_recursive_integration_stub(request, test_repo_group,
1760 1761 StubIntegrationType, stub_integration_settings):
1761 1762 integration = IntegrationModel().create(
1762 1763 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1763 1764 name='test recursive repogroup integration',
1764 1765 repo=None, repo_group=test_repo_group, child_repos_only=False)
1765 1766
1766 1767 @request.addfinalizer
1767 1768 def cleanup():
1768 1769 IntegrationModel().delete(integration)
1769 1770
1770 1771 return integration
1771 1772
1772 1773
1773 1774 @pytest.fixture
1774 1775 def global_integration_stub(request, StubIntegrationType,
1775 1776 stub_integration_settings):
1776 1777 integration = IntegrationModel().create(
1777 1778 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1778 1779 name='test global integration',
1779 1780 repo=None, repo_group=None, child_repos_only=None)
1780 1781
1781 1782 @request.addfinalizer
1782 1783 def cleanup():
1783 1784 IntegrationModel().delete(integration)
1784 1785
1785 1786 return integration
1786 1787
1787 1788
1788 1789 @pytest.fixture
1789 1790 def root_repos_integration_stub(request, StubIntegrationType,
1790 1791 stub_integration_settings):
1791 1792 integration = IntegrationModel().create(
1792 1793 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1793 1794 name='test global integration',
1794 1795 repo=None, repo_group=None, child_repos_only=True)
1795 1796
1796 1797 @request.addfinalizer
1797 1798 def cleanup():
1798 1799 IntegrationModel().delete(integration)
1799 1800
1800 1801 return integration
1801 1802
1802 1803
1803 1804 @pytest.fixture
1804 1805 def local_dt_to_utc():
1805 1806 def _factory(dt):
1806 1807 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1807 1808 dateutil.tz.tzutc()).replace(tzinfo=None)
1808 1809 return _factory
General Comments 0
You need to be logged in to leave comments. Login now