##// END OF EJS Templates
tests: Extend vcsbackend - create repo with commits...
johbo -
r770:d9d969e2 default
parent child Browse files
Show More
@@ -1,1740 +1,1750 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess
30 30 import time
31 31 import uuid
32 32
33 33 import mock
34 34 import pyramid.testing
35 35 import pytest
36 36 import colander
37 37 import requests
38 38 from webtest.app import TestApp
39 39
40 40 import rhodecode
41 41 from rhodecode.model.changeset_status import ChangesetStatusModel
42 42 from rhodecode.model.comment import ChangesetCommentsModel
43 43 from rhodecode.model.db import (
44 44 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
45 45 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, Integration)
46 46 from rhodecode.model.meta import Session
47 47 from rhodecode.model.pull_request import PullRequestModel
48 48 from rhodecode.model.repo import RepoModel
49 49 from rhodecode.model.repo_group import RepoGroupModel
50 50 from rhodecode.model.user import UserModel
51 51 from rhodecode.model.settings import VcsSettingsModel
52 52 from rhodecode.model.user_group import UserGroupModel
53 53 from rhodecode.model.integration import IntegrationModel
54 54 from rhodecode.integrations import integration_type_registry
55 55 from rhodecode.integrations.types.base import IntegrationTypeBase
56 56 from rhodecode.lib.utils import repo2db_mapper
57 57 from rhodecode.lib.vcs import create_vcsserver_proxy
58 58 from rhodecode.lib.vcs.backends import get_backend
59 59 from rhodecode.lib.vcs.nodes import FileNode
60 60 from rhodecode.tests import (
61 61 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
62 62 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
63 63 TEST_USER_REGULAR_PASS)
64 64 from rhodecode.tests.fixture import Fixture
65 65
66 66
67 67 def _split_comma(value):
68 68 return value.split(',')
69 69
70 70
71 71 def pytest_addoption(parser):
72 72 parser.addoption(
73 73 '--keep-tmp-path', action='store_true',
74 74 help="Keep the test temporary directories")
75 75 parser.addoption(
76 76 '--backends', action='store', type=_split_comma,
77 77 default=['git', 'hg', 'svn'],
78 78 help="Select which backends to test for backend specific tests.")
79 79 parser.addoption(
80 80 '--dbs', action='store', type=_split_comma,
81 81 default=['sqlite'],
82 82 help="Select which database to test for database specific tests. "
83 83 "Possible options are sqlite,postgres,mysql")
84 84 parser.addoption(
85 85 '--appenlight', '--ae', action='store_true',
86 86 help="Track statistics in appenlight.")
87 87 parser.addoption(
88 88 '--appenlight-api-key', '--ae-key',
89 89 help="API key for Appenlight.")
90 90 parser.addoption(
91 91 '--appenlight-url', '--ae-url',
92 92 default="https://ae.rhodecode.com",
93 93 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
94 94 parser.addoption(
95 95 '--sqlite-connection-string', action='store',
96 96 default='', help="Connection string for the dbs tests with SQLite")
97 97 parser.addoption(
98 98 '--postgres-connection-string', action='store',
99 99 default='', help="Connection string for the dbs tests with Postgres")
100 100 parser.addoption(
101 101 '--mysql-connection-string', action='store',
102 102 default='', help="Connection string for the dbs tests with MySQL")
103 103 parser.addoption(
104 104 '--repeat', type=int, default=100,
105 105 help="Number of repetitions in performance tests.")
106 106
107 107
108 108 def pytest_configure(config):
109 109 # Appy the kombu patch early on, needed for test discovery on Python 2.7.11
110 110 from rhodecode.config import patches
111 111 patches.kombu_1_5_1_python_2_7_11()
112 112
113 113
114 114 def pytest_collection_modifyitems(session, config, items):
115 115 # nottest marked, compare nose, used for transition from nose to pytest
116 116 remaining = [
117 117 i for i in items if getattr(i.obj, '__test__', True)]
118 118 items[:] = remaining
119 119
120 120
121 121 def pytest_generate_tests(metafunc):
122 122 # Support test generation based on --backend parameter
123 123 if 'backend_alias' in metafunc.fixturenames:
124 124 backends = get_backends_from_metafunc(metafunc)
125 125 scope = None
126 126 if not backends:
127 127 pytest.skip("Not enabled for any of selected backends")
128 128 metafunc.parametrize('backend_alias', backends, scope=scope)
129 129 elif hasattr(metafunc.function, 'backends'):
130 130 backends = get_backends_from_metafunc(metafunc)
131 131 if not backends:
132 132 pytest.skip("Not enabled for any of selected backends")
133 133
134 134
135 135 def get_backends_from_metafunc(metafunc):
136 136 requested_backends = set(metafunc.config.getoption('--backends'))
137 137 if hasattr(metafunc.function, 'backends'):
138 138 # Supported backends by this test function, created from
139 139 # pytest.mark.backends
140 140 backends = metafunc.function.backends.args
141 141 elif hasattr(metafunc.cls, 'backend_alias'):
142 142 # Support class attribute "backend_alias", this is mainly
143 143 # for legacy reasons for tests not yet using pytest.mark.backends
144 144 backends = [metafunc.cls.backend_alias]
145 145 else:
146 146 backends = metafunc.config.getoption('--backends')
147 147 return requested_backends.intersection(backends)
148 148
149 149
150 150 @pytest.fixture(scope='session', autouse=True)
151 151 def activate_example_rcextensions(request):
152 152 """
153 153 Patch in an example rcextensions module which verifies passed in kwargs.
154 154 """
155 155 from rhodecode.tests.other import example_rcextensions
156 156
157 157 old_extensions = rhodecode.EXTENSIONS
158 158 rhodecode.EXTENSIONS = example_rcextensions
159 159
160 160 @request.addfinalizer
161 161 def cleanup():
162 162 rhodecode.EXTENSIONS = old_extensions
163 163
164 164
165 165 @pytest.fixture
166 166 def capture_rcextensions():
167 167 """
168 168 Returns the recorded calls to entry points in rcextensions.
169 169 """
170 170 calls = rhodecode.EXTENSIONS.calls
171 171 calls.clear()
172 172 # Note: At this moment, it is still the empty dict, but that will
173 173 # be filled during the test run and since it is a reference this
174 174 # is enough to make it work.
175 175 return calls
176 176
177 177
178 178 @pytest.fixture(scope='session')
179 179 def http_environ_session():
180 180 """
181 181 Allow to use "http_environ" in session scope.
182 182 """
183 183 return http_environ(
184 184 http_host_stub=http_host_stub())
185 185
186 186
187 187 @pytest.fixture
188 188 def http_host_stub():
189 189 """
190 190 Value of HTTP_HOST in the test run.
191 191 """
192 192 return 'test.example.com:80'
193 193
194 194
195 195 @pytest.fixture
196 196 def http_environ(http_host_stub):
197 197 """
198 198 HTTP extra environ keys.
199 199
200 200 User by the test application and as well for setting up the pylons
201 201 environment. In the case of the fixture "app" it should be possible
202 202 to override this for a specific test case.
203 203 """
204 204 return {
205 205 'SERVER_NAME': http_host_stub.split(':')[0],
206 206 'SERVER_PORT': http_host_stub.split(':')[1],
207 207 'HTTP_HOST': http_host_stub,
208 208 }
209 209
210 210
211 211 @pytest.fixture(scope='function')
212 212 def app(request, pylonsapp, http_environ):
213 213 app = TestApp(
214 214 pylonsapp,
215 215 extra_environ=http_environ)
216 216 if request.cls:
217 217 request.cls.app = app
218 218 return app
219 219
220 220
221 221 @pytest.fixture()
222 222 def app_settings(pylonsapp, pylons_config):
223 223 """
224 224 Settings dictionary used to create the app.
225 225
226 226 Parses the ini file and passes the result through the sanitize and apply
227 227 defaults mechanism in `rhodecode.config.middleware`.
228 228 """
229 229 from paste.deploy.loadwsgi import loadcontext, APP
230 230 from rhodecode.config.middleware import (
231 231 sanitize_settings_and_apply_defaults)
232 232 context = loadcontext(APP, 'config:' + pylons_config)
233 233 settings = sanitize_settings_and_apply_defaults(context.config())
234 234 return settings
235 235
236 236
237 237 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
238 238
239 239
240 240 def _autologin_user(app, *args):
241 241 session = login_user_session(app, *args)
242 242 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
243 243 return LoginData(csrf_token, session['rhodecode_user'])
244 244
245 245
246 246 @pytest.fixture
247 247 def autologin_user(app):
248 248 """
249 249 Utility fixture which makes sure that the admin user is logged in
250 250 """
251 251 return _autologin_user(app)
252 252
253 253
254 254 @pytest.fixture
255 255 def autologin_regular_user(app):
256 256 """
257 257 Utility fixture which makes sure that the regular user is logged in
258 258 """
259 259 return _autologin_user(
260 260 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
261 261
262 262
263 263 @pytest.fixture(scope='function')
264 264 def csrf_token(request, autologin_user):
265 265 return autologin_user.csrf_token
266 266
267 267
268 268 @pytest.fixture(scope='function')
269 269 def xhr_header(request):
270 270 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
271 271
272 272
273 273 @pytest.fixture
274 274 def real_crypto_backend(monkeypatch):
275 275 """
276 276 Switch the production crypto backend on for this test.
277 277
278 278 During the test run the crypto backend is replaced with a faster
279 279 implementation based on the MD5 algorithm.
280 280 """
281 281 monkeypatch.setattr(rhodecode, 'is_test', False)
282 282
283 283
284 284 @pytest.fixture(scope='class')
285 285 def index_location(request, pylonsapp):
286 286 index_location = pylonsapp.config['app_conf']['search.location']
287 287 if request.cls:
288 288 request.cls.index_location = index_location
289 289 return index_location
290 290
291 291
292 292 @pytest.fixture(scope='session', autouse=True)
293 293 def tests_tmp_path(request):
294 294 """
295 295 Create temporary directory to be used during the test session.
296 296 """
297 297 if not os.path.exists(TESTS_TMP_PATH):
298 298 os.makedirs(TESTS_TMP_PATH)
299 299
300 300 if not request.config.getoption('--keep-tmp-path'):
301 301 @request.addfinalizer
302 302 def remove_tmp_path():
303 303 shutil.rmtree(TESTS_TMP_PATH)
304 304
305 305 return TESTS_TMP_PATH
306 306
307 307
308 308 @pytest.fixture(scope='session', autouse=True)
309 309 def patch_pyro_request_scope_proxy_factory(request):
310 310 """
311 311 Patch the pyro proxy factory to always use the same dummy request object
312 312 when under test. This will return the same pyro proxy on every call.
313 313 """
314 314 dummy_request = pyramid.testing.DummyRequest()
315 315
316 316 def mocked_call(self, request=None):
317 317 return self.getProxy(request=dummy_request)
318 318
319 319 patcher = mock.patch(
320 320 'rhodecode.lib.vcs.client.RequestScopeProxyFactory.__call__',
321 321 new=mocked_call)
322 322 patcher.start()
323 323
324 324 @request.addfinalizer
325 325 def undo_patching():
326 326 patcher.stop()
327 327
328 328
329 329 @pytest.fixture
330 330 def test_repo_group(request):
331 331 """
332 332 Create a temporary repository group, and destroy it after
333 333 usage automatically
334 334 """
335 335 fixture = Fixture()
336 336 repogroupid = 'test_repo_group_%s' % int(time.time())
337 337 repo_group = fixture.create_repo_group(repogroupid)
338 338
339 339 def _cleanup():
340 340 fixture.destroy_repo_group(repogroupid)
341 341
342 342 request.addfinalizer(_cleanup)
343 343 return repo_group
344 344
345 345
346 346 @pytest.fixture
347 347 def test_user_group(request):
348 348 """
349 349 Create a temporary user group, and destroy it after
350 350 usage automatically
351 351 """
352 352 fixture = Fixture()
353 353 usergroupid = 'test_user_group_%s' % int(time.time())
354 354 user_group = fixture.create_user_group(usergroupid)
355 355
356 356 def _cleanup():
357 357 fixture.destroy_user_group(user_group)
358 358
359 359 request.addfinalizer(_cleanup)
360 360 return user_group
361 361
362 362
363 363 @pytest.fixture(scope='session')
364 364 def test_repo(request):
365 365 container = TestRepoContainer()
366 366 request.addfinalizer(container._cleanup)
367 367 return container
368 368
369 369
370 370 class TestRepoContainer(object):
371 371 """
372 372 Container for test repositories which are used read only.
373 373
374 374 Repositories will be created on demand and re-used during the lifetime
375 375 of this object.
376 376
377 377 Usage to get the svn test repository "minimal"::
378 378
379 379 test_repo = TestContainer()
380 380 repo = test_repo('minimal', 'svn')
381 381
382 382 """
383 383
384 384 dump_extractors = {
385 385 'git': utils.extract_git_repo_from_dump,
386 386 'hg': utils.extract_hg_repo_from_dump,
387 387 'svn': utils.extract_svn_repo_from_dump,
388 388 }
389 389
390 390 def __init__(self):
391 391 self._cleanup_repos = []
392 392 self._fixture = Fixture()
393 393 self._repos = {}
394 394
395 395 def __call__(self, dump_name, backend_alias):
396 396 key = (dump_name, backend_alias)
397 397 if key not in self._repos:
398 398 repo = self._create_repo(dump_name, backend_alias)
399 399 self._repos[key] = repo.repo_id
400 400 return Repository.get(self._repos[key])
401 401
402 402 def _create_repo(self, dump_name, backend_alias):
403 403 repo_name = '%s-%s' % (backend_alias, dump_name)
404 404 backend_class = get_backend(backend_alias)
405 405 dump_extractor = self.dump_extractors[backend_alias]
406 406 repo_path = dump_extractor(dump_name, repo_name)
407 407 vcs_repo = backend_class(repo_path)
408 408 repo2db_mapper({repo_name: vcs_repo})
409 409 repo = RepoModel().get_by_repo_name(repo_name)
410 410 self._cleanup_repos.append(repo_name)
411 411 return repo
412 412
413 413 def _cleanup(self):
414 414 for repo_name in reversed(self._cleanup_repos):
415 415 self._fixture.destroy_repo(repo_name)
416 416
417 417
418 418 @pytest.fixture
419 419 def backend(request, backend_alias, pylonsapp, test_repo):
420 420 """
421 421 Parametrized fixture which represents a single backend implementation.
422 422
423 423 It respects the option `--backends` to focus the test run on specific
424 424 backend implementations.
425 425
426 426 It also supports `pytest.mark.xfail_backends` to mark tests as failing
427 427 for specific backends. This is intended as a utility for incremental
428 428 development of a new backend implementation.
429 429 """
430 430 if backend_alias not in request.config.getoption('--backends'):
431 431 pytest.skip("Backend %s not selected." % (backend_alias, ))
432 432
433 433 utils.check_xfail_backends(request.node, backend_alias)
434 434 utils.check_skip_backends(request.node, backend_alias)
435 435
436 436 repo_name = 'vcs_test_%s' % (backend_alias, )
437 437 backend = Backend(
438 438 alias=backend_alias,
439 439 repo_name=repo_name,
440 440 test_name=request.node.name,
441 441 test_repo_container=test_repo)
442 442 request.addfinalizer(backend.cleanup)
443 443 return backend
444 444
445 445
446 446 @pytest.fixture
447 447 def backend_git(request, pylonsapp, test_repo):
448 448 return backend(request, 'git', pylonsapp, test_repo)
449 449
450 450
451 451 @pytest.fixture
452 452 def backend_hg(request, pylonsapp, test_repo):
453 453 return backend(request, 'hg', pylonsapp, test_repo)
454 454
455 455
456 456 @pytest.fixture
457 457 def backend_svn(request, pylonsapp, test_repo):
458 458 return backend(request, 'svn', pylonsapp, test_repo)
459 459
460 460
461 461 @pytest.fixture
462 462 def backend_random(backend_git):
463 463 """
464 464 Use this to express that your tests need "a backend.
465 465
466 466 A few of our tests need a backend, so that we can run the code. This
467 467 fixture is intended to be used for such cases. It will pick one of the
468 468 backends and run the tests.
469 469
470 470 The fixture `backend` would run the test multiple times for each
471 471 available backend which is a pure waste of time if the test is
472 472 independent of the backend type.
473 473 """
474 474 # TODO: johbo: Change this to pick a random backend
475 475 return backend_git
476 476
477 477
478 478 @pytest.fixture
479 479 def backend_stub(backend_git):
480 480 """
481 481 Use this to express that your tests need a backend stub
482 482
483 483 TODO: mikhail: Implement a real stub logic instead of returning
484 484 a git backend
485 485 """
486 486 return backend_git
487 487
488 488
489 489 @pytest.fixture
490 490 def repo_stub(backend_stub):
491 491 """
492 492 Use this to express that your tests need a repository stub
493 493 """
494 494 return backend_stub.create_repo()
495 495
496 496
497 497 class Backend(object):
498 498 """
499 499 Represents the test configuration for one supported backend
500 500
501 501 Provides easy access to different test repositories based on
502 502 `__getitem__`. Such repositories will only be created once per test
503 503 session.
504 504 """
505 505
506 506 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
507 507 _master_repo = None
508 508 _commit_ids = {}
509 509
510 510 def __init__(self, alias, repo_name, test_name, test_repo_container):
511 511 self.alias = alias
512 512 self.repo_name = repo_name
513 513 self._cleanup_repos = []
514 514 self._test_name = test_name
515 515 self._test_repo_container = test_repo_container
516 516 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
517 517 # Fixture will survive in the end.
518 518 self._fixture = Fixture()
519 519
520 520 def __getitem__(self, key):
521 521 return self._test_repo_container(key, self.alias)
522 522
523 523 @property
524 524 def repo(self):
525 525 """
526 526 Returns the "current" repository. This is the vcs_test repo or the
527 527 last repo which has been created with `create_repo`.
528 528 """
529 529 from rhodecode.model.db import Repository
530 530 return Repository.get_by_repo_name(self.repo_name)
531 531
532 532 @property
533 533 def default_branch_name(self):
534 534 VcsRepository = get_backend(self.alias)
535 535 return VcsRepository.DEFAULT_BRANCH_NAME
536 536
537 537 @property
538 538 def default_head_id(self):
539 539 """
540 540 Returns the default head id of the underlying backend.
541 541
542 542 This will be the default branch name in case the backend does have a
543 543 default branch. In the other cases it will point to a valid head
544 544 which can serve as the base to create a new commit on top of it.
545 545 """
546 546 vcsrepo = self.repo.scm_instance()
547 547 head_id = (
548 548 vcsrepo.DEFAULT_BRANCH_NAME or
549 549 vcsrepo.commit_ids[-1])
550 550 return head_id
551 551
552 552 @property
553 553 def commit_ids(self):
554 554 """
555 555 Returns the list of commits for the last created repository
556 556 """
557 557 return self._commit_ids
558 558
559 559 def create_master_repo(self, commits):
560 560 """
561 561 Create a repository and remember it as a template.
562 562
563 563 This allows to easily create derived repositories to construct
564 564 more complex scenarios for diff, compare and pull requests.
565 565
566 566 Returns a commit map which maps from commit message to raw_id.
567 567 """
568 568 self._master_repo = self.create_repo(commits=commits)
569 569 return self._commit_ids
570 570
571 571 def create_repo(
572 572 self, commits=None, number_of_commits=0, heads=None,
573 573 name_suffix=u'', **kwargs):
574 574 """
575 575 Create a repository and record it for later cleanup.
576 576
577 577 :param commits: Optional. A sequence of dict instances.
578 578 Will add a commit per entry to the new repository.
579 579 :param number_of_commits: Optional. If set to a number, this number of
580 580 commits will be added to the new repository.
581 581 :param heads: Optional. Can be set to a sequence of of commit
582 582 names which shall be pulled in from the master repository.
583 583
584 584 """
585 585 self.repo_name = self._next_repo_name() + name_suffix
586 586 repo = self._fixture.create_repo(
587 587 self.repo_name, repo_type=self.alias, **kwargs)
588 588 self._cleanup_repos.append(repo.repo_name)
589 589
590 590 commits = commits or [
591 591 {'message': 'Commit %s of %s' % (x, self.repo_name)}
592 592 for x in xrange(number_of_commits)]
593 593 self._add_commits_to_repo(repo.scm_instance(), commits)
594 594 if heads:
595 595 self.pull_heads(repo, heads)
596 596
597 597 return repo
598 598
599 599 def pull_heads(self, repo, heads):
600 600 """
601 601 Make sure that repo contains all commits mentioned in `heads`
602 602 """
603 603 vcsmaster = self._master_repo.scm_instance()
604 604 vcsrepo = repo.scm_instance()
605 605 vcsrepo.config.clear_section('hooks')
606 606 commit_ids = [self._commit_ids[h] for h in heads]
607 607 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
608 608
609 609 def create_fork(self):
610 610 repo_to_fork = self.repo_name
611 611 self.repo_name = self._next_repo_name()
612 612 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
613 613 self._cleanup_repos.append(self.repo_name)
614 614 return repo
615 615
616 616 def new_repo_name(self, suffix=u''):
617 617 self.repo_name = self._next_repo_name() + suffix
618 618 self._cleanup_repos.append(self.repo_name)
619 619 return self.repo_name
620 620
621 621 def _next_repo_name(self):
622 622 return u"%s_%s" % (
623 623 self.invalid_repo_name.sub(u'_', self._test_name),
624 624 len(self._cleanup_repos))
625 625
626 626 def ensure_file(self, filename, content='Test content\n'):
627 627 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
628 628 commits = [
629 629 {'added': [
630 630 FileNode(filename, content=content),
631 631 ]},
632 632 ]
633 633 self._add_commits_to_repo(self.repo.scm_instance(), commits)
634 634
635 635 def enable_downloads(self):
636 636 repo = self.repo
637 637 repo.enable_downloads = True
638 638 Session().add(repo)
639 639 Session().commit()
640 640
641 641 def cleanup(self):
642 642 for repo_name in reversed(self._cleanup_repos):
643 643 self._fixture.destroy_repo(repo_name)
644 644
645 645 def _add_commits_to_repo(self, repo, commits):
646 if not commits:
646 commit_ids = _add_commits_to_repo(repo, commits)
647 if not commit_ids:
647 648 return
648
649 imc = repo.in_memory_commit
650 commit = None
651 self._commit_ids = {}
652
653 for idx, commit in enumerate(commits):
654 message = unicode(commit.get('message', 'Commit %s' % idx))
655
656 for node in commit.get('added', []):
657 imc.add(FileNode(node.path, content=node.content))
658 for node in commit.get('changed', []):
659 imc.change(FileNode(node.path, content=node.content))
660 for node in commit.get('removed', []):
661 imc.remove(FileNode(node.path))
662
663 parents = [
664 repo.get_commit(commit_id=self._commit_ids[p])
665 for p in commit.get('parents', [])]
666
667 operations = ('added', 'changed', 'removed')
668 if not any((commit.get(o) for o in operations)):
669 imc.add(FileNode('file_%s' % idx, content=message))
670
671 commit = imc.commit(
672 message=message,
673 author=unicode(commit.get('author', 'Automatic')),
674 date=commit.get('date'),
675 branch=commit.get('branch'),
676 parents=parents)
677
678 self._commit_ids[commit.message] = commit.raw_id
649 self._commit_ids = commit_ids
679 650
680 651 # Creating refs for Git to allow fetching them from remote repository
681 652 if self.alias == 'git':
682 653 refs = {}
683 654 for message in self._commit_ids:
684 655 # TODO: mikhail: do more special chars replacements
685 656 ref_name = 'refs/test-refs/{}'.format(
686 657 message.replace(' ', ''))
687 658 refs[ref_name] = self._commit_ids[message]
688 659 self._create_refs(repo, refs)
689 660
690 return commit
691
692 661 def _create_refs(self, repo, refs):
693 662 for ref_name in refs:
694 663 repo.set_refs(ref_name, refs[ref_name])
695 664
696 665
697 666 @pytest.fixture
698 667 def vcsbackend(request, backend_alias, tests_tmp_path, pylonsapp, test_repo):
699 668 """
700 669 Parametrized fixture which represents a single vcs backend implementation.
701 670
702 671 See the fixture `backend` for more details. This one implements the same
703 672 concept, but on vcs level. So it does not provide model instances etc.
704 673
705 674 Parameters are generated dynamically, see :func:`pytest_generate_tests`
706 675 for how this works.
707 676 """
708 677 if backend_alias not in request.config.getoption('--backends'):
709 678 pytest.skip("Backend %s not selected." % (backend_alias, ))
710 679
711 680 utils.check_xfail_backends(request.node, backend_alias)
712 681 utils.check_skip_backends(request.node, backend_alias)
713 682
714 683 repo_name = 'vcs_test_%s' % (backend_alias, )
715 684 repo_path = os.path.join(tests_tmp_path, repo_name)
716 685 backend = VcsBackend(
717 686 alias=backend_alias,
718 687 repo_path=repo_path,
719 688 test_name=request.node.name,
720 689 test_repo_container=test_repo)
721 690 request.addfinalizer(backend.cleanup)
722 691 return backend
723 692
724 693
725 694 @pytest.fixture
726 695 def vcsbackend_git(request, tests_tmp_path, pylonsapp, test_repo):
727 696 return vcsbackend(request, 'git', tests_tmp_path, pylonsapp, test_repo)
728 697
729 698
730 699 @pytest.fixture
731 700 def vcsbackend_hg(request, tests_tmp_path, pylonsapp, test_repo):
732 701 return vcsbackend(request, 'hg', tests_tmp_path, pylonsapp, test_repo)
733 702
734 703
735 704 @pytest.fixture
736 705 def vcsbackend_svn(request, tests_tmp_path, pylonsapp, test_repo):
737 706 return vcsbackend(request, 'svn', tests_tmp_path, pylonsapp, test_repo)
738 707
739 708
740 709 @pytest.fixture
741 710 def vcsbackend_random(vcsbackend_git):
742 711 """
743 712 Use this to express that your tests need "a vcsbackend".
744 713
745 714 The fixture `vcsbackend` would run the test multiple times for each
746 715 available vcs backend which is a pure waste of time if the test is
747 716 independent of the vcs backend type.
748 717 """
749 718 # TODO: johbo: Change this to pick a random backend
750 719 return vcsbackend_git
751 720
752 721
753 722 class VcsBackend(object):
754 723 """
755 724 Represents the test configuration for one supported vcs backend.
756 725 """
757 726
758 727 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
759 728
760 729 def __init__(self, alias, repo_path, test_name, test_repo_container):
761 730 self.alias = alias
762 731 self._repo_path = repo_path
763 732 self._cleanup_repos = []
764 733 self._test_name = test_name
765 734 self._test_repo_container = test_repo_container
766 735
767 736 def __getitem__(self, key):
768 737 return self._test_repo_container(key, self.alias).scm_instance()
769 738
770 739 @property
771 740 def repo(self):
772 741 """
773 742 Returns the "current" repository. This is the vcs_test repo of the last
774 743 repo which has been created.
775 744 """
776 745 Repository = get_backend(self.alias)
777 746 return Repository(self._repo_path)
778 747
779 748 @property
780 749 def backend(self):
781 750 """
782 751 Returns the backend implementation class.
783 752 """
784 753 return get_backend(self.alias)
785 754
786 def create_repo(self, number_of_commits=0, _clone_repo=None):
755 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None):
787 756 repo_name = self._next_repo_name()
788 757 self._repo_path = get_new_dir(repo_name)
789 Repository = get_backend(self.alias)
758 repo_class = get_backend(self.alias)
790 759 src_url = None
791 760 if _clone_repo:
792 761 src_url = _clone_repo.path
793 repo = Repository(self._repo_path, create=True, src_url=src_url)
762 repo = repo_class(self._repo_path, create=True, src_url=src_url)
794 763 self._cleanup_repos.append(repo)
795 for idx in xrange(number_of_commits):
796 self.ensure_file(filename='file_%s' % idx, content=repo.name)
764
765 commits = commits or [
766 {'message': 'Commit %s of %s' % (x, repo_name)}
767 for x in xrange(number_of_commits)]
768 _add_commits_to_repo(repo, commits)
797 769 return repo
798 770
799 771 def clone_repo(self, repo):
800 772 return self.create_repo(_clone_repo=repo)
801 773
802 774 def cleanup(self):
803 775 for repo in self._cleanup_repos:
804 776 shutil.rmtree(repo.path)
805 777
806 778 def new_repo_path(self):
807 779 repo_name = self._next_repo_name()
808 780 self._repo_path = get_new_dir(repo_name)
809 781 return self._repo_path
810 782
811 783 def _next_repo_name(self):
812 784 return "%s_%s" % (
813 785 self.invalid_repo_name.sub('_', self._test_name),
814 786 len(self._cleanup_repos))
815 787
816 788 def add_file(self, repo, filename, content='Test content\n'):
817 789 imc = repo.in_memory_commit
818 790 imc.add(FileNode(filename, content=content))
819 791 imc.commit(
820 792 message=u'Automatic commit from vcsbackend fixture',
821 793 author=u'Automatic')
822 794
823 795 def ensure_file(self, filename, content='Test content\n'):
824 796 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
825 797 self.add_file(self.repo, filename, content)
826 798
827 799
800 def _add_commits_to_repo(vcs_repo, commits):
801 commit_ids = {}
802 if not commits:
803 return commit_ids
804
805 imc = vcs_repo.in_memory_commit
806 commit = None
807
808 for idx, commit in enumerate(commits):
809 message = unicode(commit.get('message', 'Commit %s' % idx))
810
811 for node in commit.get('added', []):
812 imc.add(FileNode(node.path, content=node.content))
813 for node in commit.get('changed', []):
814 imc.change(FileNode(node.path, content=node.content))
815 for node in commit.get('removed', []):
816 imc.remove(FileNode(node.path))
817
818 parents = [
819 vcs_repo.get_commit(commit_id=commit_ids[p])
820 for p in commit.get('parents', [])]
821
822 operations = ('added', 'changed', 'removed')
823 if not any((commit.get(o) for o in operations)):
824 imc.add(FileNode('file_%s' % idx, content=message))
825
826 commit = imc.commit(
827 message=message,
828 author=unicode(commit.get('author', 'Automatic')),
829 date=commit.get('date'),
830 branch=commit.get('branch'),
831 parents=parents)
832
833 commit_ids[commit.message] = commit.raw_id
834
835 return commit_ids
836
837
828 838 @pytest.fixture
829 839 def reposerver(request):
830 840 """
831 841 Allows to serve a backend repository
832 842 """
833 843
834 844 repo_server = RepoServer()
835 845 request.addfinalizer(repo_server.cleanup)
836 846 return repo_server
837 847
838 848
839 849 class RepoServer(object):
840 850 """
841 851 Utility to serve a local repository for the duration of a test case.
842 852
843 853 Supports only Subversion so far.
844 854 """
845 855
846 856 url = None
847 857
848 858 def __init__(self):
849 859 self._cleanup_servers = []
850 860
851 861 def serve(self, vcsrepo):
852 862 if vcsrepo.alias != 'svn':
853 863 raise TypeError("Backend %s not supported" % vcsrepo.alias)
854 864
855 865 proc = subprocess.Popen(
856 866 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
857 867 '--root', vcsrepo.path])
858 868 self._cleanup_servers.append(proc)
859 869 self.url = 'svn://localhost'
860 870
861 871 def cleanup(self):
862 872 for proc in self._cleanup_servers:
863 873 proc.terminate()
864 874
865 875
866 876 @pytest.fixture
867 877 def pr_util(backend, request):
868 878 """
869 879 Utility for tests of models and for functional tests around pull requests.
870 880
871 881 It gives an instance of :class:`PRTestUtility` which provides various
872 882 utility methods around one pull request.
873 883
874 884 This fixture uses `backend` and inherits its parameterization.
875 885 """
876 886
877 887 util = PRTestUtility(backend)
878 888
879 889 @request.addfinalizer
880 890 def cleanup():
881 891 util.cleanup()
882 892
883 893 return util
884 894
885 895
886 896 class PRTestUtility(object):
887 897
888 898 pull_request = None
889 899 pull_request_id = None
890 900 mergeable_patcher = None
891 901 mergeable_mock = None
892 902 notification_patcher = None
893 903
894 904 def __init__(self, backend):
895 905 self.backend = backend
896 906
897 907 def create_pull_request(
898 908 self, commits=None, target_head=None, source_head=None,
899 909 revisions=None, approved=False, author=None, mergeable=False,
900 910 enable_notifications=True, name_suffix=u'', reviewers=None,
901 911 title=u"Test", description=u"Description"):
902 912 self.set_mergeable(mergeable)
903 913 if not enable_notifications:
904 914 # mock notification side effect
905 915 self.notification_patcher = mock.patch(
906 916 'rhodecode.model.notification.NotificationModel.create')
907 917 self.notification_patcher.start()
908 918
909 919 if not self.pull_request:
910 920 if not commits:
911 921 commits = [
912 922 {'message': 'c1'},
913 923 {'message': 'c2'},
914 924 {'message': 'c3'},
915 925 ]
916 926 target_head = 'c1'
917 927 source_head = 'c2'
918 928 revisions = ['c2']
919 929
920 930 self.commit_ids = self.backend.create_master_repo(commits)
921 931 self.target_repository = self.backend.create_repo(
922 932 heads=[target_head], name_suffix=name_suffix)
923 933 self.source_repository = self.backend.create_repo(
924 934 heads=[source_head], name_suffix=name_suffix)
925 935 self.author = author or UserModel().get_by_username(
926 936 TEST_USER_ADMIN_LOGIN)
927 937
928 938 model = PullRequestModel()
929 939 self.create_parameters = {
930 940 'created_by': self.author,
931 941 'source_repo': self.source_repository.repo_name,
932 942 'source_ref': self._default_branch_reference(source_head),
933 943 'target_repo': self.target_repository.repo_name,
934 944 'target_ref': self._default_branch_reference(target_head),
935 945 'revisions': [self.commit_ids[r] for r in revisions],
936 946 'reviewers': reviewers or self._get_reviewers(),
937 947 'title': title,
938 948 'description': description,
939 949 }
940 950 self.pull_request = model.create(**self.create_parameters)
941 951 assert model.get_versions(self.pull_request) == []
942 952
943 953 self.pull_request_id = self.pull_request.pull_request_id
944 954
945 955 if approved:
946 956 self.approve()
947 957
948 958 Session().add(self.pull_request)
949 959 Session().commit()
950 960
951 961 return self.pull_request
952 962
953 963 def approve(self):
954 964 self.create_status_votes(
955 965 ChangesetStatus.STATUS_APPROVED,
956 966 *self.pull_request.reviewers)
957 967
958 968 def close(self):
959 969 PullRequestModel().close_pull_request(self.pull_request, self.author)
960 970
961 971 def _default_branch_reference(self, commit_message):
962 972 reference = '%s:%s:%s' % (
963 973 'branch',
964 974 self.backend.default_branch_name,
965 975 self.commit_ids[commit_message])
966 976 return reference
967 977
968 978 def _get_reviewers(self):
969 979 model = UserModel()
970 980 return [
971 981 model.get_by_username(TEST_USER_REGULAR_LOGIN),
972 982 model.get_by_username(TEST_USER_REGULAR2_LOGIN),
973 983 ]
974 984
975 985 def update_source_repository(self, head=None):
976 986 heads = [head or 'c3']
977 987 self.backend.pull_heads(self.source_repository, heads=heads)
978 988
979 989 def add_one_commit(self, head=None):
980 990 self.update_source_repository(head=head)
981 991 old_commit_ids = set(self.pull_request.revisions)
982 992 PullRequestModel().update_commits(self.pull_request)
983 993 commit_ids = set(self.pull_request.revisions)
984 994 new_commit_ids = commit_ids - old_commit_ids
985 995 assert len(new_commit_ids) == 1
986 996 return new_commit_ids.pop()
987 997
988 998 def remove_one_commit(self):
989 999 assert len(self.pull_request.revisions) == 2
990 1000 source_vcs = self.source_repository.scm_instance()
991 1001 removed_commit_id = source_vcs.commit_ids[-1]
992 1002
993 1003 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
994 1004 # remove the if once that's sorted out.
995 1005 if self.backend.alias == "git":
996 1006 kwargs = {'branch_name': self.backend.default_branch_name}
997 1007 else:
998 1008 kwargs = {}
999 1009 source_vcs.strip(removed_commit_id, **kwargs)
1000 1010
1001 1011 PullRequestModel().update_commits(self.pull_request)
1002 1012 assert len(self.pull_request.revisions) == 1
1003 1013 return removed_commit_id
1004 1014
1005 1015 def create_comment(self, linked_to=None):
1006 1016 comment = ChangesetCommentsModel().create(
1007 1017 text=u"Test comment",
1008 1018 repo=self.target_repository.repo_name,
1009 1019 user=self.author,
1010 1020 pull_request=self.pull_request)
1011 1021 assert comment.pull_request_version_id is None
1012 1022
1013 1023 if linked_to:
1014 1024 PullRequestModel()._link_comments_to_version(linked_to)
1015 1025
1016 1026 return comment
1017 1027
1018 1028 def create_inline_comment(
1019 1029 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1020 1030 comment = ChangesetCommentsModel().create(
1021 1031 text=u"Test comment",
1022 1032 repo=self.target_repository.repo_name,
1023 1033 user=self.author,
1024 1034 line_no=line_no,
1025 1035 f_path=file_path,
1026 1036 pull_request=self.pull_request)
1027 1037 assert comment.pull_request_version_id is None
1028 1038
1029 1039 if linked_to:
1030 1040 PullRequestModel()._link_comments_to_version(linked_to)
1031 1041
1032 1042 return comment
1033 1043
1034 1044 def create_version_of_pull_request(self):
1035 1045 pull_request = self.create_pull_request()
1036 1046 version = PullRequestModel()._create_version_from_snapshot(
1037 1047 pull_request)
1038 1048 return version
1039 1049
1040 1050 def create_status_votes(self, status, *reviewers):
1041 1051 for reviewer in reviewers:
1042 1052 ChangesetStatusModel().set_status(
1043 1053 repo=self.pull_request.target_repo,
1044 1054 status=status,
1045 1055 user=reviewer.user_id,
1046 1056 pull_request=self.pull_request)
1047 1057
1048 1058 def set_mergeable(self, value):
1049 1059 if not self.mergeable_patcher:
1050 1060 self.mergeable_patcher = mock.patch.object(
1051 1061 VcsSettingsModel, 'get_general_settings')
1052 1062 self.mergeable_mock = self.mergeable_patcher.start()
1053 1063 self.mergeable_mock.return_value = {
1054 1064 'rhodecode_pr_merge_enabled': value}
1055 1065
1056 1066 def cleanup(self):
1057 1067 # In case the source repository is already cleaned up, the pull
1058 1068 # request will already be deleted.
1059 1069 pull_request = PullRequest().get(self.pull_request_id)
1060 1070 if pull_request:
1061 1071 PullRequestModel().delete(pull_request)
1062 1072 Session().commit()
1063 1073
1064 1074 if self.notification_patcher:
1065 1075 self.notification_patcher.stop()
1066 1076
1067 1077 if self.mergeable_patcher:
1068 1078 self.mergeable_patcher.stop()
1069 1079
1070 1080
1071 1081 @pytest.fixture
1072 1082 def user_admin(pylonsapp):
1073 1083 """
1074 1084 Provides the default admin test user as an instance of `db.User`.
1075 1085 """
1076 1086 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1077 1087 return user
1078 1088
1079 1089
1080 1090 @pytest.fixture
1081 1091 def user_regular(pylonsapp):
1082 1092 """
1083 1093 Provides the default regular test user as an instance of `db.User`.
1084 1094 """
1085 1095 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1086 1096 return user
1087 1097
1088 1098
1089 1099 @pytest.fixture
1090 1100 def user_util(request, pylonsapp):
1091 1101 """
1092 1102 Provides a wired instance of `UserUtility` with integrated cleanup.
1093 1103 """
1094 1104 utility = UserUtility(test_name=request.node.name)
1095 1105 request.addfinalizer(utility.cleanup)
1096 1106 return utility
1097 1107
1098 1108
1099 1109 # TODO: johbo: Split this up into utilities per domain or something similar
1100 1110 class UserUtility(object):
1101 1111
1102 1112 def __init__(self, test_name="test"):
1103 1113 self._test_name = test_name
1104 1114 self.fixture = Fixture()
1105 1115 self.repo_group_ids = []
1106 1116 self.user_ids = []
1107 1117 self.user_group_ids = []
1108 1118 self.user_repo_permission_ids = []
1109 1119 self.user_group_repo_permission_ids = []
1110 1120 self.user_repo_group_permission_ids = []
1111 1121 self.user_group_repo_group_permission_ids = []
1112 1122 self.user_user_group_permission_ids = []
1113 1123 self.user_group_user_group_permission_ids = []
1114 1124 self.user_permissions = []
1115 1125
1116 1126 def create_repo_group(
1117 1127 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1118 1128 group_name = "{prefix}_repogroup_{count}".format(
1119 1129 prefix=self._test_name,
1120 1130 count=len(self.repo_group_ids))
1121 1131 repo_group = self.fixture.create_repo_group(
1122 1132 group_name, cur_user=owner)
1123 1133 if auto_cleanup:
1124 1134 self.repo_group_ids.append(repo_group.group_id)
1125 1135 return repo_group
1126 1136
1127 1137 def create_user(self, auto_cleanup=True, **kwargs):
1128 1138 user_name = "{prefix}_user_{count}".format(
1129 1139 prefix=self._test_name,
1130 1140 count=len(self.user_ids))
1131 1141 user = self.fixture.create_user(user_name, **kwargs)
1132 1142 if auto_cleanup:
1133 1143 self.user_ids.append(user.user_id)
1134 1144 return user
1135 1145
1136 1146 def create_user_with_group(self):
1137 1147 user = self.create_user()
1138 1148 user_group = self.create_user_group(members=[user])
1139 1149 return user, user_group
1140 1150
1141 1151 def create_user_group(self, members=None, auto_cleanup=True, **kwargs):
1142 1152 group_name = "{prefix}_usergroup_{count}".format(
1143 1153 prefix=self._test_name,
1144 1154 count=len(self.user_group_ids))
1145 1155 user_group = self.fixture.create_user_group(group_name, **kwargs)
1146 1156 if auto_cleanup:
1147 1157 self.user_group_ids.append(user_group.users_group_id)
1148 1158 if members:
1149 1159 for user in members:
1150 1160 UserGroupModel().add_user_to_group(user_group, user)
1151 1161 return user_group
1152 1162
1153 1163 def grant_user_permission(self, user_name, permission_name):
1154 1164 self._inherit_default_user_permissions(user_name, False)
1155 1165 self.user_permissions.append((user_name, permission_name))
1156 1166
1157 1167 def grant_user_permission_to_repo_group(
1158 1168 self, repo_group, user, permission_name):
1159 1169 permission = RepoGroupModel().grant_user_permission(
1160 1170 repo_group, user, permission_name)
1161 1171 self.user_repo_group_permission_ids.append(
1162 1172 (repo_group.group_id, user.user_id))
1163 1173 return permission
1164 1174
1165 1175 def grant_user_group_permission_to_repo_group(
1166 1176 self, repo_group, user_group, permission_name):
1167 1177 permission = RepoGroupModel().grant_user_group_permission(
1168 1178 repo_group, user_group, permission_name)
1169 1179 self.user_group_repo_group_permission_ids.append(
1170 1180 (repo_group.group_id, user_group.users_group_id))
1171 1181 return permission
1172 1182
1173 1183 def grant_user_permission_to_repo(
1174 1184 self, repo, user, permission_name):
1175 1185 permission = RepoModel().grant_user_permission(
1176 1186 repo, user, permission_name)
1177 1187 self.user_repo_permission_ids.append(
1178 1188 (repo.repo_id, user.user_id))
1179 1189 return permission
1180 1190
1181 1191 def grant_user_group_permission_to_repo(
1182 1192 self, repo, user_group, permission_name):
1183 1193 permission = RepoModel().grant_user_group_permission(
1184 1194 repo, user_group, permission_name)
1185 1195 self.user_group_repo_permission_ids.append(
1186 1196 (repo.repo_id, user_group.users_group_id))
1187 1197 return permission
1188 1198
1189 1199 def grant_user_permission_to_user_group(
1190 1200 self, target_user_group, user, permission_name):
1191 1201 permission = UserGroupModel().grant_user_permission(
1192 1202 target_user_group, user, permission_name)
1193 1203 self.user_user_group_permission_ids.append(
1194 1204 (target_user_group.users_group_id, user.user_id))
1195 1205 return permission
1196 1206
1197 1207 def grant_user_group_permission_to_user_group(
1198 1208 self, target_user_group, user_group, permission_name):
1199 1209 permission = UserGroupModel().grant_user_group_permission(
1200 1210 target_user_group, user_group, permission_name)
1201 1211 self.user_group_user_group_permission_ids.append(
1202 1212 (target_user_group.users_group_id, user_group.users_group_id))
1203 1213 return permission
1204 1214
1205 1215 def revoke_user_permission(self, user_name, permission_name):
1206 1216 self._inherit_default_user_permissions(user_name, True)
1207 1217 UserModel().revoke_perm(user_name, permission_name)
1208 1218
1209 1219 def _inherit_default_user_permissions(self, user_name, value):
1210 1220 user = UserModel().get_by_username(user_name)
1211 1221 user.inherit_default_permissions = value
1212 1222 Session().add(user)
1213 1223 Session().commit()
1214 1224
1215 1225 def cleanup(self):
1216 1226 self._cleanup_permissions()
1217 1227 self._cleanup_repo_groups()
1218 1228 self._cleanup_user_groups()
1219 1229 self._cleanup_users()
1220 1230
1221 1231 def _cleanup_permissions(self):
1222 1232 if self.user_permissions:
1223 1233 for user_name, permission_name in self.user_permissions:
1224 1234 self.revoke_user_permission(user_name, permission_name)
1225 1235
1226 1236 for permission in self.user_repo_permission_ids:
1227 1237 RepoModel().revoke_user_permission(*permission)
1228 1238
1229 1239 for permission in self.user_group_repo_permission_ids:
1230 1240 RepoModel().revoke_user_group_permission(*permission)
1231 1241
1232 1242 for permission in self.user_repo_group_permission_ids:
1233 1243 RepoGroupModel().revoke_user_permission(*permission)
1234 1244
1235 1245 for permission in self.user_group_repo_group_permission_ids:
1236 1246 RepoGroupModel().revoke_user_group_permission(*permission)
1237 1247
1238 1248 for permission in self.user_user_group_permission_ids:
1239 1249 UserGroupModel().revoke_user_permission(*permission)
1240 1250
1241 1251 for permission in self.user_group_user_group_permission_ids:
1242 1252 UserGroupModel().revoke_user_group_permission(*permission)
1243 1253
1244 1254 def _cleanup_repo_groups(self):
1245 1255 def _repo_group_compare(first_group_id, second_group_id):
1246 1256 """
1247 1257 Gives higher priority to the groups with the most complex paths
1248 1258 """
1249 1259 first_group = RepoGroup.get(first_group_id)
1250 1260 second_group = RepoGroup.get(second_group_id)
1251 1261 first_group_parts = (
1252 1262 len(first_group.group_name.split('/')) if first_group else 0)
1253 1263 second_group_parts = (
1254 1264 len(second_group.group_name.split('/')) if second_group else 0)
1255 1265 return cmp(second_group_parts, first_group_parts)
1256 1266
1257 1267 sorted_repo_group_ids = sorted(
1258 1268 self.repo_group_ids, cmp=_repo_group_compare)
1259 1269 for repo_group_id in sorted_repo_group_ids:
1260 1270 self.fixture.destroy_repo_group(repo_group_id)
1261 1271
1262 1272 def _cleanup_user_groups(self):
1263 1273 def _user_group_compare(first_group_id, second_group_id):
1264 1274 """
1265 1275 Gives higher priority to the groups with the most complex paths
1266 1276 """
1267 1277 first_group = UserGroup.get(first_group_id)
1268 1278 second_group = UserGroup.get(second_group_id)
1269 1279 first_group_parts = (
1270 1280 len(first_group.users_group_name.split('/'))
1271 1281 if first_group else 0)
1272 1282 second_group_parts = (
1273 1283 len(second_group.users_group_name.split('/'))
1274 1284 if second_group else 0)
1275 1285 return cmp(second_group_parts, first_group_parts)
1276 1286
1277 1287 sorted_user_group_ids = sorted(
1278 1288 self.user_group_ids, cmp=_user_group_compare)
1279 1289 for user_group_id in sorted_user_group_ids:
1280 1290 self.fixture.destroy_user_group(user_group_id)
1281 1291
1282 1292 def _cleanup_users(self):
1283 1293 for user_id in self.user_ids:
1284 1294 self.fixture.destroy_user(user_id)
1285 1295
1286 1296
1287 1297 # TODO: Think about moving this into a pytest-pyro package and make it a
1288 1298 # pytest plugin
1289 1299 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1290 1300 def pytest_runtest_makereport(item, call):
1291 1301 """
1292 1302 Adding the remote traceback if the exception has this information.
1293 1303
1294 1304 Pyro4 attaches this information as the attribute `_pyroTraceback`
1295 1305 to the exception instance.
1296 1306 """
1297 1307 outcome = yield
1298 1308 report = outcome.get_result()
1299 1309 if call.excinfo:
1300 1310 _add_pyro_remote_traceback(report, call.excinfo.value)
1301 1311
1302 1312
1303 1313 def _add_pyro_remote_traceback(report, exc):
1304 1314 pyro_traceback = getattr(exc, '_pyroTraceback', None)
1305 1315
1306 1316 if pyro_traceback:
1307 1317 traceback = ''.join(pyro_traceback)
1308 1318 section = 'Pyro4 remote traceback ' + report.when
1309 1319 report.sections.append((section, traceback))
1310 1320
1311 1321
1312 1322 @pytest.fixture(scope='session')
1313 1323 def testrun():
1314 1324 return {
1315 1325 'uuid': uuid.uuid4(),
1316 1326 'start': datetime.datetime.utcnow().isoformat(),
1317 1327 'timestamp': int(time.time()),
1318 1328 }
1319 1329
1320 1330
1321 1331 @pytest.fixture(autouse=True)
1322 1332 def collect_appenlight_stats(request, testrun):
1323 1333 """
1324 1334 This fixture reports memory consumtion of single tests.
1325 1335
1326 1336 It gathers data based on `psutil` and sends them to Appenlight. The option
1327 1337 ``--ae`` has te be used to enable this fixture and the API key for your
1328 1338 application has to be provided in ``--ae-key``.
1329 1339 """
1330 1340 try:
1331 1341 # cygwin cannot have yet psutil support.
1332 1342 import psutil
1333 1343 except ImportError:
1334 1344 return
1335 1345
1336 1346 if not request.config.getoption('--appenlight'):
1337 1347 return
1338 1348 else:
1339 1349 # Only request the pylonsapp fixture if appenlight tracking is
1340 1350 # enabled. This will speed up a test run of unit tests by 2 to 3
1341 1351 # seconds if appenlight is not enabled.
1342 1352 pylonsapp = request.getfuncargvalue("pylonsapp")
1343 1353 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1344 1354 client = AppenlightClient(
1345 1355 url=url,
1346 1356 api_key=request.config.getoption('--appenlight-api-key'),
1347 1357 namespace=request.node.nodeid,
1348 1358 request=str(testrun['uuid']),
1349 1359 testrun=testrun)
1350 1360
1351 1361 client.collect({
1352 1362 'message': "Starting",
1353 1363 })
1354 1364
1355 1365 server_and_port = pylonsapp.config['vcs.server']
1356 1366 server = create_vcsserver_proxy(server_and_port)
1357 1367 with server:
1358 1368 vcs_pid = server.get_pid()
1359 1369 server.run_gc()
1360 1370 vcs_process = psutil.Process(vcs_pid)
1361 1371 mem = vcs_process.memory_info()
1362 1372 client.tag_before('vcsserver.rss', mem.rss)
1363 1373 client.tag_before('vcsserver.vms', mem.vms)
1364 1374
1365 1375 test_process = psutil.Process()
1366 1376 mem = test_process.memory_info()
1367 1377 client.tag_before('test.rss', mem.rss)
1368 1378 client.tag_before('test.vms', mem.vms)
1369 1379
1370 1380 client.tag_before('time', time.time())
1371 1381
1372 1382 @request.addfinalizer
1373 1383 def send_stats():
1374 1384 client.tag_after('time', time.time())
1375 1385 with server:
1376 1386 gc_stats = server.run_gc()
1377 1387 for tag, value in gc_stats.items():
1378 1388 client.tag_after(tag, value)
1379 1389 mem = vcs_process.memory_info()
1380 1390 client.tag_after('vcsserver.rss', mem.rss)
1381 1391 client.tag_after('vcsserver.vms', mem.vms)
1382 1392
1383 1393 mem = test_process.memory_info()
1384 1394 client.tag_after('test.rss', mem.rss)
1385 1395 client.tag_after('test.vms', mem.vms)
1386 1396
1387 1397 client.collect({
1388 1398 'message': "Finished",
1389 1399 })
1390 1400 client.send_stats()
1391 1401
1392 1402 return client
1393 1403
1394 1404
1395 1405 class AppenlightClient():
1396 1406
1397 1407 url_template = '{url}?protocol_version=0.5'
1398 1408
1399 1409 def __init__(
1400 1410 self, url, api_key, add_server=True, add_timestamp=True,
1401 1411 namespace=None, request=None, testrun=None):
1402 1412 self.url = self.url_template.format(url=url)
1403 1413 self.api_key = api_key
1404 1414 self.add_server = add_server
1405 1415 self.add_timestamp = add_timestamp
1406 1416 self.namespace = namespace
1407 1417 self.request = request
1408 1418 self.server = socket.getfqdn(socket.gethostname())
1409 1419 self.tags_before = {}
1410 1420 self.tags_after = {}
1411 1421 self.stats = []
1412 1422 self.testrun = testrun or {}
1413 1423
1414 1424 def tag_before(self, tag, value):
1415 1425 self.tags_before[tag] = value
1416 1426
1417 1427 def tag_after(self, tag, value):
1418 1428 self.tags_after[tag] = value
1419 1429
1420 1430 def collect(self, data):
1421 1431 if self.add_server:
1422 1432 data.setdefault('server', self.server)
1423 1433 if self.add_timestamp:
1424 1434 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1425 1435 if self.namespace:
1426 1436 data.setdefault('namespace', self.namespace)
1427 1437 if self.request:
1428 1438 data.setdefault('request', self.request)
1429 1439 self.stats.append(data)
1430 1440
1431 1441 def send_stats(self):
1432 1442 tags = [
1433 1443 ('testrun', self.request),
1434 1444 ('testrun.start', self.testrun['start']),
1435 1445 ('testrun.timestamp', self.testrun['timestamp']),
1436 1446 ('test', self.namespace),
1437 1447 ]
1438 1448 for key, value in self.tags_before.items():
1439 1449 tags.append((key + '.before', value))
1440 1450 try:
1441 1451 delta = self.tags_after[key] - value
1442 1452 tags.append((key + '.delta', delta))
1443 1453 except Exception:
1444 1454 pass
1445 1455 for key, value in self.tags_after.items():
1446 1456 tags.append((key + '.after', value))
1447 1457 self.collect({
1448 1458 'message': "Collected tags",
1449 1459 'tags': tags,
1450 1460 })
1451 1461
1452 1462 response = requests.post(
1453 1463 self.url,
1454 1464 headers={
1455 1465 'X-appenlight-api-key': self.api_key},
1456 1466 json=self.stats,
1457 1467 )
1458 1468
1459 1469 if not response.status_code == 200:
1460 1470 pprint.pprint(self.stats)
1461 1471 print response.headers
1462 1472 print response.text
1463 1473 raise Exception('Sending to appenlight failed')
1464 1474
1465 1475
1466 1476 @pytest.fixture
1467 1477 def gist_util(request, pylonsapp):
1468 1478 """
1469 1479 Provides a wired instance of `GistUtility` with integrated cleanup.
1470 1480 """
1471 1481 utility = GistUtility()
1472 1482 request.addfinalizer(utility.cleanup)
1473 1483 return utility
1474 1484
1475 1485
1476 1486 class GistUtility(object):
1477 1487 def __init__(self):
1478 1488 self.fixture = Fixture()
1479 1489 self.gist_ids = []
1480 1490
1481 1491 def create_gist(self, **kwargs):
1482 1492 gist = self.fixture.create_gist(**kwargs)
1483 1493 self.gist_ids.append(gist.gist_id)
1484 1494 return gist
1485 1495
1486 1496 def cleanup(self):
1487 1497 for id_ in self.gist_ids:
1488 1498 self.fixture.destroy_gists(str(id_))
1489 1499
1490 1500
1491 1501 @pytest.fixture
1492 1502 def enabled_backends(request):
1493 1503 backends = request.config.option.backends
1494 1504 return backends[:]
1495 1505
1496 1506
1497 1507 @pytest.fixture
1498 1508 def settings_util(request):
1499 1509 """
1500 1510 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1501 1511 """
1502 1512 utility = SettingsUtility()
1503 1513 request.addfinalizer(utility.cleanup)
1504 1514 return utility
1505 1515
1506 1516
1507 1517 class SettingsUtility(object):
1508 1518 def __init__(self):
1509 1519 self.rhodecode_ui_ids = []
1510 1520 self.rhodecode_setting_ids = []
1511 1521 self.repo_rhodecode_ui_ids = []
1512 1522 self.repo_rhodecode_setting_ids = []
1513 1523
1514 1524 def create_repo_rhodecode_ui(
1515 1525 self, repo, section, value, key=None, active=True, cleanup=True):
1516 1526 key = key or hashlib.sha1(
1517 1527 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1518 1528
1519 1529 setting = RepoRhodeCodeUi()
1520 1530 setting.repository_id = repo.repo_id
1521 1531 setting.ui_section = section
1522 1532 setting.ui_value = value
1523 1533 setting.ui_key = key
1524 1534 setting.ui_active = active
1525 1535 Session().add(setting)
1526 1536 Session().commit()
1527 1537
1528 1538 if cleanup:
1529 1539 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1530 1540 return setting
1531 1541
1532 1542 def create_rhodecode_ui(
1533 1543 self, section, value, key=None, active=True, cleanup=True):
1534 1544 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1535 1545
1536 1546 setting = RhodeCodeUi()
1537 1547 setting.ui_section = section
1538 1548 setting.ui_value = value
1539 1549 setting.ui_key = key
1540 1550 setting.ui_active = active
1541 1551 Session().add(setting)
1542 1552 Session().commit()
1543 1553
1544 1554 if cleanup:
1545 1555 self.rhodecode_ui_ids.append(setting.ui_id)
1546 1556 return setting
1547 1557
1548 1558 def create_repo_rhodecode_setting(
1549 1559 self, repo, name, value, type_, cleanup=True):
1550 1560 setting = RepoRhodeCodeSetting(
1551 1561 repo.repo_id, key=name, val=value, type=type_)
1552 1562 Session().add(setting)
1553 1563 Session().commit()
1554 1564
1555 1565 if cleanup:
1556 1566 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1557 1567 return setting
1558 1568
1559 1569 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1560 1570 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1561 1571 Session().add(setting)
1562 1572 Session().commit()
1563 1573
1564 1574 if cleanup:
1565 1575 self.rhodecode_setting_ids.append(setting.app_settings_id)
1566 1576
1567 1577 return setting
1568 1578
1569 1579 def cleanup(self):
1570 1580 for id_ in self.rhodecode_ui_ids:
1571 1581 setting = RhodeCodeUi.get(id_)
1572 1582 Session().delete(setting)
1573 1583
1574 1584 for id_ in self.rhodecode_setting_ids:
1575 1585 setting = RhodeCodeSetting.get(id_)
1576 1586 Session().delete(setting)
1577 1587
1578 1588 for id_ in self.repo_rhodecode_ui_ids:
1579 1589 setting = RepoRhodeCodeUi.get(id_)
1580 1590 Session().delete(setting)
1581 1591
1582 1592 for id_ in self.repo_rhodecode_setting_ids:
1583 1593 setting = RepoRhodeCodeSetting.get(id_)
1584 1594 Session().delete(setting)
1585 1595
1586 1596 Session().commit()
1587 1597
1588 1598
1589 1599 @pytest.fixture
1590 1600 def no_notifications(request):
1591 1601 notification_patcher = mock.patch(
1592 1602 'rhodecode.model.notification.NotificationModel.create')
1593 1603 notification_patcher.start()
1594 1604 request.addfinalizer(notification_patcher.stop)
1595 1605
1596 1606
1597 1607 @pytest.fixture
1598 1608 def silence_action_logger(request):
1599 1609 notification_patcher = mock.patch(
1600 1610 'rhodecode.lib.utils.action_logger')
1601 1611 notification_patcher.start()
1602 1612 request.addfinalizer(notification_patcher.stop)
1603 1613
1604 1614
1605 1615 @pytest.fixture(scope='session')
1606 1616 def repeat(request):
1607 1617 """
1608 1618 The number of repetitions is based on this fixture.
1609 1619
1610 1620 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1611 1621 tests are not too slow in our default test suite.
1612 1622 """
1613 1623 return request.config.getoption('--repeat')
1614 1624
1615 1625
1616 1626 @pytest.fixture
1617 1627 def rhodecode_fixtures():
1618 1628 return Fixture()
1619 1629
1620 1630
1621 1631 @pytest.fixture
1622 1632 def request_stub():
1623 1633 """
1624 1634 Stub request object.
1625 1635 """
1626 1636 request = pyramid.testing.DummyRequest()
1627 1637 request.scheme = 'https'
1628 1638 return request
1629 1639
1630 1640
1631 1641 @pytest.fixture
1632 1642 def config_stub(request, request_stub):
1633 1643 """
1634 1644 Set up pyramid.testing and return the Configurator.
1635 1645 """
1636 1646 config = pyramid.testing.setUp(request=request_stub)
1637 1647
1638 1648 @request.addfinalizer
1639 1649 def cleanup():
1640 1650 pyramid.testing.tearDown()
1641 1651
1642 1652 return config
1643 1653
1644 1654
1645 1655 @pytest.fixture
1646 1656 def StubIntegrationType():
1647 1657 class _StubIntegrationType(IntegrationTypeBase):
1648 1658 """ Test integration type class """
1649 1659
1650 1660 key = 'test'
1651 1661 display_name = 'Test integration type'
1652 1662 description = 'A test integration type for testing'
1653 1663 icon = 'test_icon_html_image'
1654 1664
1655 1665 def __init__(self, settings):
1656 1666 super(_StubIntegrationType, self).__init__(settings)
1657 1667 self.sent_events = [] # for testing
1658 1668
1659 1669 def send_event(self, event):
1660 1670 self.sent_events.append(event)
1661 1671
1662 1672 def settings_schema(self):
1663 1673 class SettingsSchema(colander.Schema):
1664 1674 test_string_field = colander.SchemaNode(
1665 1675 colander.String(),
1666 1676 missing=colander.required,
1667 1677 title='test string field',
1668 1678 )
1669 1679 test_int_field = colander.SchemaNode(
1670 1680 colander.Int(),
1671 1681 title='some integer setting',
1672 1682 )
1673 1683 return SettingsSchema()
1674 1684
1675 1685
1676 1686 integration_type_registry.register_integration_type(_StubIntegrationType)
1677 1687 return _StubIntegrationType
1678 1688
1679 1689 @pytest.fixture
1680 1690 def stub_integration_settings():
1681 1691 return {
1682 1692 'test_string_field': 'some data',
1683 1693 'test_int_field': 100,
1684 1694 }
1685 1695
1686 1696
1687 1697 @pytest.fixture
1688 1698 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1689 1699 stub_integration_settings):
1690 1700 integration = IntegrationModel().create(
1691 1701 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1692 1702 name='test repo integration', scope=repo_stub)
1693 1703
1694 1704 @request.addfinalizer
1695 1705 def cleanup():
1696 1706 IntegrationModel().delete(integration)
1697 1707
1698 1708 return integration
1699 1709
1700 1710
1701 1711 @pytest.fixture
1702 1712 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1703 1713 stub_integration_settings):
1704 1714 integration = IntegrationModel().create(
1705 1715 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1706 1716 name='test repogroup integration', scope=test_repo_group)
1707 1717
1708 1718 @request.addfinalizer
1709 1719 def cleanup():
1710 1720 IntegrationModel().delete(integration)
1711 1721
1712 1722 return integration
1713 1723
1714 1724
1715 1725 @pytest.fixture
1716 1726 def global_integration_stub(request, StubIntegrationType,
1717 1727 stub_integration_settings):
1718 1728 integration = IntegrationModel().create(
1719 1729 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1720 1730 name='test global integration', scope='global')
1721 1731
1722 1732 @request.addfinalizer
1723 1733 def cleanup():
1724 1734 IntegrationModel().delete(integration)
1725 1735
1726 1736 return integration
1727 1737
1728 1738
1729 1739 @pytest.fixture
1730 1740 def root_repos_integration_stub(request, StubIntegrationType,
1731 1741 stub_integration_settings):
1732 1742 integration = IntegrationModel().create(
1733 1743 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1734 1744 name='test global integration', scope='root_repos')
1735 1745
1736 1746 @request.addfinalizer
1737 1747 def cleanup():
1738 1748 IntegrationModel().delete(integration)
1739 1749
1740 1750 return integration
General Comments 0
You need to be logged in to leave comments. Login now