##// END OF EJS Templates
fix(tests): fixed tests for PR celery hooks deamon
super-admin -
r5589:750c46dc default
parent child Browse files
Show More
@@ -1,89 +1,94 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import time
21 21 import logging
22 22
23 from rhodecode.lib.config_utils import get_config
23 from rhodecode.lib.config_utils import get_app_config_lightweight
24 24
25 25 from rhodecode.lib.svn_txn_utils import get_txn_id_from_store
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 class BaseHooksCallbackDaemon:
31 31 """
32 32 Basic context manager for actions that don't require some extra
33 33 """
34 34 def __init__(self):
35 35 pass
36 36
37 37 def __enter__(self):
38 38 log.debug('Running `%s` callback daemon', self.__class__.__name__)
39 39 return self
40 40
41 41 def __exit__(self, exc_type, exc_val, exc_tb):
42 42 log.debug('Exiting `%s` callback daemon', self.__class__.__name__)
43 43
44 44
45 45 class HooksModuleCallbackDaemon(BaseHooksCallbackDaemon):
46 46
47 47 def __init__(self, module):
48 48 super().__init__()
49 49 self.hooks_module = module
50 50
51 51 def __repr__(self):
52 52 return f'HooksModuleCallbackDaemon(hooks_module={self.hooks_module})'
53 53
54 54
55 55 def prepare_callback_daemon(extras, protocol, host, txn_id=None):
56 56
57 57 match protocol:
58 58 case 'http':
59 59 from rhodecode.lib.hook_daemon.http_hooks_deamon import HttpHooksCallbackDaemon
60 60 port = 0
61 61 if txn_id:
62 62 # read txn-id to re-use the PORT for callback daemon
63 63 repo_path = os.path.join(extras['repo_store'], extras['repository'])
64 64 txn_details = get_txn_id_from_store(repo_path, txn_id)
65 65 port = txn_details.get('port', 0)
66 66
67 67 callback_daemon = HttpHooksCallbackDaemon(
68 68 txn_id=txn_id, host=host, port=port)
69 69 case 'celery':
70 70 from rhodecode.lib.hook_daemon.celery_hooks_deamon import CeleryHooksCallbackDaemon
71 callback_daemon = CeleryHooksCallbackDaemon(get_config(extras['config']))
71
72 config = get_app_config_lightweight(extras['config'])
73 task_queue = config.get('celery.broker_url')
74 task_backend = config.get('celery.result_backend')
75
76 callback_daemon = CeleryHooksCallbackDaemon(task_queue, task_backend)
72 77 case 'local':
73 78 from rhodecode.lib.hook_daemon.hook_module import Hooks
74 79 callback_daemon = HooksModuleCallbackDaemon(Hooks.__module__)
75 80 case _:
76 81 log.error('Unsupported callback daemon protocol "%s"', protocol)
77 82 raise Exception('Unsupported callback daemon protocol.')
78 83
79 84 extras['hooks_uri'] = getattr(callback_daemon, 'hooks_uri', '')
80 85 extras['task_queue'] = getattr(callback_daemon, 'task_queue', '')
81 86 extras['task_backend'] = getattr(callback_daemon, 'task_backend', '')
82 87 extras['hooks_protocol'] = protocol
83 88 extras['time'] = time.time()
84 89
85 90 # register txn_id
86 91 extras['txn_id'] = txn_id
87 92 log.debug('Prepared a callback daemon: %s',
88 93 callback_daemon.__class__.__name__)
89 94 return callback_daemon, extras
@@ -1,33 +1,35 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 from rhodecode.lib.hook_daemon.base import BaseHooksCallbackDaemon
20 20
21 21
22 22 class CeleryHooksCallbackDaemon(BaseHooksCallbackDaemon):
23 23 """
24 24 Context manger for achieving a compatibility with celery backend
25 25 """
26 26
27 def __init__(self, config):
28 # TODO: replace this with settings bootstrapped...
29 self.task_queue = config.get('app:main', 'celery.broker_url')
30 self.task_backend = config.get('app:main', 'celery.result_backend')
27 def __init__(self, task_queue, task_backend):
28 self.task_queue = task_queue
29 self.task_backend = task_backend
31 30
32 31 def __repr__(self):
33 32 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
33
34 def __repr__(self):
35 return f'CeleryHooksCallbackDaemon(task_queue={self.task_queue}, task_backend={self.task_backend})'
@@ -1,1750 +1,1750 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import collections
21 21 import datetime
22 22 import os
23 23 import re
24 24 import pprint
25 25 import shutil
26 26 import socket
27 27 import subprocess
28 28 import time
29 29 import uuid
30 30 import dateutil.tz
31 31 import logging
32 32 import functools
33 33
34 34 import mock
35 35 import pyramid.testing
36 36 import pytest
37 37 import colander
38 38 import requests
39 39 import pyramid.paster
40 40
41 41 import rhodecode
42 42 import rhodecode.lib
43 43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 44 from rhodecode.model.comment import CommentsModel
45 45 from rhodecode.model.db import (
46 46 PullRequest, PullRequestReviewers, Repository, RhodeCodeSetting, ChangesetStatus,
47 47 RepoGroup, UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.model.pull_request import PullRequestModel
50 50 from rhodecode.model.repo import RepoModel
51 51 from rhodecode.model.repo_group import RepoGroupModel
52 52 from rhodecode.model.user import UserModel
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54 from rhodecode.model.user_group import UserGroupModel
55 55 from rhodecode.model.integration import IntegrationModel
56 56 from rhodecode.integrations import integration_type_registry
57 57 from rhodecode.integrations.types.base import IntegrationTypeBase
58 58 from rhodecode.lib.utils import repo2db_mapper
59 59 from rhodecode.lib.str_utils import safe_bytes
60 60 from rhodecode.lib.hash_utils import sha1_safe
61 61 from rhodecode.lib.vcs.backends import get_backend
62 62 from rhodecode.lib.vcs.nodes import FileNode
63 63 from rhodecode.tests import (
64 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 66 TEST_USER_REGULAR_PASS)
67 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 68 from rhodecode.tests.fixture import Fixture
69 69 from rhodecode.config import utils as config_utils
70 70
71 71 log = logging.getLogger(__name__)
72 72
73 73
74 74 def cmp(a, b):
75 75 # backport cmp from python2 so we can still use it in the custom code in this module
76 76 return (a > b) - (a < b)
77 77
78 78
79 79 @pytest.fixture(scope='session', autouse=True)
80 80 def activate_example_rcextensions(request):
81 81 """
82 82 Patch in an example rcextensions module which verifies passed in kwargs.
83 83 """
84 84 from rhodecode.config import rcextensions
85 85
86 86 old_extensions = rhodecode.EXTENSIONS
87 87 rhodecode.EXTENSIONS = rcextensions
88 88 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
89 89
90 90 @request.addfinalizer
91 91 def cleanup():
92 92 rhodecode.EXTENSIONS = old_extensions
93 93
94 94
95 95 @pytest.fixture()
96 96 def capture_rcextensions():
97 97 """
98 98 Returns the recorded calls to entry points in rcextensions.
99 99 """
100 100 calls = rhodecode.EXTENSIONS.calls
101 101 calls.clear()
102 102 # Note: At this moment, it is still the empty dict, but that will
103 103 # be filled during the test run and since it is a reference this
104 104 # is enough to make it work.
105 105 return calls
106 106
107 107
108 108 @pytest.fixture(scope='session')
109 109 def http_environ_session():
110 110 """
111 111 Allow to use "http_environ" in session scope.
112 112 """
113 113 return plain_http_environ()
114 114
115 115
116 116 def plain_http_host_stub():
117 117 """
118 118 Value of HTTP_HOST in the test run.
119 119 """
120 120 return 'example.com:80'
121 121
122 122
123 123 @pytest.fixture()
124 124 def http_host_stub():
125 125 """
126 126 Value of HTTP_HOST in the test run.
127 127 """
128 128 return plain_http_host_stub()
129 129
130 130
131 131 def plain_http_host_only_stub():
132 132 """
133 133 Value of HTTP_HOST in the test run.
134 134 """
135 135 return plain_http_host_stub().split(':')[0]
136 136
137 137
138 138 @pytest.fixture()
139 139 def http_host_only_stub():
140 140 """
141 141 Value of HTTP_HOST in the test run.
142 142 """
143 143 return plain_http_host_only_stub()
144 144
145 145
146 146 def plain_http_environ():
147 147 """
148 148 HTTP extra environ keys.
149 149
150 150 User by the test application and as well for setting up the pylons
151 151 environment. In the case of the fixture "app" it should be possible
152 152 to override this for a specific test case.
153 153 """
154 154 return {
155 155 'SERVER_NAME': plain_http_host_only_stub(),
156 156 'SERVER_PORT': plain_http_host_stub().split(':')[1],
157 157 'HTTP_HOST': plain_http_host_stub(),
158 158 'HTTP_USER_AGENT': 'rc-test-agent',
159 159 'REQUEST_METHOD': 'GET'
160 160 }
161 161
162 162
163 163 @pytest.fixture()
164 164 def http_environ():
165 165 """
166 166 HTTP extra environ keys.
167 167
168 168 User by the test application and as well for setting up the pylons
169 169 environment. In the case of the fixture "app" it should be possible
170 170 to override this for a specific test case.
171 171 """
172 172 return plain_http_environ()
173 173
174 174
175 175 @pytest.fixture(scope='session')
176 176 def baseapp(ini_config, vcsserver, http_environ_session):
177 177 from rhodecode.lib.config_utils import get_app_config
178 178 from rhodecode.config.middleware import make_pyramid_app
179 179
180 log.info("Using the RhodeCode configuration:{}".format(ini_config))
180 log.info("Using the RhodeCode configuration:%s", ini_config)
181 181 pyramid.paster.setup_logging(ini_config)
182 182
183 183 settings = get_app_config(ini_config)
184 184 app = make_pyramid_app({'__file__': ini_config}, **settings)
185 185
186 186 return app
187 187
188 188
189 189 @pytest.fixture(scope='function')
190 190 def app(request, config_stub, baseapp, http_environ):
191 191 app = CustomTestApp(
192 192 baseapp,
193 193 extra_environ=http_environ)
194 194 if request.cls:
195 195 request.cls.app = app
196 196 return app
197 197
198 198
199 199 @pytest.fixture(scope='session')
200 200 def app_settings(baseapp, ini_config):
201 201 """
202 202 Settings dictionary used to create the app.
203 203
204 204 Parses the ini file and passes the result through the sanitize and apply
205 205 defaults mechanism in `rhodecode.config.middleware`.
206 206 """
207 207 return baseapp.config.get_settings()
208 208
209 209
210 210 @pytest.fixture(scope='session')
211 211 def db_connection(ini_settings):
212 212 # Initialize the database connection.
213 213 config_utils.initialize_database(ini_settings)
214 214
215 215
216 216 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
217 217
218 218
219 219 def _autologin_user(app, *args):
220 220 session = login_user_session(app, *args)
221 221 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
222 222 return LoginData(csrf_token, session['rhodecode_user'])
223 223
224 224
225 225 @pytest.fixture()
226 226 def autologin_user(app):
227 227 """
228 228 Utility fixture which makes sure that the admin user is logged in
229 229 """
230 230 return _autologin_user(app)
231 231
232 232
233 233 @pytest.fixture()
234 234 def autologin_regular_user(app):
235 235 """
236 236 Utility fixture which makes sure that the regular user is logged in
237 237 """
238 238 return _autologin_user(
239 239 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
240 240
241 241
242 242 @pytest.fixture(scope='function')
243 243 def csrf_token(request, autologin_user):
244 244 return autologin_user.csrf_token
245 245
246 246
247 247 @pytest.fixture(scope='function')
248 248 def xhr_header(request):
249 249 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
250 250
251 251
252 252 @pytest.fixture()
253 253 def real_crypto_backend(monkeypatch):
254 254 """
255 255 Switch the production crypto backend on for this test.
256 256
257 257 During the test run the crypto backend is replaced with a faster
258 258 implementation based on the MD5 algorithm.
259 259 """
260 260 monkeypatch.setattr(rhodecode, 'is_test', False)
261 261
262 262
263 263 @pytest.fixture(scope='class')
264 264 def index_location(request, baseapp):
265 265 index_location = baseapp.config.get_settings()['search.location']
266 266 if request.cls:
267 267 request.cls.index_location = index_location
268 268 return index_location
269 269
270 270
271 271 @pytest.fixture(scope='session', autouse=True)
272 272 def tests_tmp_path(request):
273 273 """
274 274 Create temporary directory to be used during the test session.
275 275 """
276 276 if not os.path.exists(TESTS_TMP_PATH):
277 277 os.makedirs(TESTS_TMP_PATH)
278 278
279 279 if not request.config.getoption('--keep-tmp-path'):
280 280 @request.addfinalizer
281 281 def remove_tmp_path():
282 282 shutil.rmtree(TESTS_TMP_PATH)
283 283
284 284 return TESTS_TMP_PATH
285 285
286 286
287 287 @pytest.fixture()
288 288 def test_repo_group(request):
289 289 """
290 290 Create a temporary repository group, and destroy it after
291 291 usage automatically
292 292 """
293 293 fixture = Fixture()
294 294 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
295 295 repo_group = fixture.create_repo_group(repogroupid)
296 296
297 297 def _cleanup():
298 298 fixture.destroy_repo_group(repogroupid)
299 299
300 300 request.addfinalizer(_cleanup)
301 301 return repo_group
302 302
303 303
304 304 @pytest.fixture()
305 305 def test_user_group(request):
306 306 """
307 307 Create a temporary user group, and destroy it after
308 308 usage automatically
309 309 """
310 310 fixture = Fixture()
311 311 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
312 312 user_group = fixture.create_user_group(usergroupid)
313 313
314 314 def _cleanup():
315 315 fixture.destroy_user_group(user_group)
316 316
317 317 request.addfinalizer(_cleanup)
318 318 return user_group
319 319
320 320
321 321 @pytest.fixture(scope='session')
322 322 def test_repo(request):
323 323 container = TestRepoContainer()
324 324 request.addfinalizer(container._cleanup)
325 325 return container
326 326
327 327
328 328 class TestRepoContainer(object):
329 329 """
330 330 Container for test repositories which are used read only.
331 331
332 332 Repositories will be created on demand and re-used during the lifetime
333 333 of this object.
334 334
335 335 Usage to get the svn test repository "minimal"::
336 336
337 337 test_repo = TestContainer()
338 338 repo = test_repo('minimal', 'svn')
339 339
340 340 """
341 341
342 342 dump_extractors = {
343 343 'git': utils.extract_git_repo_from_dump,
344 344 'hg': utils.extract_hg_repo_from_dump,
345 345 'svn': utils.extract_svn_repo_from_dump,
346 346 }
347 347
348 348 def __init__(self):
349 349 self._cleanup_repos = []
350 350 self._fixture = Fixture()
351 351 self._repos = {}
352 352
353 353 def __call__(self, dump_name, backend_alias, config=None):
354 354 key = (dump_name, backend_alias)
355 355 if key not in self._repos:
356 356 repo = self._create_repo(dump_name, backend_alias, config)
357 357 self._repos[key] = repo.repo_id
358 358 return Repository.get(self._repos[key])
359 359
360 360 def _create_repo(self, dump_name, backend_alias, config):
361 361 repo_name = f'{backend_alias}-{dump_name}'
362 362 backend = get_backend(backend_alias)
363 363 dump_extractor = self.dump_extractors[backend_alias]
364 364 repo_path = dump_extractor(dump_name, repo_name)
365 365
366 366 vcs_repo = backend(repo_path, config=config)
367 367 repo2db_mapper({repo_name: vcs_repo})
368 368
369 369 repo = RepoModel().get_by_repo_name(repo_name)
370 370 self._cleanup_repos.append(repo_name)
371 371 return repo
372 372
373 373 def _cleanup(self):
374 374 for repo_name in reversed(self._cleanup_repos):
375 375 self._fixture.destroy_repo(repo_name)
376 376
377 377
378 378 def backend_base(request, backend_alias, baseapp, test_repo):
379 379 if backend_alias not in request.config.getoption('--backends'):
380 380 pytest.skip("Backend %s not selected." % (backend_alias, ))
381 381
382 382 utils.check_xfail_backends(request.node, backend_alias)
383 383 utils.check_skip_backends(request.node, backend_alias)
384 384
385 385 repo_name = 'vcs_test_%s' % (backend_alias, )
386 386 backend = Backend(
387 387 alias=backend_alias,
388 388 repo_name=repo_name,
389 389 test_name=request.node.name,
390 390 test_repo_container=test_repo)
391 391 request.addfinalizer(backend.cleanup)
392 392 return backend
393 393
394 394
395 395 @pytest.fixture()
396 396 def backend(request, backend_alias, baseapp, test_repo):
397 397 """
398 398 Parametrized fixture which represents a single backend implementation.
399 399
400 400 It respects the option `--backends` to focus the test run on specific
401 401 backend implementations.
402 402
403 403 It also supports `pytest.mark.xfail_backends` to mark tests as failing
404 404 for specific backends. This is intended as a utility for incremental
405 405 development of a new backend implementation.
406 406 """
407 407 return backend_base(request, backend_alias, baseapp, test_repo)
408 408
409 409
410 410 @pytest.fixture()
411 411 def backend_git(request, baseapp, test_repo):
412 412 return backend_base(request, 'git', baseapp, test_repo)
413 413
414 414
415 415 @pytest.fixture()
416 416 def backend_hg(request, baseapp, test_repo):
417 417 return backend_base(request, 'hg', baseapp, test_repo)
418 418
419 419
420 420 @pytest.fixture()
421 421 def backend_svn(request, baseapp, test_repo):
422 422 return backend_base(request, 'svn', baseapp, test_repo)
423 423
424 424
425 425 @pytest.fixture()
426 426 def backend_random(backend_git):
427 427 """
428 428 Use this to express that your tests need "a backend.
429 429
430 430 A few of our tests need a backend, so that we can run the code. This
431 431 fixture is intended to be used for such cases. It will pick one of the
432 432 backends and run the tests.
433 433
434 434 The fixture `backend` would run the test multiple times for each
435 435 available backend which is a pure waste of time if the test is
436 436 independent of the backend type.
437 437 """
438 438 # TODO: johbo: Change this to pick a random backend
439 439 return backend_git
440 440
441 441
442 442 @pytest.fixture()
443 443 def backend_stub(backend_git):
444 444 """
445 445 Use this to express that your tests need a backend stub
446 446
447 447 TODO: mikhail: Implement a real stub logic instead of returning
448 448 a git backend
449 449 """
450 450 return backend_git
451 451
452 452
453 453 @pytest.fixture()
454 454 def repo_stub(backend_stub):
455 455 """
456 456 Use this to express that your tests need a repository stub
457 457 """
458 458 return backend_stub.create_repo()
459 459
460 460
461 461 class Backend(object):
462 462 """
463 463 Represents the test configuration for one supported backend
464 464
465 465 Provides easy access to different test repositories based on
466 466 `__getitem__`. Such repositories will only be created once per test
467 467 session.
468 468 """
469 469
470 470 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
471 471 _master_repo = None
472 472 _master_repo_path = ''
473 473 _commit_ids = {}
474 474
475 475 def __init__(self, alias, repo_name, test_name, test_repo_container):
476 476 self.alias = alias
477 477 self.repo_name = repo_name
478 478 self._cleanup_repos = []
479 479 self._test_name = test_name
480 480 self._test_repo_container = test_repo_container
481 481 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
482 482 # Fixture will survive in the end.
483 483 self._fixture = Fixture()
484 484
485 485 def __getitem__(self, key):
486 486 return self._test_repo_container(key, self.alias)
487 487
488 488 def create_test_repo(self, key, config=None):
489 489 return self._test_repo_container(key, self.alias, config)
490 490
491 491 @property
492 492 def repo_id(self):
493 493 # just fake some repo_id
494 494 return self.repo.repo_id
495 495
496 496 @property
497 497 def repo(self):
498 498 """
499 499 Returns the "current" repository. This is the vcs_test repo or the
500 500 last repo which has been created with `create_repo`.
501 501 """
502 502 from rhodecode.model.db import Repository
503 503 return Repository.get_by_repo_name(self.repo_name)
504 504
505 505 @property
506 506 def default_branch_name(self):
507 507 VcsRepository = get_backend(self.alias)
508 508 return VcsRepository.DEFAULT_BRANCH_NAME
509 509
510 510 @property
511 511 def default_head_id(self):
512 512 """
513 513 Returns the default head id of the underlying backend.
514 514
515 515 This will be the default branch name in case the backend does have a
516 516 default branch. In the other cases it will point to a valid head
517 517 which can serve as the base to create a new commit on top of it.
518 518 """
519 519 vcsrepo = self.repo.scm_instance()
520 520 head_id = (
521 521 vcsrepo.DEFAULT_BRANCH_NAME or
522 522 vcsrepo.commit_ids[-1])
523 523 return head_id
524 524
525 525 @property
526 526 def commit_ids(self):
527 527 """
528 528 Returns the list of commits for the last created repository
529 529 """
530 530 return self._commit_ids
531 531
532 532 def create_master_repo(self, commits):
533 533 """
534 534 Create a repository and remember it as a template.
535 535
536 536 This allows to easily create derived repositories to construct
537 537 more complex scenarios for diff, compare and pull requests.
538 538
539 539 Returns a commit map which maps from commit message to raw_id.
540 540 """
541 541 self._master_repo = self.create_repo(commits=commits)
542 542 self._master_repo_path = self._master_repo.repo_full_path
543 543
544 544 return self._commit_ids
545 545
546 546 def create_repo(
547 547 self, commits=None, number_of_commits=0, heads=None,
548 548 name_suffix='', bare=False, **kwargs):
549 549 """
550 550 Create a repository and record it for later cleanup.
551 551
552 552 :param commits: Optional. A sequence of dict instances.
553 553 Will add a commit per entry to the new repository.
554 554 :param number_of_commits: Optional. If set to a number, this number of
555 555 commits will be added to the new repository.
556 556 :param heads: Optional. Can be set to a sequence of of commit
557 557 names which shall be pulled in from the master repository.
558 558 :param name_suffix: adds special suffix to generated repo name
559 559 :param bare: set a repo as bare (no checkout)
560 560 """
561 561 self.repo_name = self._next_repo_name() + name_suffix
562 562 repo = self._fixture.create_repo(
563 563 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
564 564 self._cleanup_repos.append(repo.repo_name)
565 565
566 566 commits = commits or [
567 567 {'message': f'Commit {x} of {self.repo_name}'}
568 568 for x in range(number_of_commits)]
569 569 vcs_repo = repo.scm_instance()
570 570 vcs_repo.count()
571 571 self._add_commits_to_repo(vcs_repo, commits)
572 572 if heads:
573 573 self.pull_heads(repo, heads)
574 574
575 575 return repo
576 576
577 577 def pull_heads(self, repo, heads, do_fetch=False):
578 578 """
579 579 Make sure that repo contains all commits mentioned in `heads`
580 580 """
581 581 vcsrepo = repo.scm_instance()
582 582 vcsrepo.config.clear_section('hooks')
583 583 commit_ids = [self._commit_ids[h] for h in heads]
584 584 if do_fetch:
585 585 vcsrepo.fetch(self._master_repo_path, commit_ids=commit_ids)
586 586 vcsrepo.pull(self._master_repo_path, commit_ids=commit_ids)
587 587
588 588 def create_fork(self):
589 589 repo_to_fork = self.repo_name
590 590 self.repo_name = self._next_repo_name()
591 591 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
592 592 self._cleanup_repos.append(self.repo_name)
593 593 return repo
594 594
595 595 def new_repo_name(self, suffix=''):
596 596 self.repo_name = self._next_repo_name() + suffix
597 597 self._cleanup_repos.append(self.repo_name)
598 598 return self.repo_name
599 599
600 600 def _next_repo_name(self):
601 601 return "%s_%s" % (
602 602 self.invalid_repo_name.sub('_', self._test_name), len(self._cleanup_repos))
603 603
604 604 def ensure_file(self, filename, content=b'Test content\n'):
605 605 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
606 606 commits = [
607 607 {'added': [
608 608 FileNode(filename, content=content),
609 609 ]},
610 610 ]
611 611 self._add_commits_to_repo(self.repo.scm_instance(), commits)
612 612
613 613 def enable_downloads(self):
614 614 repo = self.repo
615 615 repo.enable_downloads = True
616 616 Session().add(repo)
617 617 Session().commit()
618 618
619 619 def cleanup(self):
620 620 for repo_name in reversed(self._cleanup_repos):
621 621 self._fixture.destroy_repo(repo_name)
622 622
623 623 def _add_commits_to_repo(self, repo, commits):
624 624 commit_ids = _add_commits_to_repo(repo, commits)
625 625 if not commit_ids:
626 626 return
627 627 self._commit_ids = commit_ids
628 628
629 629 # Creating refs for Git to allow fetching them from remote repository
630 630 if self.alias == 'git':
631 631 refs = {}
632 632 for message in self._commit_ids:
633 633 cleanup_message = message.replace(' ', '')
634 634 ref_name = f'refs/test-refs/{cleanup_message}'
635 635 refs[ref_name] = self._commit_ids[message]
636 636 self._create_refs(repo, refs)
637 637
638 638 def _create_refs(self, repo, refs):
639 639 for ref_name, ref_val in refs.items():
640 640 repo.set_refs(ref_name, ref_val)
641 641
642 642
643 643 class VcsBackend(object):
644 644 """
645 645 Represents the test configuration for one supported vcs backend.
646 646 """
647 647
648 648 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
649 649
650 650 def __init__(self, alias, repo_path, test_name, test_repo_container):
651 651 self.alias = alias
652 652 self._repo_path = repo_path
653 653 self._cleanup_repos = []
654 654 self._test_name = test_name
655 655 self._test_repo_container = test_repo_container
656 656
657 657 def __getitem__(self, key):
658 658 return self._test_repo_container(key, self.alias).scm_instance()
659 659
660 660 def __repr__(self):
661 661 return f'{self.__class__.__name__}(alias={self.alias}, repo={self._repo_path})'
662 662
663 663 @property
664 664 def repo(self):
665 665 """
666 666 Returns the "current" repository. This is the vcs_test repo of the last
667 667 repo which has been created.
668 668 """
669 669 Repository = get_backend(self.alias)
670 670 return Repository(self._repo_path)
671 671
672 672 @property
673 673 def backend(self):
674 674 """
675 675 Returns the backend implementation class.
676 676 """
677 677 return get_backend(self.alias)
678 678
679 679 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
680 680 bare=False):
681 681 repo_name = self._next_repo_name()
682 682 self._repo_path = get_new_dir(repo_name)
683 683 repo_class = get_backend(self.alias)
684 684 src_url = None
685 685 if _clone_repo:
686 686 src_url = _clone_repo.path
687 687 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
688 688 self._cleanup_repos.append(repo)
689 689
690 690 commits = commits or [
691 691 {'message': 'Commit %s of %s' % (x, repo_name)}
692 692 for x in range(number_of_commits)]
693 693 _add_commits_to_repo(repo, commits)
694 694 return repo
695 695
696 696 def clone_repo(self, repo):
697 697 return self.create_repo(_clone_repo=repo)
698 698
699 699 def cleanup(self):
700 700 for repo in self._cleanup_repos:
701 701 shutil.rmtree(repo.path)
702 702
703 703 def new_repo_path(self):
704 704 repo_name = self._next_repo_name()
705 705 self._repo_path = get_new_dir(repo_name)
706 706 return self._repo_path
707 707
708 708 def _next_repo_name(self):
709 709
710 710 return "{}_{}".format(
711 711 self.invalid_repo_name.sub('_', self._test_name),
712 712 len(self._cleanup_repos)
713 713 )
714 714
715 715 def add_file(self, repo, filename, content='Test content\n'):
716 716 imc = repo.in_memory_commit
717 717 imc.add(FileNode(safe_bytes(filename), content=safe_bytes(content)))
718 718 imc.commit(
719 719 message='Automatic commit from vcsbackend fixture',
720 720 author='Automatic <automatic@rhodecode.com>')
721 721
722 722 def ensure_file(self, filename, content='Test content\n'):
723 723 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
724 724 self.add_file(self.repo, filename, content)
725 725
726 726
727 727 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) -> VcsBackend:
728 728 if backend_alias not in request.config.getoption('--backends'):
729 729 pytest.skip("Backend %s not selected." % (backend_alias, ))
730 730
731 731 utils.check_xfail_backends(request.node, backend_alias)
732 732 utils.check_skip_backends(request.node, backend_alias)
733 733
734 734 repo_name = f'vcs_test_{backend_alias}'
735 735 repo_path = os.path.join(tests_tmp_path, repo_name)
736 736 backend = VcsBackend(
737 737 alias=backend_alias,
738 738 repo_path=repo_path,
739 739 test_name=request.node.name,
740 740 test_repo_container=test_repo)
741 741 request.addfinalizer(backend.cleanup)
742 742 return backend
743 743
744 744
745 745 @pytest.fixture()
746 746 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
747 747 """
748 748 Parametrized fixture which represents a single vcs backend implementation.
749 749
750 750 See the fixture `backend` for more details. This one implements the same
751 751 concept, but on vcs level. So it does not provide model instances etc.
752 752
753 753 Parameters are generated dynamically, see :func:`pytest_generate_tests`
754 754 for how this works.
755 755 """
756 756 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
757 757
758 758
759 759 @pytest.fixture()
760 760 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
761 761 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
762 762
763 763
764 764 @pytest.fixture()
765 765 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
766 766 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
767 767
768 768
769 769 @pytest.fixture()
770 770 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
771 771 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
772 772
773 773
774 774 @pytest.fixture()
775 775 def vcsbackend_stub(vcsbackend_git):
776 776 """
777 777 Use this to express that your test just needs a stub of a vcsbackend.
778 778
779 779 Plan is to eventually implement an in-memory stub to speed tests up.
780 780 """
781 781 return vcsbackend_git
782 782
783 783
784 784 def _add_commits_to_repo(vcs_repo, commits):
785 785 commit_ids = {}
786 786 if not commits:
787 787 return commit_ids
788 788
789 789 imc = vcs_repo.in_memory_commit
790 790
791 791 for idx, commit in enumerate(commits):
792 792 message = str(commit.get('message', f'Commit {idx}'))
793 793
794 794 for node in commit.get('added', []):
795 795 imc.add(FileNode(safe_bytes(node.path), content=node.content))
796 796 for node in commit.get('changed', []):
797 797 imc.change(FileNode(safe_bytes(node.path), content=node.content))
798 798 for node in commit.get('removed', []):
799 799 imc.remove(FileNode(safe_bytes(node.path)))
800 800
801 801 parents = [
802 802 vcs_repo.get_commit(commit_id=commit_ids[p])
803 803 for p in commit.get('parents', [])]
804 804
805 805 operations = ('added', 'changed', 'removed')
806 806 if not any((commit.get(o) for o in operations)):
807 807 imc.add(FileNode(b'file_%b' % safe_bytes(str(idx)), content=safe_bytes(message)))
808 808
809 809 commit = imc.commit(
810 810 message=message,
811 811 author=str(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
812 812 date=commit.get('date'),
813 813 branch=commit.get('branch'),
814 814 parents=parents)
815 815
816 816 commit_ids[commit.message] = commit.raw_id
817 817
818 818 return commit_ids
819 819
820 820
821 821 @pytest.fixture()
822 822 def reposerver(request):
823 823 """
824 824 Allows to serve a backend repository
825 825 """
826 826
827 827 repo_server = RepoServer()
828 828 request.addfinalizer(repo_server.cleanup)
829 829 return repo_server
830 830
831 831
832 832 class RepoServer(object):
833 833 """
834 834 Utility to serve a local repository for the duration of a test case.
835 835
836 836 Supports only Subversion so far.
837 837 """
838 838
839 839 url = None
840 840
841 841 def __init__(self):
842 842 self._cleanup_servers = []
843 843
844 844 def serve(self, vcsrepo):
845 845 if vcsrepo.alias != 'svn':
846 846 raise TypeError("Backend %s not supported" % vcsrepo.alias)
847 847
848 848 proc = subprocess.Popen(
849 849 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
850 850 '--root', vcsrepo.path])
851 851 self._cleanup_servers.append(proc)
852 852 self.url = 'svn://localhost'
853 853
854 854 def cleanup(self):
855 855 for proc in self._cleanup_servers:
856 856 proc.terminate()
857 857
858 858
859 859 @pytest.fixture()
860 860 def pr_util(backend, request, config_stub):
861 861 """
862 862 Utility for tests of models and for functional tests around pull requests.
863 863
864 864 It gives an instance of :class:`PRTestUtility` which provides various
865 865 utility methods around one pull request.
866 866
867 867 This fixture uses `backend` and inherits its parameterization.
868 868 """
869 869
870 870 util = PRTestUtility(backend)
871 871 request.addfinalizer(util.cleanup)
872 872
873 873 return util
874 874
875 875
876 876 class PRTestUtility(object):
877 877
878 878 pull_request = None
879 879 pull_request_id = None
880 880 mergeable_patcher = None
881 881 mergeable_mock = None
882 882 notification_patcher = None
883 883 commit_ids: dict
884 884
885 885 def __init__(self, backend):
886 886 self.backend = backend
887 887
888 888 def create_pull_request(
889 889 self, commits=None, target_head=None, source_head=None,
890 890 revisions=None, approved=False, author=None, mergeable=False,
891 891 enable_notifications=True, name_suffix='', reviewers=None, observers=None,
892 892 title="Test", description="Description"):
893 893 self.set_mergeable(mergeable)
894 894 if not enable_notifications:
895 895 # mock notification side effect
896 896 self.notification_patcher = mock.patch(
897 897 'rhodecode.model.notification.NotificationModel.create')
898 898 self.notification_patcher.start()
899 899
900 900 if not self.pull_request:
901 901 if not commits:
902 902 commits = [
903 903 {'message': 'c1'},
904 904 {'message': 'c2'},
905 905 {'message': 'c3'},
906 906 ]
907 907 target_head = 'c1'
908 908 source_head = 'c2'
909 909 revisions = ['c2']
910 910
911 911 self.commit_ids = self.backend.create_master_repo(commits)
912 912 self.target_repository = self.backend.create_repo(
913 913 heads=[target_head], name_suffix=name_suffix)
914 914 self.source_repository = self.backend.create_repo(
915 915 heads=[source_head], name_suffix=name_suffix)
916 916 self.author = author or UserModel().get_by_username(
917 917 TEST_USER_ADMIN_LOGIN)
918 918
919 919 model = PullRequestModel()
920 920 self.create_parameters = {
921 921 'created_by': self.author,
922 922 'source_repo': self.source_repository.repo_name,
923 923 'source_ref': self._default_branch_reference(source_head),
924 924 'target_repo': self.target_repository.repo_name,
925 925 'target_ref': self._default_branch_reference(target_head),
926 926 'revisions': [self.commit_ids[r] for r in revisions],
927 927 'reviewers': reviewers or self._get_reviewers(),
928 928 'observers': observers or self._get_observers(),
929 929 'title': title,
930 930 'description': description,
931 931 }
932 932 self.pull_request = model.create(**self.create_parameters)
933 933 assert model.get_versions(self.pull_request) == []
934 934
935 935 self.pull_request_id = self.pull_request.pull_request_id
936 936
937 937 if approved:
938 938 self.approve()
939 939
940 940 Session().add(self.pull_request)
941 941 Session().commit()
942 942
943 943 return self.pull_request
944 944
945 945 def approve(self):
946 946 self.create_status_votes(
947 947 ChangesetStatus.STATUS_APPROVED,
948 948 *self.pull_request.reviewers)
949 949
950 950 def close(self):
951 951 PullRequestModel().close_pull_request(self.pull_request, self.author)
952 952
953 953 def _default_branch_reference(self, commit_message, branch: str = None) -> str:
954 954 default_branch = branch or self.backend.default_branch_name
955 955 message = self.commit_ids[commit_message]
956 956 reference = f'branch:{default_branch}:{message}'
957 957
958 958 return reference
959 959
960 960 def _get_reviewers(self):
961 961 role = PullRequestReviewers.ROLE_REVIEWER
962 962 return [
963 963 (TEST_USER_REGULAR_LOGIN, ['default1'], False, role, []),
964 964 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, role, []),
965 965 ]
966 966
967 967 def _get_observers(self):
968 968 return [
969 969
970 970 ]
971 971
972 972 def update_source_repository(self, head=None, do_fetch=False):
973 973 heads = [head or 'c3']
974 974 self.backend.pull_heads(self.source_repository, heads=heads, do_fetch=do_fetch)
975 975
976 976 def update_target_repository(self, head=None, do_fetch=False):
977 977 heads = [head or 'c3']
978 978 self.backend.pull_heads(self.target_repository, heads=heads, do_fetch=do_fetch)
979 979
980 980 def set_pr_target_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
981 981 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
982 982 self.pull_request.target_ref = full_ref
983 983 return full_ref
984 984
985 985 def set_pr_source_ref(self, ref_type: str = "branch", ref_name: str = "branch", ref_commit_id: str = "") -> str:
986 986 full_ref = f"{ref_type}:{ref_name}:{ref_commit_id}"
987 987 self.pull_request.source_ref = full_ref
988 988 return full_ref
989 989
990 990 def add_one_commit(self, head=None):
991 991 self.update_source_repository(head=head)
992 992 old_commit_ids = set(self.pull_request.revisions)
993 993 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
994 994 commit_ids = set(self.pull_request.revisions)
995 995 new_commit_ids = commit_ids - old_commit_ids
996 996 assert len(new_commit_ids) == 1
997 997 return new_commit_ids.pop()
998 998
999 999 def remove_one_commit(self):
1000 1000 assert len(self.pull_request.revisions) == 2
1001 1001 source_vcs = self.source_repository.scm_instance()
1002 1002 removed_commit_id = source_vcs.commit_ids[-1]
1003 1003
1004 1004 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1005 1005 # remove the if once that's sorted out.
1006 1006 if self.backend.alias == "git":
1007 1007 kwargs = {'branch_name': self.backend.default_branch_name}
1008 1008 else:
1009 1009 kwargs = {}
1010 1010 source_vcs.strip(removed_commit_id, **kwargs)
1011 1011
1012 1012 PullRequestModel().update_commits(self.pull_request, self.pull_request.author)
1013 1013 assert len(self.pull_request.revisions) == 1
1014 1014 return removed_commit_id
1015 1015
1016 1016 def create_comment(self, linked_to=None):
1017 1017 comment = CommentsModel().create(
1018 1018 text="Test comment",
1019 1019 repo=self.target_repository.repo_name,
1020 1020 user=self.author,
1021 1021 pull_request=self.pull_request)
1022 1022 assert comment.pull_request_version_id is None
1023 1023
1024 1024 if linked_to:
1025 1025 PullRequestModel()._link_comments_to_version(linked_to)
1026 1026
1027 1027 return comment
1028 1028
1029 1029 def create_inline_comment(
1030 1030 self, linked_to=None, line_no='n1', file_path='file_1'):
1031 1031 comment = CommentsModel().create(
1032 1032 text="Test comment",
1033 1033 repo=self.target_repository.repo_name,
1034 1034 user=self.author,
1035 1035 line_no=line_no,
1036 1036 f_path=file_path,
1037 1037 pull_request=self.pull_request)
1038 1038 assert comment.pull_request_version_id is None
1039 1039
1040 1040 if linked_to:
1041 1041 PullRequestModel()._link_comments_to_version(linked_to)
1042 1042
1043 1043 return comment
1044 1044
1045 1045 def create_version_of_pull_request(self):
1046 1046 pull_request = self.create_pull_request()
1047 1047 version = PullRequestModel()._create_version_from_snapshot(
1048 1048 pull_request)
1049 1049 return version
1050 1050
1051 1051 def create_status_votes(self, status, *reviewers):
1052 1052 for reviewer in reviewers:
1053 1053 ChangesetStatusModel().set_status(
1054 1054 repo=self.pull_request.target_repo,
1055 1055 status=status,
1056 1056 user=reviewer.user_id,
1057 1057 pull_request=self.pull_request)
1058 1058
1059 1059 def set_mergeable(self, value):
1060 1060 if not self.mergeable_patcher:
1061 1061 self.mergeable_patcher = mock.patch.object(
1062 1062 VcsSettingsModel, 'get_general_settings')
1063 1063 self.mergeable_mock = self.mergeable_patcher.start()
1064 1064 self.mergeable_mock.return_value = {
1065 1065 'rhodecode_pr_merge_enabled': value}
1066 1066
1067 1067 def cleanup(self):
1068 1068 # In case the source repository is already cleaned up, the pull
1069 1069 # request will already be deleted.
1070 1070 pull_request = PullRequest().get(self.pull_request_id)
1071 1071 if pull_request:
1072 1072 PullRequestModel().delete(pull_request, pull_request.author)
1073 1073 Session().commit()
1074 1074
1075 1075 if self.notification_patcher:
1076 1076 self.notification_patcher.stop()
1077 1077
1078 1078 if self.mergeable_patcher:
1079 1079 self.mergeable_patcher.stop()
1080 1080
1081 1081
1082 1082 @pytest.fixture()
1083 1083 def user_admin(baseapp):
1084 1084 """
1085 1085 Provides the default admin test user as an instance of `db.User`.
1086 1086 """
1087 1087 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1088 1088 return user
1089 1089
1090 1090
1091 1091 @pytest.fixture()
1092 1092 def user_regular(baseapp):
1093 1093 """
1094 1094 Provides the default regular test user as an instance of `db.User`.
1095 1095 """
1096 1096 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1097 1097 return user
1098 1098
1099 1099
1100 1100 @pytest.fixture()
1101 1101 def user_util(request, db_connection):
1102 1102 """
1103 1103 Provides a wired instance of `UserUtility` with integrated cleanup.
1104 1104 """
1105 1105 utility = UserUtility(test_name=request.node.name)
1106 1106 request.addfinalizer(utility.cleanup)
1107 1107 return utility
1108 1108
1109 1109
1110 1110 # TODO: johbo: Split this up into utilities per domain or something similar
1111 1111 class UserUtility(object):
1112 1112
1113 1113 def __init__(self, test_name="test"):
1114 1114 self._test_name = self._sanitize_name(test_name)
1115 1115 self.fixture = Fixture()
1116 1116 self.repo_group_ids = []
1117 1117 self.repos_ids = []
1118 1118 self.user_ids = []
1119 1119 self.user_group_ids = []
1120 1120 self.user_repo_permission_ids = []
1121 1121 self.user_group_repo_permission_ids = []
1122 1122 self.user_repo_group_permission_ids = []
1123 1123 self.user_group_repo_group_permission_ids = []
1124 1124 self.user_user_group_permission_ids = []
1125 1125 self.user_group_user_group_permission_ids = []
1126 1126 self.user_permissions = []
1127 1127
1128 1128 def _sanitize_name(self, name):
1129 1129 for char in ['[', ']']:
1130 1130 name = name.replace(char, '_')
1131 1131 return name
1132 1132
1133 1133 def create_repo_group(
1134 1134 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1135 1135 group_name = "{prefix}_repogroup_{count}".format(
1136 1136 prefix=self._test_name,
1137 1137 count=len(self.repo_group_ids))
1138 1138 repo_group = self.fixture.create_repo_group(
1139 1139 group_name, cur_user=owner)
1140 1140 if auto_cleanup:
1141 1141 self.repo_group_ids.append(repo_group.group_id)
1142 1142 return repo_group
1143 1143
1144 1144 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1145 1145 auto_cleanup=True, repo_type='hg', bare=False):
1146 1146 repo_name = "{prefix}_repository_{count}".format(
1147 1147 prefix=self._test_name,
1148 1148 count=len(self.repos_ids))
1149 1149
1150 1150 repository = self.fixture.create_repo(
1151 1151 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1152 1152 if auto_cleanup:
1153 1153 self.repos_ids.append(repository.repo_id)
1154 1154 return repository
1155 1155
1156 1156 def create_user(self, auto_cleanup=True, **kwargs):
1157 1157 user_name = "{prefix}_user_{count}".format(
1158 1158 prefix=self._test_name,
1159 1159 count=len(self.user_ids))
1160 1160 user = self.fixture.create_user(user_name, **kwargs)
1161 1161 if auto_cleanup:
1162 1162 self.user_ids.append(user.user_id)
1163 1163 return user
1164 1164
1165 1165 def create_additional_user_email(self, user, email):
1166 1166 uem = self.fixture.create_additional_user_email(user=user, email=email)
1167 1167 return uem
1168 1168
1169 1169 def create_user_with_group(self):
1170 1170 user = self.create_user()
1171 1171 user_group = self.create_user_group(members=[user])
1172 1172 return user, user_group
1173 1173
1174 1174 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1175 1175 auto_cleanup=True, **kwargs):
1176 1176 group_name = "{prefix}_usergroup_{count}".format(
1177 1177 prefix=self._test_name,
1178 1178 count=len(self.user_group_ids))
1179 1179 user_group = self.fixture.create_user_group(
1180 1180 group_name, cur_user=owner, **kwargs)
1181 1181
1182 1182 if auto_cleanup:
1183 1183 self.user_group_ids.append(user_group.users_group_id)
1184 1184 if members:
1185 1185 for user in members:
1186 1186 UserGroupModel().add_user_to_group(user_group, user)
1187 1187 return user_group
1188 1188
1189 1189 def grant_user_permission(self, user_name, permission_name):
1190 1190 self.inherit_default_user_permissions(user_name, False)
1191 1191 self.user_permissions.append((user_name, permission_name))
1192 1192
1193 1193 def grant_user_permission_to_repo_group(
1194 1194 self, repo_group, user, permission_name):
1195 1195 permission = RepoGroupModel().grant_user_permission(
1196 1196 repo_group, user, permission_name)
1197 1197 self.user_repo_group_permission_ids.append(
1198 1198 (repo_group.group_id, user.user_id))
1199 1199 return permission
1200 1200
1201 1201 def grant_user_group_permission_to_repo_group(
1202 1202 self, repo_group, user_group, permission_name):
1203 1203 permission = RepoGroupModel().grant_user_group_permission(
1204 1204 repo_group, user_group, permission_name)
1205 1205 self.user_group_repo_group_permission_ids.append(
1206 1206 (repo_group.group_id, user_group.users_group_id))
1207 1207 return permission
1208 1208
1209 1209 def grant_user_permission_to_repo(
1210 1210 self, repo, user, permission_name):
1211 1211 permission = RepoModel().grant_user_permission(
1212 1212 repo, user, permission_name)
1213 1213 self.user_repo_permission_ids.append(
1214 1214 (repo.repo_id, user.user_id))
1215 1215 return permission
1216 1216
1217 1217 def grant_user_group_permission_to_repo(
1218 1218 self, repo, user_group, permission_name):
1219 1219 permission = RepoModel().grant_user_group_permission(
1220 1220 repo, user_group, permission_name)
1221 1221 self.user_group_repo_permission_ids.append(
1222 1222 (repo.repo_id, user_group.users_group_id))
1223 1223 return permission
1224 1224
1225 1225 def grant_user_permission_to_user_group(
1226 1226 self, target_user_group, user, permission_name):
1227 1227 permission = UserGroupModel().grant_user_permission(
1228 1228 target_user_group, user, permission_name)
1229 1229 self.user_user_group_permission_ids.append(
1230 1230 (target_user_group.users_group_id, user.user_id))
1231 1231 return permission
1232 1232
1233 1233 def grant_user_group_permission_to_user_group(
1234 1234 self, target_user_group, user_group, permission_name):
1235 1235 permission = UserGroupModel().grant_user_group_permission(
1236 1236 target_user_group, user_group, permission_name)
1237 1237 self.user_group_user_group_permission_ids.append(
1238 1238 (target_user_group.users_group_id, user_group.users_group_id))
1239 1239 return permission
1240 1240
1241 1241 def revoke_user_permission(self, user_name, permission_name):
1242 1242 self.inherit_default_user_permissions(user_name, True)
1243 1243 UserModel().revoke_perm(user_name, permission_name)
1244 1244
1245 1245 def inherit_default_user_permissions(self, user_name, value):
1246 1246 user = UserModel().get_by_username(user_name)
1247 1247 user.inherit_default_permissions = value
1248 1248 Session().add(user)
1249 1249 Session().commit()
1250 1250
1251 1251 def cleanup(self):
1252 1252 self._cleanup_permissions()
1253 1253 self._cleanup_repos()
1254 1254 self._cleanup_repo_groups()
1255 1255 self._cleanup_user_groups()
1256 1256 self._cleanup_users()
1257 1257
1258 1258 def _cleanup_permissions(self):
1259 1259 if self.user_permissions:
1260 1260 for user_name, permission_name in self.user_permissions:
1261 1261 self.revoke_user_permission(user_name, permission_name)
1262 1262
1263 1263 for permission in self.user_repo_permission_ids:
1264 1264 RepoModel().revoke_user_permission(*permission)
1265 1265
1266 1266 for permission in self.user_group_repo_permission_ids:
1267 1267 RepoModel().revoke_user_group_permission(*permission)
1268 1268
1269 1269 for permission in self.user_repo_group_permission_ids:
1270 1270 RepoGroupModel().revoke_user_permission(*permission)
1271 1271
1272 1272 for permission in self.user_group_repo_group_permission_ids:
1273 1273 RepoGroupModel().revoke_user_group_permission(*permission)
1274 1274
1275 1275 for permission in self.user_user_group_permission_ids:
1276 1276 UserGroupModel().revoke_user_permission(*permission)
1277 1277
1278 1278 for permission in self.user_group_user_group_permission_ids:
1279 1279 UserGroupModel().revoke_user_group_permission(*permission)
1280 1280
1281 1281 def _cleanup_repo_groups(self):
1282 1282 def _repo_group_compare(first_group_id, second_group_id):
1283 1283 """
1284 1284 Gives higher priority to the groups with the most complex paths
1285 1285 """
1286 1286 first_group = RepoGroup.get(first_group_id)
1287 1287 second_group = RepoGroup.get(second_group_id)
1288 1288 first_group_parts = (
1289 1289 len(first_group.group_name.split('/')) if first_group else 0)
1290 1290 second_group_parts = (
1291 1291 len(second_group.group_name.split('/')) if second_group else 0)
1292 1292 return cmp(second_group_parts, first_group_parts)
1293 1293
1294 1294 sorted_repo_group_ids = sorted(
1295 1295 self.repo_group_ids, key=functools.cmp_to_key(_repo_group_compare))
1296 1296 for repo_group_id in sorted_repo_group_ids:
1297 1297 self.fixture.destroy_repo_group(repo_group_id)
1298 1298
1299 1299 def _cleanup_repos(self):
1300 1300 sorted_repos_ids = sorted(self.repos_ids)
1301 1301 for repo_id in sorted_repos_ids:
1302 1302 self.fixture.destroy_repo(repo_id)
1303 1303
1304 1304 def _cleanup_user_groups(self):
1305 1305 def _user_group_compare(first_group_id, second_group_id):
1306 1306 """
1307 1307 Gives higher priority to the groups with the most complex paths
1308 1308 """
1309 1309 first_group = UserGroup.get(first_group_id)
1310 1310 second_group = UserGroup.get(second_group_id)
1311 1311 first_group_parts = (
1312 1312 len(first_group.users_group_name.split('/'))
1313 1313 if first_group else 0)
1314 1314 second_group_parts = (
1315 1315 len(second_group.users_group_name.split('/'))
1316 1316 if second_group else 0)
1317 1317 return cmp(second_group_parts, first_group_parts)
1318 1318
1319 1319 sorted_user_group_ids = sorted(
1320 1320 self.user_group_ids, key=functools.cmp_to_key(_user_group_compare))
1321 1321 for user_group_id in sorted_user_group_ids:
1322 1322 self.fixture.destroy_user_group(user_group_id)
1323 1323
1324 1324 def _cleanup_users(self):
1325 1325 for user_id in self.user_ids:
1326 1326 self.fixture.destroy_user(user_id)
1327 1327
1328 1328
1329 1329 @pytest.fixture(scope='session')
1330 1330 def testrun():
1331 1331 return {
1332 1332 'uuid': uuid.uuid4(),
1333 1333 'start': datetime.datetime.utcnow().isoformat(),
1334 1334 'timestamp': int(time.time()),
1335 1335 }
1336 1336
1337 1337
1338 1338 class AppenlightClient(object):
1339 1339
1340 1340 url_template = '{url}?protocol_version=0.5'
1341 1341
1342 1342 def __init__(
1343 1343 self, url, api_key, add_server=True, add_timestamp=True,
1344 1344 namespace=None, request=None, testrun=None):
1345 1345 self.url = self.url_template.format(url=url)
1346 1346 self.api_key = api_key
1347 1347 self.add_server = add_server
1348 1348 self.add_timestamp = add_timestamp
1349 1349 self.namespace = namespace
1350 1350 self.request = request
1351 1351 self.server = socket.getfqdn(socket.gethostname())
1352 1352 self.tags_before = {}
1353 1353 self.tags_after = {}
1354 1354 self.stats = []
1355 1355 self.testrun = testrun or {}
1356 1356
1357 1357 def tag_before(self, tag, value):
1358 1358 self.tags_before[tag] = value
1359 1359
1360 1360 def tag_after(self, tag, value):
1361 1361 self.tags_after[tag] = value
1362 1362
1363 1363 def collect(self, data):
1364 1364 if self.add_server:
1365 1365 data.setdefault('server', self.server)
1366 1366 if self.add_timestamp:
1367 1367 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1368 1368 if self.namespace:
1369 1369 data.setdefault('namespace', self.namespace)
1370 1370 if self.request:
1371 1371 data.setdefault('request', self.request)
1372 1372 self.stats.append(data)
1373 1373
1374 1374 def send_stats(self):
1375 1375 tags = [
1376 1376 ('testrun', self.request),
1377 1377 ('testrun.start', self.testrun['start']),
1378 1378 ('testrun.timestamp', self.testrun['timestamp']),
1379 1379 ('test', self.namespace),
1380 1380 ]
1381 1381 for key, value in self.tags_before.items():
1382 1382 tags.append((key + '.before', value))
1383 1383 try:
1384 1384 delta = self.tags_after[key] - value
1385 1385 tags.append((key + '.delta', delta))
1386 1386 except Exception:
1387 1387 pass
1388 1388 for key, value in self.tags_after.items():
1389 1389 tags.append((key + '.after', value))
1390 1390 self.collect({
1391 1391 'message': "Collected tags",
1392 1392 'tags': tags,
1393 1393 })
1394 1394
1395 1395 response = requests.post(
1396 1396 self.url,
1397 1397 headers={
1398 1398 'X-appenlight-api-key': self.api_key},
1399 1399 json=self.stats,
1400 1400 )
1401 1401
1402 1402 if not response.status_code == 200:
1403 1403 pprint.pprint(self.stats)
1404 1404 print(response.headers)
1405 1405 print(response.text)
1406 1406 raise Exception('Sending to appenlight failed')
1407 1407
1408 1408
1409 1409 @pytest.fixture()
1410 1410 def gist_util(request, db_connection):
1411 1411 """
1412 1412 Provides a wired instance of `GistUtility` with integrated cleanup.
1413 1413 """
1414 1414 utility = GistUtility()
1415 1415 request.addfinalizer(utility.cleanup)
1416 1416 return utility
1417 1417
1418 1418
1419 1419 class GistUtility(object):
1420 1420 def __init__(self):
1421 1421 self.fixture = Fixture()
1422 1422 self.gist_ids = []
1423 1423
1424 1424 def create_gist(self, **kwargs):
1425 1425 gist = self.fixture.create_gist(**kwargs)
1426 1426 self.gist_ids.append(gist.gist_id)
1427 1427 return gist
1428 1428
1429 1429 def cleanup(self):
1430 1430 for id_ in self.gist_ids:
1431 1431 self.fixture.destroy_gists(str(id_))
1432 1432
1433 1433
1434 1434 @pytest.fixture()
1435 1435 def enabled_backends(request):
1436 1436 backends = request.config.option.backends
1437 1437 return backends[:]
1438 1438
1439 1439
1440 1440 @pytest.fixture()
1441 1441 def settings_util(request, db_connection):
1442 1442 """
1443 1443 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1444 1444 """
1445 1445 utility = SettingsUtility()
1446 1446 request.addfinalizer(utility.cleanup)
1447 1447 return utility
1448 1448
1449 1449
1450 1450 class SettingsUtility(object):
1451 1451 def __init__(self):
1452 1452 self.rhodecode_ui_ids = []
1453 1453 self.rhodecode_setting_ids = []
1454 1454 self.repo_rhodecode_ui_ids = []
1455 1455 self.repo_rhodecode_setting_ids = []
1456 1456
1457 1457 def create_repo_rhodecode_ui(
1458 1458 self, repo, section, value, key=None, active=True, cleanup=True):
1459 1459 key = key or sha1_safe(f'{section}{value}{repo.repo_id}')
1460 1460
1461 1461 setting = RepoRhodeCodeUi()
1462 1462 setting.repository_id = repo.repo_id
1463 1463 setting.ui_section = section
1464 1464 setting.ui_value = value
1465 1465 setting.ui_key = key
1466 1466 setting.ui_active = active
1467 1467 Session().add(setting)
1468 1468 Session().commit()
1469 1469
1470 1470 if cleanup:
1471 1471 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1472 1472 return setting
1473 1473
1474 1474 def create_rhodecode_ui(
1475 1475 self, section, value, key=None, active=True, cleanup=True):
1476 1476 key = key or sha1_safe(f'{section}{value}')
1477 1477
1478 1478 setting = RhodeCodeUi()
1479 1479 setting.ui_section = section
1480 1480 setting.ui_value = value
1481 1481 setting.ui_key = key
1482 1482 setting.ui_active = active
1483 1483 Session().add(setting)
1484 1484 Session().commit()
1485 1485
1486 1486 if cleanup:
1487 1487 self.rhodecode_ui_ids.append(setting.ui_id)
1488 1488 return setting
1489 1489
1490 1490 def create_repo_rhodecode_setting(
1491 1491 self, repo, name, value, type_, cleanup=True):
1492 1492 setting = RepoRhodeCodeSetting(
1493 1493 repo.repo_id, key=name, val=value, type=type_)
1494 1494 Session().add(setting)
1495 1495 Session().commit()
1496 1496
1497 1497 if cleanup:
1498 1498 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1499 1499 return setting
1500 1500
1501 1501 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1502 1502 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1503 1503 Session().add(setting)
1504 1504 Session().commit()
1505 1505
1506 1506 if cleanup:
1507 1507 self.rhodecode_setting_ids.append(setting.app_settings_id)
1508 1508
1509 1509 return setting
1510 1510
1511 1511 def cleanup(self):
1512 1512 for id_ in self.rhodecode_ui_ids:
1513 1513 setting = RhodeCodeUi.get(id_)
1514 1514 Session().delete(setting)
1515 1515
1516 1516 for id_ in self.rhodecode_setting_ids:
1517 1517 setting = RhodeCodeSetting.get(id_)
1518 1518 Session().delete(setting)
1519 1519
1520 1520 for id_ in self.repo_rhodecode_ui_ids:
1521 1521 setting = RepoRhodeCodeUi.get(id_)
1522 1522 Session().delete(setting)
1523 1523
1524 1524 for id_ in self.repo_rhodecode_setting_ids:
1525 1525 setting = RepoRhodeCodeSetting.get(id_)
1526 1526 Session().delete(setting)
1527 1527
1528 1528 Session().commit()
1529 1529
1530 1530
1531 1531 @pytest.fixture()
1532 1532 def no_notifications(request):
1533 1533 notification_patcher = mock.patch(
1534 1534 'rhodecode.model.notification.NotificationModel.create')
1535 1535 notification_patcher.start()
1536 1536 request.addfinalizer(notification_patcher.stop)
1537 1537
1538 1538
1539 1539 @pytest.fixture(scope='session')
1540 1540 def repeat(request):
1541 1541 """
1542 1542 The number of repetitions is based on this fixture.
1543 1543
1544 1544 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1545 1545 tests are not too slow in our default test suite.
1546 1546 """
1547 1547 return request.config.getoption('--repeat')
1548 1548
1549 1549
1550 1550 @pytest.fixture()
1551 1551 def rhodecode_fixtures():
1552 1552 return Fixture()
1553 1553
1554 1554
1555 1555 @pytest.fixture()
1556 1556 def context_stub():
1557 1557 """
1558 1558 Stub context object.
1559 1559 """
1560 1560 context = pyramid.testing.DummyResource()
1561 1561 return context
1562 1562
1563 1563
1564 1564 @pytest.fixture()
1565 1565 def request_stub():
1566 1566 """
1567 1567 Stub request object.
1568 1568 """
1569 1569 from rhodecode.lib.base import bootstrap_request
1570 1570 request = bootstrap_request(scheme='https')
1571 1571 return request
1572 1572
1573 1573
1574 1574 @pytest.fixture()
1575 1575 def config_stub(request, request_stub):
1576 1576 """
1577 1577 Set up pyramid.testing and return the Configurator.
1578 1578 """
1579 1579 from rhodecode.lib.base import bootstrap_config
1580 1580 config = bootstrap_config(request=request_stub)
1581 1581
1582 1582 @request.addfinalizer
1583 1583 def cleanup():
1584 1584 pyramid.testing.tearDown()
1585 1585
1586 1586 return config
1587 1587
1588 1588
1589 1589 @pytest.fixture()
1590 1590 def StubIntegrationType():
1591 1591 class _StubIntegrationType(IntegrationTypeBase):
1592 1592 """ Test integration type class """
1593 1593
1594 1594 key = 'test'
1595 1595 display_name = 'Test integration type'
1596 1596 description = 'A test integration type for testing'
1597 1597
1598 1598 @classmethod
1599 1599 def icon(cls):
1600 1600 return 'test_icon_html_image'
1601 1601
1602 1602 def __init__(self, settings):
1603 1603 super(_StubIntegrationType, self).__init__(settings)
1604 1604 self.sent_events = [] # for testing
1605 1605
1606 1606 def send_event(self, event):
1607 1607 self.sent_events.append(event)
1608 1608
1609 1609 def settings_schema(self):
1610 1610 class SettingsSchema(colander.Schema):
1611 1611 test_string_field = colander.SchemaNode(
1612 1612 colander.String(),
1613 1613 missing=colander.required,
1614 1614 title='test string field',
1615 1615 )
1616 1616 test_int_field = colander.SchemaNode(
1617 1617 colander.Int(),
1618 1618 title='some integer setting',
1619 1619 )
1620 1620 return SettingsSchema()
1621 1621
1622 1622
1623 1623 integration_type_registry.register_integration_type(_StubIntegrationType)
1624 1624 return _StubIntegrationType
1625 1625
1626 1626
1627 1627 @pytest.fixture()
1628 1628 def stub_integration_settings():
1629 1629 return {
1630 1630 'test_string_field': 'some data',
1631 1631 'test_int_field': 100,
1632 1632 }
1633 1633
1634 1634
1635 1635 @pytest.fixture()
1636 1636 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1637 1637 stub_integration_settings):
1638 1638 integration = IntegrationModel().create(
1639 1639 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1640 1640 name='test repo integration',
1641 1641 repo=repo_stub, repo_group=None, child_repos_only=None)
1642 1642
1643 1643 @request.addfinalizer
1644 1644 def cleanup():
1645 1645 IntegrationModel().delete(integration)
1646 1646
1647 1647 return integration
1648 1648
1649 1649
1650 1650 @pytest.fixture()
1651 1651 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1652 1652 stub_integration_settings):
1653 1653 integration = IntegrationModel().create(
1654 1654 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1655 1655 name='test repogroup integration',
1656 1656 repo=None, repo_group=test_repo_group, child_repos_only=True)
1657 1657
1658 1658 @request.addfinalizer
1659 1659 def cleanup():
1660 1660 IntegrationModel().delete(integration)
1661 1661
1662 1662 return integration
1663 1663
1664 1664
1665 1665 @pytest.fixture()
1666 1666 def repogroup_recursive_integration_stub(request, test_repo_group,
1667 1667 StubIntegrationType, stub_integration_settings):
1668 1668 integration = IntegrationModel().create(
1669 1669 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1670 1670 name='test recursive repogroup integration',
1671 1671 repo=None, repo_group=test_repo_group, child_repos_only=False)
1672 1672
1673 1673 @request.addfinalizer
1674 1674 def cleanup():
1675 1675 IntegrationModel().delete(integration)
1676 1676
1677 1677 return integration
1678 1678
1679 1679
1680 1680 @pytest.fixture()
1681 1681 def global_integration_stub(request, StubIntegrationType,
1682 1682 stub_integration_settings):
1683 1683 integration = IntegrationModel().create(
1684 1684 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1685 1685 name='test global integration',
1686 1686 repo=None, repo_group=None, child_repos_only=None)
1687 1687
1688 1688 @request.addfinalizer
1689 1689 def cleanup():
1690 1690 IntegrationModel().delete(integration)
1691 1691
1692 1692 return integration
1693 1693
1694 1694
1695 1695 @pytest.fixture()
1696 1696 def root_repos_integration_stub(request, StubIntegrationType,
1697 1697 stub_integration_settings):
1698 1698 integration = IntegrationModel().create(
1699 1699 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1700 1700 name='test global integration',
1701 1701 repo=None, repo_group=None, child_repos_only=True)
1702 1702
1703 1703 @request.addfinalizer
1704 1704 def cleanup():
1705 1705 IntegrationModel().delete(integration)
1706 1706
1707 1707 return integration
1708 1708
1709 1709
1710 1710 @pytest.fixture()
1711 1711 def local_dt_to_utc():
1712 1712 def _factory(dt):
1713 1713 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1714 1714 dateutil.tz.tzutc()).replace(tzinfo=None)
1715 1715 return _factory
1716 1716
1717 1717
1718 1718 @pytest.fixture()
1719 1719 def disable_anonymous_user(request, baseapp):
1720 1720 set_anonymous_access(False)
1721 1721
1722 1722 @request.addfinalizer
1723 1723 def cleanup():
1724 1724 set_anonymous_access(True)
1725 1725
1726 1726
1727 1727 @pytest.fixture(scope='module')
1728 1728 def rc_fixture(request):
1729 1729 return Fixture()
1730 1730
1731 1731
1732 1732 @pytest.fixture()
1733 1733 def repo_groups(request):
1734 1734 fixture = Fixture()
1735 1735
1736 1736 session = Session()
1737 1737 zombie_group = fixture.create_repo_group('zombie')
1738 1738 parent_group = fixture.create_repo_group('parent')
1739 1739 child_group = fixture.create_repo_group('parent/child')
1740 1740 groups_in_db = session.query(RepoGroup).all()
1741 1741 assert len(groups_in_db) == 3
1742 1742 assert child_group.group_parent_id == parent_group.group_id
1743 1743
1744 1744 @request.addfinalizer
1745 1745 def cleanup():
1746 1746 fixture.destroy_repo_group(zombie_group)
1747 1747 fixture.destroy_repo_group(child_group)
1748 1748 fixture.destroy_repo_group(parent_group)
1749 1749
1750 1750 return zombie_group, parent_group, child_group
@@ -1,363 +1,364 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import logging
21 21 import io
22 22
23 23 import mock
24 24 import msgpack
25 25 import pytest
26 26 import tempfile
27 27
28 28 from rhodecode.lib.hook_daemon import http_hooks_deamon
29 29 from rhodecode.lib.hook_daemon import celery_hooks_deamon
30 30 from rhodecode.lib.hook_daemon import hook_module
31 31 from rhodecode.lib.hook_daemon import base as hook_base
32 32 from rhodecode.lib.str_utils import safe_bytes
33 33 from rhodecode.tests.utils import assert_message_in_log
34 34 from rhodecode.lib.ext_json import json
35 35
36 36 test_proto = http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO
37 37
38 38
39 39 class TestHooks(object):
40 40 def test_hooks_can_be_used_as_a_context_processor(self):
41 41 hooks = hook_module.Hooks()
42 42 with hooks as return_value:
43 43 pass
44 44 assert hooks == return_value
45 45
46 46
47 47 class TestHooksHttpHandler(object):
48 48 def test_read_request_parses_method_name_and_arguments(self):
49 49 data = {
50 50 'method': 'test',
51 51 'extras': {
52 52 'param1': 1,
53 53 'param2': 'a'
54 54 }
55 55 }
56 56 request = self._generate_post_request(data)
57 57 hooks_patcher = mock.patch.object(
58 58 hook_module.Hooks, data['method'], create=True, return_value=1)
59 59
60 60 with hooks_patcher as hooks_mock:
61 61 handler = http_hooks_deamon.HooksHttpHandler
62 62 handler.DEFAULT_HOOKS_PROTO = test_proto
63 63 handler.wbufsize = 10240
64 64 MockServer(handler, request)
65 65
66 66 hooks_mock.assert_called_once_with(data['extras'])
67 67
68 68 def test_hooks_serialized_result_is_returned(self):
69 69 request = self._generate_post_request({})
70 70 rpc_method = 'test'
71 71 hook_result = {
72 72 'first': 'one',
73 73 'second': 2
74 74 }
75 75 extras = {}
76 76
77 77 # patching our _read to return test method and proto used
78 78 read_patcher = mock.patch.object(
79 79 http_hooks_deamon.HooksHttpHandler, '_read_request',
80 80 return_value=(test_proto, rpc_method, extras))
81 81
82 82 # patch Hooks instance to return hook_result data on 'test' call
83 83 hooks_patcher = mock.patch.object(
84 84 hook_module.Hooks, rpc_method, create=True,
85 85 return_value=hook_result)
86 86
87 87 with read_patcher, hooks_patcher:
88 88 handler = http_hooks_deamon.HooksHttpHandler
89 89 handler.DEFAULT_HOOKS_PROTO = test_proto
90 90 handler.wbufsize = 10240
91 91 server = MockServer(handler, request)
92 92
93 93 expected_result = http_hooks_deamon.HooksHttpHandler.serialize_data(hook_result)
94 94
95 95 server.request.output_stream.seek(0)
96 96 assert server.request.output_stream.readlines()[-1] == expected_result
97 97
98 98 def test_exception_is_returned_in_response(self):
99 99 request = self._generate_post_request({})
100 100 rpc_method = 'test'
101 101
102 102 read_patcher = mock.patch.object(
103 103 http_hooks_deamon.HooksHttpHandler, '_read_request',
104 104 return_value=(test_proto, rpc_method, {}))
105 105
106 106 hooks_patcher = mock.patch.object(
107 107 hook_module.Hooks, rpc_method, create=True,
108 108 side_effect=Exception('Test exception'))
109 109
110 110 with read_patcher, hooks_patcher:
111 111 handler = http_hooks_deamon.HooksHttpHandler
112 112 handler.DEFAULT_HOOKS_PROTO = test_proto
113 113 handler.wbufsize = 10240
114 114 server = MockServer(handler, request)
115 115
116 116 server.request.output_stream.seek(0)
117 117 data = server.request.output_stream.readlines()
118 118 msgpack_data = b''.join(data[5:])
119 119 org_exc = http_hooks_deamon.HooksHttpHandler.deserialize_data(msgpack_data)
120 120 expected_result = {
121 121 'exception': 'Exception',
122 122 'exception_traceback': org_exc['exception_traceback'],
123 123 'exception_args': ['Test exception']
124 124 }
125 125 assert org_exc == expected_result
126 126
127 127 def test_log_message_writes_to_debug_log(self, caplog):
128 128 ip_port = ('0.0.0.0', 8888)
129 129 handler = http_hooks_deamon.HooksHttpHandler(MockRequest('POST /'), ip_port, mock.Mock())
130 130 fake_date = '1/Nov/2015 00:00:00'
131 131 date_patcher = mock.patch.object(
132 132 handler, 'log_date_time_string', return_value=fake_date)
133 133
134 134 with date_patcher, caplog.at_level(logging.DEBUG):
135 135 handler.log_message('Some message %d, %s', 123, 'string')
136 136
137 137 expected_message = f"HOOKS: client={ip_port} - - [{fake_date}] Some message 123, string"
138 138
139 139 assert_message_in_log(
140 140 caplog.records, expected_message,
141 141 levelno=logging.DEBUG, module='http_hooks_deamon')
142 142
143 143 def _generate_post_request(self, data, proto=test_proto):
144 144 if proto == http_hooks_deamon.HooksHttpHandler.MSGPACK_HOOKS_PROTO:
145 145 payload = msgpack.packb(data)
146 146 else:
147 147 payload = json.dumps(data)
148 148
149 149 return b'POST / HTTP/1.0\nContent-Length: %d\n\n%b' % (
150 150 len(payload), payload)
151 151
152 152
153 153 class ThreadedHookCallbackDaemon(object):
154 154 def test_constructor_calls_prepare(self):
155 155 prepare_daemon_patcher = mock.patch.object(
156 156 http_hooks_deamon.ThreadedHookCallbackDaemon, '_prepare')
157 157 with prepare_daemon_patcher as prepare_daemon_mock:
158 158 http_hooks_deamon.ThreadedHookCallbackDaemon()
159 159 prepare_daemon_mock.assert_called_once_with()
160 160
161 161 def test_run_is_called_on_context_start(self):
162 162 patchers = mock.patch.multiple(
163 163 http_hooks_deamon.ThreadedHookCallbackDaemon,
164 164 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
165 165
166 166 with patchers as mocks:
167 167 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
168 168 with daemon as daemon_context:
169 169 pass
170 170 mocks['_run'].assert_called_once_with()
171 171 assert daemon_context == daemon
172 172
173 173 def test_stop_is_called_on_context_exit(self):
174 174 patchers = mock.patch.multiple(
175 175 http_hooks_deamon.ThreadedHookCallbackDaemon,
176 176 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
177 177
178 178 with patchers as mocks:
179 179 daemon = http_hooks_deamon.ThreadedHookCallbackDaemon()
180 180 with daemon as daemon_context:
181 181 assert mocks['_stop'].call_count == 0
182 182
183 183 mocks['_stop'].assert_called_once_with()
184 184 assert daemon_context == daemon
185 185
186 186
187 187 class TestHttpHooksCallbackDaemon(object):
188 188 def test_hooks_callback_generates_new_port(self, caplog):
189 189 with caplog.at_level(logging.DEBUG):
190 190 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
191 191 assert daemon._daemon.server_address == ('127.0.0.1', 8881)
192 192
193 193 with caplog.at_level(logging.DEBUG):
194 194 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host=None, port=None)
195 195 assert daemon._daemon.server_address[1] in range(0, 66000)
196 196 assert daemon._daemon.server_address[0] != '127.0.0.1'
197 197
198 198 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
199 199 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
200 200 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
201 201 assert daemon._daemon == tcp_server
202 202
203 203 _, port = tcp_server.server_address
204 204
205 205 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
206 206 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
207 207 assert_message_in_log(
208 208 caplog.records, msg, levelno=logging.DEBUG, module='http_hooks_deamon')
209 209
210 210 def test_prepare_inits_hooks_uri_and_logs_it(
211 211 self, tcp_server, caplog):
212 212 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
213 213 daemon = http_hooks_deamon.HttpHooksCallbackDaemon(host='127.0.0.1', port=8881)
214 214
215 215 _, port = tcp_server.server_address
216 216 expected_uri = '{}:{}'.format('127.0.0.1', port)
217 217 assert daemon.hooks_uri == expected_uri
218 218
219 219 msg = f"HOOKS: 127.0.0.1:{port} Preparing HTTP callback daemon registering " \
220 220 f"hook object: <class 'rhodecode.lib.hook_daemon.http_hooks_deamon.HooksHttpHandler'>"
221 221
222 222 assert_message_in_log(
223 223 caplog.records, msg,
224 224 levelno=logging.DEBUG, module='http_hooks_deamon')
225 225
226 226 def test_run_creates_a_thread(self, tcp_server):
227 227 thread = mock.Mock()
228 228
229 229 with self._tcp_patcher(tcp_server):
230 230 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
231 231
232 232 with self._thread_patcher(thread) as thread_mock:
233 233 daemon._run()
234 234
235 235 thread_mock.assert_called_once_with(
236 236 target=tcp_server.serve_forever,
237 237 kwargs={'poll_interval': daemon.POLL_INTERVAL})
238 238 assert thread.daemon is True
239 239 thread.start.assert_called_once_with()
240 240
241 241 def test_run_logs(self, tcp_server, caplog):
242 242
243 243 with self._tcp_patcher(tcp_server):
244 244 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
245 245
246 246 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
247 247 daemon._run()
248 248
249 249 assert_message_in_log(
250 250 caplog.records,
251 251 'Running thread-based loop of callback daemon in background',
252 252 levelno=logging.DEBUG, module='http_hooks_deamon')
253 253
254 254 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
255 255 thread = mock.Mock()
256 256
257 257 with self._tcp_patcher(tcp_server):
258 258 daemon = http_hooks_deamon.HttpHooksCallbackDaemon()
259 259
260 260 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
261 261 with daemon:
262 262 assert daemon._daemon == tcp_server
263 263 assert daemon._callback_thread == thread
264 264
265 265 assert daemon._daemon is None
266 266 assert daemon._callback_thread is None
267 267 tcp_server.shutdown.assert_called_with()
268 268 thread.join.assert_called_once_with()
269 269
270 270 assert_message_in_log(
271 271 caplog.records, 'Waiting for background thread to finish.',
272 272 levelno=logging.DEBUG, module='http_hooks_deamon')
273 273
274 274 def _tcp_patcher(self, tcp_server):
275 275 return mock.patch.object(
276 276 http_hooks_deamon, 'TCPServer', return_value=tcp_server)
277 277
278 278 def _thread_patcher(self, thread):
279 279 return mock.patch.object(
280 280 http_hooks_deamon.threading, 'Thread', return_value=thread)
281 281
282 282
283 283 class TestPrepareHooksDaemon(object):
284 284
285 285 @pytest.mark.parametrize('protocol', ('celery',))
286 286 def test_returns_celery_hooks_callback_daemon_when_celery_protocol_specified(
287 287 self, protocol):
288 288 with tempfile.NamedTemporaryFile(mode='w') as temp_file:
289 289 temp_file.write("[app:main]\ncelery.broker_url = redis://redis/0\n"
290 290 "celery.result_backend = redis://redis/0")
291 291 temp_file.flush()
292 292 expected_extras = {'config': temp_file.name}
293 293 callback, extras = hook_base.prepare_callback_daemon(
294 294 expected_extras, protocol=protocol, host='')
295 295 assert isinstance(callback, celery_hooks_deamon.CeleryHooksCallbackDaemon)
296 296
297 297 @pytest.mark.parametrize('protocol, expected_class', (
298 298 ('http', http_hooks_deamon.HttpHooksCallbackDaemon),
299 299 ))
300 300 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
301 301 self, protocol, expected_class):
302 302 expected_extras = {
303 303 'extra1': 'value1',
304 304 'txn_id': 'txnid2',
305 305 'hooks_protocol': protocol.lower(),
306 306 'task_backend': '',
307 307 'task_queue': '',
308 308 'repo_store': '/var/opt/rhodecode_repo_store',
309 309 'repository': 'rhodecode',
310 310 }
311 311 from rhodecode import CONFIG
312 312 CONFIG['vcs.svn.redis_conn'] = 'redis://redis:6379/0'
313 313 callback, extras = hook_base.prepare_callback_daemon(
314 314 expected_extras.copy(), protocol=protocol, host='127.0.0.1',
315 315 txn_id='txnid2')
316 316 assert isinstance(callback, expected_class)
317 317 extras.pop('hooks_uri')
318 318 expected_extras['time'] = extras['time']
319 319 assert extras == expected_extras
320 320
321 321 @pytest.mark.parametrize('protocol', (
322 322 'invalid',
323 323 'Http',
324 324 'HTTP',
325 'celerY'
325 326 ))
326 327 def test_raises_on_invalid_protocol(self, protocol):
327 328 expected_extras = {
328 329 'extra1': 'value1',
329 330 'hooks_protocol': protocol.lower()
330 331 }
331 332 with pytest.raises(Exception):
332 333 callback, extras = hook_base.prepare_callback_daemon(
333 334 expected_extras.copy(),
334 335 protocol=protocol, host='127.0.0.1')
335 336
336 337
337 338 class MockRequest(object):
338 339
339 340 def __init__(self, request):
340 341 self.request = request
341 342 self.input_stream = io.BytesIO(safe_bytes(self.request))
342 343 self.output_stream = io.BytesIO() # make it un-closable for testing invesitagion
343 344 self.output_stream.close = lambda: None
344 345
345 346 def makefile(self, mode, *args, **kwargs):
346 347 return self.output_stream if mode == 'wb' else self.input_stream
347 348
348 349
349 350 class MockServer(object):
350 351
351 352 def __init__(self, handler_cls, request):
352 353 ip_port = ('0.0.0.0', 8888)
353 354 self.request = MockRequest(request)
354 355 self.server_address = ip_port
355 356 self.handler = handler_cls(self.request, ip_port, self)
356 357
357 358
358 359 @pytest.fixture()
359 360 def tcp_server():
360 361 server = mock.Mock()
361 362 server.server_address = ('127.0.0.1', 8881)
362 363 server.wbufsize = 1024
363 364 return server
@@ -1,978 +1,979 b''
1 1
2 2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import mock
21 21 import pytest
22 22 import textwrap
23 23
24 24 import rhodecode
25 25 from rhodecode.lib.vcs.backends import get_backend
26 26 from rhodecode.lib.vcs.backends.base import (
27 27 MergeResponse, MergeFailureReason, Reference)
28 28 from rhodecode.lib.vcs.exceptions import RepositoryError
29 29 from rhodecode.lib.vcs.nodes import FileNode
30 30 from rhodecode.model.comment import CommentsModel
31 31 from rhodecode.model.db import PullRequest, Session
32 32 from rhodecode.model.pull_request import PullRequestModel
33 33 from rhodecode.model.user import UserModel
34 34 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
35 35 from rhodecode.lib.str_utils import safe_str
36 36
37 37 pytestmark = [
38 38 pytest.mark.backends("git", "hg"),
39 39 ]
40 40
41 41
42 42 @pytest.mark.usefixtures('config_stub')
43 43 class TestPullRequestModel(object):
44 44
45 45 @pytest.fixture()
46 46 def pull_request(self, request, backend, pr_util):
47 47 """
48 48 A pull request combined with multiples patches.
49 49 """
50 50 BackendClass = get_backend(backend.alias)
51 51 merge_resp = MergeResponse(
52 52 False, False, None, MergeFailureReason.UNKNOWN,
53 53 metadata={'exception': 'MockError'})
54 54 self.merge_patcher = mock.patch.object(
55 55 BackendClass, 'merge', return_value=merge_resp)
56 56 self.workspace_remove_patcher = mock.patch.object(
57 57 BackendClass, 'cleanup_merge_workspace')
58 58
59 59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 60 self.merge_mock = self.merge_patcher.start()
61 61 self.comment_patcher = mock.patch(
62 62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 63 self.comment_patcher.start()
64 64 self.notification_patcher = mock.patch(
65 65 'rhodecode.model.notification.NotificationModel.create')
66 66 self.notification_patcher.start()
67 67 self.helper_patcher = mock.patch(
68 68 'rhodecode.lib.helpers.route_path')
69 69 self.helper_patcher.start()
70 70
71 71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 72 'trigger_pull_request_hook')
73 73 self.hook_mock = self.hook_patcher.start()
74 74
75 75 self.invalidation_patcher = mock.patch(
76 76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 77 self.invalidation_mock = self.invalidation_patcher.start()
78 78
79 79 self.pull_request = pr_util.create_pull_request(
80 80 mergeable=True, name_suffix=u'Δ…Δ‡')
81 81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 84 self.repo_id = self.pull_request.target_repo.repo_id
85 85
86 86 @request.addfinalizer
87 87 def cleanup_pull_request():
88 88 calls = [mock.call(
89 89 self.pull_request, self.pull_request.author, 'create')]
90 90 self.hook_mock.assert_has_calls(calls)
91 91
92 92 self.workspace_remove_patcher.stop()
93 93 self.merge_patcher.stop()
94 94 self.comment_patcher.stop()
95 95 self.notification_patcher.stop()
96 96 self.helper_patcher.stop()
97 97 self.hook_patcher.stop()
98 98 self.invalidation_patcher.stop()
99 99
100 100 return self.pull_request
101 101
102 102 def test_get_all(self, pull_request):
103 103 prs = PullRequestModel().get_all(pull_request.target_repo)
104 104 assert isinstance(prs, list)
105 105 assert len(prs) == 1
106 106
107 107 def test_count_all(self, pull_request):
108 108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
109 109 assert pr_count == 1
110 110
111 111 def test_get_awaiting_review(self, pull_request):
112 112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
113 113 assert isinstance(prs, list)
114 114 assert len(prs) == 1
115 115
116 116 def test_count_awaiting_review(self, pull_request):
117 117 pr_count = PullRequestModel().count_awaiting_review(
118 118 pull_request.target_repo)
119 119 assert pr_count == 1
120 120
121 121 def test_get_awaiting_my_review(self, pull_request):
122 122 PullRequestModel().update_reviewers(
123 123 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
124 124 pull_request.author)
125 125 Session().commit()
126 126
127 127 prs = PullRequestModel().get_awaiting_my_review(
128 128 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
129 129 assert isinstance(prs, list)
130 130 assert len(prs) == 1
131 131
132 132 def test_count_awaiting_my_review(self, pull_request):
133 133 PullRequestModel().update_reviewers(
134 134 pull_request, [(pull_request.author, ['author'], False, 'reviewer', [])],
135 135 pull_request.author)
136 136 Session().commit()
137 137
138 138 pr_count = PullRequestModel().count_awaiting_my_review(
139 139 pull_request.target_repo.repo_name, user_id=pull_request.author.user_id)
140 140 assert pr_count == 1
141 141
142 142 def test_delete_calls_cleanup_merge(self, pull_request):
143 143 repo_id = pull_request.target_repo.repo_id
144 144 PullRequestModel().delete(pull_request, pull_request.author)
145 145 Session().commit()
146 146
147 147 self.workspace_remove_mock.assert_called_once_with(
148 148 repo_id, self.workspace_id)
149 149
150 150 def test_close_calls_cleanup_and_hook(self, pull_request):
151 151 PullRequestModel().close_pull_request(
152 152 pull_request, pull_request.author)
153 153 Session().commit()
154 154
155 155 repo_id = pull_request.target_repo.repo_id
156 156
157 157 self.workspace_remove_mock.assert_called_once_with(
158 158 repo_id, self.workspace_id)
159 159 self.hook_mock.assert_called_with(
160 160 self.pull_request, self.pull_request.author, 'close')
161 161
162 162 def test_merge_status(self, pull_request):
163 163 self.merge_mock.return_value = MergeResponse(
164 164 True, False, None, MergeFailureReason.NONE)
165 165
166 166 assert pull_request._last_merge_source_rev is None
167 167 assert pull_request._last_merge_target_rev is None
168 168 assert pull_request.last_merge_status is None
169 169
170 170 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
171 171 assert status is True
172 172 assert msg == 'This pull request can be automatically merged.'
173 173 self.merge_mock.assert_called_with(
174 174 self.repo_id, self.workspace_id,
175 175 pull_request.target_ref_parts,
176 176 pull_request.source_repo.scm_instance(),
177 177 pull_request.source_ref_parts, dry_run=True,
178 178 use_rebase=False, close_branch=False)
179 179
180 180 assert pull_request._last_merge_source_rev == self.source_commit
181 181 assert pull_request._last_merge_target_rev == self.target_commit
182 182 assert pull_request.last_merge_status is MergeFailureReason.NONE
183 183
184 184 self.merge_mock.reset_mock()
185 185 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
186 186 assert status is True
187 187 assert msg == 'This pull request can be automatically merged.'
188 188 assert self.merge_mock.called is False
189 189
190 190 def test_merge_status_known_failure(self, pull_request):
191 191 self.merge_mock.return_value = MergeResponse(
192 192 False, False, None, MergeFailureReason.MERGE_FAILED,
193 193 metadata={'unresolved_files': 'file1'})
194 194
195 195 assert pull_request._last_merge_source_rev is None
196 196 assert pull_request._last_merge_target_rev is None
197 197 assert pull_request.last_merge_status is None
198 198
199 199 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
200 200 assert status is False
201 201 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
202 202 self.merge_mock.assert_called_with(
203 203 self.repo_id, self.workspace_id,
204 204 pull_request.target_ref_parts,
205 205 pull_request.source_repo.scm_instance(),
206 206 pull_request.source_ref_parts, dry_run=True,
207 207 use_rebase=False, close_branch=False)
208 208
209 209 assert pull_request._last_merge_source_rev == self.source_commit
210 210 assert pull_request._last_merge_target_rev == self.target_commit
211 211 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
212 212
213 213 self.merge_mock.reset_mock()
214 214 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
215 215 assert status is False
216 216 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
217 217 assert self.merge_mock.called is False
218 218
219 219 def test_merge_status_unknown_failure(self, pull_request):
220 220 self.merge_mock.return_value = MergeResponse(
221 221 False, False, None, MergeFailureReason.UNKNOWN,
222 222 metadata={'exception': 'MockError'})
223 223
224 224 assert pull_request._last_merge_source_rev is None
225 225 assert pull_request._last_merge_target_rev is None
226 226 assert pull_request.last_merge_status is None
227 227
228 228 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
229 229 assert status is False
230 230 assert msg == (
231 231 'This pull request cannot be merged because of an unhandled exception. '
232 232 'MockError')
233 233 self.merge_mock.assert_called_with(
234 234 self.repo_id, self.workspace_id,
235 235 pull_request.target_ref_parts,
236 236 pull_request.source_repo.scm_instance(),
237 237 pull_request.source_ref_parts, dry_run=True,
238 238 use_rebase=False, close_branch=False)
239 239
240 240 assert pull_request._last_merge_source_rev is None
241 241 assert pull_request._last_merge_target_rev is None
242 242 assert pull_request.last_merge_status is None
243 243
244 244 self.merge_mock.reset_mock()
245 245 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
246 246 assert status is False
247 247 assert msg == (
248 248 'This pull request cannot be merged because of an unhandled exception. '
249 249 'MockError')
250 250 assert self.merge_mock.called is True
251 251
252 252 def test_merge_status_when_target_is_locked(self, pull_request):
253 253 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
254 254 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
255 255 assert status is False
256 256 assert msg == (
257 257 'This pull request cannot be merged because the target repository '
258 258 'is locked by user:1.')
259 259
260 260 def test_merge_status_requirements_check_target(self, pull_request):
261 261
262 262 def has_largefiles(self, repo):
263 263 return repo == pull_request.source_repo
264 264
265 265 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
266 266 with patcher:
267 267 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
268 268
269 269 assert status is False
270 270 assert msg == 'Target repository large files support is disabled.'
271 271
272 272 def test_merge_status_requirements_check_source(self, pull_request):
273 273
274 274 def has_largefiles(self, repo):
275 275 return repo == pull_request.target_repo
276 276
277 277 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
278 278 with patcher:
279 279 merge_response, status, msg = PullRequestModel().merge_status(pull_request)
280 280
281 281 assert status is False
282 282 assert msg == 'Source repository large files support is disabled.'
283 283
284 284 def test_merge(self, pull_request, merge_extras):
285 285 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
286 286 merge_ref = Reference(
287 287 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
288 288 self.merge_mock.return_value = MergeResponse(
289 289 True, True, merge_ref, MergeFailureReason.NONE)
290 290
291 291 merge_extras['repository'] = pull_request.target_repo.repo_name
292 292 PullRequestModel().merge_repo(
293 293 pull_request, pull_request.author, extras=merge_extras)
294 294 Session().commit()
295 295
296 296 message = (
297 297 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
298 298 u'\n\n {pr_title}'.format(
299 299 pr_id=pull_request.pull_request_id,
300 300 source_repo=safe_str(
301 301 pull_request.source_repo.scm_instance().name),
302 302 source_ref_name=pull_request.source_ref_parts.name,
303 303 pr_title=safe_str(pull_request.title)
304 304 )
305 305 )
306 306 self.merge_mock.assert_called_with(
307 307 self.repo_id, self.workspace_id,
308 308 pull_request.target_ref_parts,
309 309 pull_request.source_repo.scm_instance(),
310 310 pull_request.source_ref_parts,
311 311 user_name=user.short_contact, user_email=user.email, message=message,
312 312 use_rebase=False, close_branch=False
313 313 )
314 314 self.invalidation_mock.assert_called_once_with(
315 315 pull_request.target_repo.repo_name)
316 316
317 317 self.hook_mock.assert_called_with(
318 318 self.pull_request, self.pull_request.author, 'merge')
319 319
320 320 pull_request = PullRequest.get(pull_request.pull_request_id)
321 321 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
322 322
323 323 def test_merge_with_status_lock(self, pull_request, merge_extras):
324 324 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
325 325 merge_ref = Reference(
326 326 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
327 327 self.merge_mock.return_value = MergeResponse(
328 328 True, True, merge_ref, MergeFailureReason.NONE)
329 329
330 330 merge_extras['repository'] = pull_request.target_repo.repo_name
331 331
332 332 with pull_request.set_state(PullRequest.STATE_UPDATING):
333 333 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
334 334 PullRequestModel().merge_repo(
335 335 pull_request, pull_request.author, extras=merge_extras)
336 336 Session().commit()
337 337
338 338 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
339 339
340 340 message = (
341 341 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
342 342 u'\n\n {pr_title}'.format(
343 343 pr_id=pull_request.pull_request_id,
344 344 source_repo=safe_str(
345 345 pull_request.source_repo.scm_instance().name),
346 346 source_ref_name=pull_request.source_ref_parts.name,
347 347 pr_title=safe_str(pull_request.title)
348 348 )
349 349 )
350 350 self.merge_mock.assert_called_with(
351 351 self.repo_id, self.workspace_id,
352 352 pull_request.target_ref_parts,
353 353 pull_request.source_repo.scm_instance(),
354 354 pull_request.source_ref_parts,
355 355 user_name=user.short_contact, user_email=user.email, message=message,
356 356 use_rebase=False, close_branch=False
357 357 )
358 358 self.invalidation_mock.assert_called_once_with(
359 359 pull_request.target_repo.repo_name)
360 360
361 361 self.hook_mock.assert_called_with(
362 362 self.pull_request, self.pull_request.author, 'merge')
363 363
364 364 pull_request = PullRequest.get(pull_request.pull_request_id)
365 365 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
366 366
367 367 def test_merge_failed(self, pull_request, merge_extras):
368 368 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
369 369 merge_ref = Reference(
370 370 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
371 371 self.merge_mock.return_value = MergeResponse(
372 372 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
373 373
374 374 merge_extras['repository'] = pull_request.target_repo.repo_name
375 375 PullRequestModel().merge_repo(
376 376 pull_request, pull_request.author, extras=merge_extras)
377 377 Session().commit()
378 378
379 379 message = (
380 380 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
381 381 u'\n\n {pr_title}'.format(
382 382 pr_id=pull_request.pull_request_id,
383 383 source_repo=safe_str(
384 384 pull_request.source_repo.scm_instance().name),
385 385 source_ref_name=pull_request.source_ref_parts.name,
386 386 pr_title=safe_str(pull_request.title)
387 387 )
388 388 )
389 389 self.merge_mock.assert_called_with(
390 390 self.repo_id, self.workspace_id,
391 391 pull_request.target_ref_parts,
392 392 pull_request.source_repo.scm_instance(),
393 393 pull_request.source_ref_parts,
394 394 user_name=user.short_contact, user_email=user.email, message=message,
395 395 use_rebase=False, close_branch=False
396 396 )
397 397
398 398 pull_request = PullRequest.get(pull_request.pull_request_id)
399 399 assert self.invalidation_mock.called is False
400 400 assert pull_request.merge_rev is None
401 401
402 402 def test_get_commit_ids(self, pull_request):
403 403 # The PR has been not merged yet, so expect an exception
404 404 with pytest.raises(ValueError):
405 405 PullRequestModel()._get_commit_ids(pull_request)
406 406
407 407 # Merge revision is in the revisions list
408 408 pull_request.merge_rev = pull_request.revisions[0]
409 409 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
410 410 assert commit_ids == pull_request.revisions
411 411
412 412 # Merge revision is not in the revisions list
413 413 pull_request.merge_rev = 'f000' * 10
414 414 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
415 415 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
416 416
417 417 def test_get_diff_from_pr_version(self, pull_request):
418 418 source_repo = pull_request.source_repo
419 419 source_ref_id = pull_request.source_ref_parts.commit_id
420 420 target_ref_id = pull_request.target_ref_parts.commit_id
421 421 diff = PullRequestModel()._get_diff_from_pr_or_version(
422 422 source_repo, source_ref_id, target_ref_id,
423 423 hide_whitespace_changes=False, diff_context=6)
424 424 assert b'file_1' in diff.raw.tobytes()
425 425
426 426 def test_generate_title_returns_unicode(self):
427 427 title = PullRequestModel().generate_pullrequest_title(
428 428 source='source-dummy',
429 429 source_ref='source-ref-dummy',
430 430 target='target-dummy',
431 431 )
432 432 assert type(title) == str
433 433
434 434 @pytest.mark.parametrize('title, has_wip', [
435 435 ('hello', False),
436 436 ('hello wip', False),
437 437 ('hello wip: xxx', False),
438 438 ('[wip] hello', True),
439 439 ('[wip] hello', True),
440 440 ('wip: hello', True),
441 441 ('wip hello', True),
442 442
443 443 ])
444 444 def test_wip_title_marker(self, pull_request, title, has_wip):
445 445 pull_request.title = title
446 446 assert pull_request.work_in_progress == has_wip
447 447
448 448
449 449 @pytest.mark.usefixtures('config_stub')
450 450 class TestIntegrationMerge(object):
451 451 @pytest.mark.parametrize('extra_config', (
452 452 {'vcs.hooks.protocol.v2': 'celery', 'vcs.hooks.direct_calls': False},
453 453 ))
454 454 def test_merge_triggers_push_hooks(
455 455 self, pr_util, user_admin, capture_rcextensions, merge_extras,
456 456 extra_config):
457 457
458 458 pull_request = pr_util.create_pull_request(
459 459 approved=True, mergeable=True)
460 460 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
461 461 merge_extras['repository'] = pull_request.target_repo.repo_name
462 462 Session().commit()
463 463
464 464 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
465 465 merge_state = PullRequestModel().merge_repo(
466 466 pull_request, user_admin, extras=merge_extras)
467 467 Session().commit()
468 468
469 469 assert merge_state.executed
470 470 assert '_pre_push_hook' in capture_rcextensions
471 471 assert '_push_hook' in capture_rcextensions
472 472
473 473 def test_merge_can_be_rejected_by_pre_push_hook(
474 474 self, pr_util, user_admin, capture_rcextensions, merge_extras):
475 475 pull_request = pr_util.create_pull_request(
476 476 approved=True, mergeable=True)
477 477 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
478 478 merge_extras['repository'] = pull_request.target_repo.repo_name
479 479 Session().commit()
480 480
481 481 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
482 482 pre_pull.side_effect = RepositoryError("Disallow push!")
483 483 merge_status = PullRequestModel().merge_repo(
484 484 pull_request, user_admin, extras=merge_extras)
485 485 Session().commit()
486 486
487 487 assert not merge_status.executed
488 488 assert 'pre_push' not in capture_rcextensions
489 489 assert 'post_push' not in capture_rcextensions
490 490
491 491 def test_merge_fails_if_target_is_locked(
492 492 self, pr_util, user_regular, merge_extras):
493 493 pull_request = pr_util.create_pull_request(
494 494 approved=True, mergeable=True)
495 495 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
496 496 pull_request.target_repo.locked = locked_by
497 497 # TODO: johbo: Check if this can work based on the database, currently
498 498 # all data is pre-computed, that's why just updating the DB is not
499 499 # enough.
500 500 merge_extras['locked_by'] = locked_by
501 501 merge_extras['repository'] = pull_request.target_repo.repo_name
502 502 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
503 503 Session().commit()
504 504 merge_status = PullRequestModel().merge_repo(
505 505 pull_request, user_regular, extras=merge_extras)
506 506 Session().commit()
507 507
508 508 assert not merge_status.executed
509 509
510 510
511 511 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
512 512 (False, 1, 0),
513 513 (True, 0, 1),
514 514 ])
515 515 def test_outdated_comments(
516 516 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
517 517 pull_request = pr_util.create_pull_request()
518 518 pr_util.create_inline_comment(file_path='not_in_updated_diff')
519 519
520 520 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
521 521 pr_util.add_one_commit()
522 522 assert_inline_comments(
523 523 pull_request, visible=inlines_count, outdated=outdated_count)
524 524 outdated_comment_mock.assert_called_with(pull_request)
525 525
526 526
527 527 @pytest.mark.parametrize('mr_type, expected_msg', [
528 528 (MergeFailureReason.NONE,
529 529 'This pull request can be automatically merged.'),
530 530 (MergeFailureReason.UNKNOWN,
531 531 'This pull request cannot be merged because of an unhandled exception. CRASH'),
532 532 (MergeFailureReason.MERGE_FAILED,
533 533 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
534 534 (MergeFailureReason.PUSH_FAILED,
535 535 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
536 536 (MergeFailureReason.TARGET_IS_NOT_HEAD,
537 537 'This pull request cannot be merged because the target `ref_name` is not a head.'),
538 538 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
539 539 'This pull request cannot be merged because the source contains more branches than the target.'),
540 540 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
541 541 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
542 542 (MergeFailureReason.TARGET_IS_LOCKED,
543 543 'This pull request cannot be merged because the target repository is locked by user:123.'),
544 544 (MergeFailureReason.MISSING_TARGET_REF,
545 545 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
546 546 (MergeFailureReason.MISSING_SOURCE_REF,
547 547 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
548 548 (MergeFailureReason.SUBREPO_MERGE_FAILED,
549 549 'This pull request cannot be merged because of conflicts related to sub repositories.'),
550 550
551 551 ])
552 552 def test_merge_response_message(mr_type, expected_msg):
553 553 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
554 554 metadata = {
555 555 'unresolved_files': 'CONFLICT_FILE',
556 556 'exception': "CRASH",
557 557 'target': 'some-repo',
558 558 'merge_commit': 'merge_commit',
559 559 'target_ref': merge_ref,
560 560 'source_ref': merge_ref,
561 561 'heads': ','.join(['a', 'b', 'c']),
562 562 'locked_by': 'user:123'
563 563 }
564 564
565 565 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
566 566 assert merge_response.merge_status_message == expected_msg
567 567
568 568
569 569 @pytest.fixture()
570 def merge_extras(user_regular):
570 def merge_extras(request, user_regular):
571 571 """
572 572 Context for the vcs operation when running a merge.
573 573 """
574
574 575 extras = {
575 576 'ip': '127.0.0.1',
576 577 'username': user_regular.username,
577 578 'user_id': user_regular.user_id,
578 579 'action': 'push',
579 580 'repository': 'fake_target_repo_name',
580 581 'scm': 'git',
581 'config': 'fake_config_ini_path',
582 'config': request.config.getini('pyramid_config'),
582 583 'repo_store': '',
583 584 'make_lock': None,
584 585 'locked_by': [None, None, None],
585 586 'server_url': 'http://test.example.com:5000',
586 587 'hooks': ['push', 'pull'],
587 588 'is_shadow_repo': False,
588 589 }
589 590 return extras
590 591
591 592
592 593 @pytest.mark.usefixtures('config_stub')
593 594 class TestUpdateCommentHandling(object):
594 595
595 596 @pytest.fixture(autouse=True, scope='class')
596 597 def enable_outdated_comments(self, request, baseapp):
597 598 config_patch = mock.patch.dict(
598 599 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
599 600 config_patch.start()
600 601
601 602 @request.addfinalizer
602 603 def cleanup():
603 604 config_patch.stop()
604 605
605 606 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
606 607 commits = [
607 608 {'message': 'a'},
608 609 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
609 610 {'message': 'c', 'added': [FileNode(b'file_c', b'test_content\n')]},
610 611 ]
611 612 pull_request = pr_util.create_pull_request(
612 613 commits=commits, target_head='a', source_head='b', revisions=['b'])
613 614 pr_util.create_inline_comment(file_path='file_b')
614 615 pr_util.add_one_commit(head='c')
615 616
616 617 assert_inline_comments(pull_request, visible=1, outdated=0)
617 618
618 619 def test_comment_stays_unflagged_on_change_above(self, pr_util):
619 620 original_content = b''.join((b'line %d\n' % x for x in range(1, 11)))
620 621 updated_content = b'new_line_at_top\n' + original_content
621 622 commits = [
622 623 {'message': 'a'},
623 624 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
624 625 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
625 626 ]
626 627 pull_request = pr_util.create_pull_request(
627 628 commits=commits, target_head='a', source_head='b', revisions=['b'])
628 629
629 630 with outdated_comments_patcher():
630 631 comment = pr_util.create_inline_comment(
631 632 line_no=u'n8', file_path='file_b')
632 633 pr_util.add_one_commit(head='c')
633 634
634 635 assert_inline_comments(pull_request, visible=1, outdated=0)
635 636 assert comment.line_no == u'n9'
636 637
637 638 def test_comment_stays_unflagged_on_change_below(self, pr_util):
638 639 original_content = b''.join([b'line %d\n' % x for x in range(10)])
639 640 updated_content = original_content + b'new_line_at_end\n'
640 641 commits = [
641 642 {'message': 'a'},
642 643 {'message': 'b', 'added': [FileNode(b'file_b', original_content)]},
643 644 {'message': 'c', 'changed': [FileNode(b'file_b', updated_content)]},
644 645 ]
645 646 pull_request = pr_util.create_pull_request(
646 647 commits=commits, target_head='a', source_head='b', revisions=['b'])
647 648 pr_util.create_inline_comment(file_path='file_b')
648 649 pr_util.add_one_commit(head='c')
649 650
650 651 assert_inline_comments(pull_request, visible=1, outdated=0)
651 652
652 653 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
653 654 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
654 655 base_lines = [b'line %d\n' % x for x in range(1, 13)]
655 656 change_lines = list(base_lines)
656 657 change_lines.insert(6, b'line 6a added\n')
657 658
658 659 # Changes on the last line of sight
659 660 update_lines = list(change_lines)
660 661 update_lines[0] = b'line 1 changed\n'
661 662 update_lines[-1] = b'line 12 changed\n'
662 663
663 664 def file_b(lines):
664 665 return FileNode(b'file_b', b''.join(lines))
665 666
666 667 commits = [
667 668 {'message': 'a', 'added': [file_b(base_lines)]},
668 669 {'message': 'b', 'changed': [file_b(change_lines)]},
669 670 {'message': 'c', 'changed': [file_b(update_lines)]},
670 671 ]
671 672
672 673 pull_request = pr_util.create_pull_request(
673 674 commits=commits, target_head='a', source_head='b', revisions=['b'])
674 675 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
675 676
676 677 with outdated_comments_patcher():
677 678 pr_util.add_one_commit(head='c')
678 679 assert_inline_comments(pull_request, visible=0, outdated=1)
679 680
680 681 @pytest.mark.parametrize("change, content", [
681 682 ('changed', b'changed\n'),
682 683 ('removed', b''),
683 684 ], ids=['changed', b'removed'])
684 685 def test_comment_flagged_on_change(self, pr_util, change, content):
685 686 commits = [
686 687 {'message': 'a'},
687 688 {'message': 'b', 'added': [FileNode(b'file_b', b'test_content\n')]},
688 689 {'message': 'c', change: [FileNode(b'file_b', content)]},
689 690 ]
690 691 pull_request = pr_util.create_pull_request(
691 692 commits=commits, target_head='a', source_head='b', revisions=['b'])
692 693 pr_util.create_inline_comment(file_path='file_b')
693 694
694 695 with outdated_comments_patcher():
695 696 pr_util.add_one_commit(head='c')
696 697 assert_inline_comments(pull_request, visible=0, outdated=1)
697 698
698 699
699 700 @pytest.mark.usefixtures('config_stub')
700 701 class TestUpdateChangedFiles(object):
701 702
702 703 def test_no_changes_on_unchanged_diff(self, pr_util):
703 704 commits = [
704 705 {'message': 'a'},
705 706 {'message': 'b',
706 707 'added': [FileNode(b'file_b', b'test_content b\n')]},
707 708 {'message': 'c',
708 709 'added': [FileNode(b'file_c', b'test_content c\n')]},
709 710 ]
710 711 # open a PR from a to b, adding file_b
711 712 pull_request = pr_util.create_pull_request(
712 713 commits=commits, target_head='a', source_head='b', revisions=['b'],
713 714 name_suffix='per-file-review')
714 715
715 716 # modify PR adding new file file_c
716 717 pr_util.add_one_commit(head='c')
717 718
718 719 assert_pr_file_changes(
719 720 pull_request,
720 721 added=['file_c'],
721 722 modified=[],
722 723 removed=[])
723 724
724 725 def test_modify_and_undo_modification_diff(self, pr_util):
725 726 commits = [
726 727 {'message': 'a'},
727 728 {'message': 'b',
728 729 'added': [FileNode(b'file_b', b'test_content b\n')]},
729 730 {'message': 'c',
730 731 'changed': [FileNode(b'file_b', b'test_content b modified\n')]},
731 732 {'message': 'd',
732 733 'changed': [FileNode(b'file_b', b'test_content b\n')]},
733 734 ]
734 735 # open a PR from a to b, adding file_b
735 736 pull_request = pr_util.create_pull_request(
736 737 commits=commits, target_head='a', source_head='b', revisions=['b'],
737 738 name_suffix='per-file-review')
738 739
739 740 # modify PR modifying file file_b
740 741 pr_util.add_one_commit(head='c')
741 742
742 743 assert_pr_file_changes(
743 744 pull_request,
744 745 added=[],
745 746 modified=['file_b'],
746 747 removed=[])
747 748
748 749 # move the head again to d, which rollbacks change,
749 750 # meaning we should indicate no changes
750 751 pr_util.add_one_commit(head='d')
751 752
752 753 assert_pr_file_changes(
753 754 pull_request,
754 755 added=[],
755 756 modified=[],
756 757 removed=[])
757 758
758 759 def test_updated_all_files_in_pr(self, pr_util):
759 760 commits = [
760 761 {'message': 'a'},
761 762 {'message': 'b', 'added': [
762 763 FileNode(b'file_a', b'test_content a\n'),
763 764 FileNode(b'file_b', b'test_content b\n'),
764 765 FileNode(b'file_c', b'test_content c\n')]},
765 766 {'message': 'c', 'changed': [
766 767 FileNode(b'file_a', b'test_content a changed\n'),
767 768 FileNode(b'file_b', b'test_content b changed\n'),
768 769 FileNode(b'file_c', b'test_content c changed\n')]},
769 770 ]
770 771 # open a PR from a to b, changing 3 files
771 772 pull_request = pr_util.create_pull_request(
772 773 commits=commits, target_head='a', source_head='b', revisions=['b'],
773 774 name_suffix='per-file-review')
774 775
775 776 pr_util.add_one_commit(head='c')
776 777
777 778 assert_pr_file_changes(
778 779 pull_request,
779 780 added=[],
780 781 modified=['file_a', 'file_b', 'file_c'],
781 782 removed=[])
782 783
783 784 def test_updated_and_removed_all_files_in_pr(self, pr_util):
784 785 commits = [
785 786 {'message': 'a'},
786 787 {'message': 'b', 'added': [
787 788 FileNode(b'file_a', b'test_content a\n'),
788 789 FileNode(b'file_b', b'test_content b\n'),
789 790 FileNode(b'file_c', b'test_content c\n')]},
790 791 {'message': 'c', 'removed': [
791 792 FileNode(b'file_a', b'test_content a changed\n'),
792 793 FileNode(b'file_b', b'test_content b changed\n'),
793 794 FileNode(b'file_c', b'test_content c changed\n')]},
794 795 ]
795 796 # open a PR from a to b, removing 3 files
796 797 pull_request = pr_util.create_pull_request(
797 798 commits=commits, target_head='a', source_head='b', revisions=['b'],
798 799 name_suffix='per-file-review')
799 800
800 801 pr_util.add_one_commit(head='c')
801 802
802 803 assert_pr_file_changes(
803 804 pull_request,
804 805 added=[],
805 806 modified=[],
806 807 removed=['file_a', 'file_b', 'file_c'])
807 808
808 809
809 810 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
810 811 model = PullRequestModel()
811 812 pull_request = pr_util.create_pull_request()
812 813 pr_util.update_source_repository()
813 814
814 815 model.update_commits(pull_request, pull_request.author)
815 816
816 817 # Expect that it has a version entry now
817 818 assert len(model.get_versions(pull_request)) == 1
818 819
819 820
820 821 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
821 822 pull_request = pr_util.create_pull_request()
822 823 model = PullRequestModel()
823 824 model.update_commits(pull_request, pull_request.author)
824 825
825 826 # Expect that it still has no versions
826 827 assert len(model.get_versions(pull_request)) == 0
827 828
828 829
829 830 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
830 831 model = PullRequestModel()
831 832 pull_request = pr_util.create_pull_request()
832 833 comment = pr_util.create_comment()
833 834 pr_util.update_source_repository()
834 835
835 836 model.update_commits(pull_request, pull_request.author)
836 837
837 838 # Expect that the comment is linked to the pr version now
838 839 assert comment.pull_request_version == model.get_versions(pull_request)[0]
839 840
840 841
841 842 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
842 843 model = PullRequestModel()
843 844 pull_request = pr_util.create_pull_request()
844 845 pr_util.update_source_repository()
845 846 pr_util.update_source_repository()
846 847
847 848 update_response = model.update_commits(pull_request, pull_request.author)
848 849
849 850 commit_id = update_response.common_ancestor_id
850 851 # Expect to find a new comment about the change
851 852 expected_message = textwrap.dedent(
852 853 """\
853 854 Pull request updated. Auto status change to |under_review|
854 855
855 856 .. role:: added
856 857 .. role:: removed
857 858 .. parsed-literal::
858 859
859 860 Changed commits:
860 861 * :added:`1 added`
861 862 * :removed:`0 removed`
862 863
863 864 Changed files:
864 865 * `A file_2 <#a_c-{}-92ed3b5f07b4>`_
865 866
866 867 .. |under_review| replace:: *"Under Review"*"""
867 868 ).format(commit_id[:12])
868 869 pull_request_comments = sorted(
869 870 pull_request.comments, key=lambda c: c.modified_at)
870 871 update_comment = pull_request_comments[-1]
871 872 assert update_comment.text == expected_message
872 873
873 874
874 875 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
875 876 pull_request = pr_util.create_pull_request()
876 877
877 878 # Avoiding default values
878 879 pull_request.status = PullRequest.STATUS_CLOSED
879 880 pull_request._last_merge_source_rev = "0" * 40
880 881 pull_request._last_merge_target_rev = "1" * 40
881 882 pull_request.last_merge_status = 1
882 883 pull_request.merge_rev = "2" * 40
883 884
884 885 # Remember automatic values
885 886 created_on = pull_request.created_on
886 887 updated_on = pull_request.updated_on
887 888
888 889 # Create a new version of the pull request
889 890 version = PullRequestModel()._create_version_from_snapshot(pull_request)
890 891
891 892 # Check attributes
892 893 assert version.title == pr_util.create_parameters['title']
893 894 assert version.description == pr_util.create_parameters['description']
894 895 assert version.status == PullRequest.STATUS_CLOSED
895 896
896 897 # versions get updated created_on
897 898 assert version.created_on != created_on
898 899
899 900 assert version.updated_on == updated_on
900 901 assert version.user_id == pull_request.user_id
901 902 assert version.revisions == pr_util.create_parameters['revisions']
902 903 assert version.source_repo == pr_util.source_repository
903 904 assert version.source_ref == pr_util.create_parameters['source_ref']
904 905 assert version.target_repo == pr_util.target_repository
905 906 assert version.target_ref == pr_util.create_parameters['target_ref']
906 907 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
907 908 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
908 909 assert version.last_merge_status == pull_request.last_merge_status
909 910 assert version.merge_rev == pull_request.merge_rev
910 911 assert version.pull_request == pull_request
911 912
912 913
913 914 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
914 915 version1 = pr_util.create_version_of_pull_request()
915 916 comment_linked = pr_util.create_comment(linked_to=version1)
916 917 comment_unlinked = pr_util.create_comment()
917 918 version2 = pr_util.create_version_of_pull_request()
918 919
919 920 PullRequestModel()._link_comments_to_version(version2)
920 921 Session().commit()
921 922
922 923 # Expect that only the new comment is linked to version2
923 924 assert (
924 925 comment_unlinked.pull_request_version_id ==
925 926 version2.pull_request_version_id)
926 927 assert (
927 928 comment_linked.pull_request_version_id ==
928 929 version1.pull_request_version_id)
929 930 assert (
930 931 comment_unlinked.pull_request_version_id !=
931 932 comment_linked.pull_request_version_id)
932 933
933 934
934 935 def test_calculate_commits():
935 936 old_ids = [1, 2, 3]
936 937 new_ids = [1, 3, 4, 5]
937 938 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
938 939 assert change.added == [4, 5]
939 940 assert change.common == [1, 3]
940 941 assert change.removed == [2]
941 942 assert change.total == [1, 3, 4, 5]
942 943
943 944
944 945 def assert_inline_comments(pull_request, visible=None, outdated=None):
945 946 if visible is not None:
946 947 inline_comments = CommentsModel().get_inline_comments(
947 948 pull_request.target_repo.repo_id, pull_request=pull_request)
948 949 inline_cnt = len(CommentsModel().get_inline_comments_as_list(
949 950 inline_comments))
950 951 assert inline_cnt == visible
951 952 if outdated is not None:
952 953 outdated_comments = CommentsModel().get_outdated_comments(
953 954 pull_request.target_repo.repo_id, pull_request)
954 955 assert len(outdated_comments) == outdated
955 956
956 957
957 958 def assert_pr_file_changes(
958 959 pull_request, added=None, modified=None, removed=None):
959 960 pr_versions = PullRequestModel().get_versions(pull_request)
960 961 # always use first version, ie original PR to calculate changes
961 962 pull_request_version = pr_versions[0]
962 963 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
963 964 pull_request, pull_request_version)
964 965 file_changes = PullRequestModel()._calculate_file_changes(
965 966 old_diff_data, new_diff_data)
966 967
967 968 assert added == file_changes.added, \
968 969 'expected added:%s vs value:%s' % (added, file_changes.added)
969 970 assert modified == file_changes.modified, \
970 971 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
971 972 assert removed == file_changes.removed, \
972 973 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
973 974
974 975
975 976 def outdated_comments_patcher(use_outdated=True):
976 977 return mock.patch.object(
977 978 CommentsModel, 'use_outdated_comments',
978 979 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now