##// END OF EJS Templates
tests: fixed author for commit messages to be in a proper format.
marcink -
r3840:eb39c224 default
parent child Browse files
Show More
@@ -1,1218 +1,1221 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35
36 36
37 37 def route_path(name, params=None, **kwargs):
38 38 import urllib
39 39
40 40 base_url = {
41 41 'repo_changelog': '/{repo_name}/changelog',
42 42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 43 'repo_commits': '/{repo_name}/commits',
44 44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 46 'pullrequest_show_all': '/{repo_name}/pull-request',
47 47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 50 'pullrequest_new': '/{repo_name}/pull-request/new',
51 51 'pullrequest_create': '/{repo_name}/pull-request/create',
52 52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 57 }[name].format(**kwargs)
58 58
59 59 if params:
60 60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
61 61 return base_url
62 62
63 63
64 64 @pytest.mark.usefixtures('app', 'autologin_user')
65 65 @pytest.mark.backends("git", "hg")
66 66 class TestPullrequestsView(object):
67 67
68 68 def test_index(self, backend):
69 69 self.app.get(route_path(
70 70 'pullrequest_new',
71 71 repo_name=backend.repo_name))
72 72
73 73 def test_option_menu_create_pull_request_exists(self, backend):
74 74 repo_name = backend.repo_name
75 75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
76 76
77 77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
78 78 'pullrequest_new', repo_name=repo_name)
79 79 response.mustcontain(create_pr_link)
80 80
81 81 def test_create_pr_form_with_raw_commit_id(self, backend):
82 82 repo = backend.repo
83 83
84 84 self.app.get(
85 85 route_path('pullrequest_new', repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 @pytest.mark.parametrize('range_diff', ["0", "1"])
91 91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
92 92 pull_request = pr_util.create_pull_request(
93 93 mergeable=pr_merge_enabled, enable_notifications=False)
94 94
95 95 response = self.app.get(route_path(
96 96 'pullrequest_show',
97 97 repo_name=pull_request.target_repo.scm_instance().name,
98 98 pull_request_id=pull_request.pull_request_id,
99 99 params={'range-diff': range_diff}))
100 100
101 101 for commit_id in pull_request.revisions:
102 102 response.mustcontain(commit_id)
103 103
104 104 assert pull_request.target_ref_parts.type in response
105 105 assert pull_request.target_ref_parts.name in response
106 106 target_clone_url = pull_request.target_repo.clone_url()
107 107 assert target_clone_url in response
108 108
109 109 assert 'class="pull-request-merge"' in response
110 110 if pr_merge_enabled:
111 111 response.mustcontain('Pull request reviewer approval is pending')
112 112 else:
113 113 response.mustcontain('Server-side pull request merging is disabled.')
114 114
115 115 if range_diff == "1":
116 116 response.mustcontain('Turn off: Show the diff as commit range')
117 117
118 118 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
119 119 # Logout
120 120 response = self.app.post(
121 121 h.route_path('logout'),
122 122 params={'csrf_token': csrf_token})
123 123 # Login as regular user
124 124 response = self.app.post(h.route_path('login'),
125 125 {'username': TEST_USER_REGULAR_LOGIN,
126 126 'password': 'test12'})
127 127
128 128 pull_request = pr_util.create_pull_request(
129 129 author=TEST_USER_REGULAR_LOGIN)
130 130
131 131 response = self.app.get(route_path(
132 132 'pullrequest_show',
133 133 repo_name=pull_request.target_repo.scm_instance().name,
134 134 pull_request_id=pull_request.pull_request_id))
135 135
136 136 response.mustcontain('Server-side pull request merging is disabled.')
137 137
138 138 assert_response = response.assert_response()
139 139 # for regular user without a merge permissions, we don't see it
140 140 assert_response.no_element_exists('#close-pull-request-action')
141 141
142 142 user_util.grant_user_permission_to_repo(
143 143 pull_request.target_repo,
144 144 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
145 145 'repository.write')
146 146 response = self.app.get(route_path(
147 147 'pullrequest_show',
148 148 repo_name=pull_request.target_repo.scm_instance().name,
149 149 pull_request_id=pull_request.pull_request_id))
150 150
151 151 response.mustcontain('Server-side pull request merging is disabled.')
152 152
153 153 assert_response = response.assert_response()
154 154 # now regular user has a merge permissions, we have CLOSE button
155 155 assert_response.one_element_exists('#close-pull-request-action')
156 156
157 157 def test_show_invalid_commit_id(self, pr_util):
158 158 # Simulating invalid revisions which will cause a lookup error
159 159 pull_request = pr_util.create_pull_request()
160 160 pull_request.revisions = ['invalid']
161 161 Session().add(pull_request)
162 162 Session().commit()
163 163
164 164 response = self.app.get(route_path(
165 165 'pullrequest_show',
166 166 repo_name=pull_request.target_repo.scm_instance().name,
167 167 pull_request_id=pull_request.pull_request_id))
168 168
169 169 for commit_id in pull_request.revisions:
170 170 response.mustcontain(commit_id)
171 171
172 172 def test_show_invalid_source_reference(self, pr_util):
173 173 pull_request = pr_util.create_pull_request()
174 174 pull_request.source_ref = 'branch:b:invalid'
175 175 Session().add(pull_request)
176 176 Session().commit()
177 177
178 178 self.app.get(route_path(
179 179 'pullrequest_show',
180 180 repo_name=pull_request.target_repo.scm_instance().name,
181 181 pull_request_id=pull_request.pull_request_id))
182 182
183 183 def test_edit_title_description(self, pr_util, csrf_token):
184 184 pull_request = pr_util.create_pull_request()
185 185 pull_request_id = pull_request.pull_request_id
186 186
187 187 response = self.app.post(
188 188 route_path('pullrequest_update',
189 189 repo_name=pull_request.target_repo.repo_name,
190 190 pull_request_id=pull_request_id),
191 191 params={
192 192 'edit_pull_request': 'true',
193 193 'title': 'New title',
194 194 'description': 'New description',
195 195 'csrf_token': csrf_token})
196 196
197 197 assert_session_flash(
198 198 response, u'Pull request title & description updated.',
199 199 category='success')
200 200
201 201 pull_request = PullRequest.get(pull_request_id)
202 202 assert pull_request.title == 'New title'
203 203 assert pull_request.description == 'New description'
204 204
205 205 def test_edit_title_description_closed(self, pr_util, csrf_token):
206 206 pull_request = pr_util.create_pull_request()
207 207 pull_request_id = pull_request.pull_request_id
208 208 repo_name = pull_request.target_repo.repo_name
209 209 pr_util.close()
210 210
211 211 response = self.app.post(
212 212 route_path('pullrequest_update',
213 213 repo_name=repo_name, pull_request_id=pull_request_id),
214 214 params={
215 215 'edit_pull_request': 'true',
216 216 'title': 'New title',
217 217 'description': 'New description',
218 218 'csrf_token': csrf_token}, status=200)
219 219 assert_session_flash(
220 220 response, u'Cannot update closed pull requests.',
221 221 category='error')
222 222
223 223 def test_update_invalid_source_reference(self, pr_util, csrf_token):
224 224 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
225 225
226 226 pull_request = pr_util.create_pull_request()
227 227 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
228 228 Session().add(pull_request)
229 229 Session().commit()
230 230
231 231 pull_request_id = pull_request.pull_request_id
232 232
233 233 response = self.app.post(
234 234 route_path('pullrequest_update',
235 235 repo_name=pull_request.target_repo.repo_name,
236 236 pull_request_id=pull_request_id),
237 237 params={'update_commits': 'true', 'csrf_token': csrf_token})
238 238
239 239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
240 240 UpdateFailureReason.MISSING_SOURCE_REF])
241 241 assert_session_flash(response, expected_msg, category='error')
242 242
243 243 def test_missing_target_reference(self, pr_util, csrf_token):
244 244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
245 245 pull_request = pr_util.create_pull_request(
246 246 approved=True, mergeable=True)
247 247 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
248 248 pull_request.target_ref = unicode_reference
249 249 Session().add(pull_request)
250 250 Session().commit()
251 251
252 252 pull_request_id = pull_request.pull_request_id
253 253 pull_request_url = route_path(
254 254 'pullrequest_show',
255 255 repo_name=pull_request.target_repo.repo_name,
256 256 pull_request_id=pull_request_id)
257 257
258 258 response = self.app.get(pull_request_url)
259 259 target_ref_id = 'invalid-branch'
260 260 merge_resp = MergeResponse(
261 261 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
262 262 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
263 263 response.assert_response().element_contains(
264 264 'span[data-role="merge-message"]', merge_resp.merge_status_message)
265 265
266 266 def test_comment_and_close_pull_request_custom_message_approved(
267 267 self, pr_util, csrf_token, xhr_header):
268 268
269 269 pull_request = pr_util.create_pull_request(approved=True)
270 270 pull_request_id = pull_request.pull_request_id
271 271 author = pull_request.user_id
272 272 repo = pull_request.target_repo.repo_id
273 273
274 274 self.app.post(
275 275 route_path('pullrequest_comment_create',
276 276 repo_name=pull_request.target_repo.scm_instance().name,
277 277 pull_request_id=pull_request_id),
278 278 params={
279 279 'close_pull_request': '1',
280 280 'text': 'Closing a PR',
281 281 'csrf_token': csrf_token},
282 282 extra_environ=xhr_header,)
283 283
284 284 journal = UserLog.query()\
285 285 .filter(UserLog.user_id == author)\
286 286 .filter(UserLog.repository_id == repo) \
287 287 .order_by('user_log_id') \
288 288 .all()
289 289 assert journal[-1].action == 'repo.pull_request.close'
290 290
291 291 pull_request = PullRequest.get(pull_request_id)
292 292 assert pull_request.is_closed()
293 293
294 294 status = ChangesetStatusModel().get_status(
295 295 pull_request.source_repo, pull_request=pull_request)
296 296 assert status == ChangesetStatus.STATUS_APPROVED
297 297 comments = ChangesetComment().query() \
298 298 .filter(ChangesetComment.pull_request == pull_request) \
299 299 .order_by(ChangesetComment.comment_id.asc())\
300 300 .all()
301 301 assert comments[-1].text == 'Closing a PR'
302 302
303 303 def test_comment_force_close_pull_request_rejected(
304 304 self, pr_util, csrf_token, xhr_header):
305 305 pull_request = pr_util.create_pull_request()
306 306 pull_request_id = pull_request.pull_request_id
307 307 PullRequestModel().update_reviewers(
308 308 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
309 309 pull_request.author)
310 310 author = pull_request.user_id
311 311 repo = pull_request.target_repo.repo_id
312 312
313 313 self.app.post(
314 314 route_path('pullrequest_comment_create',
315 315 repo_name=pull_request.target_repo.scm_instance().name,
316 316 pull_request_id=pull_request_id),
317 317 params={
318 318 'close_pull_request': '1',
319 319 'csrf_token': csrf_token},
320 320 extra_environ=xhr_header)
321 321
322 322 pull_request = PullRequest.get(pull_request_id)
323 323
324 324 journal = UserLog.query()\
325 325 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
326 326 .order_by('user_log_id') \
327 327 .all()
328 328 assert journal[-1].action == 'repo.pull_request.close'
329 329
330 330 # check only the latest status, not the review status
331 331 status = ChangesetStatusModel().get_status(
332 332 pull_request.source_repo, pull_request=pull_request)
333 333 assert status == ChangesetStatus.STATUS_REJECTED
334 334
335 335 def test_comment_and_close_pull_request(
336 336 self, pr_util, csrf_token, xhr_header):
337 337 pull_request = pr_util.create_pull_request()
338 338 pull_request_id = pull_request.pull_request_id
339 339
340 340 response = self.app.post(
341 341 route_path('pullrequest_comment_create',
342 342 repo_name=pull_request.target_repo.scm_instance().name,
343 343 pull_request_id=pull_request.pull_request_id),
344 344 params={
345 345 'close_pull_request': 'true',
346 346 'csrf_token': csrf_token},
347 347 extra_environ=xhr_header)
348 348
349 349 assert response.json
350 350
351 351 pull_request = PullRequest.get(pull_request_id)
352 352 assert pull_request.is_closed()
353 353
354 354 # check only the latest status, not the review status
355 355 status = ChangesetStatusModel().get_status(
356 356 pull_request.source_repo, pull_request=pull_request)
357 357 assert status == ChangesetStatus.STATUS_REJECTED
358 358
359 359 def test_create_pull_request(self, backend, csrf_token):
360 360 commits = [
361 361 {'message': 'ancestor'},
362 362 {'message': 'change'},
363 363 {'message': 'change2'},
364 364 ]
365 365 commit_ids = backend.create_master_repo(commits)
366 366 target = backend.create_repo(heads=['ancestor'])
367 367 source = backend.create_repo(heads=['change2'])
368 368
369 369 response = self.app.post(
370 370 route_path('pullrequest_create', repo_name=source.repo_name),
371 371 [
372 372 ('source_repo', source.repo_name),
373 373 ('source_ref', 'branch:default:' + commit_ids['change2']),
374 374 ('target_repo', target.repo_name),
375 375 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
376 376 ('common_ancestor', commit_ids['ancestor']),
377 377 ('pullrequest_title', 'Title'),
378 378 ('pullrequest_desc', 'Description'),
379 379 ('description_renderer', 'markdown'),
380 380 ('__start__', 'review_members:sequence'),
381 381 ('__start__', 'reviewer:mapping'),
382 382 ('user_id', '1'),
383 383 ('__start__', 'reasons:sequence'),
384 384 ('reason', 'Some reason'),
385 385 ('__end__', 'reasons:sequence'),
386 386 ('__start__', 'rules:sequence'),
387 387 ('__end__', 'rules:sequence'),
388 388 ('mandatory', 'False'),
389 389 ('__end__', 'reviewer:mapping'),
390 390 ('__end__', 'review_members:sequence'),
391 391 ('__start__', 'revisions:sequence'),
392 392 ('revisions', commit_ids['change']),
393 393 ('revisions', commit_ids['change2']),
394 394 ('__end__', 'revisions:sequence'),
395 395 ('user', ''),
396 396 ('csrf_token', csrf_token),
397 397 ],
398 398 status=302)
399 399
400 400 location = response.headers['Location']
401 401 pull_request_id = location.rsplit('/', 1)[1]
402 402 assert pull_request_id != 'new'
403 403 pull_request = PullRequest.get(int(pull_request_id))
404 404
405 405 # check that we have now both revisions
406 406 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
407 407 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
408 408 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
409 409 assert pull_request.target_ref == expected_target_ref
410 410
411 411 def test_reviewer_notifications(self, backend, csrf_token):
412 412 # We have to use the app.post for this test so it will create the
413 413 # notifications properly with the new PR
414 414 commits = [
415 415 {'message': 'ancestor',
416 416 'added': [FileNode('file_A', content='content_of_ancestor')]},
417 417 {'message': 'change',
418 418 'added': [FileNode('file_a', content='content_of_change')]},
419 419 {'message': 'change-child'},
420 420 {'message': 'ancestor-child', 'parents': ['ancestor'],
421 421 'added': [
422 422 FileNode('file_B', content='content_of_ancestor_child')]},
423 423 {'message': 'ancestor-child-2'},
424 424 ]
425 425 commit_ids = backend.create_master_repo(commits)
426 426 target = backend.create_repo(heads=['ancestor-child'])
427 427 source = backend.create_repo(heads=['change'])
428 428
429 429 response = self.app.post(
430 430 route_path('pullrequest_create', repo_name=source.repo_name),
431 431 [
432 432 ('source_repo', source.repo_name),
433 433 ('source_ref', 'branch:default:' + commit_ids['change']),
434 434 ('target_repo', target.repo_name),
435 435 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
436 436 ('common_ancestor', commit_ids['ancestor']),
437 437 ('pullrequest_title', 'Title'),
438 438 ('pullrequest_desc', 'Description'),
439 439 ('description_renderer', 'markdown'),
440 440 ('__start__', 'review_members:sequence'),
441 441 ('__start__', 'reviewer:mapping'),
442 442 ('user_id', '2'),
443 443 ('__start__', 'reasons:sequence'),
444 444 ('reason', 'Some reason'),
445 445 ('__end__', 'reasons:sequence'),
446 446 ('__start__', 'rules:sequence'),
447 447 ('__end__', 'rules:sequence'),
448 448 ('mandatory', 'False'),
449 449 ('__end__', 'reviewer:mapping'),
450 450 ('__end__', 'review_members:sequence'),
451 451 ('__start__', 'revisions:sequence'),
452 452 ('revisions', commit_ids['change']),
453 453 ('__end__', 'revisions:sequence'),
454 454 ('user', ''),
455 455 ('csrf_token', csrf_token),
456 456 ],
457 457 status=302)
458 458
459 459 location = response.headers['Location']
460 460
461 461 pull_request_id = location.rsplit('/', 1)[1]
462 462 assert pull_request_id != 'new'
463 463 pull_request = PullRequest.get(int(pull_request_id))
464 464
465 465 # Check that a notification was made
466 466 notifications = Notification.query()\
467 467 .filter(Notification.created_by == pull_request.author.user_id,
468 468 Notification.type_ == Notification.TYPE_PULL_REQUEST,
469 469 Notification.subject.contains(
470 470 "wants you to review pull request #%s" % pull_request_id))
471 471 assert len(notifications.all()) == 1
472 472
473 473 # Change reviewers and check that a notification was made
474 474 PullRequestModel().update_reviewers(
475 475 pull_request.pull_request_id, [(1, [], False, [])],
476 476 pull_request.author)
477 477 assert len(notifications.all()) == 2
478 478
479 479 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
480 480 csrf_token):
481 481 commits = [
482 482 {'message': 'ancestor',
483 483 'added': [FileNode('file_A', content='content_of_ancestor')]},
484 484 {'message': 'change',
485 485 'added': [FileNode('file_a', content='content_of_change')]},
486 486 {'message': 'change-child'},
487 487 {'message': 'ancestor-child', 'parents': ['ancestor'],
488 488 'added': [
489 489 FileNode('file_B', content='content_of_ancestor_child')]},
490 490 {'message': 'ancestor-child-2'},
491 491 ]
492 492 commit_ids = backend.create_master_repo(commits)
493 493 target = backend.create_repo(heads=['ancestor-child'])
494 494 source = backend.create_repo(heads=['change'])
495 495
496 496 response = self.app.post(
497 497 route_path('pullrequest_create', repo_name=source.repo_name),
498 498 [
499 499 ('source_repo', source.repo_name),
500 500 ('source_ref', 'branch:default:' + commit_ids['change']),
501 501 ('target_repo', target.repo_name),
502 502 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
503 503 ('common_ancestor', commit_ids['ancestor']),
504 504 ('pullrequest_title', 'Title'),
505 505 ('pullrequest_desc', 'Description'),
506 506 ('description_renderer', 'markdown'),
507 507 ('__start__', 'review_members:sequence'),
508 508 ('__start__', 'reviewer:mapping'),
509 509 ('user_id', '1'),
510 510 ('__start__', 'reasons:sequence'),
511 511 ('reason', 'Some reason'),
512 512 ('__end__', 'reasons:sequence'),
513 513 ('__start__', 'rules:sequence'),
514 514 ('__end__', 'rules:sequence'),
515 515 ('mandatory', 'False'),
516 516 ('__end__', 'reviewer:mapping'),
517 517 ('__end__', 'review_members:sequence'),
518 518 ('__start__', 'revisions:sequence'),
519 519 ('revisions', commit_ids['change']),
520 520 ('__end__', 'revisions:sequence'),
521 521 ('user', ''),
522 522 ('csrf_token', csrf_token),
523 523 ],
524 524 status=302)
525 525
526 526 location = response.headers['Location']
527 527
528 528 pull_request_id = location.rsplit('/', 1)[1]
529 529 assert pull_request_id != 'new'
530 530 pull_request = PullRequest.get(int(pull_request_id))
531 531
532 532 # target_ref has to point to the ancestor's commit_id in order to
533 533 # show the correct diff
534 534 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
535 535 assert pull_request.target_ref == expected_target_ref
536 536
537 537 # Check generated diff contents
538 538 response = response.follow()
539 539 assert 'content_of_ancestor' not in response.body
540 540 assert 'content_of_ancestor-child' not in response.body
541 541 assert 'content_of_change' in response.body
542 542
543 543 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
544 544 # Clear any previous calls to rcextensions
545 545 rhodecode.EXTENSIONS.calls.clear()
546 546
547 547 pull_request = pr_util.create_pull_request(
548 548 approved=True, mergeable=True)
549 549 pull_request_id = pull_request.pull_request_id
550 550 repo_name = pull_request.target_repo.scm_instance().name,
551 551
552 552 response = self.app.post(
553 553 route_path('pullrequest_merge',
554 554 repo_name=str(repo_name[0]),
555 555 pull_request_id=pull_request_id),
556 556 params={'csrf_token': csrf_token}).follow()
557 557
558 558 pull_request = PullRequest.get(pull_request_id)
559 559
560 560 assert response.status_int == 200
561 561 assert pull_request.is_closed()
562 562 assert_pull_request_status(
563 563 pull_request, ChangesetStatus.STATUS_APPROVED)
564 564
565 565 # Check the relevant log entries were added
566 566 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
567 567 actions = [log.action for log in user_logs]
568 568 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
569 569 expected_actions = [
570 570 u'repo.pull_request.close',
571 571 u'repo.pull_request.merge',
572 572 u'repo.pull_request.comment.create'
573 573 ]
574 574 assert actions == expected_actions
575 575
576 576 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
577 577 actions = [log for log in user_logs]
578 578 assert actions[-1].action == 'user.push'
579 579 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
580 580
581 581 # Check post_push rcextension was really executed
582 582 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
583 583 assert len(push_calls) == 1
584 584 unused_last_call_args, last_call_kwargs = push_calls[0]
585 585 assert last_call_kwargs['action'] == 'push'
586 586 assert last_call_kwargs['commit_ids'] == pr_commit_ids
587 587
588 588 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
589 589 pull_request = pr_util.create_pull_request(mergeable=False)
590 590 pull_request_id = pull_request.pull_request_id
591 591 pull_request = PullRequest.get(pull_request_id)
592 592
593 593 response = self.app.post(
594 594 route_path('pullrequest_merge',
595 595 repo_name=pull_request.target_repo.scm_instance().name,
596 596 pull_request_id=pull_request.pull_request_id),
597 597 params={'csrf_token': csrf_token}).follow()
598 598
599 599 assert response.status_int == 200
600 600 response.mustcontain(
601 601 'Merge is not currently possible because of below failed checks.')
602 602 response.mustcontain('Server-side pull request merging is disabled.')
603 603
604 604 @pytest.mark.skip_backends('svn')
605 605 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
606 606 pull_request = pr_util.create_pull_request(mergeable=True)
607 607 pull_request_id = pull_request.pull_request_id
608 608 repo_name = pull_request.target_repo.scm_instance().name
609 609
610 610 response = self.app.post(
611 611 route_path('pullrequest_merge',
612 612 repo_name=repo_name, pull_request_id=pull_request_id),
613 613 params={'csrf_token': csrf_token}).follow()
614 614
615 615 assert response.status_int == 200
616 616
617 617 response.mustcontain(
618 618 'Merge is not currently possible because of below failed checks.')
619 619 response.mustcontain('Pull request reviewer approval is pending.')
620 620
621 621 def test_merge_pull_request_renders_failure_reason(
622 622 self, user_regular, csrf_token, pr_util):
623 623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
624 624 pull_request_id = pull_request.pull_request_id
625 625 repo_name = pull_request.target_repo.scm_instance().name
626 626
627 627 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
628 628 MergeFailureReason.PUSH_FAILED,
629 629 metadata={'target': 'shadow repo',
630 630 'merge_commit': 'xxx'})
631 631 model_patcher = mock.patch.multiple(
632 632 PullRequestModel,
633 633 merge_repo=mock.Mock(return_value=merge_resp),
634 634 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
635 635
636 636 with model_patcher:
637 637 response = self.app.post(
638 638 route_path('pullrequest_merge',
639 639 repo_name=repo_name,
640 640 pull_request_id=pull_request_id),
641 641 params={'csrf_token': csrf_token}, status=302)
642 642
643 643 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
644 644 metadata={'target': 'shadow repo',
645 645 'merge_commit': 'xxx'})
646 646 assert_session_flash(response, merge_resp.merge_status_message)
647 647
648 648 def test_update_source_revision(self, backend, csrf_token):
649 649 commits = [
650 650 {'message': 'ancestor'},
651 651 {'message': 'change'},
652 652 {'message': 'change-2'},
653 653 ]
654 654 commit_ids = backend.create_master_repo(commits)
655 655 target = backend.create_repo(heads=['ancestor'])
656 656 source = backend.create_repo(heads=['change'])
657 657
658 658 # create pr from a in source to A in target
659 659 pull_request = PullRequest()
660 660
661 661 pull_request.source_repo = source
662 662 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
663 663 branch=backend.default_branch_name, commit_id=commit_ids['change'])
664 664
665 665 pull_request.target_repo = target
666 666 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
667 667 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
668 668
669 669 pull_request.revisions = [commit_ids['change']]
670 670 pull_request.title = u"Test"
671 671 pull_request.description = u"Description"
672 672 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
673 673 pull_request.pull_request_state = PullRequest.STATE_CREATED
674 674 Session().add(pull_request)
675 675 Session().commit()
676 676 pull_request_id = pull_request.pull_request_id
677 677
678 678 # source has ancestor - change - change-2
679 679 backend.pull_heads(source, heads=['change-2'])
680 680
681 681 # update PR
682 682 self.app.post(
683 683 route_path('pullrequest_update',
684 684 repo_name=target.repo_name, pull_request_id=pull_request_id),
685 685 params={'update_commits': 'true', 'csrf_token': csrf_token})
686 686
687 687 response = self.app.get(
688 688 route_path('pullrequest_show',
689 689 repo_name=target.repo_name,
690 690 pull_request_id=pull_request.pull_request_id))
691 691
692 692 assert response.status_int == 200
693 693 assert 'Pull request updated to' in response.body
694 694 assert 'with 1 added, 0 removed commits.' in response.body
695 695
696 696 # check that we have now both revisions
697 697 pull_request = PullRequest.get(pull_request_id)
698 698 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
699 699
700 700 def test_update_target_revision(self, backend, csrf_token):
701 701 commits = [
702 702 {'message': 'ancestor'},
703 703 {'message': 'change'},
704 704 {'message': 'ancestor-new', 'parents': ['ancestor']},
705 705 {'message': 'change-rebased'},
706 706 ]
707 707 commit_ids = backend.create_master_repo(commits)
708 708 target = backend.create_repo(heads=['ancestor'])
709 709 source = backend.create_repo(heads=['change'])
710 710
711 711 # create pr from a in source to A in target
712 712 pull_request = PullRequest()
713 713
714 714 pull_request.source_repo = source
715 715 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
716 716 branch=backend.default_branch_name, commit_id=commit_ids['change'])
717 717
718 718 pull_request.target_repo = target
719 719 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
720 720 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
721 721
722 722 pull_request.revisions = [commit_ids['change']]
723 723 pull_request.title = u"Test"
724 724 pull_request.description = u"Description"
725 725 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
726 726 pull_request.pull_request_state = PullRequest.STATE_CREATED
727 727
728 728 Session().add(pull_request)
729 729 Session().commit()
730 730 pull_request_id = pull_request.pull_request_id
731 731
732 732 # target has ancestor - ancestor-new
733 733 # source has ancestor - ancestor-new - change-rebased
734 734 backend.pull_heads(target, heads=['ancestor-new'])
735 735 backend.pull_heads(source, heads=['change-rebased'])
736 736
737 737 # update PR
738 738 self.app.post(
739 739 route_path('pullrequest_update',
740 740 repo_name=target.repo_name,
741 741 pull_request_id=pull_request_id),
742 742 params={'update_commits': 'true', 'csrf_token': csrf_token},
743 743 status=200)
744 744
745 745 # check that we have now both revisions
746 746 pull_request = PullRequest.get(pull_request_id)
747 747 assert pull_request.revisions == [commit_ids['change-rebased']]
748 748 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
749 749 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
750 750
751 751 response = self.app.get(
752 752 route_path('pullrequest_show',
753 753 repo_name=target.repo_name,
754 754 pull_request_id=pull_request.pull_request_id))
755 755 assert response.status_int == 200
756 756 assert 'Pull request updated to' in response.body
757 757 assert 'with 1 added, 1 removed commits.' in response.body
758 758
759 759 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
760 760 backend = backend_git
761 761 commits = [
762 762 {'message': 'master-commit-1'},
763 763 {'message': 'master-commit-2-change-1'},
764 764 {'message': 'master-commit-3-change-2'},
765 765
766 766 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
767 767 {'message': 'feat-commit-2'},
768 768 ]
769 769 commit_ids = backend.create_master_repo(commits)
770 770 target = backend.create_repo(heads=['master-commit-3-change-2'])
771 771 source = backend.create_repo(heads=['feat-commit-2'])
772 772
773 773 # create pr from a in source to A in target
774 774 pull_request = PullRequest()
775 775 pull_request.source_repo = source
776 776
777 777 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
778 778 branch=backend.default_branch_name,
779 779 commit_id=commit_ids['master-commit-3-change-2'])
780 780
781 781 pull_request.target_repo = target
782 782 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
783 783 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
784 784
785 785 pull_request.revisions = [
786 786 commit_ids['feat-commit-1'],
787 787 commit_ids['feat-commit-2']
788 788 ]
789 789 pull_request.title = u"Test"
790 790 pull_request.description = u"Description"
791 791 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
792 792 pull_request.pull_request_state = PullRequest.STATE_CREATED
793 793 Session().add(pull_request)
794 794 Session().commit()
795 795 pull_request_id = pull_request.pull_request_id
796 796
797 797 # PR is created, now we simulate a force-push into target,
798 798 # that drops a 2 last commits
799 799 vcsrepo = target.scm_instance()
800 800 vcsrepo.config.clear_section('hooks')
801 801 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
802 802
803 803 # update PR
804 804 self.app.post(
805 805 route_path('pullrequest_update',
806 806 repo_name=target.repo_name,
807 807 pull_request_id=pull_request_id),
808 808 params={'update_commits': 'true', 'csrf_token': csrf_token},
809 809 status=200)
810 810
811 811 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
812 812 assert response.status_int == 200
813 813 response.mustcontain('Pull request updated to')
814 814 response.mustcontain('with 0 added, 0 removed commits.')
815 815
816 816 def test_update_of_ancestor_reference(self, backend, csrf_token):
817 817 commits = [
818 818 {'message': 'ancestor'},
819 819 {'message': 'change'},
820 820 {'message': 'change-2'},
821 821 {'message': 'ancestor-new', 'parents': ['ancestor']},
822 822 {'message': 'change-rebased'},
823 823 ]
824 824 commit_ids = backend.create_master_repo(commits)
825 825 target = backend.create_repo(heads=['ancestor'])
826 826 source = backend.create_repo(heads=['change'])
827 827
828 828 # create pr from a in source to A in target
829 829 pull_request = PullRequest()
830 830 pull_request.source_repo = source
831 831
832 832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
833 833 branch=backend.default_branch_name, commit_id=commit_ids['change'])
834 834 pull_request.target_repo = target
835 835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
836 836 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
837 837 pull_request.revisions = [commit_ids['change']]
838 838 pull_request.title = u"Test"
839 839 pull_request.description = u"Description"
840 840 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
841 841 pull_request.pull_request_state = PullRequest.STATE_CREATED
842 842 Session().add(pull_request)
843 843 Session().commit()
844 844 pull_request_id = pull_request.pull_request_id
845 845
846 846 # target has ancestor - ancestor-new
847 847 # source has ancestor - ancestor-new - change-rebased
848 848 backend.pull_heads(target, heads=['ancestor-new'])
849 849 backend.pull_heads(source, heads=['change-rebased'])
850 850
851 851 # update PR
852 852 self.app.post(
853 853 route_path('pullrequest_update',
854 854 repo_name=target.repo_name, pull_request_id=pull_request_id),
855 855 params={'update_commits': 'true', 'csrf_token': csrf_token},
856 856 status=200)
857 857
858 858 # Expect the target reference to be updated correctly
859 859 pull_request = PullRequest.get(pull_request_id)
860 860 assert pull_request.revisions == [commit_ids['change-rebased']]
861 861 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
862 862 branch=backend.default_branch_name,
863 863 commit_id=commit_ids['ancestor-new'])
864 864 assert pull_request.target_ref == expected_target_ref
865 865
866 866 def test_remove_pull_request_branch(self, backend_git, csrf_token):
867 867 branch_name = 'development'
868 868 commits = [
869 869 {'message': 'initial-commit'},
870 870 {'message': 'old-feature'},
871 871 {'message': 'new-feature', 'branch': branch_name},
872 872 ]
873 873 repo = backend_git.create_repo(commits)
874 repo_name = repo.repo_name
874 875 commit_ids = backend_git.commit_ids
875 876
876 877 pull_request = PullRequest()
877 878 pull_request.source_repo = repo
878 879 pull_request.target_repo = repo
879 880 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
880 881 branch=branch_name, commit_id=commit_ids['new-feature'])
881 882 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
882 883 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
883 884 pull_request.revisions = [commit_ids['new-feature']]
884 885 pull_request.title = u"Test"
885 886 pull_request.description = u"Description"
886 887 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
887 888 pull_request.pull_request_state = PullRequest.STATE_CREATED
888 889 Session().add(pull_request)
889 890 Session().commit()
890 891
892 pull_request_id = pull_request.pull_request_id
893
891 894 vcs = repo.scm_instance()
892 895 vcs.remove_ref('refs/heads/{}'.format(branch_name))
893 896
894 897 response = self.app.get(route_path(
895 898 'pullrequest_show',
896 repo_name=repo.repo_name,
897 pull_request_id=pull_request.pull_request_id))
899 repo_name=repo_name,
900 pull_request_id=pull_request_id))
898 901
899 902 assert response.status_int == 200
900 903
901 904 response.assert_response().element_contains(
902 905 '#changeset_compare_view_content .alert strong',
903 906 'Missing commits')
904 907 response.assert_response().element_contains(
905 908 '#changeset_compare_view_content .alert',
906 909 'This pull request cannot be displayed, because one or more'
907 910 ' commits no longer exist in the source repository.')
908 911
909 912 def test_strip_commits_from_pull_request(
910 913 self, backend, pr_util, csrf_token):
911 914 commits = [
912 915 {'message': 'initial-commit'},
913 916 {'message': 'old-feature'},
914 917 {'message': 'new-feature', 'parents': ['initial-commit']},
915 918 ]
916 919 pull_request = pr_util.create_pull_request(
917 920 commits, target_head='initial-commit', source_head='new-feature',
918 921 revisions=['new-feature'])
919 922
920 923 vcs = pr_util.source_repository.scm_instance()
921 924 if backend.alias == 'git':
922 925 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
923 926 else:
924 927 vcs.strip(pr_util.commit_ids['new-feature'])
925 928
926 929 response = self.app.get(route_path(
927 930 'pullrequest_show',
928 931 repo_name=pr_util.target_repository.repo_name,
929 932 pull_request_id=pull_request.pull_request_id))
930 933
931 934 assert response.status_int == 200
932 935
933 936 response.assert_response().element_contains(
934 937 '#changeset_compare_view_content .alert strong',
935 938 'Missing commits')
936 939 response.assert_response().element_contains(
937 940 '#changeset_compare_view_content .alert',
938 941 'This pull request cannot be displayed, because one or more'
939 942 ' commits no longer exist in the source repository.')
940 943 response.assert_response().element_contains(
941 944 '#update_commits',
942 945 'Update commits')
943 946
944 947 def test_strip_commits_and_update(
945 948 self, backend, pr_util, csrf_token):
946 949 commits = [
947 950 {'message': 'initial-commit'},
948 951 {'message': 'old-feature'},
949 952 {'message': 'new-feature', 'parents': ['old-feature']},
950 953 ]
951 954 pull_request = pr_util.create_pull_request(
952 955 commits, target_head='old-feature', source_head='new-feature',
953 956 revisions=['new-feature'], mergeable=True)
954 957
955 958 vcs = pr_util.source_repository.scm_instance()
956 959 if backend.alias == 'git':
957 960 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
958 961 else:
959 962 vcs.strip(pr_util.commit_ids['new-feature'])
960 963
961 964 response = self.app.post(
962 965 route_path('pullrequest_update',
963 966 repo_name=pull_request.target_repo.repo_name,
964 967 pull_request_id=pull_request.pull_request_id),
965 968 params={'update_commits': 'true',
966 969 'csrf_token': csrf_token})
967 970
968 971 assert response.status_int == 200
969 972 assert response.body == 'true'
970 973
971 974 # Make sure that after update, it won't raise 500 errors
972 975 response = self.app.get(route_path(
973 976 'pullrequest_show',
974 977 repo_name=pr_util.target_repository.repo_name,
975 978 pull_request_id=pull_request.pull_request_id))
976 979
977 980 assert response.status_int == 200
978 981 response.assert_response().element_contains(
979 982 '#changeset_compare_view_content .alert strong',
980 983 'Missing commits')
981 984
982 985 def test_branch_is_a_link(self, pr_util):
983 986 pull_request = pr_util.create_pull_request()
984 987 pull_request.source_ref = 'branch:origin:1234567890abcdef'
985 988 pull_request.target_ref = 'branch:target:abcdef1234567890'
986 989 Session().add(pull_request)
987 990 Session().commit()
988 991
989 992 response = self.app.get(route_path(
990 993 'pullrequest_show',
991 994 repo_name=pull_request.target_repo.scm_instance().name,
992 995 pull_request_id=pull_request.pull_request_id))
993 996 assert response.status_int == 200
994 997
995 998 origin = response.assert_response().get_element('.pr-origininfo .tag')
996 999 origin_children = origin.getchildren()
997 1000 assert len(origin_children) == 1
998 1001 target = response.assert_response().get_element('.pr-targetinfo .tag')
999 1002 target_children = target.getchildren()
1000 1003 assert len(target_children) == 1
1001 1004
1002 1005 expected_origin_link = route_path(
1003 1006 'repo_commits',
1004 1007 repo_name=pull_request.source_repo.scm_instance().name,
1005 1008 params=dict(branch='origin'))
1006 1009 expected_target_link = route_path(
1007 1010 'repo_commits',
1008 1011 repo_name=pull_request.target_repo.scm_instance().name,
1009 1012 params=dict(branch='target'))
1010 1013 assert origin_children[0].attrib['href'] == expected_origin_link
1011 1014 assert origin_children[0].text == 'branch: origin'
1012 1015 assert target_children[0].attrib['href'] == expected_target_link
1013 1016 assert target_children[0].text == 'branch: target'
1014 1017
1015 1018 def test_bookmark_is_not_a_link(self, pr_util):
1016 1019 pull_request = pr_util.create_pull_request()
1017 1020 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1018 1021 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1019 1022 Session().add(pull_request)
1020 1023 Session().commit()
1021 1024
1022 1025 response = self.app.get(route_path(
1023 1026 'pullrequest_show',
1024 1027 repo_name=pull_request.target_repo.scm_instance().name,
1025 1028 pull_request_id=pull_request.pull_request_id))
1026 1029 assert response.status_int == 200
1027 1030
1028 1031 origin = response.assert_response().get_element('.pr-origininfo .tag')
1029 1032 assert origin.text.strip() == 'bookmark: origin'
1030 1033 assert origin.getchildren() == []
1031 1034
1032 1035 target = response.assert_response().get_element('.pr-targetinfo .tag')
1033 1036 assert target.text.strip() == 'bookmark: target'
1034 1037 assert target.getchildren() == []
1035 1038
1036 1039 def test_tag_is_not_a_link(self, pr_util):
1037 1040 pull_request = pr_util.create_pull_request()
1038 1041 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1039 1042 pull_request.target_ref = 'tag:target:abcdef1234567890'
1040 1043 Session().add(pull_request)
1041 1044 Session().commit()
1042 1045
1043 1046 response = self.app.get(route_path(
1044 1047 'pullrequest_show',
1045 1048 repo_name=pull_request.target_repo.scm_instance().name,
1046 1049 pull_request_id=pull_request.pull_request_id))
1047 1050 assert response.status_int == 200
1048 1051
1049 1052 origin = response.assert_response().get_element('.pr-origininfo .tag')
1050 1053 assert origin.text.strip() == 'tag: origin'
1051 1054 assert origin.getchildren() == []
1052 1055
1053 1056 target = response.assert_response().get_element('.pr-targetinfo .tag')
1054 1057 assert target.text.strip() == 'tag: target'
1055 1058 assert target.getchildren() == []
1056 1059
1057 1060 @pytest.mark.parametrize('mergeable', [True, False])
1058 1061 def test_shadow_repository_link(
1059 1062 self, mergeable, pr_util, http_host_only_stub):
1060 1063 """
1061 1064 Check that the pull request summary page displays a link to the shadow
1062 1065 repository if the pull request is mergeable. If it is not mergeable
1063 1066 the link should not be displayed.
1064 1067 """
1065 1068 pull_request = pr_util.create_pull_request(
1066 1069 mergeable=mergeable, enable_notifications=False)
1067 1070 target_repo = pull_request.target_repo.scm_instance()
1068 1071 pr_id = pull_request.pull_request_id
1069 1072 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1070 1073 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1071 1074
1072 1075 response = self.app.get(route_path(
1073 1076 'pullrequest_show',
1074 1077 repo_name=target_repo.name,
1075 1078 pull_request_id=pr_id))
1076 1079
1077 1080 if mergeable:
1078 1081 response.assert_response().element_value_contains(
1079 1082 'input.pr-mergeinfo', shadow_url)
1080 1083 response.assert_response().element_value_contains(
1081 1084 'input.pr-mergeinfo ', 'pr-merge')
1082 1085 else:
1083 1086 response.assert_response().no_element_exists('.pr-mergeinfo')
1084 1087
1085 1088
1086 1089 @pytest.mark.usefixtures('app')
1087 1090 @pytest.mark.backends("git", "hg")
1088 1091 class TestPullrequestsControllerDelete(object):
1089 1092 def test_pull_request_delete_button_permissions_admin(
1090 1093 self, autologin_user, user_admin, pr_util):
1091 1094 pull_request = pr_util.create_pull_request(
1092 1095 author=user_admin.username, enable_notifications=False)
1093 1096
1094 1097 response = self.app.get(route_path(
1095 1098 'pullrequest_show',
1096 1099 repo_name=pull_request.target_repo.scm_instance().name,
1097 1100 pull_request_id=pull_request.pull_request_id))
1098 1101
1099 1102 response.mustcontain('id="delete_pullrequest"')
1100 1103 response.mustcontain('Confirm to delete this pull request')
1101 1104
1102 1105 def test_pull_request_delete_button_permissions_owner(
1103 1106 self, autologin_regular_user, user_regular, pr_util):
1104 1107 pull_request = pr_util.create_pull_request(
1105 1108 author=user_regular.username, enable_notifications=False)
1106 1109
1107 1110 response = self.app.get(route_path(
1108 1111 'pullrequest_show',
1109 1112 repo_name=pull_request.target_repo.scm_instance().name,
1110 1113 pull_request_id=pull_request.pull_request_id))
1111 1114
1112 1115 response.mustcontain('id="delete_pullrequest"')
1113 1116 response.mustcontain('Confirm to delete this pull request')
1114 1117
1115 1118 def test_pull_request_delete_button_permissions_forbidden(
1116 1119 self, autologin_regular_user, user_regular, user_admin, pr_util):
1117 1120 pull_request = pr_util.create_pull_request(
1118 1121 author=user_admin.username, enable_notifications=False)
1119 1122
1120 1123 response = self.app.get(route_path(
1121 1124 'pullrequest_show',
1122 1125 repo_name=pull_request.target_repo.scm_instance().name,
1123 1126 pull_request_id=pull_request.pull_request_id))
1124 1127 response.mustcontain(no=['id="delete_pullrequest"'])
1125 1128 response.mustcontain(no=['Confirm to delete this pull request'])
1126 1129
1127 1130 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1128 1131 self, autologin_regular_user, user_regular, user_admin, pr_util,
1129 1132 user_util):
1130 1133
1131 1134 pull_request = pr_util.create_pull_request(
1132 1135 author=user_admin.username, enable_notifications=False)
1133 1136
1134 1137 user_util.grant_user_permission_to_repo(
1135 1138 pull_request.target_repo, user_regular,
1136 1139 'repository.write')
1137 1140
1138 1141 response = self.app.get(route_path(
1139 1142 'pullrequest_show',
1140 1143 repo_name=pull_request.target_repo.scm_instance().name,
1141 1144 pull_request_id=pull_request.pull_request_id))
1142 1145
1143 1146 response.mustcontain('id="open_edit_pullrequest"')
1144 1147 response.mustcontain('id="delete_pullrequest"')
1145 1148 response.mustcontain(no=['Confirm to delete this pull request'])
1146 1149
1147 1150 def test_delete_comment_returns_404_if_comment_does_not_exist(
1148 1151 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1149 1152
1150 1153 pull_request = pr_util.create_pull_request(
1151 1154 author=user_admin.username, enable_notifications=False)
1152 1155
1153 1156 self.app.post(
1154 1157 route_path(
1155 1158 'pullrequest_comment_delete',
1156 1159 repo_name=pull_request.target_repo.scm_instance().name,
1157 1160 pull_request_id=pull_request.pull_request_id,
1158 1161 comment_id=1024404),
1159 1162 extra_environ=xhr_header,
1160 1163 params={'csrf_token': csrf_token},
1161 1164 status=404
1162 1165 )
1163 1166
1164 1167 def test_delete_comment(
1165 1168 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1166 1169
1167 1170 pull_request = pr_util.create_pull_request(
1168 1171 author=user_admin.username, enable_notifications=False)
1169 1172 comment = pr_util.create_comment()
1170 1173 comment_id = comment.comment_id
1171 1174
1172 1175 response = self.app.post(
1173 1176 route_path(
1174 1177 'pullrequest_comment_delete',
1175 1178 repo_name=pull_request.target_repo.scm_instance().name,
1176 1179 pull_request_id=pull_request.pull_request_id,
1177 1180 comment_id=comment_id),
1178 1181 extra_environ=xhr_header,
1179 1182 params={'csrf_token': csrf_token},
1180 1183 status=200
1181 1184 )
1182 1185 assert response.body == 'true'
1183 1186
1184 1187 @pytest.mark.parametrize('url_type', [
1185 1188 'pullrequest_new',
1186 1189 'pullrequest_create',
1187 1190 'pullrequest_update',
1188 1191 'pullrequest_merge',
1189 1192 ])
1190 1193 def test_pull_request_is_forbidden_on_archived_repo(
1191 1194 self, autologin_user, backend, xhr_header, user_util, url_type):
1192 1195
1193 1196 # create a temporary repo
1194 1197 source = user_util.create_repo(repo_type=backend.alias)
1195 1198 repo_name = source.repo_name
1196 1199 repo = Repository.get_by_repo_name(repo_name)
1197 1200 repo.archived = True
1198 1201 Session().commit()
1199 1202
1200 1203 response = self.app.get(
1201 1204 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1202 1205
1203 1206 msg = 'Action not supported for archived repository.'
1204 1207 assert_session_flash(response, msg)
1205 1208
1206 1209
1207 1210 def assert_pull_request_status(pull_request, expected_status):
1208 1211 status = ChangesetStatusModel().calculated_review_status(
1209 1212 pull_request=pull_request)
1210 1213 assert status == expected_status
1211 1214
1212 1215
1213 1216 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1214 1217 @pytest.mark.usefixtures("autologin_user")
1215 1218 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1216 1219 response = app.get(
1217 1220 route_path(route, repo_name=backend_svn.repo_name), status=404)
1218 1221
@@ -1,1902 +1,1902 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33 import functools
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 61 from rhodecode.lib.vcs.backends import get_backend
62 62 from rhodecode.lib.vcs.nodes import FileNode
63 63 from rhodecode.tests import (
64 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 66 TEST_USER_REGULAR_PASS)
67 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 68 from rhodecode.tests.fixture import Fixture
69 69 from rhodecode.config import utils as config_utils
70 70
71 71 def _split_comma(value):
72 72 return value.split(',')
73 73
74 74
75 75 def pytest_addoption(parser):
76 76 parser.addoption(
77 77 '--keep-tmp-path', action='store_true',
78 78 help="Keep the test temporary directories")
79 79 parser.addoption(
80 80 '--backends', action='store', type=_split_comma,
81 81 default=['git', 'hg', 'svn'],
82 82 help="Select which backends to test for backend specific tests.")
83 83 parser.addoption(
84 84 '--dbs', action='store', type=_split_comma,
85 85 default=['sqlite'],
86 86 help="Select which database to test for database specific tests. "
87 87 "Possible options are sqlite,postgres,mysql")
88 88 parser.addoption(
89 89 '--appenlight', '--ae', action='store_true',
90 90 help="Track statistics in appenlight.")
91 91 parser.addoption(
92 92 '--appenlight-api-key', '--ae-key',
93 93 help="API key for Appenlight.")
94 94 parser.addoption(
95 95 '--appenlight-url', '--ae-url',
96 96 default="https://ae.rhodecode.com",
97 97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 98 parser.addoption(
99 99 '--sqlite-connection-string', action='store',
100 100 default='', help="Connection string for the dbs tests with SQLite")
101 101 parser.addoption(
102 102 '--postgres-connection-string', action='store',
103 103 default='', help="Connection string for the dbs tests with Postgres")
104 104 parser.addoption(
105 105 '--mysql-connection-string', action='store',
106 106 default='', help="Connection string for the dbs tests with MySQL")
107 107 parser.addoption(
108 108 '--repeat', type=int, default=100,
109 109 help="Number of repetitions in performance tests.")
110 110
111 111
112 112 def pytest_configure(config):
113 113 from rhodecode.config import patches
114 114
115 115
116 116 def pytest_collection_modifyitems(session, config, items):
117 117 # nottest marked, compare nose, used for transition from nose to pytest
118 118 remaining = [
119 119 i for i in items if getattr(i.obj, '__test__', True)]
120 120 items[:] = remaining
121 121
122 122
123 123 def pytest_generate_tests(metafunc):
124 124 # Support test generation based on --backend parameter
125 125 if 'backend_alias' in metafunc.fixturenames:
126 126 backends = get_backends_from_metafunc(metafunc)
127 127 scope = None
128 128 if not backends:
129 129 pytest.skip("Not enabled for any of selected backends")
130 130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 131 elif hasattr(metafunc.function, 'backends'):
132 132 backends = get_backends_from_metafunc(metafunc)
133 133 if not backends:
134 134 pytest.skip("Not enabled for any of selected backends")
135 135
136 136
137 137 def get_backends_from_metafunc(metafunc):
138 138 requested_backends = set(metafunc.config.getoption('--backends'))
139 139 if hasattr(metafunc.function, 'backends'):
140 140 # Supported backends by this test function, created from
141 141 # pytest.mark.backends
142 142 backends = metafunc.definition.get_closest_marker('backends').args
143 143 elif hasattr(metafunc.cls, 'backend_alias'):
144 144 # Support class attribute "backend_alias", this is mainly
145 145 # for legacy reasons for tests not yet using pytest.mark.backends
146 146 backends = [metafunc.cls.backend_alias]
147 147 else:
148 148 backends = metafunc.config.getoption('--backends')
149 149 return requested_backends.intersection(backends)
150 150
151 151
152 152 @pytest.fixture(scope='session', autouse=True)
153 153 def activate_example_rcextensions(request):
154 154 """
155 155 Patch in an example rcextensions module which verifies passed in kwargs.
156 156 """
157 157 from rhodecode.config import rcextensions
158 158
159 159 old_extensions = rhodecode.EXTENSIONS
160 160 rhodecode.EXTENSIONS = rcextensions
161 161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
162 162
163 163 @request.addfinalizer
164 164 def cleanup():
165 165 rhodecode.EXTENSIONS = old_extensions
166 166
167 167
168 168 @pytest.fixture
169 169 def capture_rcextensions():
170 170 """
171 171 Returns the recorded calls to entry points in rcextensions.
172 172 """
173 173 calls = rhodecode.EXTENSIONS.calls
174 174 calls.clear()
175 175 # Note: At this moment, it is still the empty dict, but that will
176 176 # be filled during the test run and since it is a reference this
177 177 # is enough to make it work.
178 178 return calls
179 179
180 180
181 181 @pytest.fixture(scope='session')
182 182 def http_environ_session():
183 183 """
184 184 Allow to use "http_environ" in session scope.
185 185 """
186 186 return plain_http_environ()
187 187
188 188
189 189 def plain_http_host_stub():
190 190 """
191 191 Value of HTTP_HOST in the test run.
192 192 """
193 193 return 'example.com:80'
194 194
195 195
196 196 @pytest.fixture
197 197 def http_host_stub():
198 198 """
199 199 Value of HTTP_HOST in the test run.
200 200 """
201 201 return plain_http_host_stub()
202 202
203 203
204 204 def plain_http_host_only_stub():
205 205 """
206 206 Value of HTTP_HOST in the test run.
207 207 """
208 208 return plain_http_host_stub().split(':')[0]
209 209
210 210
211 211 @pytest.fixture
212 212 def http_host_only_stub():
213 213 """
214 214 Value of HTTP_HOST in the test run.
215 215 """
216 216 return plain_http_host_only_stub()
217 217
218 218
219 219 def plain_http_environ():
220 220 """
221 221 HTTP extra environ keys.
222 222
223 223 User by the test application and as well for setting up the pylons
224 224 environment. In the case of the fixture "app" it should be possible
225 225 to override this for a specific test case.
226 226 """
227 227 return {
228 228 'SERVER_NAME': plain_http_host_only_stub(),
229 229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
230 230 'HTTP_HOST': plain_http_host_stub(),
231 231 'HTTP_USER_AGENT': 'rc-test-agent',
232 232 'REQUEST_METHOD': 'GET'
233 233 }
234 234
235 235
236 236 @pytest.fixture
237 237 def http_environ():
238 238 """
239 239 HTTP extra environ keys.
240 240
241 241 User by the test application and as well for setting up the pylons
242 242 environment. In the case of the fixture "app" it should be possible
243 243 to override this for a specific test case.
244 244 """
245 245 return plain_http_environ()
246 246
247 247
248 248 @pytest.fixture(scope='session')
249 249 def baseapp(ini_config, vcsserver, http_environ_session):
250 250 from rhodecode.lib.pyramid_utils import get_app_config
251 251 from rhodecode.config.middleware import make_pyramid_app
252 252
253 253 print("Using the RhodeCode configuration:{}".format(ini_config))
254 254 pyramid.paster.setup_logging(ini_config)
255 255
256 256 settings = get_app_config(ini_config)
257 257 app = make_pyramid_app({'__file__': ini_config}, **settings)
258 258
259 259 return app
260 260
261 261
262 262 @pytest.fixture(scope='function')
263 263 def app(request, config_stub, baseapp, http_environ):
264 264 app = CustomTestApp(
265 265 baseapp,
266 266 extra_environ=http_environ)
267 267 if request.cls:
268 268 request.cls.app = app
269 269 return app
270 270
271 271
272 272 @pytest.fixture(scope='session')
273 273 def app_settings(baseapp, ini_config):
274 274 """
275 275 Settings dictionary used to create the app.
276 276
277 277 Parses the ini file and passes the result through the sanitize and apply
278 278 defaults mechanism in `rhodecode.config.middleware`.
279 279 """
280 280 return baseapp.config.get_settings()
281 281
282 282
283 283 @pytest.fixture(scope='session')
284 284 def db_connection(ini_settings):
285 285 # Initialize the database connection.
286 286 config_utils.initialize_database(ini_settings)
287 287
288 288
289 289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
290 290
291 291
292 292 def _autologin_user(app, *args):
293 293 session = login_user_session(app, *args)
294 294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
295 295 return LoginData(csrf_token, session['rhodecode_user'])
296 296
297 297
298 298 @pytest.fixture
299 299 def autologin_user(app):
300 300 """
301 301 Utility fixture which makes sure that the admin user is logged in
302 302 """
303 303 return _autologin_user(app)
304 304
305 305
306 306 @pytest.fixture
307 307 def autologin_regular_user(app):
308 308 """
309 309 Utility fixture which makes sure that the regular user is logged in
310 310 """
311 311 return _autologin_user(
312 312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
313 313
314 314
315 315 @pytest.fixture(scope='function')
316 316 def csrf_token(request, autologin_user):
317 317 return autologin_user.csrf_token
318 318
319 319
320 320 @pytest.fixture(scope='function')
321 321 def xhr_header(request):
322 322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
323 323
324 324
325 325 @pytest.fixture
326 326 def real_crypto_backend(monkeypatch):
327 327 """
328 328 Switch the production crypto backend on for this test.
329 329
330 330 During the test run the crypto backend is replaced with a faster
331 331 implementation based on the MD5 algorithm.
332 332 """
333 333 monkeypatch.setattr(rhodecode, 'is_test', False)
334 334
335 335
336 336 @pytest.fixture(scope='class')
337 337 def index_location(request, baseapp):
338 338 index_location = baseapp.config.get_settings()['search.location']
339 339 if request.cls:
340 340 request.cls.index_location = index_location
341 341 return index_location
342 342
343 343
344 344 @pytest.fixture(scope='session', autouse=True)
345 345 def tests_tmp_path(request):
346 346 """
347 347 Create temporary directory to be used during the test session.
348 348 """
349 349 if not os.path.exists(TESTS_TMP_PATH):
350 350 os.makedirs(TESTS_TMP_PATH)
351 351
352 352 if not request.config.getoption('--keep-tmp-path'):
353 353 @request.addfinalizer
354 354 def remove_tmp_path():
355 355 shutil.rmtree(TESTS_TMP_PATH)
356 356
357 357 return TESTS_TMP_PATH
358 358
359 359
360 360 @pytest.fixture
361 361 def test_repo_group(request):
362 362 """
363 363 Create a temporary repository group, and destroy it after
364 364 usage automatically
365 365 """
366 366 fixture = Fixture()
367 367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
368 368 repo_group = fixture.create_repo_group(repogroupid)
369 369
370 370 def _cleanup():
371 371 fixture.destroy_repo_group(repogroupid)
372 372
373 373 request.addfinalizer(_cleanup)
374 374 return repo_group
375 375
376 376
377 377 @pytest.fixture
378 378 def test_user_group(request):
379 379 """
380 380 Create a temporary user group, and destroy it after
381 381 usage automatically
382 382 """
383 383 fixture = Fixture()
384 384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
385 385 user_group = fixture.create_user_group(usergroupid)
386 386
387 387 def _cleanup():
388 388 fixture.destroy_user_group(user_group)
389 389
390 390 request.addfinalizer(_cleanup)
391 391 return user_group
392 392
393 393
394 394 @pytest.fixture(scope='session')
395 395 def test_repo(request):
396 396 container = TestRepoContainer()
397 397 request.addfinalizer(container._cleanup)
398 398 return container
399 399
400 400
401 401 class TestRepoContainer(object):
402 402 """
403 403 Container for test repositories which are used read only.
404 404
405 405 Repositories will be created on demand and re-used during the lifetime
406 406 of this object.
407 407
408 408 Usage to get the svn test repository "minimal"::
409 409
410 410 test_repo = TestContainer()
411 411 repo = test_repo('minimal', 'svn')
412 412
413 413 """
414 414
415 415 dump_extractors = {
416 416 'git': utils.extract_git_repo_from_dump,
417 417 'hg': utils.extract_hg_repo_from_dump,
418 418 'svn': utils.extract_svn_repo_from_dump,
419 419 }
420 420
421 421 def __init__(self):
422 422 self._cleanup_repos = []
423 423 self._fixture = Fixture()
424 424 self._repos = {}
425 425
426 426 def __call__(self, dump_name, backend_alias, config=None):
427 427 key = (dump_name, backend_alias)
428 428 if key not in self._repos:
429 429 repo = self._create_repo(dump_name, backend_alias, config)
430 430 self._repos[key] = repo.repo_id
431 431 return Repository.get(self._repos[key])
432 432
433 433 def _create_repo(self, dump_name, backend_alias, config):
434 434 repo_name = '%s-%s' % (backend_alias, dump_name)
435 435 backend_class = get_backend(backend_alias)
436 436 dump_extractor = self.dump_extractors[backend_alias]
437 437 repo_path = dump_extractor(dump_name, repo_name)
438 438
439 439 vcs_repo = backend_class(repo_path, config=config)
440 440 repo2db_mapper({repo_name: vcs_repo})
441 441
442 442 repo = RepoModel().get_by_repo_name(repo_name)
443 443 self._cleanup_repos.append(repo_name)
444 444 return repo
445 445
446 446 def _cleanup(self):
447 447 for repo_name in reversed(self._cleanup_repos):
448 448 self._fixture.destroy_repo(repo_name)
449 449
450 450
451 451 def backend_base(request, backend_alias, baseapp, test_repo):
452 452 if backend_alias not in request.config.getoption('--backends'):
453 453 pytest.skip("Backend %s not selected." % (backend_alias, ))
454 454
455 455 utils.check_xfail_backends(request.node, backend_alias)
456 456 utils.check_skip_backends(request.node, backend_alias)
457 457
458 458 repo_name = 'vcs_test_%s' % (backend_alias, )
459 459 backend = Backend(
460 460 alias=backend_alias,
461 461 repo_name=repo_name,
462 462 test_name=request.node.name,
463 463 test_repo_container=test_repo)
464 464 request.addfinalizer(backend.cleanup)
465 465 return backend
466 466
467 467
468 468 @pytest.fixture
469 469 def backend(request, backend_alias, baseapp, test_repo):
470 470 """
471 471 Parametrized fixture which represents a single backend implementation.
472 472
473 473 It respects the option `--backends` to focus the test run on specific
474 474 backend implementations.
475 475
476 476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
477 477 for specific backends. This is intended as a utility for incremental
478 478 development of a new backend implementation.
479 479 """
480 480 return backend_base(request, backend_alias, baseapp, test_repo)
481 481
482 482
483 483 @pytest.fixture
484 484 def backend_git(request, baseapp, test_repo):
485 485 return backend_base(request, 'git', baseapp, test_repo)
486 486
487 487
488 488 @pytest.fixture
489 489 def backend_hg(request, baseapp, test_repo):
490 490 return backend_base(request, 'hg', baseapp, test_repo)
491 491
492 492
493 493 @pytest.fixture
494 494 def backend_svn(request, baseapp, test_repo):
495 495 return backend_base(request, 'svn', baseapp, test_repo)
496 496
497 497
498 498 @pytest.fixture
499 499 def backend_random(backend_git):
500 500 """
501 501 Use this to express that your tests need "a backend.
502 502
503 503 A few of our tests need a backend, so that we can run the code. This
504 504 fixture is intended to be used for such cases. It will pick one of the
505 505 backends and run the tests.
506 506
507 507 The fixture `backend` would run the test multiple times for each
508 508 available backend which is a pure waste of time if the test is
509 509 independent of the backend type.
510 510 """
511 511 # TODO: johbo: Change this to pick a random backend
512 512 return backend_git
513 513
514 514
515 515 @pytest.fixture
516 516 def backend_stub(backend_git):
517 517 """
518 518 Use this to express that your tests need a backend stub
519 519
520 520 TODO: mikhail: Implement a real stub logic instead of returning
521 521 a git backend
522 522 """
523 523 return backend_git
524 524
525 525
526 526 @pytest.fixture
527 527 def repo_stub(backend_stub):
528 528 """
529 529 Use this to express that your tests need a repository stub
530 530 """
531 531 return backend_stub.create_repo()
532 532
533 533
534 534 class Backend(object):
535 535 """
536 536 Represents the test configuration for one supported backend
537 537
538 538 Provides easy access to different test repositories based on
539 539 `__getitem__`. Such repositories will only be created once per test
540 540 session.
541 541 """
542 542
543 543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
544 544 _master_repo = None
545 545 _commit_ids = {}
546 546
547 547 def __init__(self, alias, repo_name, test_name, test_repo_container):
548 548 self.alias = alias
549 549 self.repo_name = repo_name
550 550 self._cleanup_repos = []
551 551 self._test_name = test_name
552 552 self._test_repo_container = test_repo_container
553 553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
554 554 # Fixture will survive in the end.
555 555 self._fixture = Fixture()
556 556
557 557 def __getitem__(self, key):
558 558 return self._test_repo_container(key, self.alias)
559 559
560 560 def create_test_repo(self, key, config=None):
561 561 return self._test_repo_container(key, self.alias, config)
562 562
563 563 @property
564 564 def repo(self):
565 565 """
566 566 Returns the "current" repository. This is the vcs_test repo or the
567 567 last repo which has been created with `create_repo`.
568 568 """
569 569 from rhodecode.model.db import Repository
570 570 return Repository.get_by_repo_name(self.repo_name)
571 571
572 572 @property
573 573 def default_branch_name(self):
574 574 VcsRepository = get_backend(self.alias)
575 575 return VcsRepository.DEFAULT_BRANCH_NAME
576 576
577 577 @property
578 578 def default_head_id(self):
579 579 """
580 580 Returns the default head id of the underlying backend.
581 581
582 582 This will be the default branch name in case the backend does have a
583 583 default branch. In the other cases it will point to a valid head
584 584 which can serve as the base to create a new commit on top of it.
585 585 """
586 586 vcsrepo = self.repo.scm_instance()
587 587 head_id = (
588 588 vcsrepo.DEFAULT_BRANCH_NAME or
589 589 vcsrepo.commit_ids[-1])
590 590 return head_id
591 591
592 592 @property
593 593 def commit_ids(self):
594 594 """
595 595 Returns the list of commits for the last created repository
596 596 """
597 597 return self._commit_ids
598 598
599 599 def create_master_repo(self, commits):
600 600 """
601 601 Create a repository and remember it as a template.
602 602
603 603 This allows to easily create derived repositories to construct
604 604 more complex scenarios for diff, compare and pull requests.
605 605
606 606 Returns a commit map which maps from commit message to raw_id.
607 607 """
608 608 self._master_repo = self.create_repo(commits=commits)
609 609 return self._commit_ids
610 610
611 611 def create_repo(
612 612 self, commits=None, number_of_commits=0, heads=None,
613 613 name_suffix=u'', bare=False, **kwargs):
614 614 """
615 615 Create a repository and record it for later cleanup.
616 616
617 617 :param commits: Optional. A sequence of dict instances.
618 618 Will add a commit per entry to the new repository.
619 619 :param number_of_commits: Optional. If set to a number, this number of
620 620 commits will be added to the new repository.
621 621 :param heads: Optional. Can be set to a sequence of of commit
622 622 names which shall be pulled in from the master repository.
623 623 :param name_suffix: adds special suffix to generated repo name
624 624 :param bare: set a repo as bare (no checkout)
625 625 """
626 626 self.repo_name = self._next_repo_name() + name_suffix
627 627 repo = self._fixture.create_repo(
628 628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
629 629 self._cleanup_repos.append(repo.repo_name)
630 630
631 631 commits = commits or [
632 632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
633 633 for x in range(number_of_commits)]
634 634 vcs_repo = repo.scm_instance()
635 635 vcs_repo.count()
636 636 self._add_commits_to_repo(vcs_repo, commits)
637 637 if heads:
638 638 self.pull_heads(repo, heads)
639 639
640 640 return repo
641 641
642 642 def pull_heads(self, repo, heads):
643 643 """
644 644 Make sure that repo contains all commits mentioned in `heads`
645 645 """
646 646 vcsmaster = self._master_repo.scm_instance()
647 647 vcsrepo = repo.scm_instance()
648 648 vcsrepo.config.clear_section('hooks')
649 649 commit_ids = [self._commit_ids[h] for h in heads]
650 650 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
651 651
652 652 def create_fork(self):
653 653 repo_to_fork = self.repo_name
654 654 self.repo_name = self._next_repo_name()
655 655 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
656 656 self._cleanup_repos.append(self.repo_name)
657 657 return repo
658 658
659 659 def new_repo_name(self, suffix=u''):
660 660 self.repo_name = self._next_repo_name() + suffix
661 661 self._cleanup_repos.append(self.repo_name)
662 662 return self.repo_name
663 663
664 664 def _next_repo_name(self):
665 665 return u"%s_%s" % (
666 666 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
667 667
668 668 def ensure_file(self, filename, content='Test content\n'):
669 669 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
670 670 commits = [
671 671 {'added': [
672 672 FileNode(filename, content=content),
673 673 ]},
674 674 ]
675 675 self._add_commits_to_repo(self.repo.scm_instance(), commits)
676 676
677 677 def enable_downloads(self):
678 678 repo = self.repo
679 679 repo.enable_downloads = True
680 680 Session().add(repo)
681 681 Session().commit()
682 682
683 683 def cleanup(self):
684 684 for repo_name in reversed(self._cleanup_repos):
685 685 self._fixture.destroy_repo(repo_name)
686 686
687 687 def _add_commits_to_repo(self, repo, commits):
688 688 commit_ids = _add_commits_to_repo(repo, commits)
689 689 if not commit_ids:
690 690 return
691 691 self._commit_ids = commit_ids
692 692
693 693 # Creating refs for Git to allow fetching them from remote repository
694 694 if self.alias == 'git':
695 695 refs = {}
696 696 for message in self._commit_ids:
697 697 # TODO: mikhail: do more special chars replacements
698 698 ref_name = 'refs/test-refs/{}'.format(
699 699 message.replace(' ', ''))
700 700 refs[ref_name] = self._commit_ids[message]
701 701 self._create_refs(repo, refs)
702 702
703 703 def _create_refs(self, repo, refs):
704 704 for ref_name in refs:
705 705 repo.set_refs(ref_name, refs[ref_name])
706 706
707 707
708 708 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
709 709 if backend_alias not in request.config.getoption('--backends'):
710 710 pytest.skip("Backend %s not selected." % (backend_alias, ))
711 711
712 712 utils.check_xfail_backends(request.node, backend_alias)
713 713 utils.check_skip_backends(request.node, backend_alias)
714 714
715 715 repo_name = 'vcs_test_%s' % (backend_alias, )
716 716 repo_path = os.path.join(tests_tmp_path, repo_name)
717 717 backend = VcsBackend(
718 718 alias=backend_alias,
719 719 repo_path=repo_path,
720 720 test_name=request.node.name,
721 721 test_repo_container=test_repo)
722 722 request.addfinalizer(backend.cleanup)
723 723 return backend
724 724
725 725
726 726 @pytest.fixture
727 727 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
728 728 """
729 729 Parametrized fixture which represents a single vcs backend implementation.
730 730
731 731 See the fixture `backend` for more details. This one implements the same
732 732 concept, but on vcs level. So it does not provide model instances etc.
733 733
734 734 Parameters are generated dynamically, see :func:`pytest_generate_tests`
735 735 for how this works.
736 736 """
737 737 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
738 738
739 739
740 740 @pytest.fixture
741 741 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
742 742 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
743 743
744 744
745 745 @pytest.fixture
746 746 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
747 747 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
748 748
749 749
750 750 @pytest.fixture
751 751 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
752 752 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
753 753
754 754
755 755 @pytest.fixture
756 756 def vcsbackend_stub(vcsbackend_git):
757 757 """
758 758 Use this to express that your test just needs a stub of a vcsbackend.
759 759
760 760 Plan is to eventually implement an in-memory stub to speed tests up.
761 761 """
762 762 return vcsbackend_git
763 763
764 764
765 765 class VcsBackend(object):
766 766 """
767 767 Represents the test configuration for one supported vcs backend.
768 768 """
769 769
770 770 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
771 771
772 772 def __init__(self, alias, repo_path, test_name, test_repo_container):
773 773 self.alias = alias
774 774 self._repo_path = repo_path
775 775 self._cleanup_repos = []
776 776 self._test_name = test_name
777 777 self._test_repo_container = test_repo_container
778 778
779 779 def __getitem__(self, key):
780 780 return self._test_repo_container(key, self.alias).scm_instance()
781 781
782 782 @property
783 783 def repo(self):
784 784 """
785 785 Returns the "current" repository. This is the vcs_test repo of the last
786 786 repo which has been created.
787 787 """
788 788 Repository = get_backend(self.alias)
789 789 return Repository(self._repo_path)
790 790
791 791 @property
792 792 def backend(self):
793 793 """
794 794 Returns the backend implementation class.
795 795 """
796 796 return get_backend(self.alias)
797 797
798 798 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
799 799 bare=False):
800 800 repo_name = self._next_repo_name()
801 801 self._repo_path = get_new_dir(repo_name)
802 802 repo_class = get_backend(self.alias)
803 803 src_url = None
804 804 if _clone_repo:
805 805 src_url = _clone_repo.path
806 806 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
807 807 self._cleanup_repos.append(repo)
808 808
809 809 commits = commits or [
810 810 {'message': 'Commit %s of %s' % (x, repo_name)}
811 811 for x in xrange(number_of_commits)]
812 812 _add_commits_to_repo(repo, commits)
813 813 return repo
814 814
815 815 def clone_repo(self, repo):
816 816 return self.create_repo(_clone_repo=repo)
817 817
818 818 def cleanup(self):
819 819 for repo in self._cleanup_repos:
820 820 shutil.rmtree(repo.path)
821 821
822 822 def new_repo_path(self):
823 823 repo_name = self._next_repo_name()
824 824 self._repo_path = get_new_dir(repo_name)
825 825 return self._repo_path
826 826
827 827 def _next_repo_name(self):
828 828 return "%s_%s" % (
829 829 self.invalid_repo_name.sub('_', self._test_name),
830 830 len(self._cleanup_repos))
831 831
832 832 def add_file(self, repo, filename, content='Test content\n'):
833 833 imc = repo.in_memory_commit
834 834 imc.add(FileNode(filename, content=content))
835 835 imc.commit(
836 836 message=u'Automatic commit from vcsbackend fixture',
837 author=u'Automatic')
837 author=u'Automatic <automatic@rhodecode.com>')
838 838
839 839 def ensure_file(self, filename, content='Test content\n'):
840 840 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
841 841 self.add_file(self.repo, filename, content)
842 842
843 843
844 844 def _add_commits_to_repo(vcs_repo, commits):
845 845 commit_ids = {}
846 846 if not commits:
847 847 return commit_ids
848 848
849 849 imc = vcs_repo.in_memory_commit
850 850 commit = None
851 851
852 852 for idx, commit in enumerate(commits):
853 853 message = unicode(commit.get('message', 'Commit %s' % idx))
854 854
855 855 for node in commit.get('added', []):
856 856 imc.add(FileNode(node.path, content=node.content))
857 857 for node in commit.get('changed', []):
858 858 imc.change(FileNode(node.path, content=node.content))
859 859 for node in commit.get('removed', []):
860 860 imc.remove(FileNode(node.path))
861 861
862 862 parents = [
863 863 vcs_repo.get_commit(commit_id=commit_ids[p])
864 864 for p in commit.get('parents', [])]
865 865
866 866 operations = ('added', 'changed', 'removed')
867 867 if not any((commit.get(o) for o in operations)):
868 868 imc.add(FileNode('file_%s' % idx, content=message))
869 869
870 870 commit = imc.commit(
871 871 message=message,
872 author=unicode(commit.get('author', 'Automatic')),
872 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
873 873 date=commit.get('date'),
874 874 branch=commit.get('branch'),
875 875 parents=parents)
876 876
877 877 commit_ids[commit.message] = commit.raw_id
878 878
879 879 return commit_ids
880 880
881 881
882 882 @pytest.fixture
883 883 def reposerver(request):
884 884 """
885 885 Allows to serve a backend repository
886 886 """
887 887
888 888 repo_server = RepoServer()
889 889 request.addfinalizer(repo_server.cleanup)
890 890 return repo_server
891 891
892 892
893 893 class RepoServer(object):
894 894 """
895 895 Utility to serve a local repository for the duration of a test case.
896 896
897 897 Supports only Subversion so far.
898 898 """
899 899
900 900 url = None
901 901
902 902 def __init__(self):
903 903 self._cleanup_servers = []
904 904
905 905 def serve(self, vcsrepo):
906 906 if vcsrepo.alias != 'svn':
907 907 raise TypeError("Backend %s not supported" % vcsrepo.alias)
908 908
909 909 proc = subprocess32.Popen(
910 910 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
911 911 '--root', vcsrepo.path])
912 912 self._cleanup_servers.append(proc)
913 913 self.url = 'svn://localhost'
914 914
915 915 def cleanup(self):
916 916 for proc in self._cleanup_servers:
917 917 proc.terminate()
918 918
919 919
920 920 @pytest.fixture
921 921 def pr_util(backend, request, config_stub):
922 922 """
923 923 Utility for tests of models and for functional tests around pull requests.
924 924
925 925 It gives an instance of :class:`PRTestUtility` which provides various
926 926 utility methods around one pull request.
927 927
928 928 This fixture uses `backend` and inherits its parameterization.
929 929 """
930 930
931 931 util = PRTestUtility(backend)
932 932 request.addfinalizer(util.cleanup)
933 933
934 934 return util
935 935
936 936
937 937 class PRTestUtility(object):
938 938
939 939 pull_request = None
940 940 pull_request_id = None
941 941 mergeable_patcher = None
942 942 mergeable_mock = None
943 943 notification_patcher = None
944 944
945 945 def __init__(self, backend):
946 946 self.backend = backend
947 947
948 948 def create_pull_request(
949 949 self, commits=None, target_head=None, source_head=None,
950 950 revisions=None, approved=False, author=None, mergeable=False,
951 951 enable_notifications=True, name_suffix=u'', reviewers=None,
952 952 title=u"Test", description=u"Description"):
953 953 self.set_mergeable(mergeable)
954 954 if not enable_notifications:
955 955 # mock notification side effect
956 956 self.notification_patcher = mock.patch(
957 957 'rhodecode.model.notification.NotificationModel.create')
958 958 self.notification_patcher.start()
959 959
960 960 if not self.pull_request:
961 961 if not commits:
962 962 commits = [
963 963 {'message': 'c1'},
964 964 {'message': 'c2'},
965 965 {'message': 'c3'},
966 966 ]
967 967 target_head = 'c1'
968 968 source_head = 'c2'
969 969 revisions = ['c2']
970 970
971 971 self.commit_ids = self.backend.create_master_repo(commits)
972 972 self.target_repository = self.backend.create_repo(
973 973 heads=[target_head], name_suffix=name_suffix)
974 974 self.source_repository = self.backend.create_repo(
975 975 heads=[source_head], name_suffix=name_suffix)
976 976 self.author = author or UserModel().get_by_username(
977 977 TEST_USER_ADMIN_LOGIN)
978 978
979 979 model = PullRequestModel()
980 980 self.create_parameters = {
981 981 'created_by': self.author,
982 982 'source_repo': self.source_repository.repo_name,
983 983 'source_ref': self._default_branch_reference(source_head),
984 984 'target_repo': self.target_repository.repo_name,
985 985 'target_ref': self._default_branch_reference(target_head),
986 986 'revisions': [self.commit_ids[r] for r in revisions],
987 987 'reviewers': reviewers or self._get_reviewers(),
988 988 'title': title,
989 989 'description': description,
990 990 }
991 991 self.pull_request = model.create(**self.create_parameters)
992 992 assert model.get_versions(self.pull_request) == []
993 993
994 994 self.pull_request_id = self.pull_request.pull_request_id
995 995
996 996 if approved:
997 997 self.approve()
998 998
999 999 Session().add(self.pull_request)
1000 1000 Session().commit()
1001 1001
1002 1002 return self.pull_request
1003 1003
1004 1004 def approve(self):
1005 1005 self.create_status_votes(
1006 1006 ChangesetStatus.STATUS_APPROVED,
1007 1007 *self.pull_request.reviewers)
1008 1008
1009 1009 def close(self):
1010 1010 PullRequestModel().close_pull_request(self.pull_request, self.author)
1011 1011
1012 1012 def _default_branch_reference(self, commit_message):
1013 1013 reference = '%s:%s:%s' % (
1014 1014 'branch',
1015 1015 self.backend.default_branch_name,
1016 1016 self.commit_ids[commit_message])
1017 1017 return reference
1018 1018
1019 1019 def _get_reviewers(self):
1020 1020 return [
1021 1021 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1022 1022 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1023 1023 ]
1024 1024
1025 1025 def update_source_repository(self, head=None):
1026 1026 heads = [head or 'c3']
1027 1027 self.backend.pull_heads(self.source_repository, heads=heads)
1028 1028
1029 1029 def add_one_commit(self, head=None):
1030 1030 self.update_source_repository(head=head)
1031 1031 old_commit_ids = set(self.pull_request.revisions)
1032 1032 PullRequestModel().update_commits(self.pull_request)
1033 1033 commit_ids = set(self.pull_request.revisions)
1034 1034 new_commit_ids = commit_ids - old_commit_ids
1035 1035 assert len(new_commit_ids) == 1
1036 1036 return new_commit_ids.pop()
1037 1037
1038 1038 def remove_one_commit(self):
1039 1039 assert len(self.pull_request.revisions) == 2
1040 1040 source_vcs = self.source_repository.scm_instance()
1041 1041 removed_commit_id = source_vcs.commit_ids[-1]
1042 1042
1043 1043 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1044 1044 # remove the if once that's sorted out.
1045 1045 if self.backend.alias == "git":
1046 1046 kwargs = {'branch_name': self.backend.default_branch_name}
1047 1047 else:
1048 1048 kwargs = {}
1049 1049 source_vcs.strip(removed_commit_id, **kwargs)
1050 1050
1051 1051 PullRequestModel().update_commits(self.pull_request)
1052 1052 assert len(self.pull_request.revisions) == 1
1053 1053 return removed_commit_id
1054 1054
1055 1055 def create_comment(self, linked_to=None):
1056 1056 comment = CommentsModel().create(
1057 1057 text=u"Test comment",
1058 1058 repo=self.target_repository.repo_name,
1059 1059 user=self.author,
1060 1060 pull_request=self.pull_request)
1061 1061 assert comment.pull_request_version_id is None
1062 1062
1063 1063 if linked_to:
1064 1064 PullRequestModel()._link_comments_to_version(linked_to)
1065 1065
1066 1066 return comment
1067 1067
1068 1068 def create_inline_comment(
1069 1069 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1070 1070 comment = CommentsModel().create(
1071 1071 text=u"Test comment",
1072 1072 repo=self.target_repository.repo_name,
1073 1073 user=self.author,
1074 1074 line_no=line_no,
1075 1075 f_path=file_path,
1076 1076 pull_request=self.pull_request)
1077 1077 assert comment.pull_request_version_id is None
1078 1078
1079 1079 if linked_to:
1080 1080 PullRequestModel()._link_comments_to_version(linked_to)
1081 1081
1082 1082 return comment
1083 1083
1084 1084 def create_version_of_pull_request(self):
1085 1085 pull_request = self.create_pull_request()
1086 1086 version = PullRequestModel()._create_version_from_snapshot(
1087 1087 pull_request)
1088 1088 return version
1089 1089
1090 1090 def create_status_votes(self, status, *reviewers):
1091 1091 for reviewer in reviewers:
1092 1092 ChangesetStatusModel().set_status(
1093 1093 repo=self.pull_request.target_repo,
1094 1094 status=status,
1095 1095 user=reviewer.user_id,
1096 1096 pull_request=self.pull_request)
1097 1097
1098 1098 def set_mergeable(self, value):
1099 1099 if not self.mergeable_patcher:
1100 1100 self.mergeable_patcher = mock.patch.object(
1101 1101 VcsSettingsModel, 'get_general_settings')
1102 1102 self.mergeable_mock = self.mergeable_patcher.start()
1103 1103 self.mergeable_mock.return_value = {
1104 1104 'rhodecode_pr_merge_enabled': value}
1105 1105
1106 1106 def cleanup(self):
1107 1107 # In case the source repository is already cleaned up, the pull
1108 1108 # request will already be deleted.
1109 1109 pull_request = PullRequest().get(self.pull_request_id)
1110 1110 if pull_request:
1111 1111 PullRequestModel().delete(pull_request, pull_request.author)
1112 1112 Session().commit()
1113 1113
1114 1114 if self.notification_patcher:
1115 1115 self.notification_patcher.stop()
1116 1116
1117 1117 if self.mergeable_patcher:
1118 1118 self.mergeable_patcher.stop()
1119 1119
1120 1120
1121 1121 @pytest.fixture
1122 1122 def user_admin(baseapp):
1123 1123 """
1124 1124 Provides the default admin test user as an instance of `db.User`.
1125 1125 """
1126 1126 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1127 1127 return user
1128 1128
1129 1129
1130 1130 @pytest.fixture
1131 1131 def user_regular(baseapp):
1132 1132 """
1133 1133 Provides the default regular test user as an instance of `db.User`.
1134 1134 """
1135 1135 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1136 1136 return user
1137 1137
1138 1138
1139 1139 @pytest.fixture
1140 1140 def user_util(request, db_connection):
1141 1141 """
1142 1142 Provides a wired instance of `UserUtility` with integrated cleanup.
1143 1143 """
1144 1144 utility = UserUtility(test_name=request.node.name)
1145 1145 request.addfinalizer(utility.cleanup)
1146 1146 return utility
1147 1147
1148 1148
1149 1149 # TODO: johbo: Split this up into utilities per domain or something similar
1150 1150 class UserUtility(object):
1151 1151
1152 1152 def __init__(self, test_name="test"):
1153 1153 self._test_name = self._sanitize_name(test_name)
1154 1154 self.fixture = Fixture()
1155 1155 self.repo_group_ids = []
1156 1156 self.repos_ids = []
1157 1157 self.user_ids = []
1158 1158 self.user_group_ids = []
1159 1159 self.user_repo_permission_ids = []
1160 1160 self.user_group_repo_permission_ids = []
1161 1161 self.user_repo_group_permission_ids = []
1162 1162 self.user_group_repo_group_permission_ids = []
1163 1163 self.user_user_group_permission_ids = []
1164 1164 self.user_group_user_group_permission_ids = []
1165 1165 self.user_permissions = []
1166 1166
1167 1167 def _sanitize_name(self, name):
1168 1168 for char in ['[', ']']:
1169 1169 name = name.replace(char, '_')
1170 1170 return name
1171 1171
1172 1172 def create_repo_group(
1173 1173 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1174 1174 group_name = "{prefix}_repogroup_{count}".format(
1175 1175 prefix=self._test_name,
1176 1176 count=len(self.repo_group_ids))
1177 1177 repo_group = self.fixture.create_repo_group(
1178 1178 group_name, cur_user=owner)
1179 1179 if auto_cleanup:
1180 1180 self.repo_group_ids.append(repo_group.group_id)
1181 1181 return repo_group
1182 1182
1183 1183 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1184 1184 auto_cleanup=True, repo_type='hg', bare=False):
1185 1185 repo_name = "{prefix}_repository_{count}".format(
1186 1186 prefix=self._test_name,
1187 1187 count=len(self.repos_ids))
1188 1188
1189 1189 repository = self.fixture.create_repo(
1190 1190 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1191 1191 if auto_cleanup:
1192 1192 self.repos_ids.append(repository.repo_id)
1193 1193 return repository
1194 1194
1195 1195 def create_user(self, auto_cleanup=True, **kwargs):
1196 1196 user_name = "{prefix}_user_{count}".format(
1197 1197 prefix=self._test_name,
1198 1198 count=len(self.user_ids))
1199 1199 user = self.fixture.create_user(user_name, **kwargs)
1200 1200 if auto_cleanup:
1201 1201 self.user_ids.append(user.user_id)
1202 1202 return user
1203 1203
1204 1204 def create_additional_user_email(self, user, email):
1205 1205 uem = self.fixture.create_additional_user_email(user=user, email=email)
1206 1206 return uem
1207 1207
1208 1208 def create_user_with_group(self):
1209 1209 user = self.create_user()
1210 1210 user_group = self.create_user_group(members=[user])
1211 1211 return user, user_group
1212 1212
1213 1213 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1214 1214 auto_cleanup=True, **kwargs):
1215 1215 group_name = "{prefix}_usergroup_{count}".format(
1216 1216 prefix=self._test_name,
1217 1217 count=len(self.user_group_ids))
1218 1218 user_group = self.fixture.create_user_group(
1219 1219 group_name, cur_user=owner, **kwargs)
1220 1220
1221 1221 if auto_cleanup:
1222 1222 self.user_group_ids.append(user_group.users_group_id)
1223 1223 if members:
1224 1224 for user in members:
1225 1225 UserGroupModel().add_user_to_group(user_group, user)
1226 1226 return user_group
1227 1227
1228 1228 def grant_user_permission(self, user_name, permission_name):
1229 1229 self.inherit_default_user_permissions(user_name, False)
1230 1230 self.user_permissions.append((user_name, permission_name))
1231 1231
1232 1232 def grant_user_permission_to_repo_group(
1233 1233 self, repo_group, user, permission_name):
1234 1234 permission = RepoGroupModel().grant_user_permission(
1235 1235 repo_group, user, permission_name)
1236 1236 self.user_repo_group_permission_ids.append(
1237 1237 (repo_group.group_id, user.user_id))
1238 1238 return permission
1239 1239
1240 1240 def grant_user_group_permission_to_repo_group(
1241 1241 self, repo_group, user_group, permission_name):
1242 1242 permission = RepoGroupModel().grant_user_group_permission(
1243 1243 repo_group, user_group, permission_name)
1244 1244 self.user_group_repo_group_permission_ids.append(
1245 1245 (repo_group.group_id, user_group.users_group_id))
1246 1246 return permission
1247 1247
1248 1248 def grant_user_permission_to_repo(
1249 1249 self, repo, user, permission_name):
1250 1250 permission = RepoModel().grant_user_permission(
1251 1251 repo, user, permission_name)
1252 1252 self.user_repo_permission_ids.append(
1253 1253 (repo.repo_id, user.user_id))
1254 1254 return permission
1255 1255
1256 1256 def grant_user_group_permission_to_repo(
1257 1257 self, repo, user_group, permission_name):
1258 1258 permission = RepoModel().grant_user_group_permission(
1259 1259 repo, user_group, permission_name)
1260 1260 self.user_group_repo_permission_ids.append(
1261 1261 (repo.repo_id, user_group.users_group_id))
1262 1262 return permission
1263 1263
1264 1264 def grant_user_permission_to_user_group(
1265 1265 self, target_user_group, user, permission_name):
1266 1266 permission = UserGroupModel().grant_user_permission(
1267 1267 target_user_group, user, permission_name)
1268 1268 self.user_user_group_permission_ids.append(
1269 1269 (target_user_group.users_group_id, user.user_id))
1270 1270 return permission
1271 1271
1272 1272 def grant_user_group_permission_to_user_group(
1273 1273 self, target_user_group, user_group, permission_name):
1274 1274 permission = UserGroupModel().grant_user_group_permission(
1275 1275 target_user_group, user_group, permission_name)
1276 1276 self.user_group_user_group_permission_ids.append(
1277 1277 (target_user_group.users_group_id, user_group.users_group_id))
1278 1278 return permission
1279 1279
1280 1280 def revoke_user_permission(self, user_name, permission_name):
1281 1281 self.inherit_default_user_permissions(user_name, True)
1282 1282 UserModel().revoke_perm(user_name, permission_name)
1283 1283
1284 1284 def inherit_default_user_permissions(self, user_name, value):
1285 1285 user = UserModel().get_by_username(user_name)
1286 1286 user.inherit_default_permissions = value
1287 1287 Session().add(user)
1288 1288 Session().commit()
1289 1289
1290 1290 def cleanup(self):
1291 1291 self._cleanup_permissions()
1292 1292 self._cleanup_repos()
1293 1293 self._cleanup_repo_groups()
1294 1294 self._cleanup_user_groups()
1295 1295 self._cleanup_users()
1296 1296
1297 1297 def _cleanup_permissions(self):
1298 1298 if self.user_permissions:
1299 1299 for user_name, permission_name in self.user_permissions:
1300 1300 self.revoke_user_permission(user_name, permission_name)
1301 1301
1302 1302 for permission in self.user_repo_permission_ids:
1303 1303 RepoModel().revoke_user_permission(*permission)
1304 1304
1305 1305 for permission in self.user_group_repo_permission_ids:
1306 1306 RepoModel().revoke_user_group_permission(*permission)
1307 1307
1308 1308 for permission in self.user_repo_group_permission_ids:
1309 1309 RepoGroupModel().revoke_user_permission(*permission)
1310 1310
1311 1311 for permission in self.user_group_repo_group_permission_ids:
1312 1312 RepoGroupModel().revoke_user_group_permission(*permission)
1313 1313
1314 1314 for permission in self.user_user_group_permission_ids:
1315 1315 UserGroupModel().revoke_user_permission(*permission)
1316 1316
1317 1317 for permission in self.user_group_user_group_permission_ids:
1318 1318 UserGroupModel().revoke_user_group_permission(*permission)
1319 1319
1320 1320 def _cleanup_repo_groups(self):
1321 1321 def _repo_group_compare(first_group_id, second_group_id):
1322 1322 """
1323 1323 Gives higher priority to the groups with the most complex paths
1324 1324 """
1325 1325 first_group = RepoGroup.get(first_group_id)
1326 1326 second_group = RepoGroup.get(second_group_id)
1327 1327 first_group_parts = (
1328 1328 len(first_group.group_name.split('/')) if first_group else 0)
1329 1329 second_group_parts = (
1330 1330 len(second_group.group_name.split('/')) if second_group else 0)
1331 1331 return cmp(second_group_parts, first_group_parts)
1332 1332
1333 1333 sorted_repo_group_ids = sorted(
1334 1334 self.repo_group_ids, cmp=_repo_group_compare)
1335 1335 for repo_group_id in sorted_repo_group_ids:
1336 1336 self.fixture.destroy_repo_group(repo_group_id)
1337 1337
1338 1338 def _cleanup_repos(self):
1339 1339 sorted_repos_ids = sorted(self.repos_ids)
1340 1340 for repo_id in sorted_repos_ids:
1341 1341 self.fixture.destroy_repo(repo_id)
1342 1342
1343 1343 def _cleanup_user_groups(self):
1344 1344 def _user_group_compare(first_group_id, second_group_id):
1345 1345 """
1346 1346 Gives higher priority to the groups with the most complex paths
1347 1347 """
1348 1348 first_group = UserGroup.get(first_group_id)
1349 1349 second_group = UserGroup.get(second_group_id)
1350 1350 first_group_parts = (
1351 1351 len(first_group.users_group_name.split('/'))
1352 1352 if first_group else 0)
1353 1353 second_group_parts = (
1354 1354 len(second_group.users_group_name.split('/'))
1355 1355 if second_group else 0)
1356 1356 return cmp(second_group_parts, first_group_parts)
1357 1357
1358 1358 sorted_user_group_ids = sorted(
1359 1359 self.user_group_ids, cmp=_user_group_compare)
1360 1360 for user_group_id in sorted_user_group_ids:
1361 1361 self.fixture.destroy_user_group(user_group_id)
1362 1362
1363 1363 def _cleanup_users(self):
1364 1364 for user_id in self.user_ids:
1365 1365 self.fixture.destroy_user(user_id)
1366 1366
1367 1367
1368 1368 # TODO: Think about moving this into a pytest-pyro package and make it a
1369 1369 # pytest plugin
1370 1370 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1371 1371 def pytest_runtest_makereport(item, call):
1372 1372 """
1373 1373 Adding the remote traceback if the exception has this information.
1374 1374
1375 1375 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1376 1376 to the exception instance.
1377 1377 """
1378 1378 outcome = yield
1379 1379 report = outcome.get_result()
1380 1380 if call.excinfo:
1381 1381 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1382 1382
1383 1383
1384 1384 def _add_vcsserver_remote_traceback(report, exc):
1385 1385 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1386 1386
1387 1387 if vcsserver_traceback:
1388 1388 section = 'VCSServer remote traceback ' + report.when
1389 1389 report.sections.append((section, vcsserver_traceback))
1390 1390
1391 1391
1392 1392 @pytest.fixture(scope='session')
1393 1393 def testrun():
1394 1394 return {
1395 1395 'uuid': uuid.uuid4(),
1396 1396 'start': datetime.datetime.utcnow().isoformat(),
1397 1397 'timestamp': int(time.time()),
1398 1398 }
1399 1399
1400 1400
1401 1401 @pytest.fixture(autouse=True)
1402 1402 def collect_appenlight_stats(request, testrun):
1403 1403 """
1404 1404 This fixture reports memory consumtion of single tests.
1405 1405
1406 1406 It gathers data based on `psutil` and sends them to Appenlight. The option
1407 1407 ``--ae`` has te be used to enable this fixture and the API key for your
1408 1408 application has to be provided in ``--ae-key``.
1409 1409 """
1410 1410 try:
1411 1411 # cygwin cannot have yet psutil support.
1412 1412 import psutil
1413 1413 except ImportError:
1414 1414 return
1415 1415
1416 1416 if not request.config.getoption('--appenlight'):
1417 1417 return
1418 1418 else:
1419 1419 # Only request the baseapp fixture if appenlight tracking is
1420 1420 # enabled. This will speed up a test run of unit tests by 2 to 3
1421 1421 # seconds if appenlight is not enabled.
1422 1422 baseapp = request.getfuncargvalue("baseapp")
1423 1423 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1424 1424 client = AppenlightClient(
1425 1425 url=url,
1426 1426 api_key=request.config.getoption('--appenlight-api-key'),
1427 1427 namespace=request.node.nodeid,
1428 1428 request=str(testrun['uuid']),
1429 1429 testrun=testrun)
1430 1430
1431 1431 client.collect({
1432 1432 'message': "Starting",
1433 1433 })
1434 1434
1435 1435 server_and_port = baseapp.config.get_settings()['vcs.server']
1436 1436 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1437 1437 server = create_vcsserver_proxy(server_and_port, protocol)
1438 1438 with server:
1439 1439 vcs_pid = server.get_pid()
1440 1440 server.run_gc()
1441 1441 vcs_process = psutil.Process(vcs_pid)
1442 1442 mem = vcs_process.memory_info()
1443 1443 client.tag_before('vcsserver.rss', mem.rss)
1444 1444 client.tag_before('vcsserver.vms', mem.vms)
1445 1445
1446 1446 test_process = psutil.Process()
1447 1447 mem = test_process.memory_info()
1448 1448 client.tag_before('test.rss', mem.rss)
1449 1449 client.tag_before('test.vms', mem.vms)
1450 1450
1451 1451 client.tag_before('time', time.time())
1452 1452
1453 1453 @request.addfinalizer
1454 1454 def send_stats():
1455 1455 client.tag_after('time', time.time())
1456 1456 with server:
1457 1457 gc_stats = server.run_gc()
1458 1458 for tag, value in gc_stats.items():
1459 1459 client.tag_after(tag, value)
1460 1460 mem = vcs_process.memory_info()
1461 1461 client.tag_after('vcsserver.rss', mem.rss)
1462 1462 client.tag_after('vcsserver.vms', mem.vms)
1463 1463
1464 1464 mem = test_process.memory_info()
1465 1465 client.tag_after('test.rss', mem.rss)
1466 1466 client.tag_after('test.vms', mem.vms)
1467 1467
1468 1468 client.collect({
1469 1469 'message': "Finished",
1470 1470 })
1471 1471 client.send_stats()
1472 1472
1473 1473 return client
1474 1474
1475 1475
1476 1476 class AppenlightClient():
1477 1477
1478 1478 url_template = '{url}?protocol_version=0.5'
1479 1479
1480 1480 def __init__(
1481 1481 self, url, api_key, add_server=True, add_timestamp=True,
1482 1482 namespace=None, request=None, testrun=None):
1483 1483 self.url = self.url_template.format(url=url)
1484 1484 self.api_key = api_key
1485 1485 self.add_server = add_server
1486 1486 self.add_timestamp = add_timestamp
1487 1487 self.namespace = namespace
1488 1488 self.request = request
1489 1489 self.server = socket.getfqdn(socket.gethostname())
1490 1490 self.tags_before = {}
1491 1491 self.tags_after = {}
1492 1492 self.stats = []
1493 1493 self.testrun = testrun or {}
1494 1494
1495 1495 def tag_before(self, tag, value):
1496 1496 self.tags_before[tag] = value
1497 1497
1498 1498 def tag_after(self, tag, value):
1499 1499 self.tags_after[tag] = value
1500 1500
1501 1501 def collect(self, data):
1502 1502 if self.add_server:
1503 1503 data.setdefault('server', self.server)
1504 1504 if self.add_timestamp:
1505 1505 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1506 1506 if self.namespace:
1507 1507 data.setdefault('namespace', self.namespace)
1508 1508 if self.request:
1509 1509 data.setdefault('request', self.request)
1510 1510 self.stats.append(data)
1511 1511
1512 1512 def send_stats(self):
1513 1513 tags = [
1514 1514 ('testrun', self.request),
1515 1515 ('testrun.start', self.testrun['start']),
1516 1516 ('testrun.timestamp', self.testrun['timestamp']),
1517 1517 ('test', self.namespace),
1518 1518 ]
1519 1519 for key, value in self.tags_before.items():
1520 1520 tags.append((key + '.before', value))
1521 1521 try:
1522 1522 delta = self.tags_after[key] - value
1523 1523 tags.append((key + '.delta', delta))
1524 1524 except Exception:
1525 1525 pass
1526 1526 for key, value in self.tags_after.items():
1527 1527 tags.append((key + '.after', value))
1528 1528 self.collect({
1529 1529 'message': "Collected tags",
1530 1530 'tags': tags,
1531 1531 })
1532 1532
1533 1533 response = requests.post(
1534 1534 self.url,
1535 1535 headers={
1536 1536 'X-appenlight-api-key': self.api_key},
1537 1537 json=self.stats,
1538 1538 )
1539 1539
1540 1540 if not response.status_code == 200:
1541 1541 pprint.pprint(self.stats)
1542 1542 print(response.headers)
1543 1543 print(response.text)
1544 1544 raise Exception('Sending to appenlight failed')
1545 1545
1546 1546
1547 1547 @pytest.fixture
1548 1548 def gist_util(request, db_connection):
1549 1549 """
1550 1550 Provides a wired instance of `GistUtility` with integrated cleanup.
1551 1551 """
1552 1552 utility = GistUtility()
1553 1553 request.addfinalizer(utility.cleanup)
1554 1554 return utility
1555 1555
1556 1556
1557 1557 class GistUtility(object):
1558 1558 def __init__(self):
1559 1559 self.fixture = Fixture()
1560 1560 self.gist_ids = []
1561 1561
1562 1562 def create_gist(self, **kwargs):
1563 1563 gist = self.fixture.create_gist(**kwargs)
1564 1564 self.gist_ids.append(gist.gist_id)
1565 1565 return gist
1566 1566
1567 1567 def cleanup(self):
1568 1568 for id_ in self.gist_ids:
1569 1569 self.fixture.destroy_gists(str(id_))
1570 1570
1571 1571
1572 1572 @pytest.fixture
1573 1573 def enabled_backends(request):
1574 1574 backends = request.config.option.backends
1575 1575 return backends[:]
1576 1576
1577 1577
1578 1578 @pytest.fixture
1579 1579 def settings_util(request, db_connection):
1580 1580 """
1581 1581 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1582 1582 """
1583 1583 utility = SettingsUtility()
1584 1584 request.addfinalizer(utility.cleanup)
1585 1585 return utility
1586 1586
1587 1587
1588 1588 class SettingsUtility(object):
1589 1589 def __init__(self):
1590 1590 self.rhodecode_ui_ids = []
1591 1591 self.rhodecode_setting_ids = []
1592 1592 self.repo_rhodecode_ui_ids = []
1593 1593 self.repo_rhodecode_setting_ids = []
1594 1594
1595 1595 def create_repo_rhodecode_ui(
1596 1596 self, repo, section, value, key=None, active=True, cleanup=True):
1597 1597 key = key or hashlib.sha1(
1598 1598 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1599 1599
1600 1600 setting = RepoRhodeCodeUi()
1601 1601 setting.repository_id = repo.repo_id
1602 1602 setting.ui_section = section
1603 1603 setting.ui_value = value
1604 1604 setting.ui_key = key
1605 1605 setting.ui_active = active
1606 1606 Session().add(setting)
1607 1607 Session().commit()
1608 1608
1609 1609 if cleanup:
1610 1610 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1611 1611 return setting
1612 1612
1613 1613 def create_rhodecode_ui(
1614 1614 self, section, value, key=None, active=True, cleanup=True):
1615 1615 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1616 1616
1617 1617 setting = RhodeCodeUi()
1618 1618 setting.ui_section = section
1619 1619 setting.ui_value = value
1620 1620 setting.ui_key = key
1621 1621 setting.ui_active = active
1622 1622 Session().add(setting)
1623 1623 Session().commit()
1624 1624
1625 1625 if cleanup:
1626 1626 self.rhodecode_ui_ids.append(setting.ui_id)
1627 1627 return setting
1628 1628
1629 1629 def create_repo_rhodecode_setting(
1630 1630 self, repo, name, value, type_, cleanup=True):
1631 1631 setting = RepoRhodeCodeSetting(
1632 1632 repo.repo_id, key=name, val=value, type=type_)
1633 1633 Session().add(setting)
1634 1634 Session().commit()
1635 1635
1636 1636 if cleanup:
1637 1637 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1638 1638 return setting
1639 1639
1640 1640 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1641 1641 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1642 1642 Session().add(setting)
1643 1643 Session().commit()
1644 1644
1645 1645 if cleanup:
1646 1646 self.rhodecode_setting_ids.append(setting.app_settings_id)
1647 1647
1648 1648 return setting
1649 1649
1650 1650 def cleanup(self):
1651 1651 for id_ in self.rhodecode_ui_ids:
1652 1652 setting = RhodeCodeUi.get(id_)
1653 1653 Session().delete(setting)
1654 1654
1655 1655 for id_ in self.rhodecode_setting_ids:
1656 1656 setting = RhodeCodeSetting.get(id_)
1657 1657 Session().delete(setting)
1658 1658
1659 1659 for id_ in self.repo_rhodecode_ui_ids:
1660 1660 setting = RepoRhodeCodeUi.get(id_)
1661 1661 Session().delete(setting)
1662 1662
1663 1663 for id_ in self.repo_rhodecode_setting_ids:
1664 1664 setting = RepoRhodeCodeSetting.get(id_)
1665 1665 Session().delete(setting)
1666 1666
1667 1667 Session().commit()
1668 1668
1669 1669
1670 1670 @pytest.fixture
1671 1671 def no_notifications(request):
1672 1672 notification_patcher = mock.patch(
1673 1673 'rhodecode.model.notification.NotificationModel.create')
1674 1674 notification_patcher.start()
1675 1675 request.addfinalizer(notification_patcher.stop)
1676 1676
1677 1677
1678 1678 @pytest.fixture(scope='session')
1679 1679 def repeat(request):
1680 1680 """
1681 1681 The number of repetitions is based on this fixture.
1682 1682
1683 1683 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1684 1684 tests are not too slow in our default test suite.
1685 1685 """
1686 1686 return request.config.getoption('--repeat')
1687 1687
1688 1688
1689 1689 @pytest.fixture
1690 1690 def rhodecode_fixtures():
1691 1691 return Fixture()
1692 1692
1693 1693
1694 1694 @pytest.fixture
1695 1695 def context_stub():
1696 1696 """
1697 1697 Stub context object.
1698 1698 """
1699 1699 context = pyramid.testing.DummyResource()
1700 1700 return context
1701 1701
1702 1702
1703 1703 @pytest.fixture
1704 1704 def request_stub():
1705 1705 """
1706 1706 Stub request object.
1707 1707 """
1708 1708 from rhodecode.lib.base import bootstrap_request
1709 1709 request = bootstrap_request(scheme='https')
1710 1710 return request
1711 1711
1712 1712
1713 1713 @pytest.fixture
1714 1714 def config_stub(request, request_stub):
1715 1715 """
1716 1716 Set up pyramid.testing and return the Configurator.
1717 1717 """
1718 1718 from rhodecode.lib.base import bootstrap_config
1719 1719 config = bootstrap_config(request=request_stub)
1720 1720
1721 1721 @request.addfinalizer
1722 1722 def cleanup():
1723 1723 pyramid.testing.tearDown()
1724 1724
1725 1725 return config
1726 1726
1727 1727
1728 1728 @pytest.fixture
1729 1729 def StubIntegrationType():
1730 1730 class _StubIntegrationType(IntegrationTypeBase):
1731 1731 """ Test integration type class """
1732 1732
1733 1733 key = 'test'
1734 1734 display_name = 'Test integration type'
1735 1735 description = 'A test integration type for testing'
1736 1736
1737 1737 @classmethod
1738 1738 def icon(cls):
1739 1739 return 'test_icon_html_image'
1740 1740
1741 1741 def __init__(self, settings):
1742 1742 super(_StubIntegrationType, self).__init__(settings)
1743 1743 self.sent_events = [] # for testing
1744 1744
1745 1745 def send_event(self, event):
1746 1746 self.sent_events.append(event)
1747 1747
1748 1748 def settings_schema(self):
1749 1749 class SettingsSchema(colander.Schema):
1750 1750 test_string_field = colander.SchemaNode(
1751 1751 colander.String(),
1752 1752 missing=colander.required,
1753 1753 title='test string field',
1754 1754 )
1755 1755 test_int_field = colander.SchemaNode(
1756 1756 colander.Int(),
1757 1757 title='some integer setting',
1758 1758 )
1759 1759 return SettingsSchema()
1760 1760
1761 1761
1762 1762 integration_type_registry.register_integration_type(_StubIntegrationType)
1763 1763 return _StubIntegrationType
1764 1764
1765 1765 @pytest.fixture
1766 1766 def stub_integration_settings():
1767 1767 return {
1768 1768 'test_string_field': 'some data',
1769 1769 'test_int_field': 100,
1770 1770 }
1771 1771
1772 1772
1773 1773 @pytest.fixture
1774 1774 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1775 1775 stub_integration_settings):
1776 1776 integration = IntegrationModel().create(
1777 1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1778 1778 name='test repo integration',
1779 1779 repo=repo_stub, repo_group=None, child_repos_only=None)
1780 1780
1781 1781 @request.addfinalizer
1782 1782 def cleanup():
1783 1783 IntegrationModel().delete(integration)
1784 1784
1785 1785 return integration
1786 1786
1787 1787
1788 1788 @pytest.fixture
1789 1789 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1790 1790 stub_integration_settings):
1791 1791 integration = IntegrationModel().create(
1792 1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1793 1793 name='test repogroup integration',
1794 1794 repo=None, repo_group=test_repo_group, child_repos_only=True)
1795 1795
1796 1796 @request.addfinalizer
1797 1797 def cleanup():
1798 1798 IntegrationModel().delete(integration)
1799 1799
1800 1800 return integration
1801 1801
1802 1802
1803 1803 @pytest.fixture
1804 1804 def repogroup_recursive_integration_stub(request, test_repo_group,
1805 1805 StubIntegrationType, stub_integration_settings):
1806 1806 integration = IntegrationModel().create(
1807 1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1808 1808 name='test recursive repogroup integration',
1809 1809 repo=None, repo_group=test_repo_group, child_repos_only=False)
1810 1810
1811 1811 @request.addfinalizer
1812 1812 def cleanup():
1813 1813 IntegrationModel().delete(integration)
1814 1814
1815 1815 return integration
1816 1816
1817 1817
1818 1818 @pytest.fixture
1819 1819 def global_integration_stub(request, StubIntegrationType,
1820 1820 stub_integration_settings):
1821 1821 integration = IntegrationModel().create(
1822 1822 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1823 1823 name='test global integration',
1824 1824 repo=None, repo_group=None, child_repos_only=None)
1825 1825
1826 1826 @request.addfinalizer
1827 1827 def cleanup():
1828 1828 IntegrationModel().delete(integration)
1829 1829
1830 1830 return integration
1831 1831
1832 1832
1833 1833 @pytest.fixture
1834 1834 def root_repos_integration_stub(request, StubIntegrationType,
1835 1835 stub_integration_settings):
1836 1836 integration = IntegrationModel().create(
1837 1837 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1838 1838 name='test global integration',
1839 1839 repo=None, repo_group=None, child_repos_only=True)
1840 1840
1841 1841 @request.addfinalizer
1842 1842 def cleanup():
1843 1843 IntegrationModel().delete(integration)
1844 1844
1845 1845 return integration
1846 1846
1847 1847
1848 1848 @pytest.fixture
1849 1849 def local_dt_to_utc():
1850 1850 def _factory(dt):
1851 1851 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1852 1852 dateutil.tz.tzutc()).replace(tzinfo=None)
1853 1853 return _factory
1854 1854
1855 1855
1856 1856 @pytest.fixture
1857 1857 def disable_anonymous_user(request, baseapp):
1858 1858 set_anonymous_access(False)
1859 1859
1860 1860 @request.addfinalizer
1861 1861 def cleanup():
1862 1862 set_anonymous_access(True)
1863 1863
1864 1864
1865 1865 @pytest.fixture(scope='module')
1866 1866 def rc_fixture(request):
1867 1867 return Fixture()
1868 1868
1869 1869
1870 1870 @pytest.fixture
1871 1871 def repo_groups(request):
1872 1872 fixture = Fixture()
1873 1873
1874 1874 session = Session()
1875 1875 zombie_group = fixture.create_repo_group('zombie')
1876 1876 parent_group = fixture.create_repo_group('parent')
1877 1877 child_group = fixture.create_repo_group('parent/child')
1878 1878 groups_in_db = session.query(RepoGroup).all()
1879 1879 assert len(groups_in_db) == 3
1880 1880 assert child_group.group_parent_id == parent_group.group_id
1881 1881
1882 1882 @request.addfinalizer
1883 1883 def cleanup():
1884 1884 fixture.destroy_repo_group(zombie_group)
1885 1885 fixture.destroy_repo_group(child_group)
1886 1886 fixture.destroy_repo_group(parent_group)
1887 1887
1888 1888 return zombie_group, parent_group, child_group
1889 1889
1890 1890
1891 1891 @pytest.fixture(scope="session")
1892 1892 def tmp_path_factory(request):
1893 1893 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1894 1894 """
1895 1895
1896 1896 class TempPathFactory:
1897 1897
1898 1898 def mktemp(self, basename):
1899 1899 import tempfile
1900 1900 return tempfile.mktemp(basename)
1901 1901
1902 1902 return TempPathFactory()
@@ -1,468 +1,468 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import threading
22 22 import time
23 23 import logging
24 24 import os.path
25 25 import subprocess32
26 26 import tempfile
27 27 import urllib2
28 28 from lxml.html import fromstring, tostring
29 29 from lxml.cssselect import CSSSelector
30 30 from urlparse import urlparse, parse_qsl
31 31 from urllib import unquote_plus
32 32 import webob
33 33
34 34 from webtest.app import TestResponse, TestApp, string_types
35 35 from webtest.compat import print_stderr
36 36
37 37 import pytest
38 38 import rc_testdata
39 39
40 40 from rhodecode.model.db import User, Repository
41 41 from rhodecode.model.meta import Session
42 42 from rhodecode.model.scm import ScmModel
43 43 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
44 44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 45 from rhodecode.tests import login_user_session
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class CustomTestResponse(TestResponse):
51 51
52 52 def _save_output(self, out):
53 53 f = tempfile.NamedTemporaryFile(delete=False, prefix='rc-test-', suffix='.html')
54 54 f.write(out)
55 55 return f.name
56 56
57 57 def mustcontain(self, *strings, **kw):
58 58 """
59 59 Assert that the response contains all of the strings passed
60 60 in as arguments.
61 61
62 62 Equivalent to::
63 63
64 64 assert string in res
65 65 """
66 66 print_body = kw.pop('print_body', False)
67 67 if 'no' in kw:
68 68 no = kw['no']
69 69 del kw['no']
70 70 if isinstance(no, string_types):
71 71 no = [no]
72 72 else:
73 73 no = []
74 74 if kw:
75 75 raise TypeError(
76 76 "The only keyword argument allowed is 'no' got %s" % kw)
77 77
78 78 f = self._save_output(str(self))
79 79
80 80 for s in strings:
81 81 if not s in self:
82 82 print_stderr("Actual response (no %r):" % s)
83 83 print_stderr("body output saved as `%s`" % f)
84 84 if print_body:
85 85 print_stderr(str(self))
86 86 raise IndexError(
87 87 "Body does not contain string %r, body output saved as %s" % (s, f))
88 88
89 89 for no_s in no:
90 90 if no_s in self:
91 91 print_stderr("Actual response (has %r)" % no_s)
92 92 print_stderr("body output saved as `%s`" % f)
93 93 if print_body:
94 94 print_stderr(str(self))
95 95 raise IndexError(
96 96 "Body contains bad string %r, body output saved as %s" % (no_s, f))
97 97
98 98 def assert_response(self):
99 99 return AssertResponse(self)
100 100
101 101 def get_session_from_response(self):
102 102 """
103 103 This returns the session from a response object.
104 104 """
105 105 from rhodecode.lib.rc_beaker import session_factory_from_settings
106 106 session = session_factory_from_settings(self.test_app._pyramid_settings)
107 107 return session(self.request)
108 108
109 109
110 110 class TestRequest(webob.BaseRequest):
111 111
112 112 # for py.test
113 113 disabled = True
114 114 ResponseClass = CustomTestResponse
115 115
116 116 def add_response_callback(self, callback):
117 117 pass
118 118
119 119
120 120 class CustomTestApp(TestApp):
121 121 """
122 122 Custom app to make mustcontain more Useful, and extract special methods
123 123 """
124 124 RequestClass = TestRequest
125 125 rc_login_data = {}
126 126 rc_current_session = None
127 127
128 128 def login(self, username=None, password=None):
129 129 from rhodecode.lib import auth
130 130
131 131 if username and password:
132 132 session = login_user_session(self, username, password)
133 133 else:
134 134 session = login_user_session(self)
135 135
136 136 self.rc_login_data['csrf_token'] = auth.get_csrf_token(session)
137 137 self.rc_current_session = session
138 138 return session['rhodecode_user']
139 139
140 140 @property
141 141 def csrf_token(self):
142 142 return self.rc_login_data['csrf_token']
143 143
144 144 @property
145 145 def _pyramid_registry(self):
146 146 return self.app.config.registry
147 147
148 148 @property
149 149 def _pyramid_settings(self):
150 150 return self._pyramid_registry.settings
151 151
152 152
153 153 def set_anonymous_access(enabled):
154 154 """(Dis)allows anonymous access depending on parameter `enabled`"""
155 155 user = User.get_default_user()
156 156 user.active = enabled
157 157 Session().add(user)
158 158 Session().commit()
159 159 time.sleep(1.5) # must sleep for cache (1s to expire)
160 160 log.info('anonymous access is now: %s', enabled)
161 161 assert enabled == User.get_default_user().active, (
162 162 'Cannot set anonymous access')
163 163
164 164
165 165 def check_xfail_backends(node, backend_alias):
166 166 # Using "xfail_backends" here intentionally, since this marks work
167 167 # which is "to be done" soon.
168 168 skip_marker = node.get_closest_marker('xfail_backends')
169 169 if skip_marker and backend_alias in skip_marker.args:
170 170 msg = "Support for backend %s to be developed." % (backend_alias, )
171 171 msg = skip_marker.kwargs.get('reason', msg)
172 172 pytest.xfail(msg)
173 173
174 174
175 175 def check_skip_backends(node, backend_alias):
176 176 # Using "skip_backends" here intentionally, since this marks work which is
177 177 # not supported.
178 178 skip_marker = node.get_closest_marker('skip_backends')
179 179 if skip_marker and backend_alias in skip_marker.args:
180 180 msg = "Feature not supported for backend %s." % (backend_alias, )
181 181 msg = skip_marker.kwargs.get('reason', msg)
182 182 pytest.skip(msg)
183 183
184 184
185 185 def extract_git_repo_from_dump(dump_name, repo_name):
186 186 """Create git repo `repo_name` from dump `dump_name`."""
187 187 repos_path = ScmModel().repos_path
188 188 target_path = os.path.join(repos_path, repo_name)
189 189 rc_testdata.extract_git_dump(dump_name, target_path)
190 190 return target_path
191 191
192 192
193 193 def extract_hg_repo_from_dump(dump_name, repo_name):
194 194 """Create hg repo `repo_name` from dump `dump_name`."""
195 195 repos_path = ScmModel().repos_path
196 196 target_path = os.path.join(repos_path, repo_name)
197 197 rc_testdata.extract_hg_dump(dump_name, target_path)
198 198 return target_path
199 199
200 200
201 201 def extract_svn_repo_from_dump(dump_name, repo_name):
202 202 """Create a svn repo `repo_name` from dump `dump_name`."""
203 203 repos_path = ScmModel().repos_path
204 204 target_path = os.path.join(repos_path, repo_name)
205 205 SubversionRepository(target_path, create=True)
206 206 _load_svn_dump_into_repo(dump_name, target_path)
207 207 return target_path
208 208
209 209
210 210 def assert_message_in_log(log_records, message, levelno, module):
211 211 messages = [
212 212 r.message for r in log_records
213 213 if r.module == module and r.levelno == levelno
214 214 ]
215 215 assert message in messages
216 216
217 217
218 218 def _load_svn_dump_into_repo(dump_name, repo_path):
219 219 """
220 220 Utility to populate a svn repository with a named dump
221 221
222 222 Currently the dumps are in rc_testdata. They might later on be
223 223 integrated with the main repository once they stabilize more.
224 224 """
225 225 dump = rc_testdata.load_svn_dump(dump_name)
226 226 load_dump = subprocess32.Popen(
227 227 ['svnadmin', 'load', repo_path],
228 228 stdin=subprocess32.PIPE, stdout=subprocess32.PIPE,
229 229 stderr=subprocess32.PIPE)
230 230 out, err = load_dump.communicate(dump)
231 231 if load_dump.returncode != 0:
232 232 log.error("Output of load_dump command: %s", out)
233 233 log.error("Error output of load_dump command: %s", err)
234 234 raise Exception(
235 235 'Failed to load dump "%s" into repository at path "%s".'
236 236 % (dump_name, repo_path))
237 237
238 238
239 239 class AssertResponse(object):
240 240 """
241 241 Utility that helps to assert things about a given HTML response.
242 242 """
243 243
244 244 def __init__(self, response):
245 245 self.response = response
246 246
247 247 def get_imports(self):
248 248 return fromstring, tostring, CSSSelector
249 249
250 250 def one_element_exists(self, css_selector):
251 251 self.get_element(css_selector)
252 252
253 253 def no_element_exists(self, css_selector):
254 254 assert not self._get_elements(css_selector)
255 255
256 256 def element_equals_to(self, css_selector, expected_content):
257 257 element = self.get_element(css_selector)
258 258 element_text = self._element_to_string(element)
259 259 assert expected_content in element_text
260 260
261 261 def element_contains(self, css_selector, expected_content):
262 262 element = self.get_element(css_selector)
263 263 assert expected_content in element.text_content()
264 264
265 265 def element_value_contains(self, css_selector, expected_content):
266 266 element = self.get_element(css_selector)
267 267 assert expected_content in element.value
268 268
269 269 def contains_one_link(self, link_text, href):
270 270 fromstring, tostring, CSSSelector = self.get_imports()
271 271 doc = fromstring(self.response.body)
272 272 sel = CSSSelector('a[href]')
273 273 elements = [
274 274 e for e in sel(doc) if e.text_content().strip() == link_text]
275 275 assert len(elements) == 1, "Did not find link or found multiple links"
276 276 self._ensure_url_equal(elements[0].attrib.get('href'), href)
277 277
278 278 def contains_one_anchor(self, anchor_id):
279 279 fromstring, tostring, CSSSelector = self.get_imports()
280 280 doc = fromstring(self.response.body)
281 281 sel = CSSSelector('#' + anchor_id)
282 282 elements = sel(doc)
283 283 assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id)
284 284
285 285 def _ensure_url_equal(self, found, expected):
286 286 assert _Url(found) == _Url(expected)
287 287
288 288 def get_element(self, css_selector):
289 289 elements = self._get_elements(css_selector)
290 290 assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector)
291 291 return elements[0]
292 292
293 293 def get_elements(self, css_selector):
294 294 return self._get_elements(css_selector)
295 295
296 296 def _get_elements(self, css_selector):
297 297 fromstring, tostring, CSSSelector = self.get_imports()
298 298 doc = fromstring(self.response.body)
299 299 sel = CSSSelector(css_selector)
300 300 elements = sel(doc)
301 301 return elements
302 302
303 303 def _element_to_string(self, element):
304 304 fromstring, tostring, CSSSelector = self.get_imports()
305 305 return tostring(element)
306 306
307 307
308 308 class _Url(object):
309 309 """
310 310 A url object that can be compared with other url orbjects
311 311 without regard to the vagaries of encoding, escaping, and ordering
312 312 of parameters in query strings.
313 313
314 314 Inspired by
315 315 http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python
316 316 """
317 317
318 318 def __init__(self, url):
319 319 parts = urlparse(url)
320 320 _query = frozenset(parse_qsl(parts.query))
321 321 _path = unquote_plus(parts.path)
322 322 parts = parts._replace(query=_query, path=_path)
323 323 self.parts = parts
324 324
325 325 def __eq__(self, other):
326 326 return self.parts == other.parts
327 327
328 328 def __hash__(self):
329 329 return hash(self.parts)
330 330
331 331
332 332 def run_test_concurrently(times, raise_catched_exc=True):
333 333 """
334 334 Add this decorator to small pieces of code that you want to test
335 335 concurrently
336 336
337 337 ex:
338 338
339 339 @test_concurrently(25)
340 340 def my_test_function():
341 341 ...
342 342 """
343 343 def test_concurrently_decorator(test_func):
344 344 def wrapper(*args, **kwargs):
345 345 exceptions = []
346 346
347 347 def call_test_func():
348 348 try:
349 349 test_func(*args, **kwargs)
350 350 except Exception as e:
351 351 exceptions.append(e)
352 352 if raise_catched_exc:
353 353 raise
354 354 threads = []
355 355 for i in range(times):
356 356 threads.append(threading.Thread(target=call_test_func))
357 357 for t in threads:
358 358 t.start()
359 359 for t in threads:
360 360 t.join()
361 361 if exceptions:
362 362 raise Exception(
363 363 'test_concurrently intercepted %s exceptions: %s' % (
364 364 len(exceptions), exceptions))
365 365 return wrapper
366 366 return test_concurrently_decorator
367 367
368 368
369 369 def wait_for_url(url, timeout=10):
370 370 """
371 371 Wait until URL becomes reachable.
372 372
373 373 It polls the URL until the timeout is reached or it became reachable.
374 374 If will call to `py.test.fail` in case the URL is not reachable.
375 375 """
376 376 timeout = time.time() + timeout
377 377 last = 0
378 378 wait = 0.1
379 379
380 380 while timeout > last:
381 381 last = time.time()
382 382 if is_url_reachable(url):
383 383 break
384 384 elif (last + wait) > time.time():
385 385 # Go to sleep because not enough time has passed since last check.
386 386 time.sleep(wait)
387 387 else:
388 388 pytest.fail("Timeout while waiting for URL {}".format(url))
389 389
390 390
391 391 def is_url_reachable(url):
392 392 try:
393 393 urllib2.urlopen(url)
394 394 except urllib2.URLError:
395 395 log.exception('URL Reach error')
396 396 return False
397 397 return True
398 398
399 399
400 400 def repo_on_filesystem(repo_name):
401 401 from rhodecode.lib import vcs
402 402 from rhodecode.tests import TESTS_TMP_PATH
403 403 repo = vcs.get_vcs_instance(
404 404 os.path.join(TESTS_TMP_PATH, repo_name), create=False)
405 405 return repo is not None
406 406
407 407
408 408 def commit_change(
409 409 repo, filename, content, message, vcs_type, parent=None, newfile=False):
410 410 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
411 411
412 412 repo = Repository.get_by_repo_name(repo)
413 413 _commit = parent
414 414 if not parent:
415 415 _commit = EmptyCommit(alias=vcs_type)
416 416
417 417 if newfile:
418 418 nodes = {
419 419 filename: {
420 420 'content': content
421 421 }
422 422 }
423 423 commit = ScmModel().create_nodes(
424 424 user=TEST_USER_ADMIN_LOGIN, repo=repo,
425 425 message=message,
426 426 nodes=nodes,
427 427 parent_commit=_commit,
428 author=TEST_USER_ADMIN_LOGIN,
428 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
429 429 )
430 430 else:
431 431 commit = ScmModel().commit_change(
432 432 repo=repo.scm_instance(), repo_name=repo.repo_name,
433 433 commit=parent, user=TEST_USER_ADMIN_LOGIN,
434 author=TEST_USER_ADMIN_LOGIN,
434 author='{} <admin@rhodecode.com>'.format(TEST_USER_ADMIN_LOGIN),
435 435 message=message,
436 436 content=content,
437 437 f_path=filename
438 438 )
439 439 return commit
440 440
441 441
442 442 def permission_update_data_generator(csrf_token, default=None, grant=None, revoke=None):
443 443 if not default:
444 444 raise ValueError('Permission for default user must be given')
445 445 form_data = [(
446 446 'csrf_token', csrf_token
447 447 )]
448 448 # add default
449 449 form_data.extend([
450 450 ('u_perm_1', default)
451 451 ])
452 452
453 453 if grant:
454 454 for cnt, (obj_id, perm, obj_name, obj_type) in enumerate(grant, 1):
455 455 form_data.extend([
456 456 ('perm_new_member_perm_new{}'.format(cnt), perm),
457 457 ('perm_new_member_id_new{}'.format(cnt), obj_id),
458 458 ('perm_new_member_name_new{}'.format(cnt), obj_name),
459 459 ('perm_new_member_type_new{}'.format(cnt), obj_type),
460 460
461 461 ])
462 462 if revoke:
463 463 for obj_id, obj_type in revoke:
464 464 form_data.extend([
465 465 ('perm_del_member_id_{}'.format(obj_id), obj_id),
466 466 ('perm_del_member_type_{}'.format(obj_id), obj_type),
467 467 ])
468 468 return form_data
@@ -1,147 +1,147 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22
23 23 import pytest
24 24
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.tests.vcs.conftest import BackendTestMixin
27 27
28 28
29 29 @pytest.mark.usefixtures("vcs_repository_support")
30 30 class TestBranches(BackendTestMixin):
31 31
32 32 def test_empty_repository_has_no_branches(self, vcsbackend):
33 33 empty_repo = vcsbackend.create_repo()
34 34 assert empty_repo.branches == {}
35 35
36 36 def test_branches_all(self, vcsbackend):
37 37 branch_count = {
38 38 'git': 1,
39 39 'hg': 1,
40 40 'svn': 0,
41 41 }
42 42 assert len(self.repo.branches_all) == branch_count[vcsbackend.alias]
43 43
44 44 def test_closed_branches(self):
45 45 assert len(self.repo.branches_closed) == 0
46 46
47 47 def test_simple(self, local_dt_to_utc):
48 48 tip = self.repo.get_commit()
49 49 assert tip.message == 'Changes...'
50 50 assert tip.date == local_dt_to_utc(datetime.datetime(2010, 1, 1, 21))
51 51
52 52 @pytest.mark.backends("git", "hg")
53 53 def test_new_branch(self):
54 54 # This check must not be removed to ensure the 'branches' LazyProperty
55 55 # gets hit *before* the new 'foobar' branch got created:
56 56 assert 'foobar' not in self.repo.branches
57 57 self.imc.add(FileNode(
58 58 'docs/index.txt',
59 59 content='Documentation\n'))
60 60 foobar_tip = self.imc.commit(
61 61 message=u'New branch: foobar',
62 author=u'joe',
62 author=u'joe <joe@rhodecode.com>',
63 63 branch='foobar',
64 64 )
65 65 assert 'foobar' in self.repo.branches
66 66 assert foobar_tip.branch == 'foobar'
67 67
68 68 @pytest.mark.backends("git", "hg")
69 69 def test_new_head(self):
70 70 tip = self.repo.get_commit()
71 71 self.imc.add(FileNode(
72 72 'docs/index.txt',
73 73 content='Documentation\n'))
74 74 foobar_tip = self.imc.commit(
75 75 message=u'New branch: foobar',
76 author=u'joe',
76 author=u'joe <joe@rhodecode.com>',
77 77 branch='foobar',
78 78 parents=[tip],
79 79 )
80 80 self.imc.change(FileNode(
81 81 'docs/index.txt',
82 82 content='Documentation\nand more...\n'))
83 83 newtip = self.imc.commit(
84 84 message=u'At default branch',
85 author=u'joe',
85 author=u'joe <joe@rhodecode.com>',
86 86 branch=foobar_tip.branch,
87 87 parents=[foobar_tip],
88 88 )
89 89
90 90 newest_tip = self.imc.commit(
91 91 message=u'Merged with %s' % foobar_tip.raw_id,
92 author=u'joe',
92 author=u'joe <joe@rhodecode.com>',
93 93 branch=self.backend_class.DEFAULT_BRANCH_NAME,
94 94 parents=[newtip, foobar_tip],
95 95 )
96 96
97 97 assert newest_tip.branch == \
98 98 self.backend_class.DEFAULT_BRANCH_NAME
99 99
100 100 @pytest.mark.backends("git", "hg")
101 101 def test_branch_with_slash_in_name(self):
102 102 self.imc.add(FileNode('extrafile', content='Some data\n'))
103 103 self.imc.commit(
104 u'Branch with a slash!', author=u'joe',
104 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
105 105 branch='issue/123')
106 106 assert 'issue/123' in self.repo.branches
107 107
108 108 @pytest.mark.backends("git", "hg")
109 109 def test_branch_with_slash_in_name_and_similar_without(self):
110 110 self.imc.add(FileNode('extrafile', content='Some data\n'))
111 111 self.imc.commit(
112 u'Branch with a slash!', author=u'joe',
112 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
113 113 branch='issue/123')
114 114 self.imc.add(FileNode('extrafile II', content='Some data\n'))
115 115 self.imc.commit(
116 u'Branch without a slash...', author=u'joe',
116 u'Branch without a slash...', author=u'joe <joe@rhodecode.com>',
117 117 branch='123')
118 118 assert 'issue/123' in self.repo.branches
119 119 assert '123' in self.repo.branches
120 120
121 121
122 122 class TestSvnBranches(object):
123 123
124 124 def test_empty_repository_has_no_tags_and_branches(self, vcsbackend_svn):
125 125 empty_repo = vcsbackend_svn.create_repo()
126 126 assert empty_repo.branches == {}
127 127 assert empty_repo.tags == {}
128 128
129 129 def test_missing_structure_has_no_tags_and_branches(self, vcsbackend_svn):
130 130 repo = vcsbackend_svn.create_repo(number_of_commits=1)
131 131 assert repo.branches == {}
132 132 assert repo.tags == {}
133 133
134 134 def test_discovers_ordered_branches(self, vcsbackend_svn):
135 135 repo = vcsbackend_svn['svn-simple-layout']
136 136 expected_branches = [
137 137 'branches/add-docs',
138 138 'branches/argparse',
139 139 'trunk',
140 140 ]
141 141 assert repo.branches.keys() == expected_branches
142 142
143 143 def test_discovers_ordered_tags(self, vcsbackend_svn):
144 144 repo = vcsbackend_svn['svn-simple-layout']
145 145 expected_tags = [
146 146 'tags/v0.1', 'tags/v0.2', 'tags/v0.3', 'tags/v0.5']
147 147 assert repo.tags.keys() == expected_tags
@@ -1,596 +1,596 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import time
23 23
24 24 import pytest
25 25
26 26 from rhodecode.lib.vcs.backends.base import (
27 27 CollectionGenerator, FILEMODE_DEFAULT, EmptyCommit)
28 28 from rhodecode.lib.vcs.exceptions import (
29 29 BranchDoesNotExistError, CommitDoesNotExistError,
30 30 RepositoryError, EmptyRepositoryError)
31 31 from rhodecode.lib.vcs.nodes import (
32 32 FileNode, AddedFileNodesGenerator,
33 33 ChangedFileNodesGenerator, RemovedFileNodesGenerator)
34 34 from rhodecode.tests import get_new_dir
35 35 from rhodecode.tests.vcs.conftest import BackendTestMixin
36 36
37 37
38 38 class TestBaseChangeset(object):
39 39
40 40 def test_is_deprecated(self):
41 41 from rhodecode.lib.vcs.backends.base import BaseChangeset
42 42 pytest.deprecated_call(BaseChangeset)
43 43
44 44
45 45 class TestEmptyCommit(object):
46 46
47 47 def test_branch_without_alias_returns_none(self):
48 48 commit = EmptyCommit()
49 49 assert commit.branch is None
50 50
51 51
52 52 @pytest.mark.usefixtures("vcs_repository_support")
53 53 class TestCommitsInNonEmptyRepo(BackendTestMixin):
54 54 recreate_repo_per_test = True
55 55
56 56 @classmethod
57 57 def _get_commits(cls):
58 58 start_date = datetime.datetime(2010, 1, 1, 20)
59 59 for x in xrange(5):
60 60 yield {
61 61 'message': 'Commit %d' % x,
62 62 'author': 'Joe Doe <joe.doe@example.com>',
63 63 'date': start_date + datetime.timedelta(hours=12 * x),
64 64 'added': [
65 65 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
66 66 ],
67 67 }
68 68
69 69 def test_walk_returns_empty_list_in_case_of_file(self):
70 70 result = list(self.tip.walk('file_0.txt'))
71 71 assert result == []
72 72
73 73 @pytest.mark.backends("git", "hg")
74 74 def test_new_branch(self):
75 75 self.imc.add(FileNode('docs/index.txt',
76 76 content='Documentation\n'))
77 77 foobar_tip = self.imc.commit(
78 78 message=u'New branch: foobar',
79 author=u'joe',
79 author=u'joe <joe@rhodecode.com>',
80 80 branch='foobar',
81 81 )
82 82 assert 'foobar' in self.repo.branches
83 83 assert foobar_tip.branch == 'foobar'
84 84 # 'foobar' should be the only branch that contains the new commit
85 85 branch = self.repo.branches.values()
86 86 assert branch[0] != branch[1]
87 87
88 88 @pytest.mark.backends("git", "hg")
89 89 def test_new_head_in_default_branch(self):
90 90 tip = self.repo.get_commit()
91 91 self.imc.add(FileNode('docs/index.txt',
92 92 content='Documentation\n'))
93 93 foobar_tip = self.imc.commit(
94 94 message=u'New branch: foobar',
95 author=u'joe',
95 author=u'joe <joe@rhodecode.com>',
96 96 branch='foobar',
97 97 parents=[tip],
98 98 )
99 99 self.imc.change(FileNode('docs/index.txt',
100 100 content='Documentation\nand more...\n'))
101 101 newtip = self.imc.commit(
102 102 message=u'At default branch',
103 author=u'joe',
103 author=u'joe <joe@rhodecode.com>',
104 104 branch=foobar_tip.branch,
105 105 parents=[foobar_tip],
106 106 )
107 107
108 108 newest_tip = self.imc.commit(
109 109 message=u'Merged with %s' % foobar_tip.raw_id,
110 author=u'joe',
110 author=u'joe <joe@rhodecode.com>',
111 111 branch=self.backend_class.DEFAULT_BRANCH_NAME,
112 112 parents=[newtip, foobar_tip],
113 113 )
114 114
115 115 assert newest_tip.branch == self.backend_class.DEFAULT_BRANCH_NAME
116 116
117 117 @pytest.mark.backends("git", "hg")
118 118 def test_get_commits_respects_branch_name(self):
119 119 """
120 120 * e1930d0 (HEAD, master) Back in default branch
121 121 | * e1930d0 (docs) New Branch: docs2
122 122 | * dcc14fa New branch: docs
123 123 |/
124 124 * e63c41a Initial commit
125 125 ...
126 126 * 624d3db Commit 0
127 127
128 128 :return:
129 129 """
130 130 DEFAULT_BRANCH = self.repo.DEFAULT_BRANCH_NAME
131 131 TEST_BRANCH = 'docs'
132 132 org_tip = self.repo.get_commit()
133 133
134 134 self.imc.add(FileNode('readme.txt', content='Document\n'))
135 135 initial = self.imc.commit(
136 136 message=u'Initial commit',
137 author=u'joe',
137 author=u'joe <joe@rhodecode.com>',
138 138 parents=[org_tip],
139 139 branch=DEFAULT_BRANCH,)
140 140
141 141 self.imc.add(FileNode('newdoc.txt', content='foobar\n'))
142 142 docs_branch_commit1 = self.imc.commit(
143 143 message=u'New branch: docs',
144 author=u'joe',
144 author=u'joe <joe@rhodecode.com>',
145 145 parents=[initial],
146 146 branch=TEST_BRANCH,)
147 147
148 148 self.imc.add(FileNode('newdoc2.txt', content='foobar2\n'))
149 149 docs_branch_commit2 = self.imc.commit(
150 150 message=u'New branch: docs2',
151 author=u'joe',
151 author=u'joe <joe@rhodecode.com>',
152 152 parents=[docs_branch_commit1],
153 153 branch=TEST_BRANCH,)
154 154
155 155 self.imc.add(FileNode('newfile', content='hello world\n'))
156 156 self.imc.commit(
157 157 message=u'Back in default branch',
158 author=u'joe',
158 author=u'joe <joe@rhodecode.com>',
159 159 parents=[initial],
160 160 branch=DEFAULT_BRANCH,)
161 161
162 162 default_branch_commits = self.repo.get_commits(branch_name=DEFAULT_BRANCH)
163 163 assert docs_branch_commit1 not in list(default_branch_commits)
164 164 assert docs_branch_commit2 not in list(default_branch_commits)
165 165
166 166 docs_branch_commits = self.repo.get_commits(
167 167 start_id=self.repo.commit_ids[0], end_id=self.repo.commit_ids[-1],
168 168 branch_name=TEST_BRANCH)
169 169 assert docs_branch_commit1 in list(docs_branch_commits)
170 170 assert docs_branch_commit2 in list(docs_branch_commits)
171 171
172 172 @pytest.mark.backends("svn")
173 173 def test_get_commits_respects_branch_name_svn(self, vcsbackend_svn):
174 174 repo = vcsbackend_svn['svn-simple-layout']
175 175 commits = repo.get_commits(branch_name='trunk')
176 176 commit_indexes = [c.idx for c in commits]
177 177 assert commit_indexes == [1, 2, 3, 7, 12, 15]
178 178
179 179 def test_get_commit_by_index(self):
180 180 for idx in [1, 2, 3, 4]:
181 181 assert idx == self.repo.get_commit(commit_idx=idx).idx
182 182
183 183 def test_get_commit_by_branch(self):
184 184 for branch, commit_id in self.repo.branches.iteritems():
185 185 assert commit_id == self.repo.get_commit(branch).raw_id
186 186
187 187 def test_get_commit_by_tag(self):
188 188 for tag, commit_id in self.repo.tags.iteritems():
189 189 assert commit_id == self.repo.get_commit(tag).raw_id
190 190
191 191 def test_get_commit_parents(self):
192 192 repo = self.repo
193 193 for test_idx in [1, 2, 3]:
194 194 commit = repo.get_commit(commit_idx=test_idx - 1)
195 195 assert [commit] == repo.get_commit(commit_idx=test_idx).parents
196 196
197 197 def test_get_commit_children(self):
198 198 repo = self.repo
199 199 for test_idx in [1, 2, 3]:
200 200 commit = repo.get_commit(commit_idx=test_idx + 1)
201 201 assert [commit] == repo.get_commit(commit_idx=test_idx).children
202 202
203 203
204 204 @pytest.mark.usefixtures("vcs_repository_support")
205 205 class TestCommits(BackendTestMixin):
206 206 recreate_repo_per_test = False
207 207
208 208 @classmethod
209 209 def _get_commits(cls):
210 210 start_date = datetime.datetime(2010, 1, 1, 20)
211 211 for x in xrange(5):
212 212 yield {
213 213 'message': u'Commit %d' % x,
214 214 'author': u'Joe Doe <joe.doe@example.com>',
215 215 'date': start_date + datetime.timedelta(hours=12 * x),
216 216 'added': [
217 217 FileNode('file_%d.txt' % x, content='Foobar %d' % x),
218 218 ],
219 219 }
220 220
221 221 def test_simple(self):
222 222 tip = self.repo.get_commit()
223 223 assert tip.date, datetime.datetime(2010, 1, 3 == 20)
224 224
225 225 def test_simple_serialized_commit(self):
226 226 tip = self.repo.get_commit()
227 227 # json.dumps(tip) uses .__json__() method
228 228 data = tip.__json__()
229 229 assert 'branch' in data
230 230 assert data['revision']
231 231
232 232 def test_retrieve_tip(self):
233 233 tip = self.repo.get_commit('tip')
234 234 assert tip == self.repo.get_commit()
235 235
236 236 def test_invalid(self):
237 237 with pytest.raises(CommitDoesNotExistError):
238 238 self.repo.get_commit(commit_idx=123456789)
239 239
240 240 def test_idx(self):
241 241 commit = self.repo[0]
242 242 assert commit.idx == 0
243 243
244 244 def test_negative_idx(self):
245 245 commit = self.repo.get_commit(commit_idx=-1)
246 246 assert commit.idx >= 0
247 247
248 248 def test_revision_is_deprecated(self):
249 249 def get_revision(commit):
250 250 return commit.revision
251 251
252 252 commit = self.repo[0]
253 253 pytest.deprecated_call(get_revision, commit)
254 254
255 255 def test_size(self):
256 256 tip = self.repo.get_commit()
257 257 size = 5 * len('Foobar N') # Size of 5 files
258 258 assert tip.size == size
259 259
260 260 def test_size_at_commit(self):
261 261 tip = self.repo.get_commit()
262 262 size = 5 * len('Foobar N') # Size of 5 files
263 263 assert self.repo.size_at_commit(tip.raw_id) == size
264 264
265 265 def test_size_at_first_commit(self):
266 266 commit = self.repo[0]
267 267 size = len('Foobar N') # Size of 1 file
268 268 assert self.repo.size_at_commit(commit.raw_id) == size
269 269
270 270 def test_author(self):
271 271 tip = self.repo.get_commit()
272 272 assert_text_equal(tip.author, u'Joe Doe <joe.doe@example.com>')
273 273
274 274 def test_author_name(self):
275 275 tip = self.repo.get_commit()
276 276 assert_text_equal(tip.author_name, u'Joe Doe')
277 277
278 278 def test_author_email(self):
279 279 tip = self.repo.get_commit()
280 280 assert_text_equal(tip.author_email, u'joe.doe@example.com')
281 281
282 282 def test_message(self):
283 283 tip = self.repo.get_commit()
284 284 assert_text_equal(tip.message, u'Commit 4')
285 285
286 286 def test_diff(self):
287 287 tip = self.repo.get_commit()
288 288 diff = tip.diff()
289 289 assert "+Foobar 4" in diff.raw
290 290
291 291 def test_prev(self):
292 292 tip = self.repo.get_commit()
293 293 prev_commit = tip.prev()
294 294 assert prev_commit.message == 'Commit 3'
295 295
296 296 def test_prev_raises_on_first_commit(self):
297 297 commit = self.repo.get_commit(commit_idx=0)
298 298 with pytest.raises(CommitDoesNotExistError):
299 299 commit.prev()
300 300
301 301 def test_prev_works_on_second_commit_issue_183(self):
302 302 commit = self.repo.get_commit(commit_idx=1)
303 303 prev_commit = commit.prev()
304 304 assert prev_commit.idx == 0
305 305
306 306 def test_next(self):
307 307 commit = self.repo.get_commit(commit_idx=2)
308 308 next_commit = commit.next()
309 309 assert next_commit.message == 'Commit 3'
310 310
311 311 def test_next_raises_on_tip(self):
312 312 commit = self.repo.get_commit()
313 313 with pytest.raises(CommitDoesNotExistError):
314 314 commit.next()
315 315
316 316 def test_get_path_commit(self):
317 317 commit = self.repo.get_commit()
318 318 commit.get_path_commit('file_4.txt')
319 319 assert commit.message == 'Commit 4'
320 320
321 321 def test_get_filenodes_generator(self):
322 322 tip = self.repo.get_commit()
323 323 filepaths = [node.path for node in tip.get_filenodes_generator()]
324 324 assert filepaths == ['file_%d.txt' % x for x in xrange(5)]
325 325
326 326 def test_get_file_annotate(self):
327 327 file_added_commit = self.repo.get_commit(commit_idx=3)
328 328 annotations = list(file_added_commit.get_file_annotate('file_3.txt'))
329 329
330 330 line_no, commit_id, commit_loader, line = annotations[0]
331 331
332 332 assert line_no == 1
333 333 assert commit_id == file_added_commit.raw_id
334 334 assert commit_loader() == file_added_commit
335 335 assert 'Foobar 3' in line
336 336
337 337 def test_get_file_annotate_does_not_exist(self):
338 338 file_added_commit = self.repo.get_commit(commit_idx=2)
339 339 # TODO: Should use a specific exception class here?
340 340 with pytest.raises(Exception):
341 341 list(file_added_commit.get_file_annotate('file_3.txt'))
342 342
343 343 def test_get_file_annotate_tip(self):
344 344 tip = self.repo.get_commit()
345 345 commit = self.repo.get_commit(commit_idx=3)
346 346 expected_values = list(commit.get_file_annotate('file_3.txt'))
347 347 annotations = list(tip.get_file_annotate('file_3.txt'))
348 348
349 349 # Note: Skip index 2 because the loader function is not the same
350 350 for idx in (0, 1, 3):
351 351 assert annotations[0][idx] == expected_values[0][idx]
352 352
353 353 def test_get_commits_is_ordered_by_date(self):
354 354 commits = self.repo.get_commits()
355 355 assert isinstance(commits, CollectionGenerator)
356 356 assert len(commits) == 0 or len(commits) != 0
357 357 commits = list(commits)
358 358 ordered_by_date = sorted(commits, key=lambda commit: commit.date)
359 359 assert commits == ordered_by_date
360 360
361 361 def test_get_commits_respects_start(self):
362 362 second_id = self.repo.commit_ids[1]
363 363 commits = self.repo.get_commits(start_id=second_id)
364 364 assert isinstance(commits, CollectionGenerator)
365 365 commits = list(commits)
366 366 assert len(commits) == 4
367 367
368 368 def test_get_commits_includes_start_commit(self):
369 369 second_id = self.repo.commit_ids[1]
370 370 commits = self.repo.get_commits(start_id=second_id)
371 371 assert isinstance(commits, CollectionGenerator)
372 372 commits = list(commits)
373 373 assert commits[0].raw_id == second_id
374 374
375 375 def test_get_commits_respects_end(self):
376 376 second_id = self.repo.commit_ids[1]
377 377 commits = self.repo.get_commits(end_id=second_id)
378 378 assert isinstance(commits, CollectionGenerator)
379 379 commits = list(commits)
380 380 assert commits[-1].raw_id == second_id
381 381 assert len(commits) == 2
382 382
383 383 def test_get_commits_respects_both_start_and_end(self):
384 384 second_id = self.repo.commit_ids[1]
385 385 third_id = self.repo.commit_ids[2]
386 386 commits = self.repo.get_commits(start_id=second_id, end_id=third_id)
387 387 assert isinstance(commits, CollectionGenerator)
388 388 commits = list(commits)
389 389 assert len(commits) == 2
390 390
391 391 def test_get_commits_on_empty_repo_raises_EmptyRepository_error(self):
392 392 repo_path = get_new_dir(str(time.time()))
393 393 repo = self.Backend(repo_path, create=True)
394 394
395 395 with pytest.raises(EmptyRepositoryError):
396 396 list(repo.get_commits(start_id='foobar'))
397 397
398 398 def test_get_commits_respects_hidden(self):
399 399 commits = self.repo.get_commits(show_hidden=True)
400 400 assert isinstance(commits, CollectionGenerator)
401 401 assert len(commits) == 5
402 402
403 403 def test_get_commits_includes_end_commit(self):
404 404 second_id = self.repo.commit_ids[1]
405 405 commits = self.repo.get_commits(end_id=second_id)
406 406 assert isinstance(commits, CollectionGenerator)
407 407 assert len(commits) == 2
408 408 commits = list(commits)
409 409 assert commits[-1].raw_id == second_id
410 410
411 411 def test_get_commits_respects_start_date(self):
412 412 start_date = datetime.datetime(2010, 1, 2)
413 413 commits = self.repo.get_commits(start_date=start_date)
414 414 assert isinstance(commits, CollectionGenerator)
415 415 # Should be 4 commits after 2010-01-02 00:00:00
416 416 assert len(commits) == 4
417 417 for c in commits:
418 418 assert c.date >= start_date
419 419
420 420 def test_get_commits_respects_start_date_with_branch(self):
421 421 start_date = datetime.datetime(2010, 1, 2)
422 422 commits = self.repo.get_commits(
423 423 start_date=start_date, branch_name=self.repo.DEFAULT_BRANCH_NAME)
424 424 assert isinstance(commits, CollectionGenerator)
425 425 # Should be 4 commits after 2010-01-02 00:00:00
426 426 assert len(commits) == 4
427 427 for c in commits:
428 428 assert c.date >= start_date
429 429
430 430 def test_get_commits_respects_start_date_and_end_date(self):
431 431 start_date = datetime.datetime(2010, 1, 2)
432 432 end_date = datetime.datetime(2010, 1, 3)
433 433 commits = self.repo.get_commits(start_date=start_date,
434 434 end_date=end_date)
435 435 assert isinstance(commits, CollectionGenerator)
436 436 assert len(commits) == 2
437 437 for c in commits:
438 438 assert c.date >= start_date
439 439 assert c.date <= end_date
440 440
441 441 def test_get_commits_respects_end_date(self):
442 442 end_date = datetime.datetime(2010, 1, 2)
443 443 commits = self.repo.get_commits(end_date=end_date)
444 444 assert isinstance(commits, CollectionGenerator)
445 445 assert len(commits) == 1
446 446 for c in commits:
447 447 assert c.date <= end_date
448 448
449 449 def test_get_commits_respects_reverse(self):
450 450 commits = self.repo.get_commits() # no longer reverse support
451 451 assert isinstance(commits, CollectionGenerator)
452 452 assert len(commits) == 5
453 453 commit_ids = reversed([c.raw_id for c in commits])
454 454 assert list(commit_ids) == list(reversed(self.repo.commit_ids))
455 455
456 456 def test_get_commits_slice_generator(self):
457 457 commits = self.repo.get_commits(
458 458 branch_name=self.repo.DEFAULT_BRANCH_NAME)
459 459 assert isinstance(commits, CollectionGenerator)
460 460 commit_slice = list(commits[1:3])
461 461 assert len(commit_slice) == 2
462 462
463 463 def test_get_commits_raise_commitdoesnotexist_for_wrong_start(self):
464 464 with pytest.raises(CommitDoesNotExistError):
465 465 list(self.repo.get_commits(start_id='foobar'))
466 466
467 467 def test_get_commits_raise_commitdoesnotexist_for_wrong_end(self):
468 468 with pytest.raises(CommitDoesNotExistError):
469 469 list(self.repo.get_commits(end_id='foobar'))
470 470
471 471 def test_get_commits_raise_branchdoesnotexist_for_wrong_branch_name(self):
472 472 with pytest.raises(BranchDoesNotExistError):
473 473 list(self.repo.get_commits(branch_name='foobar'))
474 474
475 475 def test_get_commits_raise_repositoryerror_for_wrong_start_end(self):
476 476 start_id = self.repo.commit_ids[-1]
477 477 end_id = self.repo.commit_ids[0]
478 478 with pytest.raises(RepositoryError):
479 479 list(self.repo.get_commits(start_id=start_id, end_id=end_id))
480 480
481 481 def test_get_commits_raises_for_numerical_ids(self):
482 482 with pytest.raises(TypeError):
483 483 self.repo.get_commits(start_id=1, end_id=2)
484 484
485 485 def test_commit_equality(self):
486 486 commit1 = self.repo.get_commit(self.repo.commit_ids[0])
487 487 commit2 = self.repo.get_commit(self.repo.commit_ids[1])
488 488
489 489 assert commit1 == commit1
490 490 assert commit2 == commit2
491 491 assert commit1 != commit2
492 492 assert commit2 != commit1
493 493 assert commit1 != None
494 494 assert None != commit1
495 495 assert 1 != commit1
496 496 assert 'string' != commit1
497 497
498 498
499 499 @pytest.mark.parametrize("filename, expected", [
500 500 ("README.rst", False),
501 501 ("README", True),
502 502 ])
503 503 def test_commit_is_link(vcsbackend, filename, expected):
504 504 commit = vcsbackend.repo.get_commit()
505 505 link_status = commit.is_link(filename)
506 506 assert link_status is expected
507 507
508 508
509 509 @pytest.mark.usefixtures("vcs_repository_support")
510 510 class TestCommitsChanges(BackendTestMixin):
511 511 recreate_repo_per_test = False
512 512
513 513 @classmethod
514 514 def _get_commits(cls):
515 515 return [
516 516 {
517 517 'message': u'Initial',
518 518 'author': u'Joe Doe <joe.doe@example.com>',
519 519 'date': datetime.datetime(2010, 1, 1, 20),
520 520 'added': [
521 521 FileNode('foo/bar', content='foo'),
522 522 FileNode('foo/baΕ‚', content='foo'),
523 523 FileNode('foobar', content='foo'),
524 524 FileNode('qwe', content='foo'),
525 525 ],
526 526 },
527 527 {
528 528 'message': u'Massive changes',
529 529 'author': u'Joe Doe <joe.doe@example.com>',
530 530 'date': datetime.datetime(2010, 1, 1, 22),
531 531 'added': [FileNode('fallout', content='War never changes')],
532 532 'changed': [
533 533 FileNode('foo/bar', content='baz'),
534 534 FileNode('foobar', content='baz'),
535 535 ],
536 536 'removed': [FileNode('qwe')],
537 537 },
538 538 ]
539 539
540 540 def test_initial_commit(self, local_dt_to_utc):
541 541 commit = self.repo.get_commit(commit_idx=0)
542 542 assert set(commit.added) == set([
543 543 commit.get_node('foo/bar'),
544 544 commit.get_node('foo/baΕ‚'),
545 545 commit.get_node('foobar'),
546 546 commit.get_node('qwe'),
547 547 ])
548 548 assert set(commit.changed) == set()
549 549 assert set(commit.removed) == set()
550 550 assert set(commit.affected_files) == set(
551 551 ['foo/bar', 'foo/baΕ‚', 'foobar', 'qwe'])
552 552 assert commit.date == local_dt_to_utc(
553 553 datetime.datetime(2010, 1, 1, 20, 0))
554 554
555 555 def test_head_added(self):
556 556 commit = self.repo.get_commit()
557 557 assert isinstance(commit.added, AddedFileNodesGenerator)
558 558 assert set(commit.added) == set([commit.get_node('fallout')])
559 559 assert isinstance(commit.changed, ChangedFileNodesGenerator)
560 560 assert set(commit.changed) == set([
561 561 commit.get_node('foo/bar'),
562 562 commit.get_node('foobar'),
563 563 ])
564 564 assert isinstance(commit.removed, RemovedFileNodesGenerator)
565 565 assert len(commit.removed) == 1
566 566 assert list(commit.removed)[0].path == 'qwe'
567 567
568 568 def test_get_filemode(self):
569 569 commit = self.repo.get_commit()
570 570 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/bar')
571 571
572 572 def test_get_filemode_non_ascii(self):
573 573 commit = self.repo.get_commit()
574 574 assert FILEMODE_DEFAULT == commit.get_file_mode('foo/baΕ‚')
575 575 assert FILEMODE_DEFAULT == commit.get_file_mode(u'foo/baΕ‚')
576 576
577 577 def test_get_path_history(self):
578 578 commit = self.repo.get_commit()
579 579 history = commit.get_path_history('foo/bar')
580 580 assert len(history) == 2
581 581
582 582 def test_get_path_history_with_limit(self):
583 583 commit = self.repo.get_commit()
584 584 history = commit.get_path_history('foo/bar', limit=1)
585 585 assert len(history) == 1
586 586
587 587 def test_get_path_history_first_commit(self):
588 588 commit = self.repo[0]
589 589 history = commit.get_path_history('foo/bar')
590 590 assert len(history) == 1
591 591
592 592
593 593 def assert_text_equal(expected, given):
594 594 assert expected == given
595 595 assert isinstance(expected, unicode)
596 596 assert isinstance(given, unicode)
@@ -1,1188 +1,1188 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.lib.utils import make_db_config
27 27 from rhodecode.lib.vcs import backends
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 Reference, MergeResponse, MergeFailureReason)
30 30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 31 from rhodecode.lib.vcs.exceptions import (
32 32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35 35
36 36
37 37 pytestmark = pytest.mark.backends("hg")
38 38
39 39
40 40 def repo_path_generator():
41 41 """
42 42 Return a different path to be used for cloning repos.
43 43 """
44 44 i = 0
45 45 while True:
46 46 i += 1
47 47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48 48
49 49 REPO_PATH_GENERATOR = repo_path_generator()
50 50
51 51
52 52 @pytest.fixture(scope='class', autouse=True)
53 53 def repo(request, baseapp):
54 54 repo = MercurialRepository(TEST_HG_REPO)
55 55 if request.cls:
56 56 request.cls.repo = repo
57 57 return repo
58 58
59 59
60 60 class TestMercurialRepository:
61 61
62 62 # pylint: disable=protected-access
63 63
64 64 def get_clone_repo(self):
65 65 """
66 66 Return a clone of the base repo.
67 67 """
68 68 clone_path = next(REPO_PATH_GENERATOR)
69 69 repo_clone = MercurialRepository(
70 70 clone_path, create=True, src_url=self.repo.path)
71 71
72 72 return repo_clone
73 73
74 74 def get_empty_repo(self):
75 75 """
76 76 Return an empty repo.
77 77 """
78 78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79 79
80 80 def test_wrong_repo_path(self):
81 81 wrong_repo_path = '/tmp/errorrepo_hg'
82 82 with pytest.raises(RepositoryError):
83 83 MercurialRepository(wrong_repo_path)
84 84
85 85 def test_unicode_path_repo(self):
86 86 with pytest.raises(VCSError):
87 87 MercurialRepository(u'iShouldFail')
88 88
89 89 def test_unicode_commit_id(self):
90 90 with pytest.raises(CommitDoesNotExistError):
91 91 self.repo.get_commit(u'unicode-commit-id')
92 92 with pytest.raises(CommitDoesNotExistError):
93 93 self.repo.get_commit(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
94 94
95 95 def test_unicode_bookmark(self):
96 96 self.repo.bookmark(u'unicode-bookmark')
97 97 self.repo.bookmark(u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
98 98
99 99 def test_unicode_branch(self):
100 100 with pytest.raises(KeyError):
101 101 self.repo.branches[u'unicode-branch']
102 102 with pytest.raises(KeyError):
103 103 self.repo.branches[u'unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
104 104
105 105 def test_repo_clone(self):
106 106 if os.path.exists(TEST_HG_REPO_CLONE):
107 107 self.fail(
108 108 'Cannot test mercurial clone repo as location %s already '
109 109 'exists. You should manually remove it first.'
110 110 % TEST_HG_REPO_CLONE)
111 111
112 112 repo = MercurialRepository(TEST_HG_REPO)
113 113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
114 114 src_url=TEST_HG_REPO)
115 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 116 # Checking hashes of commits should be enough
117 117 for commit in repo.get_commits():
118 118 raw_id = commit.raw_id
119 119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
120 120
121 121 def test_repo_clone_with_update(self):
122 122 repo = MercurialRepository(TEST_HG_REPO)
123 123 repo_clone = MercurialRepository(
124 124 TEST_HG_REPO_CLONE + '_w_update',
125 125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
126 126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 127
128 128 # check if current workdir was updated
129 129 assert os.path.isfile(
130 130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131 131
132 132 def test_repo_clone_without_update(self):
133 133 repo = MercurialRepository(TEST_HG_REPO)
134 134 repo_clone = MercurialRepository(
135 135 TEST_HG_REPO_CLONE + '_wo_update',
136 136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
137 137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 138 assert not os.path.isfile(
139 139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140 140
141 141 def test_commit_ids(self):
142 142 # there are 21 commits at bitbucket now
143 143 # so we can assume they would be available from now on
144 144 subset = set([
145 145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
146 146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
147 147 '6cba7170863a2411822803fa77a0a264f1310b35',
148 148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
149 149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
150 150 '6fff84722075f1607a30f436523403845f84cd9e',
151 151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
152 152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
153 153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
154 154 'be90031137367893f1c406e0a8683010fd115b79',
155 155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
156 156 '84478366594b424af694a6c784cb991a16b87c21',
157 157 '17f8e105dddb9f339600389c6dc7175d395a535c',
158 158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
159 159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
160 160 '786facd2c61deb9cf91e9534735124fb8fc11842',
161 161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
162 162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
163 163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
164 164 ])
165 165 assert subset.issubset(set(self.repo.commit_ids))
166 166
167 167 # check if we have the proper order of commits
168 168 org = [
169 169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
170 170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
171 171 '6cba7170863a2411822803fa77a0a264f1310b35',
172 172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
173 173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
174 174 '6fff84722075f1607a30f436523403845f84cd9e',
175 175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
176 176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
177 177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
178 178 'be90031137367893f1c406e0a8683010fd115b79',
179 179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
180 180 '84478366594b424af694a6c784cb991a16b87c21',
181 181 '17f8e105dddb9f339600389c6dc7175d395a535c',
182 182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
183 183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
184 184 '786facd2c61deb9cf91e9534735124fb8fc11842',
185 185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
186 186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
187 187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
188 188 '2c1885c735575ca478bf9e17b0029dca68824458',
189 189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
190 190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
191 191 '4fb8326d78e5120da2c7468dcf7098997be385da',
192 192 '62b4a097164940bd66030c4db51687f3ec035eed',
193 193 '536c1a19428381cfea92ac44985304f6a8049569',
194 194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
195 195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
196 196 'f8940bcb890a98c4702319fbe36db75ea309b475',
197 197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
198 198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
199 199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
200 200 ]
201 201 assert org == self.repo.commit_ids[:31]
202 202
203 203 def test_iter_slice(self):
204 204 sliced = list(self.repo[:10])
205 205 itered = list(self.repo)[:10]
206 206 assert sliced == itered
207 207
208 208 def test_slicing(self):
209 209 # 4 1 5 10 95
210 210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
211 211 (10, 20, 10), (5, 100, 95)]:
212 212 indexes = list(self.repo[sfrom:sto])
213 213 assert len(indexes) == size
214 214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
215 215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
216 216
217 217 def test_branches(self):
218 218 # TODO: Need more tests here
219 219
220 220 # active branches
221 221 assert 'default' in self.repo.branches
222 222 assert 'stable' in self.repo.branches
223 223
224 224 # closed
225 225 assert 'git' in self.repo._get_branches(closed=True)
226 226 assert 'web' in self.repo._get_branches(closed=True)
227 227
228 228 for name, id in self.repo.branches.items():
229 229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
230 230
231 231 def test_tip_in_tags(self):
232 232 # tip is always a tag
233 233 assert 'tip' in self.repo.tags
234 234
235 235 def test_tip_commit_in_tags(self):
236 236 tip = self.repo.get_commit()
237 237 assert self.repo.tags['tip'] == tip.raw_id
238 238
239 239 def test_initial_commit(self):
240 240 init_commit = self.repo.get_commit(commit_idx=0)
241 241 init_author = init_commit.author
242 242
243 243 assert init_commit.message == 'initial import'
244 244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
245 245 assert init_author == init_commit.committer
246 246 assert sorted(init_commit._file_paths) == sorted([
247 247 'vcs/__init__.py',
248 248 'vcs/backends/BaseRepository.py',
249 249 'vcs/backends/__init__.py',
250 250 ])
251 251 assert sorted(init_commit._dir_paths) == sorted(
252 252 ['', 'vcs', 'vcs/backends'])
253 253
254 254 assert init_commit._dir_paths + init_commit._file_paths == \
255 255 init_commit._paths
256 256
257 257 with pytest.raises(NodeDoesNotExistError):
258 258 init_commit.get_node(path='foobar')
259 259
260 260 node = init_commit.get_node('vcs/')
261 261 assert hasattr(node, 'kind')
262 262 assert node.kind == NodeKind.DIR
263 263
264 264 node = init_commit.get_node('vcs')
265 265 assert hasattr(node, 'kind')
266 266 assert node.kind == NodeKind.DIR
267 267
268 268 node = init_commit.get_node('vcs/__init__.py')
269 269 assert hasattr(node, 'kind')
270 270 assert node.kind == NodeKind.FILE
271 271
272 272 def test_not_existing_commit(self):
273 273 # rawid
274 274 with pytest.raises(RepositoryError):
275 275 self.repo.get_commit('abcd' * 10)
276 276 # shortid
277 277 with pytest.raises(RepositoryError):
278 278 self.repo.get_commit('erro' * 4)
279 279 # numeric
280 280 with pytest.raises(RepositoryError):
281 281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
282 282
283 283 # Small chance we ever get to this one
284 284 idx = pow(2, 30)
285 285 with pytest.raises(RepositoryError):
286 286 self.repo.get_commit(commit_idx=idx)
287 287
288 288 def test_commit10(self):
289 289 commit10 = self.repo.get_commit(commit_idx=10)
290 290 README = """===
291 291 VCS
292 292 ===
293 293
294 294 Various Version Control System management abstraction layer for Python.
295 295
296 296 Introduction
297 297 ------------
298 298
299 299 TODO: To be written...
300 300
301 301 """
302 302 node = commit10.get_node('README.rst')
303 303 assert node.kind == NodeKind.FILE
304 304 assert node.content == README
305 305
306 306 def test_local_clone(self):
307 307 clone_path = next(REPO_PATH_GENERATOR)
308 308 self.repo._local_clone(clone_path)
309 309 repo_clone = MercurialRepository(clone_path)
310 310
311 311 assert self.repo.commit_ids == repo_clone.commit_ids
312 312
313 313 def test_local_clone_fails_if_target_exists(self):
314 314 with pytest.raises(RepositoryError):
315 315 self.repo._local_clone(self.repo.path)
316 316
317 317 def test_update(self):
318 318 repo_clone = self.get_clone_repo()
319 319 branches = repo_clone.branches
320 320
321 321 repo_clone._update('default')
322 322 assert branches['default'] == repo_clone._identify()
323 323 repo_clone._update('stable')
324 324 assert branches['stable'] == repo_clone._identify()
325 325
326 326 def test_local_pull_branch(self):
327 327 target_repo = self.get_empty_repo()
328 328 source_repo = self.get_clone_repo()
329 329
330 330 default = Reference(
331 331 'branch', 'default', source_repo.branches['default'])
332 332 target_repo._local_pull(source_repo.path, default)
333 333 target_repo = MercurialRepository(target_repo.path)
334 334 assert (target_repo.branches['default'] ==
335 335 source_repo.branches['default'])
336 336
337 337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
338 338 target_repo._local_pull(source_repo.path, stable)
339 339 target_repo = MercurialRepository(target_repo.path)
340 340 assert target_repo.branches['stable'] == source_repo.branches['stable']
341 341
342 342 def test_local_pull_bookmark(self):
343 343 target_repo = self.get_empty_repo()
344 344 source_repo = self.get_clone_repo()
345 345
346 346 commits = list(source_repo.get_commits(branch_name='default'))
347 347 foo1_id = commits[-5].raw_id
348 348 foo1 = Reference('book', 'foo1', foo1_id)
349 349 source_repo._update(foo1_id)
350 350 source_repo.bookmark('foo1')
351 351
352 352 foo2_id = commits[-3].raw_id
353 353 foo2 = Reference('book', 'foo2', foo2_id)
354 354 source_repo._update(foo2_id)
355 355 source_repo.bookmark('foo2')
356 356
357 357 target_repo._local_pull(source_repo.path, foo1)
358 358 target_repo = MercurialRepository(target_repo.path)
359 359 assert target_repo.branches['default'] == commits[-5].raw_id
360 360
361 361 target_repo._local_pull(source_repo.path, foo2)
362 362 target_repo = MercurialRepository(target_repo.path)
363 363 assert target_repo.branches['default'] == commits[-3].raw_id
364 364
365 365 def test_local_pull_commit(self):
366 366 target_repo = self.get_empty_repo()
367 367 source_repo = self.get_clone_repo()
368 368
369 369 commits = list(source_repo.get_commits(branch_name='default'))
370 370 commit_id = commits[-5].raw_id
371 371 commit = Reference('rev', commit_id, commit_id)
372 372 target_repo._local_pull(source_repo.path, commit)
373 373 target_repo = MercurialRepository(target_repo.path)
374 374 assert target_repo.branches['default'] == commit_id
375 375
376 376 commit_id = commits[-3].raw_id
377 377 commit = Reference('rev', commit_id, commit_id)
378 378 target_repo._local_pull(source_repo.path, commit)
379 379 target_repo = MercurialRepository(target_repo.path)
380 380 assert target_repo.branches['default'] == commit_id
381 381
382 382 def test_local_pull_from_same_repo(self):
383 383 reference = Reference('branch', 'default', None)
384 384 with pytest.raises(ValueError):
385 385 self.repo._local_pull(self.repo.path, reference)
386 386
387 387 def test_validate_pull_reference_raises_on_missing_reference(
388 388 self, vcsbackend_hg):
389 389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
390 390 reference = Reference(
391 391 'book', 'invalid_reference', 'a' * 40)
392 392
393 393 with pytest.raises(CommitDoesNotExistError):
394 394 target_repo._validate_pull_reference(reference)
395 395
396 396 def test_heads(self):
397 397 assert set(self.repo._heads()) == set(self.repo.branches.values())
398 398
399 399 def test_ancestor(self):
400 400 commits = [
401 401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
402 402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
403 403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
404 404
405 405 def test_local_push(self):
406 406 target_repo = self.get_empty_repo()
407 407
408 408 revisions = list(self.repo.get_commits(branch_name='default'))
409 409 revision = revisions[-5].raw_id
410 410 self.repo._local_push(revision, target_repo.path)
411 411
412 412 target_repo = MercurialRepository(target_repo.path)
413 413
414 414 assert target_repo.branches['default'] == revision
415 415
416 416 def test_hooks_can_be_enabled_for_local_push(self):
417 417 revision = 'deadbeef'
418 418 repo_path = 'test_group/test_repo'
419 419 with mock.patch.object(self.repo, '_remote') as remote_mock:
420 420 self.repo._local_push(revision, repo_path, enable_hooks=True)
421 421 remote_mock.push.assert_called_once_with(
422 422 [revision], repo_path, hooks=True, push_branches=False)
423 423
424 424 def test_local_merge(self, vcsbackend_hg):
425 425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
426 426 source_repo = vcsbackend_hg.clone_repo(target_repo)
427 427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
428 428 target_repo = MercurialRepository(target_repo.path)
429 429 target_rev = target_repo.branches['default']
430 430 target_ref = Reference(
431 431 type='branch', name='default', commit_id=target_rev)
432 432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
433 433 source_repo = MercurialRepository(source_repo.path)
434 434 source_rev = source_repo.branches['default']
435 435 source_ref = Reference(
436 436 type='branch', name='default', commit_id=source_rev)
437 437
438 438 target_repo._local_pull(source_repo.path, source_ref)
439 439
440 440 merge_message = 'Merge message\n\nDescription:...'
441 441 user_name = 'Albert Einstein'
442 442 user_email = 'albert@einstein.com'
443 443 merge_commit_id, needs_push = target_repo._local_merge(
444 444 target_ref, merge_message, user_name, user_email, source_ref)
445 445 assert needs_push
446 446
447 447 target_repo = MercurialRepository(target_repo.path)
448 448 assert target_repo.commit_ids[-3] == target_rev
449 449 assert target_repo.commit_ids[-2] == source_rev
450 450 last_commit = target_repo.get_commit(merge_commit_id)
451 451 assert last_commit.message.strip() == merge_message
452 452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
453 453
454 454 assert not os.path.exists(
455 455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
456 456
457 457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
458 458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
459 459 source_repo = vcsbackend_hg.clone_repo(target_repo)
460 460 target_rev = target_repo.branches['default']
461 461 target_ref = Reference(
462 462 type='branch', name='default', commit_id=target_rev)
463 463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
464 464 source_repo = MercurialRepository(source_repo.path)
465 465 source_rev = source_repo.branches['default']
466 466 source_ref = Reference(
467 467 type='branch', name='default', commit_id=source_rev)
468 468
469 469 target_repo._local_pull(source_repo.path, source_ref)
470 470
471 471 merge_message = 'Merge message\n\nDescription:...'
472 472 user_name = 'Albert Einstein'
473 473 user_email = 'albert@einstein.com'
474 474 merge_commit_id, needs_push = target_repo._local_merge(
475 475 target_ref, merge_message, user_name, user_email, source_ref)
476 476 assert merge_commit_id == source_rev
477 477 assert needs_push
478 478
479 479 target_repo = MercurialRepository(target_repo.path)
480 480 assert target_repo.commit_ids[-2] == target_rev
481 481 assert target_repo.commit_ids[-1] == source_rev
482 482
483 483 assert not os.path.exists(
484 484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
485 485
486 486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
487 487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
488 488 target_rev = target_repo.branches['default']
489 489 target_ref = Reference(
490 490 type='branch', name='default', commit_id=target_rev)
491 491
492 492 merge_message = 'Merge message\n\nDescription:...'
493 493 user_name = 'Albert Einstein'
494 494 user_email = 'albert@einstein.com'
495 495 merge_commit_id, needs_push = target_repo._local_merge(
496 496 target_ref, merge_message, user_name, user_email, target_ref)
497 497 assert merge_commit_id == target_rev
498 498 assert not needs_push
499 499
500 500 target_repo = MercurialRepository(target_repo.path)
501 501 assert target_repo.commit_ids[-1] == target_rev
502 502
503 503 assert not os.path.exists(
504 504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
505 505
506 506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
507 507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
508 508 source_repo = vcsbackend_hg.clone_repo(target_repo)
509 509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
510 510 target_repo = MercurialRepository(target_repo.path)
511 511 target_rev = target_repo.branches['default']
512 512 target_ref = Reference(
513 513 type='branch', name='default', commit_id=target_rev)
514 514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
515 515 source_repo = MercurialRepository(source_repo.path)
516 516 source_rev = source_repo.branches['default']
517 517 source_ref = Reference(
518 518 type='branch', name='default', commit_id=source_rev)
519 519
520 520 target_repo._local_pull(source_repo.path, source_ref)
521 521 with pytest.raises(RepositoryError):
522 522 target_repo._local_merge(
523 523 target_ref, 'merge_message', 'user name', 'user@name.com',
524 524 source_ref)
525 525
526 526 # Check we are not left in an intermediate merge state
527 527 assert not os.path.exists(
528 528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
529 529
530 530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
531 531 commits = [
532 532 {'message': 'a'},
533 533 {'message': 'b', 'branch': 'b'},
534 534 ]
535 535 repo = backend_hg.create_repo(commits)
536 536 commit_ids = backend_hg.commit_ids
537 537 target_ref = Reference(
538 538 type='branch', name='default', commit_id=commit_ids['a'])
539 539 source_ref = Reference(
540 540 type='branch', name='b', commit_id=commit_ids['b'])
541 541 merge_message = 'Merge message\n\nDescription:...'
542 542 user_name = 'Albert Einstein'
543 543 user_email = 'albert@einstein.com'
544 544 vcs_repo = repo.scm_instance()
545 545 merge_commit_id, needs_push = vcs_repo._local_merge(
546 546 target_ref, merge_message, user_name, user_email, source_ref)
547 547 assert merge_commit_id != source_ref.commit_id
548 548 assert needs_push is True
549 549 commit = vcs_repo.get_commit(merge_commit_id)
550 550 assert commit.merge is True
551 551 assert commit.message == merge_message
552 552
553 553 def test_maybe_prepare_merge_workspace(self):
554 554 workspace = self.repo._maybe_prepare_merge_workspace(
555 555 1, 'pr2', 'unused', 'unused2')
556 556
557 557 assert os.path.isdir(workspace)
558 558 workspace_repo = MercurialRepository(workspace)
559 559 assert workspace_repo.branches == self.repo.branches
560 560
561 561 # Calling it a second time should also succeed
562 562 workspace = self.repo._maybe_prepare_merge_workspace(
563 563 1, 'pr2', 'unused', 'unused2')
564 564 assert os.path.isdir(workspace)
565 565
566 566 def test_cleanup_merge_workspace(self):
567 567 workspace = self.repo._maybe_prepare_merge_workspace(
568 568 1, 'pr3', 'unused', 'unused2')
569 569
570 570 assert os.path.isdir(workspace)
571 571 self.repo.cleanup_merge_workspace(1, 'pr3')
572 572
573 573 assert not os.path.exists(workspace)
574 574
575 575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
576 576 # No assert: because in case of an inexistent workspace this function
577 577 # should still succeed.
578 578 self.repo.cleanup_merge_workspace(1, 'pr4')
579 579
580 580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
581 581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
582 582 source_repo = vcsbackend_hg.clone_repo(target_repo)
583 583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
584 584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
585 585 imc = source_repo.in_memory_commit
586 586 imc.add(FileNode('file_x', content=source_repo.name))
587 587 imc.commit(
588 588 message=u'Automatic commit from repo merge test',
589 author=u'Automatic')
589 author=u'Automatic <automatic@rhodecode.com>')
590 590 target_commit = target_repo.get_commit()
591 591 source_commit = source_repo.get_commit()
592 592 default_branch = target_repo.DEFAULT_BRANCH_NAME
593 593 bookmark_name = 'bookmark'
594 594 target_repo._update(default_branch)
595 595 target_repo.bookmark(bookmark_name)
596 596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
597 597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
598 598 workspace_id = 'test-merge'
599 599 repo_id = repo_id_generator(target_repo.path)
600 600 merge_response = target_repo.merge(
601 601 repo_id, workspace_id, target_ref, source_repo, source_ref,
602 602 'test user', 'test@rhodecode.com', 'merge message 1',
603 603 dry_run=False)
604 604 expected_merge_response = MergeResponse(
605 605 True, True, merge_response.merge_ref,
606 606 MergeFailureReason.NONE)
607 607 assert merge_response == expected_merge_response
608 608
609 609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
610 610 target_repo.path)
611 611 target_commits = list(target_repo.get_commits())
612 612 commit_ids = [c.raw_id for c in target_commits[:-1]]
613 613 assert source_ref.commit_id in commit_ids
614 614 assert target_ref.commit_id in commit_ids
615 615
616 616 merge_commit = target_commits[-1]
617 617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
618 618 assert merge_commit.message.strip() == 'merge message 1'
619 619 assert merge_commit.author == 'test user <test@rhodecode.com>'
620 620
621 621 # Check the bookmark was updated in the target repo
622 622 assert (
623 623 target_repo.bookmarks[bookmark_name] ==
624 624 merge_response.merge_ref.commit_id)
625 625
626 626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
627 627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
628 628 source_repo = vcsbackend_hg.clone_repo(target_repo)
629 629 imc = source_repo.in_memory_commit
630 630 imc.add(FileNode('file_x', content=source_repo.name))
631 631 imc.commit(
632 632 message=u'Automatic commit from repo merge test',
633 author=u'Automatic')
633 author=u'Automatic <automatic@rhodecode.com>')
634 634 target_commit = target_repo.get_commit()
635 635 source_commit = source_repo.get_commit()
636 636 default_branch = target_repo.DEFAULT_BRANCH_NAME
637 637 bookmark_name = 'bookmark'
638 638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
639 639 source_repo._update(default_branch)
640 640 source_repo.bookmark(bookmark_name)
641 641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
642 642 workspace_id = 'test-merge'
643 643 repo_id = repo_id_generator(target_repo.path)
644 644 merge_response = target_repo.merge(
645 645 repo_id, workspace_id, target_ref, source_repo, source_ref,
646 646 'test user', 'test@rhodecode.com', 'merge message 1',
647 647 dry_run=False)
648 648 expected_merge_response = MergeResponse(
649 649 True, True, merge_response.merge_ref,
650 650 MergeFailureReason.NONE)
651 651 assert merge_response == expected_merge_response
652 652
653 653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
654 654 target_repo.path)
655 655 target_commits = list(target_repo.get_commits())
656 656 commit_ids = [c.raw_id for c in target_commits]
657 657 assert source_ref.commit_id == commit_ids[-1]
658 658 assert target_ref.commit_id == commit_ids[-2]
659 659
660 660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
661 661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
662 662 source_repo = vcsbackend_hg.clone_repo(target_repo)
663 663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
664 664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
665 665
666 666 # add an extra head to the target repo
667 667 imc = target_repo.in_memory_commit
668 668 imc.add(FileNode('file_x', content='foo'))
669 669 commits = list(target_repo.get_commits())
670 670 imc.commit(
671 671 message=u'Automatic commit from repo merge test',
672 author=u'Automatic', parents=commits[0:1])
672 author=u'Automatic <automatic@rhodecode.com>', parents=commits[0:1])
673 673
674 674 target_commit = target_repo.get_commit()
675 675 source_commit = source_repo.get_commit()
676 676 default_branch = target_repo.DEFAULT_BRANCH_NAME
677 677 target_repo._update(default_branch)
678 678
679 679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
680 680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
681 681 workspace_id = 'test-merge'
682 682
683 683 assert len(target_repo._heads(branch='default')) == 2
684 684 heads = target_repo._heads(branch='default')
685 685 expected_merge_response = MergeResponse(
686 686 False, False, None,
687 687 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
688 688 metadata={'heads': heads})
689 689 repo_id = repo_id_generator(target_repo.path)
690 690 merge_response = target_repo.merge(
691 691 repo_id, workspace_id, target_ref, source_repo, source_ref,
692 692 'test user', 'test@rhodecode.com', 'merge message 1',
693 693 dry_run=False)
694 694 assert merge_response == expected_merge_response
695 695
696 696 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
697 697 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
698 698 source_repo = vcsbackend_hg.clone_repo(target_repo)
699 699 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
700 700 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
701 701 imc = source_repo.in_memory_commit
702 702 imc.add(FileNode('file_x', content=source_repo.name))
703 703 imc.commit(
704 704 message=u'Automatic commit from repo merge test',
705 author=u'Automatic')
705 author=u'Automatic <automatic@rhodecode.com>')
706 706 target_commit = target_repo.get_commit()
707 707 source_commit = source_repo.get_commit()
708 708
709 709 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
710 710
711 711 default_branch = target_repo.DEFAULT_BRANCH_NAME
712 712 bookmark_name = 'bookmark'
713 713 source_repo._update(default_branch)
714 714 source_repo.bookmark(bookmark_name)
715 715
716 716 target_ref = Reference('branch', default_branch, target_commit.raw_id)
717 717 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
718 718 repo_id = repo_id_generator(target_repo.path)
719 719 workspace_id = 'test-merge'
720 720
721 721 merge_response = target_repo.merge(
722 722 repo_id, workspace_id, target_ref, source_repo, source_ref,
723 723 'test user', 'test@rhodecode.com', 'merge message 1',
724 724 dry_run=False, use_rebase=True)
725 725
726 726 expected_merge_response = MergeResponse(
727 727 True, True, merge_response.merge_ref,
728 728 MergeFailureReason.NONE)
729 729 assert merge_response == expected_merge_response
730 730
731 731 target_repo = backends.get_backend(vcsbackend_hg.alias)(
732 732 target_repo.path)
733 733 last_commit = target_repo.get_commit()
734 734 assert last_commit.message == source_commit.message
735 735 assert last_commit.author == source_commit.author
736 736 # This checks that we effectively did a rebase
737 737 assert last_commit.raw_id != source_commit.raw_id
738 738
739 739 # Check the target has only 4 commits: 2 were already in target and
740 740 # only two should have been added
741 741 assert len(target_repo.commit_ids) == 2 + 2
742 742
743 743
744 744 class TestGetShadowInstance(object):
745 745
746 746 @pytest.fixture
747 747 def repo(self, vcsbackend_hg, monkeypatch):
748 748 repo = vcsbackend_hg.repo
749 749 monkeypatch.setattr(repo, 'config', mock.Mock())
750 750 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
751 751 return repo
752 752
753 753 def test_passes_config(self, repo):
754 754 shadow = repo._get_shadow_instance(repo.path)
755 755 assert shadow.config == repo.config.copy()
756 756
757 757 def test_disables_hooks(self, repo):
758 758 shadow = repo._get_shadow_instance(repo.path)
759 759 shadow.config.clear_section.assert_called_once_with('hooks')
760 760
761 761 def test_allows_to_keep_hooks(self, repo):
762 762 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
763 763 assert not shadow.config.clear_section.called
764 764
765 765
766 766 class TestMercurialCommit(object):
767 767
768 768 def _test_equality(self, commit):
769 769 idx = commit.idx
770 770 assert commit == self.repo.get_commit(commit_idx=idx)
771 771
772 772 def test_equality(self):
773 773 indexes = [0, 10, 20]
774 774 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
775 775 for commit in commits:
776 776 self._test_equality(commit)
777 777
778 778 def test_default_commit(self):
779 779 tip = self.repo.get_commit('tip')
780 780 assert tip == self.repo.get_commit()
781 781 assert tip == self.repo.get_commit(commit_id=None)
782 782 assert tip == self.repo.get_commit(commit_idx=None)
783 783 assert tip == list(self.repo[-1:])[0]
784 784
785 785 def test_root_node(self):
786 786 tip = self.repo.get_commit('tip')
787 787 assert tip.root is tip.get_node('')
788 788
789 789 def test_lazy_fetch(self):
790 790 """
791 791 Test if commit's nodes expands and are cached as we walk through
792 792 the commit. This test is somewhat hard to write as order of tests
793 793 is a key here. Written by running command after command in a shell.
794 794 """
795 795 commit = self.repo.get_commit(commit_idx=45)
796 796 assert len(commit.nodes) == 0
797 797 root = commit.root
798 798 assert len(commit.nodes) == 1
799 799 assert len(root.nodes) == 8
800 800 # accessing root.nodes updates commit.nodes
801 801 assert len(commit.nodes) == 9
802 802
803 803 docs = root.get_node('docs')
804 804 # we haven't yet accessed anything new as docs dir was already cached
805 805 assert len(commit.nodes) == 9
806 806 assert len(docs.nodes) == 8
807 807 # accessing docs.nodes updates commit.nodes
808 808 assert len(commit.nodes) == 17
809 809
810 810 assert docs is commit.get_node('docs')
811 811 assert docs is root.nodes[0]
812 812 assert docs is root.dirs[0]
813 813 assert docs is commit.get_node('docs')
814 814
815 815 def test_nodes_with_commit(self):
816 816 commit = self.repo.get_commit(commit_idx=45)
817 817 root = commit.root
818 818 docs = root.get_node('docs')
819 819 assert docs is commit.get_node('docs')
820 820 api = docs.get_node('api')
821 821 assert api is commit.get_node('docs/api')
822 822 index = api.get_node('index.rst')
823 823 assert index is commit.get_node('docs/api/index.rst')
824 824 assert index is commit.get_node(
825 825 'docs').get_node('api').get_node('index.rst')
826 826
827 827 def test_branch_and_tags(self):
828 828 commit0 = self.repo.get_commit(commit_idx=0)
829 829 assert commit0.branch == 'default'
830 830 assert commit0.tags == []
831 831
832 832 commit10 = self.repo.get_commit(commit_idx=10)
833 833 assert commit10.branch == 'default'
834 834 assert commit10.tags == []
835 835
836 836 commit44 = self.repo.get_commit(commit_idx=44)
837 837 assert commit44.branch == 'web'
838 838
839 839 tip = self.repo.get_commit('tip')
840 840 assert 'tip' in tip.tags
841 841
842 842 def test_bookmarks(self):
843 843 commit0 = self.repo.get_commit(commit_idx=0)
844 844 assert commit0.bookmarks == []
845 845
846 846 def _test_file_size(self, idx, path, size):
847 847 node = self.repo.get_commit(commit_idx=idx).get_node(path)
848 848 assert node.is_file()
849 849 assert node.size == size
850 850
851 851 def test_file_size(self):
852 852 to_check = (
853 853 (10, 'setup.py', 1068),
854 854 (20, 'setup.py', 1106),
855 855 (60, 'setup.py', 1074),
856 856
857 857 (10, 'vcs/backends/base.py', 2921),
858 858 (20, 'vcs/backends/base.py', 3936),
859 859 (60, 'vcs/backends/base.py', 6189),
860 860 )
861 861 for idx, path, size in to_check:
862 862 self._test_file_size(idx, path, size)
863 863
864 864 def test_file_history_from_commits(self):
865 865 node = self.repo[10].get_node('setup.py')
866 866 commit_ids = [commit.raw_id for commit in node.history]
867 867 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
868 868
869 869 node = self.repo[20].get_node('setup.py')
870 870 node_ids = [commit.raw_id for commit in node.history]
871 871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
872 872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
873 873
874 874 # special case we check history from commit that has this particular
875 875 # file changed this means we check if it's included as well
876 876 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
877 877 .get_node('setup.py')
878 878 node_ids = [commit.raw_id for commit in node.history]
879 879 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
880 880 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
881 881
882 882 def test_file_history(self):
883 883 # we can only check if those commits are present in the history
884 884 # as we cannot update this test every time file is changed
885 885 files = {
886 886 'setup.py': [7, 18, 45, 46, 47, 69, 77],
887 887 'vcs/nodes.py': [
888 888 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
889 889 'vcs/backends/hg.py': [
890 890 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
891 891 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
892 892 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
893 893 }
894 894 for path, indexes in files.items():
895 895 tip = self.repo.get_commit(commit_idx=indexes[-1])
896 896 node = tip.get_node(path)
897 897 node_indexes = [commit.idx for commit in node.history]
898 898 assert set(indexes).issubset(set(node_indexes)), (
899 899 "We assumed that %s is subset of commits for which file %s "
900 900 "has been changed, and history of that node returned: %s"
901 901 % (indexes, path, node_indexes))
902 902
903 903 def test_file_annotate(self):
904 904 files = {
905 905 'vcs/backends/__init__.py': {
906 906 89: {
907 907 'lines_no': 31,
908 908 'commits': [
909 909 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
910 910 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
911 911 32, 32, 32, 32, 37, 32, 37, 37, 32,
912 912 32, 32
913 913 ]
914 914 },
915 915 20: {
916 916 'lines_no': 1,
917 917 'commits': [4]
918 918 },
919 919 55: {
920 920 'lines_no': 31,
921 921 'commits': [
922 922 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
923 923 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
924 924 32, 32, 32, 32, 37, 32, 37, 37, 32,
925 925 32, 32
926 926 ]
927 927 }
928 928 },
929 929 'vcs/exceptions.py': {
930 930 89: {
931 931 'lines_no': 18,
932 932 'commits': [
933 933 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
934 934 16, 16, 17, 16, 16, 18, 18, 18
935 935 ]
936 936 },
937 937 20: {
938 938 'lines_no': 18,
939 939 'commits': [
940 940 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
941 941 16, 16, 17, 16, 16, 18, 18, 18
942 942 ]
943 943 },
944 944 55: {
945 945 'lines_no': 18,
946 946 'commits': [
947 947 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
948 948 17, 16, 16, 18, 18, 18
949 949 ]
950 950 }
951 951 },
952 952 'MANIFEST.in': {
953 953 89: {
954 954 'lines_no': 5,
955 955 'commits': [7, 7, 7, 71, 71]
956 956 },
957 957 20: {
958 958 'lines_no': 3,
959 959 'commits': [7, 7, 7]
960 960 },
961 961 55: {
962 962 'lines_no': 3,
963 963 'commits': [7, 7, 7]
964 964 }
965 965 }
966 966 }
967 967
968 968 for fname, commit_dict in files.items():
969 969 for idx, __ in commit_dict.items():
970 970 commit = self.repo.get_commit(commit_idx=idx)
971 971 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
972 972 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
973 973 assert l1_1 == l1_2
974 974 l1 = l1_2 = [
975 975 x[2]().idx for x in commit.get_file_annotate(fname)]
976 976 l2 = files[fname][idx]['commits']
977 977 assert l1 == l2, (
978 978 "The lists of commit for %s@commit_id%s"
979 979 "from annotation list should match each other,"
980 980 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
981 981
982 982 def test_commit_state(self):
983 983 """
984 984 Tests which files have been added/changed/removed at particular commit
985 985 """
986 986
987 987 # commit_id 46ad32a4f974:
988 988 # hg st --rev 46ad32a4f974
989 989 # changed: 13
990 990 # added: 20
991 991 # removed: 1
992 992 changed = set([
993 993 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
994 994 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
995 995 'vcs/__init__.py', 'vcs/backends/__init__.py',
996 996 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
997 997 'vcs/utils/__init__.py'])
998 998
999 999 added = set([
1000 1000 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
1001 1001 'docs/api/index.rst', 'docs/api/nodes.rst',
1002 1002 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
1003 1003 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
1004 1004 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1005 1005 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1006 1006 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1007 1007 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1008 1008 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1009 1009 'vcs/web/simplevcs/views.py'])
1010 1010
1011 1011 removed = set(['docs/api.rst'])
1012 1012
1013 1013 commit64 = self.repo.get_commit('46ad32a4f974')
1014 1014 assert set((node.path for node in commit64.added)) == added
1015 1015 assert set((node.path for node in commit64.changed)) == changed
1016 1016 assert set((node.path for node in commit64.removed)) == removed
1017 1017
1018 1018 # commit_id b090f22d27d6:
1019 1019 # hg st --rev b090f22d27d6
1020 1020 # changed: 13
1021 1021 # added: 20
1022 1022 # removed: 1
1023 1023 commit88 = self.repo.get_commit('b090f22d27d6')
1024 1024 assert set((node.path for node in commit88.added)) == set()
1025 1025 assert set((node.path for node in commit88.changed)) == \
1026 1026 set(['.hgignore'])
1027 1027 assert set((node.path for node in commit88.removed)) == set()
1028 1028
1029 1029 #
1030 1030 # 85:
1031 1031 # added: 2 [
1032 1032 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1033 1033 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1034 1034 # removed: 1 ['vcs/utils/web.py']
1035 1035 commit85 = self.repo.get_commit(commit_idx=85)
1036 1036 assert set((node.path for node in commit85.added)) == set([
1037 1037 'vcs/utils/diffs.py',
1038 1038 'vcs/web/simplevcs/views/diffs.py'])
1039 1039 assert set((node.path for node in commit85.changed)) == set([
1040 1040 'vcs/web/simplevcs/models.py',
1041 1041 'vcs/web/simplevcs/utils.py',
1042 1042 'vcs/web/simplevcs/views/__init__.py',
1043 1043 'vcs/web/simplevcs/views/repository.py',
1044 1044 ])
1045 1045 assert set((node.path for node in commit85.removed)) == \
1046 1046 set(['vcs/utils/web.py'])
1047 1047
1048 1048 def test_files_state(self):
1049 1049 """
1050 1050 Tests state of FileNodes.
1051 1051 """
1052 1052 commit = self.repo.get_commit(commit_idx=85)
1053 1053 node = commit.get_node('vcs/utils/diffs.py')
1054 1054 assert node.state, NodeState.ADDED
1055 1055 assert node.added
1056 1056 assert not node.changed
1057 1057 assert not node.not_changed
1058 1058 assert not node.removed
1059 1059
1060 1060 commit = self.repo.get_commit(commit_idx=88)
1061 1061 node = commit.get_node('.hgignore')
1062 1062 assert node.state, NodeState.CHANGED
1063 1063 assert not node.added
1064 1064 assert node.changed
1065 1065 assert not node.not_changed
1066 1066 assert not node.removed
1067 1067
1068 1068 commit = self.repo.get_commit(commit_idx=85)
1069 1069 node = commit.get_node('setup.py')
1070 1070 assert node.state, NodeState.NOT_CHANGED
1071 1071 assert not node.added
1072 1072 assert not node.changed
1073 1073 assert node.not_changed
1074 1074 assert not node.removed
1075 1075
1076 1076 # If node has REMOVED state then trying to fetch it would raise
1077 1077 # CommitError exception
1078 1078 commit = self.repo.get_commit(commit_idx=2)
1079 1079 path = 'vcs/backends/BaseRepository.py'
1080 1080 with pytest.raises(NodeDoesNotExistError):
1081 1081 commit.get_node(path)
1082 1082 # but it would be one of ``removed`` (commit's attribute)
1083 1083 assert path in [rf.path for rf in commit.removed]
1084 1084
1085 1085 def test_commit_message_is_unicode(self):
1086 1086 for cm in self.repo:
1087 1087 assert type(cm.message) == unicode
1088 1088
1089 1089 def test_commit_author_is_unicode(self):
1090 1090 for cm in self.repo:
1091 1091 assert type(cm.author) == unicode
1092 1092
1093 1093 def test_repo_files_content_is_unicode(self):
1094 1094 test_commit = self.repo.get_commit(commit_idx=100)
1095 1095 for node in test_commit.get_node('/'):
1096 1096 if node.is_file():
1097 1097 assert type(node.content) == unicode
1098 1098
1099 1099 def test_wrong_path(self):
1100 1100 # There is 'setup.py' in the root dir but not there:
1101 1101 path = 'foo/bar/setup.py'
1102 1102 with pytest.raises(VCSError):
1103 1103 self.repo.get_commit().get_node(path)
1104 1104
1105 1105 def test_author_email(self):
1106 1106 assert 'marcin@python-blog.com' == \
1107 1107 self.repo.get_commit('b986218ba1c9').author_email
1108 1108 assert 'lukasz.balcerzak@python-center.pl' == \
1109 1109 self.repo.get_commit('3803844fdbd3').author_email
1110 1110 assert '' == self.repo.get_commit('84478366594b').author_email
1111 1111
1112 1112 def test_author_username(self):
1113 1113 assert 'Marcin Kuzminski' == \
1114 1114 self.repo.get_commit('b986218ba1c9').author_name
1115 1115 assert 'Lukasz Balcerzak' == \
1116 1116 self.repo.get_commit('3803844fdbd3').author_name
1117 1117 assert 'marcink' == \
1118 1118 self.repo.get_commit('84478366594b').author_name
1119 1119
1120 1120
1121 1121 class TestLargeFileRepo(object):
1122 1122
1123 1123 def test_large_file(self, backend_hg):
1124 1124 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1125 1125
1126 1126 tip = repo.scm_instance().get_commit()
1127 1127 node = tip.get_node('.hglf/thisfileislarge')
1128 1128
1129 1129 lf_node = node.get_largefile_node()
1130 1130
1131 1131 assert lf_node.is_largefile() is True
1132 1132 assert lf_node.size == 1024000
1133 1133 assert lf_node.name == '.hglf/thisfileislarge'
1134 1134
1135 1135
1136 1136 class TestGetBranchName(object):
1137 1137 def test_returns_ref_name_when_type_is_branch(self):
1138 1138 ref = self._create_ref('branch', 'fake-name')
1139 1139 result = self.repo._get_branch_name(ref)
1140 1140 assert result == ref.name
1141 1141
1142 1142 @pytest.mark.parametrize("type_", ("book", "tag"))
1143 1143 def test_queries_remote_when_type_is_not_branch(self, type_):
1144 1144 ref = self._create_ref(type_, 'wrong-fake-name')
1145 1145 with mock.patch.object(self.repo, "_remote") as remote_mock:
1146 1146 remote_mock.ctx_branch.return_value = "fake-name"
1147 1147 result = self.repo._get_branch_name(ref)
1148 1148 assert result == "fake-name"
1149 1149 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1150 1150
1151 1151 def _create_ref(self, type_, name):
1152 1152 ref = mock.Mock()
1153 1153 ref.type = type_
1154 1154 ref.name = 'wrong-fake-name'
1155 1155 ref.commit_id = "deadbeef"
1156 1156 return ref
1157 1157
1158 1158
1159 1159 class TestIsTheSameBranch(object):
1160 1160 def test_returns_true_when_branches_are_equal(self):
1161 1161 source_ref = mock.Mock(name="source-ref")
1162 1162 target_ref = mock.Mock(name="target-ref")
1163 1163 branch_name_patcher = mock.patch.object(
1164 1164 self.repo, "_get_branch_name", return_value="default")
1165 1165 with branch_name_patcher as branch_name_mock:
1166 1166 result = self.repo._is_the_same_branch(source_ref, target_ref)
1167 1167
1168 1168 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1169 1169 assert branch_name_mock.call_args_list == expected_calls
1170 1170 assert result is True
1171 1171
1172 1172 def test_returns_false_when_branches_are_not_equal(self):
1173 1173 source_ref = mock.Mock(name="source-ref")
1174 1174 source_ref.name = "source-branch"
1175 1175 target_ref = mock.Mock(name="target-ref")
1176 1176 source_ref.name = "target-branch"
1177 1177
1178 1178 def side_effect(ref):
1179 1179 return ref.name
1180 1180
1181 1181 branch_name_patcher = mock.patch.object(
1182 1182 self.repo, "_get_branch_name", side_effect=side_effect)
1183 1183 with branch_name_patcher as branch_name_mock:
1184 1184 result = self.repo._is_the_same_branch(source_ref, target_ref)
1185 1185
1186 1186 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1187 1187 assert branch_name_mock.call_args_list == expected_calls
1188 1188 assert result is False
@@ -1,353 +1,351 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Tests so called "in memory commits" commit API of vcs.
23 23 """
24 24 import datetime
25 25
26 26 import pytest
27 27
28 28 from rhodecode.lib.utils2 import safe_unicode
29 29 from rhodecode.lib.vcs.exceptions import (
30 30 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
31 31 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
32 32 NodeNotChangedError)
33 33 from rhodecode.lib.vcs.nodes import DirNode, FileNode
34 34 from rhodecode.tests.vcs.conftest import BackendTestMixin
35 35
36 36
37 37 @pytest.fixture
38 38 def nodes():
39 39 nodes = [
40 40 FileNode('foobar', content='Foo & bar'),
41 41 FileNode('foobar2', content='Foo & bar, doubled!'),
42 42 FileNode('foo bar with spaces', content=''),
43 43 FileNode('foo/bar/baz', content='Inside'),
44 44 FileNode(
45 45 'foo/bar/file.bin',
46 46 content=(
47 47 '\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
48 48 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
49 49 '\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
50 50 '\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
51 51 '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
52 52 '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
53 53 '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
54 54 )
55 55 ),
56 56 ]
57 57 return nodes
58 58
59 59
60 60 @pytest.mark.usefixtures("vcs_repository_support")
61 61 class TestInMemoryCommit(BackendTestMixin):
62 62 """
63 63 This is a backend independent test case class which should be created
64 64 with ``type`` method.
65 65
66 66 It is required to set following attributes at subclass:
67 67
68 68 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
69 69 """
70 70
71 71 @classmethod
72 72 def _get_commits(cls):
73 73 return []
74 74
75 75 def test_add(self, nodes):
76 76 for node in nodes:
77 77 self.imc.add(node)
78 78
79 79 self.commit()
80 80 self.assert_succesful_commit(nodes)
81 81
82 82 @pytest.mark.backends("hg")
83 83 def test_add_on_branch_hg(self, nodes):
84 84 for node in nodes:
85 85 self.imc.add(node)
86 86 self.commit(branch=u'stable')
87 87 self.assert_succesful_commit(nodes)
88 88
89 89 @pytest.mark.backends("git")
90 90 def test_add_on_branch_git(self, nodes):
91 91 self.repo._checkout('stable', create=True)
92 92
93 93 for node in nodes:
94 94 self.imc.add(node)
95 95 self.commit(branch=u'stable')
96 96 self.assert_succesful_commit(nodes)
97 97
98 98 def test_add_in_bulk(self, nodes):
99 99 self.imc.add(*nodes)
100 100
101 101 self.commit()
102 102 self.assert_succesful_commit(nodes)
103 103
104 104 def test_add_non_ascii_files(self):
105 105 nodes = [
106 106 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko_utf8_str', content='Δ‡Δ‡Δ‡Δ‡'),
107 107 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_unicode', content=u'Δ‡Δ‡Δ‡Δ‡'),
108 108 ]
109 109
110 110 for node in nodes:
111 111 self.imc.add(node)
112 112
113 113 self.commit()
114 114 self.assert_succesful_commit(nodes)
115 115
116 116 def commit(self, branch=None):
117 117 self.old_commit_count = len(self.repo.commit_ids)
118 118 self.commit_message = u'Test commit with unicode: ΕΌΓ³Ε‚wik'
119 self.commit_author = unicode(self.__class__)
119 self.commit_author = u'{} <foo@email.com>'.format(self.__class__.__name__)
120 120 self.commit = self.imc.commit(
121 121 message=self.commit_message, author=self.commit_author,
122 122 branch=branch)
123 123
124 124 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
125 125 to_add = [
126 126 FileNode('foo/bar/image.png', content='\0'),
127 127 FileNode('foo/README.txt', content='readme!'),
128 128 ]
129 129 self.imc.add(*to_add)
130 commit = self.imc.commit(u'Initial', u'joe.doe@example.com')
130 commit = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
131 131 assert isinstance(commit.get_node('foo'), DirNode)
132 132 assert isinstance(commit.get_node('foo/bar'), DirNode)
133 133 self.assert_nodes_in_commit(commit, to_add)
134 134
135 135 # commit some more files again
136 136 to_add = [
137 137 FileNode('foo/bar/foobaz/bar', content='foo'),
138 138 FileNode('foo/bar/another/bar', content='foo'),
139 139 FileNode('foo/baz.txt', content='foo'),
140 140 FileNode('foobar/foobaz/file', content='foo'),
141 141 FileNode('foobar/barbaz', content='foo'),
142 142 ]
143 143 self.imc.add(*to_add)
144 commit = self.imc.commit(u'Another', u'joe.doe@example.com')
144 commit = self.imc.commit(u'Another', u'joe doe <joe.doe@example.com>')
145 145 self.assert_nodes_in_commit(commit, to_add)
146 146
147 147 def test_add_raise_already_added(self):
148 148 node = FileNode('foobar', content='baz')
149 149 self.imc.add(node)
150 150 with pytest.raises(NodeAlreadyAddedError):
151 151 self.imc.add(node)
152 152
153 153 def test_check_integrity_raise_already_exist(self):
154 154 node = FileNode('foobar', content='baz')
155 155 self.imc.add(node)
156 self.imc.commit(message=u'Added foobar', author=unicode(self))
156 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
157 157 self.imc.add(node)
158 158 with pytest.raises(NodeAlreadyExistsError):
159 self.imc.commit(message='new message', author=str(self))
159 self.imc.commit(message='new message', author=u'{} <foo@bar.com>'.format(self))
160 160
161 161 def test_change(self):
162 162 self.imc.add(FileNode('foo/bar/baz', content='foo'))
163 163 self.imc.add(FileNode('foo/fbar', content='foobar'))
164 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
164 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
165 165
166 166 # Change node's content
167 167 node = FileNode('foo/bar/baz', content='My **changed** content')
168 168 self.imc.change(node)
169 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
169 self.imc.commit(u'Changed %s' % node.path, u'joe doe <joe.doe@example.com>')
170 170
171 171 newtip = self.repo.get_commit()
172 172 assert tip != newtip
173 173 assert tip.id != newtip.id
174 174 self.assert_nodes_in_commit(newtip, (node,))
175 175
176 176 def test_change_non_ascii(self):
177 177 to_add = [
178 178 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
179 179 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
180 180 ]
181 181 for node in to_add:
182 182 self.imc.add(node)
183 183
184 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
184 tip = self.imc.commit(u'Initial', u'joe doe <joe.doe@example.com>')
185 185
186 186 # Change node's content
187 187 node = FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='My **changed** content')
188 188 self.imc.change(node)
189 189 self.imc.commit(u'Changed %s' % safe_unicode(node.path),
190 u'joe.doe@example.com')
190 author=u'joe doe <joe.doe@example.com>')
191 191
192 192 node_uni = FileNode(
193 193 u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'My **changed** content')
194 194 self.imc.change(node_uni)
195 195 self.imc.commit(u'Changed %s' % safe_unicode(node_uni.path),
196 u'joe.doe@example.com')
196 author=u'joe doe <joe.doe@example.com>')
197 197
198 198 newtip = self.repo.get_commit()
199 199 assert tip != newtip
200 200 assert tip.id != newtip.id
201 201
202 202 self.assert_nodes_in_commit(newtip, (node, node_uni))
203 203
204 204 def test_change_raise_empty_repository(self):
205 205 node = FileNode('foobar')
206 206 with pytest.raises(EmptyRepositoryError):
207 207 self.imc.change(node)
208 208
209 209 def test_check_integrity_change_raise_node_does_not_exist(self):
210 210 node = FileNode('foobar', content='baz')
211 211 self.imc.add(node)
212 self.imc.commit(message=u'Added foobar', author=unicode(self))
212 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
213 213 node = FileNode('not-foobar', content='')
214 214 self.imc.change(node)
215 215 with pytest.raises(NodeDoesNotExistError):
216 self.imc.commit(
217 message='Changed not existing node',
218 author=str(self))
216 self.imc.commit(message='Changed not existing node', author=u'{} <foo@bar.com>'.format(self))
219 217
220 218 def test_change_raise_node_already_changed(self):
221 219 node = FileNode('foobar', content='baz')
222 220 self.imc.add(node)
223 self.imc.commit(message=u'Added foobar', author=unicode(self))
221 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
224 222 node = FileNode('foobar', content='more baz')
225 223 self.imc.change(node)
226 224 with pytest.raises(NodeAlreadyChangedError):
227 225 self.imc.change(node)
228 226
229 227 def test_check_integrity_change_raise_node_not_changed(self, nodes):
230 228 self.test_add(nodes) # Performs first commit
231 229
232 230 node = FileNode(nodes[0].path, content=nodes[0].content)
233 231 self.imc.change(node)
234 232 with pytest.raises(NodeNotChangedError):
235 233 self.imc.commit(
236 234 message=u'Trying to mark node as changed without touching it',
237 author=unicode(self))
235 author=u'{} <foo@bar.com>'.format(self))
238 236
239 237 def test_change_raise_node_already_removed(self):
240 238 node = FileNode('foobar', content='baz')
241 239 self.imc.add(node)
242 self.imc.commit(message=u'Added foobar', author=unicode(self))
240 self.imc.commit(message=u'Added foobar', author=u'{} <foo@bar.com>'.format(self))
243 241 self.imc.remove(FileNode('foobar'))
244 242 with pytest.raises(NodeAlreadyRemovedError):
245 243 self.imc.change(node)
246 244
247 245 def test_remove(self, nodes):
248 246 self.test_add(nodes) # Performs first commit
249 247
250 248 tip = self.repo.get_commit()
251 249 node = nodes[0]
252 250 assert node.content == tip.get_node(node.path).content
253 251 self.imc.remove(node)
254 252 self.imc.commit(
255 message=u'Removed %s' % node.path, author=unicode(self))
253 message=u'Removed %s' % node.path, author=u'{} <foo@bar.com>'.format(self))
256 254
257 255 newtip = self.repo.get_commit()
258 256 assert tip != newtip
259 257 assert tip.id != newtip.id
260 258 with pytest.raises(NodeDoesNotExistError):
261 259 newtip.get_node(node.path)
262 260
263 261 def test_remove_last_file_from_directory(self):
264 262 node = FileNode('omg/qwe/foo/bar', content='foobar')
265 263 self.imc.add(node)
266 self.imc.commit(u'added', u'joe doe')
264 self.imc.commit(u'added', author=u'joe doe <joe@doe.com>')
267 265
268 266 self.imc.remove(node)
269 tip = self.imc.commit(u'removed', u'joe doe')
267 tip = self.imc.commit(u'removed', u'joe doe <joe@doe.com>')
270 268 with pytest.raises(NodeDoesNotExistError):
271 269 tip.get_node('omg/qwe/foo/bar')
272 270
273 271 def test_remove_raise_node_does_not_exist(self, nodes):
274 272 self.imc.remove(nodes[0])
275 273 with pytest.raises(NodeDoesNotExistError):
276 274 self.imc.commit(
277 275 message='Trying to remove node at empty repository',
278 author=str(self))
276 author=u'{} <foo@bar.com>'.format(self))
279 277
280 278 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
281 279 self.test_add(nodes) # Performs first commit
282 280
283 281 node = FileNode('no-such-file')
284 282 self.imc.remove(node)
285 283 with pytest.raises(NodeDoesNotExistError):
286 284 self.imc.commit(
287 285 message=u'Trying to remove not existing node',
288 author=unicode(self))
286 author=u'{} <foo@bar.com>'.format(self))
289 287
290 288 def test_remove_raise_node_already_removed(self, nodes):
291 289 self.test_add(nodes) # Performs first commit
292 290
293 291 node = FileNode(nodes[0].path)
294 292 self.imc.remove(node)
295 293 with pytest.raises(NodeAlreadyRemovedError):
296 294 self.imc.remove(node)
297 295
298 296 def test_remove_raise_node_already_changed(self, nodes):
299 297 self.test_add(nodes) # Performs first commit
300 298
301 299 node = FileNode(nodes[0].path, content='Bending time')
302 300 self.imc.change(node)
303 301 with pytest.raises(NodeAlreadyChangedError):
304 302 self.imc.remove(node)
305 303
306 304 def test_reset(self):
307 305 self.imc.add(FileNode('foo', content='bar'))
308 306 # self.imc.change(FileNode('baz', content='new'))
309 307 # self.imc.remove(FileNode('qwe'))
310 308 self.imc.reset()
311 309 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
312 310
313 311 def test_multiple_commits(self):
314 312 N = 3 # number of commits to perform
315 313 last = None
316 314 for x in xrange(N):
317 315 fname = 'file%s' % str(x).rjust(5, '0')
318 316 content = 'foobar\n' * x
319 317 node = FileNode(fname, content=content)
320 318 self.imc.add(node)
321 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs')
319 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs <foo@bar.com>')
322 320 assert last != commit
323 321 last = commit
324 322
325 323 # Check commit number for same repo
326 324 assert len(self.repo.commit_ids) == N
327 325
328 326 # Check commit number for recreated repo
329 327 repo = self.Backend(self.repo_path)
330 328 assert len(repo.commit_ids) == N
331 329
332 330 def test_date_attr(self, local_dt_to_utc):
333 331 node = FileNode('foobar.txt', content='Foobared!')
334 332 self.imc.add(node)
335 333 date = datetime.datetime(1985, 1, 30, 1, 45)
336 334 commit = self.imc.commit(
337 335 u"Committed at time when I was born ;-)",
338 author=u'lb', date=date)
336 author=u'{} <foo@bar.com>'.format(self), date=date)
339 337
340 338 assert commit.date == local_dt_to_utc(date)
341 339
342 340 def assert_succesful_commit(self, added_nodes):
343 341 newtip = self.repo.get_commit()
344 342 assert self.commit == newtip
345 343 assert self.old_commit_count + 1 == len(self.repo.commit_ids)
346 344 assert newtip.message == self.commit_message
347 345 assert newtip.author == self.commit_author
348 346 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
349 347 self.assert_nodes_in_commit(newtip, added_nodes)
350 348
351 349 def assert_nodes_in_commit(self, commit, nodes):
352 350 for node in nodes:
353 351 assert commit.get_node(node.path).content == node.content
@@ -1,552 +1,552 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 from urllib2 import URLError
23 23
24 24 import mock
25 25 import pytest
26 26
27 27 from rhodecode.lib.vcs import backends
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 Config, BaseInMemoryCommit, Reference, MergeResponse, MergeFailureReason)
30 30 from rhodecode.lib.vcs.exceptions import VCSError, RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.tests.vcs.conftest import BackendTestMixin
33 33 from rhodecode.tests import repo_id_generator
34 34
35 35
36 36 @pytest.mark.usefixtures("vcs_repository_support")
37 37 class TestRepositoryBase(BackendTestMixin):
38 38 recreate_repo_per_test = False
39 39
40 40 def test_init_accepts_unicode_path(self, tmpdir):
41 41 path = unicode(tmpdir.join(u'unicode Γ€'))
42 42 self.Backend(path, create=True)
43 43
44 44 def test_init_accepts_str_path(self, tmpdir):
45 45 path = str(tmpdir.join('str Γ€'))
46 46 self.Backend(path, create=True)
47 47
48 48 def test_init_fails_if_path_does_not_exist(self, tmpdir):
49 49 path = unicode(tmpdir.join('i-do-not-exist'))
50 50 with pytest.raises(VCSError):
51 51 self.Backend(path)
52 52
53 53 def test_init_fails_if_path_is_not_a_valid_repository(self, tmpdir):
54 54 path = unicode(tmpdir.mkdir(u'unicode Γ€'))
55 55 with pytest.raises(VCSError):
56 56 self.Backend(path)
57 57
58 58 def test_has_commits_attribute(self):
59 59 self.repo.commit_ids
60 60
61 61 def test_name(self):
62 62 assert self.repo.name.startswith('vcs-test')
63 63
64 64 @pytest.mark.backends("hg", "git")
65 65 def test_has_default_branch_name(self):
66 66 assert self.repo.DEFAULT_BRANCH_NAME is not None
67 67
68 68 @pytest.mark.backends("svn")
69 69 def test_has_no_default_branch_name(self):
70 70 assert self.repo.DEFAULT_BRANCH_NAME is None
71 71
72 72 def test_has_empty_commit(self):
73 73 assert self.repo.EMPTY_COMMIT_ID is not None
74 74 assert self.repo.EMPTY_COMMIT is not None
75 75
76 76 def test_empty_changeset_is_deprecated(self):
77 77 def get_empty_changeset(repo):
78 78 return repo.EMPTY_CHANGESET
79 79 pytest.deprecated_call(get_empty_changeset, self.repo)
80 80
81 81 def test_bookmarks(self):
82 82 assert len(self.repo.bookmarks) == 0
83 83
84 84 # TODO: Cover two cases: Local repo path, remote URL
85 85 def test_check_url(self):
86 86 config = Config()
87 87 assert self.Backend.check_url(self.repo.path, config)
88 88
89 89 def test_check_url_invalid(self):
90 90 config = Config()
91 91 with pytest.raises(URLError):
92 92 self.Backend.check_url(self.repo.path + "invalid", config)
93 93
94 94 def test_get_contact(self):
95 95 assert self.repo.contact
96 96
97 97 def test_get_description(self):
98 98 assert self.repo.description
99 99
100 100 def test_get_hook_location(self):
101 101 assert len(self.repo.get_hook_location()) != 0
102 102
103 103 def test_last_change(self, local_dt_to_utc):
104 104 assert self.repo.last_change >= local_dt_to_utc(
105 105 datetime.datetime(2010, 1, 1, 21, 0))
106 106
107 107 def test_last_change_in_empty_repository(self, vcsbackend, local_dt_to_utc):
108 108 delta = datetime.timedelta(seconds=1)
109 109
110 110 start = local_dt_to_utc(datetime.datetime.now())
111 111 empty_repo = vcsbackend.create_repo()
112 112 now = local_dt_to_utc(datetime.datetime.now())
113 113 assert empty_repo.last_change >= start - delta
114 114 assert empty_repo.last_change <= now + delta
115 115
116 116 def test_repo_equality(self):
117 117 assert self.repo == self.repo
118 118
119 119 def test_repo_equality_broken_object(self):
120 120 import copy
121 121 _repo = copy.copy(self.repo)
122 122 delattr(_repo, 'path')
123 123 assert self.repo != _repo
124 124
125 125 def test_repo_equality_other_object(self):
126 126 class dummy(object):
127 127 path = self.repo.path
128 128 assert self.repo != dummy()
129 129
130 130 def test_get_commit_is_implemented(self):
131 131 self.repo.get_commit()
132 132
133 133 def test_get_commits_is_implemented(self):
134 134 commit_iter = iter(self.repo.get_commits())
135 135 commit = next(commit_iter)
136 136 assert commit.idx == 0
137 137
138 138 def test_supports_iteration(self):
139 139 repo_iter = iter(self.repo)
140 140 commit = next(repo_iter)
141 141 assert commit.idx == 0
142 142
143 143 def test_in_memory_commit(self):
144 144 imc = self.repo.in_memory_commit
145 145 assert isinstance(imc, BaseInMemoryCommit)
146 146
147 147 @pytest.mark.backends("hg")
148 148 def test__get_url_unicode(self):
149 149 url = u'/home/repos/malmΓΆ'
150 150 assert self.repo._get_url(url)
151 151
152 152
153 153 @pytest.mark.usefixtures("vcs_repository_support")
154 154 class TestDeprecatedRepositoryAPI(BackendTestMixin):
155 155 recreate_repo_per_test = False
156 156
157 157 def test_revisions_is_deprecated(self):
158 158 def get_revisions(repo):
159 159 return repo.revisions
160 160 pytest.deprecated_call(get_revisions, self.repo)
161 161
162 162 def test_get_changeset_is_deprecated(self):
163 163 pytest.deprecated_call(self.repo.get_changeset)
164 164
165 165 def test_get_changesets_is_deprecated(self):
166 166 pytest.deprecated_call(self.repo.get_changesets)
167 167
168 168 def test_in_memory_changeset_is_deprecated(self):
169 169 def get_imc(repo):
170 170 return repo.in_memory_changeset
171 171 pytest.deprecated_call(get_imc, self.repo)
172 172
173 173
174 174 # TODO: these tests are incomplete, must check the resulting compare result for
175 175 # correcteness
176 176 class TestRepositoryCompare:
177 177
178 178 @pytest.mark.parametrize('merge', [True, False])
179 179 def test_compare_commits_of_same_repository(self, vcsbackend, merge):
180 180 target_repo = vcsbackend.create_repo(number_of_commits=5)
181 181 target_repo.compare(
182 182 target_repo[1].raw_id, target_repo[3].raw_id, target_repo,
183 183 merge=merge)
184 184
185 185 @pytest.mark.xfail_backends('svn')
186 186 @pytest.mark.parametrize('merge', [True, False])
187 187 def test_compare_cloned_repositories(self, vcsbackend, merge):
188 188 target_repo = vcsbackend.create_repo(number_of_commits=5)
189 189 source_repo = vcsbackend.clone_repo(target_repo)
190 190 assert target_repo != source_repo
191 191
192 192 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
193 193 source_commit = source_repo.get_commit()
194 194
195 195 target_repo.compare(
196 196 target_repo[1].raw_id, source_repo[3].raw_id, source_repo,
197 197 merge=merge)
198 198
199 199 @pytest.mark.xfail_backends('svn')
200 200 @pytest.mark.parametrize('merge', [True, False])
201 201 def test_compare_unrelated_repositories(self, vcsbackend, merge):
202 202 orig = vcsbackend.create_repo(number_of_commits=5)
203 203 unrelated = vcsbackend.create_repo(number_of_commits=5)
204 204 assert orig != unrelated
205 205
206 206 orig.compare(
207 207 orig[1].raw_id, unrelated[3].raw_id, unrelated, merge=merge)
208 208
209 209
210 210 class TestRepositoryGetCommonAncestor:
211 211
212 212 def test_get_common_ancestor_from_same_repo_existing(self, vcsbackend):
213 213 target_repo = vcsbackend.create_repo(number_of_commits=5)
214 214
215 215 expected_ancestor = target_repo[2].raw_id
216 216
217 217 assert target_repo.get_common_ancestor(
218 218 commit_id1=target_repo[2].raw_id,
219 219 commit_id2=target_repo[4].raw_id,
220 220 repo2=target_repo
221 221 ) == expected_ancestor
222 222
223 223 assert target_repo.get_common_ancestor(
224 224 commit_id1=target_repo[4].raw_id,
225 225 commit_id2=target_repo[2].raw_id,
226 226 repo2=target_repo
227 227 ) == expected_ancestor
228 228
229 229 @pytest.mark.xfail_backends("svn")
230 230 def test_get_common_ancestor_from_cloned_repo_existing(self, vcsbackend):
231 231 target_repo = vcsbackend.create_repo(number_of_commits=5)
232 232 source_repo = vcsbackend.clone_repo(target_repo)
233 233 assert target_repo != source_repo
234 234
235 235 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
236 236 source_commit = source_repo.get_commit()
237 237
238 238 expected_ancestor = target_repo[4].raw_id
239 239
240 240 assert target_repo.get_common_ancestor(
241 241 commit_id1=target_repo[4].raw_id,
242 242 commit_id2=source_commit.raw_id,
243 243 repo2=source_repo
244 244 ) == expected_ancestor
245 245
246 246 assert target_repo.get_common_ancestor(
247 247 commit_id1=source_commit.raw_id,
248 248 commit_id2=target_repo[4].raw_id,
249 249 repo2=target_repo
250 250 ) == expected_ancestor
251 251
252 252 @pytest.mark.xfail_backends("svn")
253 253 def test_get_common_ancestor_from_unrelated_repo_missing(self, vcsbackend):
254 254 original = vcsbackend.create_repo(number_of_commits=5)
255 255 unrelated = vcsbackend.create_repo(number_of_commits=5)
256 256 assert original != unrelated
257 257
258 258 assert original.get_common_ancestor(
259 259 commit_id1=original[0].raw_id,
260 260 commit_id2=unrelated[0].raw_id,
261 261 repo2=unrelated
262 262 ) is None
263 263
264 264 assert original.get_common_ancestor(
265 265 commit_id1=original[-1].raw_id,
266 266 commit_id2=unrelated[-1].raw_id,
267 267 repo2=unrelated
268 268 ) is None
269 269
270 270
271 271 @pytest.mark.backends("git", "hg")
272 272 class TestRepositoryMerge(object):
273 273 def prepare_for_success(self, vcsbackend):
274 274 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
275 275 self.source_repo = vcsbackend.clone_repo(self.target_repo)
276 276 vcsbackend.add_file(self.target_repo, 'README_MERGE1', 'Version 1')
277 277 vcsbackend.add_file(self.source_repo, 'README_MERGE2', 'Version 2')
278 278 imc = self.source_repo.in_memory_commit
279 279 imc.add(FileNode('file_x', content=self.source_repo.name))
280 280 imc.commit(
281 281 message=u'Automatic commit from repo merge test',
282 author=u'Automatic')
282 author=u'Automatic <automatic@rhodecode.com>')
283 283 self.target_commit = self.target_repo.get_commit()
284 284 self.source_commit = self.source_repo.get_commit()
285 285 # This only works for Git and Mercurial
286 286 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
287 287 self.target_ref = Reference('branch', default_branch, self.target_commit.raw_id)
288 288 self.source_ref = Reference('branch', default_branch, self.source_commit.raw_id)
289 289 self.workspace_id = 'test-merge-{}'.format(vcsbackend.alias)
290 290 self.repo_id = repo_id_generator(self.target_repo.path)
291 291
292 292 def prepare_for_conflict(self, vcsbackend):
293 293 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
294 294 self.source_repo = vcsbackend.clone_repo(self.target_repo)
295 295 vcsbackend.add_file(self.target_repo, 'README_MERGE', 'Version 1')
296 296 vcsbackend.add_file(self.source_repo, 'README_MERGE', 'Version 2')
297 297 self.target_commit = self.target_repo.get_commit()
298 298 self.source_commit = self.source_repo.get_commit()
299 299 # This only works for Git and Mercurial
300 300 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
301 301 self.target_ref = Reference('branch', default_branch, self.target_commit.raw_id)
302 302 self.source_ref = Reference('branch', default_branch, self.source_commit.raw_id)
303 303 self.workspace_id = 'test-merge-{}'.format(vcsbackend.alias)
304 304 self.repo_id = repo_id_generator(self.target_repo.path)
305 305
306 306 def test_merge_success(self, vcsbackend):
307 307 self.prepare_for_success(vcsbackend)
308 308
309 309 merge_response = self.target_repo.merge(
310 310 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
311 311 self.source_ref,
312 312 'test user', 'test@rhodecode.com', 'merge message 1',
313 313 dry_run=False)
314 314 expected_merge_response = MergeResponse(
315 315 True, True, merge_response.merge_ref,
316 316 MergeFailureReason.NONE)
317 317 assert merge_response == expected_merge_response
318 318
319 319 target_repo = backends.get_backend(vcsbackend.alias)(
320 320 self.target_repo.path)
321 321 target_commits = list(target_repo.get_commits())
322 322 commit_ids = [c.raw_id for c in target_commits[:-1]]
323 323 assert self.source_ref.commit_id in commit_ids
324 324 assert self.target_ref.commit_id in commit_ids
325 325
326 326 merge_commit = target_commits[-1]
327 327 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
328 328 assert merge_commit.message.strip() == 'merge message 1'
329 329 assert merge_commit.author == 'test user <test@rhodecode.com>'
330 330
331 331 # We call it twice so to make sure we can handle updates
332 332 target_ref = Reference(
333 333 self.target_ref.type, self.target_ref.name,
334 334 merge_response.merge_ref.commit_id)
335 335
336 336 merge_response = target_repo.merge(
337 337 self.repo_id, self.workspace_id, target_ref, self.source_repo, self.source_ref,
338 338 'test user', 'test@rhodecode.com', 'merge message 2',
339 339 dry_run=False)
340 340 expected_merge_response = MergeResponse(
341 341 True, True, merge_response.merge_ref,
342 342 MergeFailureReason.NONE)
343 343 assert merge_response == expected_merge_response
344 344
345 345 target_repo = backends.get_backend(
346 346 vcsbackend.alias)(self.target_repo.path)
347 347 merge_commit = target_repo.get_commit(
348 348 merge_response.merge_ref.commit_id)
349 349 assert merge_commit.message.strip() == 'merge message 1'
350 350 assert merge_commit.author == 'test user <test@rhodecode.com>'
351 351
352 352 def test_merge_success_dry_run(self, vcsbackend):
353 353 self.prepare_for_success(vcsbackend)
354 354
355 355 merge_response = self.target_repo.merge(
356 356 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
357 357 self.source_ref, dry_run=True)
358 358
359 359 # We call it twice so to make sure we can handle updates
360 360 merge_response_update = self.target_repo.merge(
361 361 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
362 362 self.source_ref, dry_run=True)
363 363
364 364 # Multiple merges may differ in their commit id. Therefore we set the
365 365 # commit id to `None` before comparing the merge responses.
366 366 new_merge_ref = merge_response.merge_ref._replace(commit_id=None)
367 367 merge_response.merge_ref = new_merge_ref
368 368
369 369 new_update_merge_ref = merge_response_update.merge_ref._replace(commit_id=None)
370 370 merge_response_update.merge_ref = new_update_merge_ref
371 371
372 372 assert merge_response == merge_response_update
373 373 assert merge_response.possible is True
374 374 assert merge_response.executed is False
375 375 assert merge_response.merge_ref
376 376 assert merge_response.failure_reason is MergeFailureReason.NONE
377 377
378 378 @pytest.mark.parametrize('dry_run', [True, False])
379 379 def test_merge_conflict(self, vcsbackend, dry_run):
380 380 self.prepare_for_conflict(vcsbackend)
381 381
382 382 expected_merge_response = MergeResponse(
383 383 False, False, None, MergeFailureReason.MERGE_FAILED)
384 384
385 385 merge_response = self.target_repo.merge(
386 386 self.repo_id, self.workspace_id, self.target_ref,
387 387 self.source_repo, self.source_ref,
388 388 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
389 389 assert merge_response == expected_merge_response
390 390
391 391 # We call it twice so to make sure we can handle updates
392 392 merge_response = self.target_repo.merge(
393 393 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
394 394 self.source_ref,
395 395 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
396 396 assert merge_response == expected_merge_response
397 397
398 398 def test_merge_target_is_not_head(self, vcsbackend):
399 399 self.prepare_for_success(vcsbackend)
400 400 target_ref = Reference(
401 401 self.target_ref.type, self.target_ref.name, '0' * 40)
402 402 expected_merge_response = MergeResponse(
403 403 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
404 404 metadata={'target_ref': target_ref})
405 405 merge_response = self.target_repo.merge(
406 406 self.repo_id, self.workspace_id, target_ref, self.source_repo,
407 407 self.source_ref, dry_run=True)
408 408
409 409 assert merge_response == expected_merge_response
410 410
411 411 def test_merge_missing_source_reference(self, vcsbackend):
412 412 self.prepare_for_success(vcsbackend)
413 413
414 414 source_ref = Reference(
415 415 self.source_ref.type, 'not_existing', self.source_ref.commit_id)
416 416 expected_merge_response = MergeResponse(
417 417 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
418 418 metadata={'source_ref': source_ref})
419 419
420 420 merge_response = self.target_repo.merge(
421 421 self.repo_id, self.workspace_id, self.target_ref,
422 422 self.source_repo, source_ref,
423 423 dry_run=True)
424 424
425 425 assert merge_response == expected_merge_response
426 426
427 427 def test_merge_raises_exception(self, vcsbackend):
428 428 self.prepare_for_success(vcsbackend)
429 429 expected_merge_response = MergeResponse(
430 430 False, False, None, MergeFailureReason.UNKNOWN,
431 431 metadata={'exception': 'ErrorForTest'})
432 432
433 433 with mock.patch.object(self.target_repo, '_merge_repo',
434 434 side_effect=RepositoryError()):
435 435 merge_response = self.target_repo.merge(
436 436 self.repo_id, self.workspace_id, self.target_ref,
437 437 self.source_repo, self.source_ref,
438 438 dry_run=True)
439 439
440 440 assert merge_response == expected_merge_response
441 441
442 442 def test_merge_invalid_user_name(self, vcsbackend):
443 443 repo = vcsbackend.create_repo(number_of_commits=1)
444 444 ref = Reference('branch', 'master', 'not_used')
445 445 workspace_id = 'test-errors-in-merge'
446 446 repo_id = repo_id_generator(workspace_id)
447 447 with pytest.raises(ValueError):
448 448 repo.merge(repo_id, workspace_id, ref, self, ref)
449 449
450 450 def test_merge_invalid_user_email(self, vcsbackend):
451 451 repo = vcsbackend.create_repo(number_of_commits=1)
452 452 ref = Reference('branch', 'master', 'not_used')
453 453 workspace_id = 'test-errors-in-merge'
454 454 repo_id = repo_id_generator(workspace_id)
455 455 with pytest.raises(ValueError):
456 456 repo.merge(
457 457 repo_id, workspace_id, ref, self, ref, 'user name')
458 458
459 459 def test_merge_invalid_message(self, vcsbackend):
460 460 repo = vcsbackend.create_repo(number_of_commits=1)
461 461 ref = Reference('branch', 'master', 'not_used')
462 462 workspace_id = 'test-errors-in-merge'
463 463 repo_id = repo_id_generator(workspace_id)
464 464 with pytest.raises(ValueError):
465 465 repo.merge(
466 466 repo_id, workspace_id, ref, self, ref,
467 467 'user name', 'user@email.com')
468 468
469 469
470 470 @pytest.mark.usefixtures("vcs_repository_support")
471 471 class TestRepositoryStrip(BackendTestMixin):
472 472 recreate_repo_per_test = True
473 473
474 474 @classmethod
475 475 def _get_commits(cls):
476 476 commits = [
477 477 {
478 478 'message': 'Initial commit',
479 479 'author': 'Joe Doe <joe.doe@example.com>',
480 480 'date': datetime.datetime(2010, 1, 1, 20),
481 481 'branch': 'master',
482 482 'added': [
483 483 FileNode('foobar', content='foobar'),
484 484 FileNode('foobar2', content='foobar2'),
485 485 ],
486 486 },
487 487 ]
488 488 for x in xrange(10):
489 489 commit_data = {
490 490 'message': 'Changed foobar - commit%s' % x,
491 491 'author': 'Jane Doe <jane.doe@example.com>',
492 492 'date': datetime.datetime(2010, 1, 1, 21, x),
493 493 'branch': 'master',
494 494 'changed': [
495 495 FileNode('foobar', 'FOOBAR - %s' % x),
496 496 ],
497 497 }
498 498 commits.append(commit_data)
499 499 return commits
500 500
501 501 @pytest.mark.backends("git", "hg")
502 502 def test_strip_commit(self):
503 503 tip = self.repo.get_commit()
504 504 assert tip.idx == 10
505 505 self.repo.strip(tip.raw_id, self.repo.DEFAULT_BRANCH_NAME)
506 506
507 507 tip = self.repo.get_commit()
508 508 assert tip.idx == 9
509 509
510 510 @pytest.mark.backends("git", "hg")
511 511 def test_strip_multiple_commits(self):
512 512 tip = self.repo.get_commit()
513 513 assert tip.idx == 10
514 514
515 515 old = self.repo.get_commit(commit_idx=5)
516 516 self.repo.strip(old.raw_id, self.repo.DEFAULT_BRANCH_NAME)
517 517
518 518 tip = self.repo.get_commit()
519 519 assert tip.idx == 4
520 520
521 521
522 522 @pytest.mark.backends('hg', 'git')
523 523 class TestRepositoryPull(object):
524 524
525 525 def test_pull(self, vcsbackend):
526 526 source_repo = vcsbackend.repo
527 527 target_repo = vcsbackend.create_repo()
528 528 assert len(source_repo.commit_ids) > len(target_repo.commit_ids)
529 529
530 530 target_repo.pull(source_repo.path)
531 531 # Note: Get a fresh instance, avoids caching trouble
532 532 target_repo = vcsbackend.backend(target_repo.path)
533 533 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
534 534
535 535 def test_pull_wrong_path(self, vcsbackend):
536 536 target_repo = vcsbackend.create_repo()
537 537 with pytest.raises(RepositoryError):
538 538 target_repo.pull(target_repo.path + "wrong")
539 539
540 540 def test_pull_specific_commits(self, vcsbackend):
541 541 source_repo = vcsbackend.repo
542 542 target_repo = vcsbackend.create_repo()
543 543
544 544 second_commit = source_repo[1].raw_id
545 545 if vcsbackend.alias == 'git':
546 546 second_commit_ref = 'refs/test-refs/a'
547 547 source_repo.set_refs(second_commit_ref, second_commit)
548 548
549 549 target_repo.pull(source_repo.path, commit_ids=[second_commit])
550 550 target_repo = vcsbackend.backend(target_repo.path)
551 551 assert 2 == len(target_repo.commit_ids)
552 552 assert second_commit == target_repo.get_commit().raw_id
@@ -1,67 +1,67 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Test suite for making push/pull operations, on specially modified INI files
23 23
24 24 .. important::
25 25
26 26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 27 to redirect things to stderr instead of stdout.
28 28 """
29 29
30 30 import os
31 31 import pytest
32 32
33 33 from rhodecode.lib.vcs.backends.git.repository import GitRepository
34 34 from rhodecode.lib.vcs.nodes import FileNode
35 35 from rhodecode.tests import GIT_REPO
36 36 from rhodecode.tests.vcs_operations import Command
37 37 from .test_vcs_operations import _check_proper_clone, _check_proper_git_push
38 38
39 39
40 40 def test_git_clone_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
41 41 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
42 42 cmd = Command('/tmp')
43 43 stdout, stderr = cmd.execute(
44 44 'git -c http.postBuffer=1024 clone', clone_url, tmpdir.strpath)
45 45 _check_proper_clone(stdout, stderr, 'git')
46 46 cmd.assert_returncode_success()
47 47
48 48
49 49 def test_git_push_with_small_push_buffer(backend_git, rc_web_server, tmpdir):
50 50 empty_repo = backend_git.create_repo()
51 51
52 52 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
53 53
54 54 cmd = Command(tmpdir.strpath)
55 55 cmd.execute('git clone', clone_url)
56 56
57 57 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
58 58 repo.in_memory_commit.add(FileNode('readme.md', content='## Hello'))
59 59 repo.in_memory_commit.commit(
60 60 message='Commit on branch Master',
61 author='Automatic test',
61 author='Automatic test <automatic@rhodecode.com>',
62 62 branch='master')
63 63
64 64 repo_cmd = Command(repo.path)
65 65 stdout, stderr = repo_cmd.execute(
66 66 'git -c http.postBuffer=1024 push --verbose origin master')
67 67 _check_proper_git_push(stdout, stderr, branch='master')
@@ -1,282 +1,282 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import pytest
24 24
25 25 from rhodecode.lib.vcs.backends.git.repository import GitRepository
26 26 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
27 27 from rhodecode.lib.vcs.nodes import FileNode
28 28 from rhodecode.model.db import Repository
29 29 from rhodecode.model.meta import Session
30 30 from rhodecode.tests import GIT_REPO, HG_REPO
31 31
32 32 from rhodecode.tests.vcs_operations import (
33 33 Command, _check_proper_clone, _check_proper_git_push, _check_proper_hg_push,
34 34 _add_files_and_push)
35 35
36 36
37 37 @pytest.mark.usefixtures("disable_locking")
38 38 class TestVCSOperationsSpecial(object):
39 39
40 40 def test_git_sets_default_branch_if_not_master(
41 41 self, backend_git, tmpdir, rc_web_server):
42 42 empty_repo = backend_git.create_repo()
43 43 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
44 44
45 45 cmd = Command(tmpdir.strpath)
46 46 cmd.execute('git clone', clone_url)
47 47
48 48 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
49 49 repo._checkout('test', create=True)
50 50 repo.in_memory_commit.add(FileNode('file', content=''))
51 51 repo.in_memory_commit.commit(
52 52 message='Commit on branch test',
53 author='Automatic test',
53 author='Automatic test <automatic@rhodecode.com>',
54 54 branch='test')
55 55
56 56 repo_cmd = Command(repo.path)
57 57 stdout, stderr = repo_cmd.execute('git push --verbose origin test')
58 58 _check_proper_git_push(
59 59 stdout, stderr, branch='test', should_set_default_branch=True)
60 60
61 61 stdout, stderr = cmd.execute(
62 62 'git clone', clone_url, empty_repo.repo_name + '-clone')
63 63 _check_proper_clone(stdout, stderr, 'git')
64 64
65 65 # Doing an explicit commit in order to get latest user logs on MySQL
66 66 Session().commit()
67 67
68 68 def test_git_fetches_from_remote_repository_with_annotated_tags(
69 69 self, backend_git, rc_web_server):
70 70 # Note: This is a test specific to the git backend. It checks the
71 71 # integration of fetching from a remote repository which contains
72 72 # annotated tags.
73 73
74 74 # Dulwich shows this specific behavior only when
75 75 # operating against a remote repository.
76 76 source_repo = backend_git['annotated-tag']
77 77 target_vcs_repo = backend_git.create_repo().scm_instance()
78 78 target_vcs_repo.fetch(rc_web_server.repo_clone_url(source_repo.repo_name))
79 79
80 80 def test_git_push_shows_pull_request_refs(self, backend_git, rc_web_server, tmpdir):
81 81 """
82 82 test if remote info about refs is visible
83 83 """
84 84 empty_repo = backend_git.create_repo()
85 85
86 86 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
87 87
88 88 cmd = Command(tmpdir.strpath)
89 89 cmd.execute('git clone', clone_url)
90 90
91 91 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
92 92 repo.in_memory_commit.add(FileNode('readme.md', content='## Hello'))
93 93 repo.in_memory_commit.commit(
94 94 message='Commit on branch Master',
95 author='Automatic test',
95 author='Automatic test <automatic@rhodecode.com>',
96 96 branch='master')
97 97
98 98 repo_cmd = Command(repo.path)
99 99 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
100 100 _check_proper_git_push(stdout, stderr, branch='master')
101 101
102 102 ref = '{}/{}/pull-request/new?branch=master'.format(
103 103 rc_web_server.host_url(), empty_repo.repo_name)
104 104 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
105 105 assert 'remote: RhodeCode: push completed' in stderr
106 106
107 107 # push on the same branch
108 108 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
109 109 repo.in_memory_commit.add(FileNode('setup.py', content='print\n'))
110 110 repo.in_memory_commit.commit(
111 111 message='Commit2 on branch Master',
112 author='Automatic test2',
112 author='Automatic test2 <automatic@rhodecode.com>',
113 113 branch='master')
114 114
115 115 repo_cmd = Command(repo.path)
116 116 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
117 117 _check_proper_git_push(stdout, stderr, branch='master')
118 118
119 119 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
120 120 assert 'remote: RhodeCode: push completed' in stderr
121 121
122 122 # new Branch
123 123 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
124 124 repo.in_memory_commit.add(FileNode('feature1.py', content='## Hello world'))
125 125 repo.in_memory_commit.commit(
126 126 message='Commit on branch feature',
127 author='Automatic test',
127 author='Automatic test <automatic@rhodecode.com>',
128 128 branch='feature')
129 129
130 130 repo_cmd = Command(repo.path)
131 131 stdout, stderr = repo_cmd.execute('git push --verbose origin feature')
132 132 _check_proper_git_push(stdout, stderr, branch='feature')
133 133
134 134 ref = '{}/{}/pull-request/new?branch=feature'.format(
135 135 rc_web_server.host_url(), empty_repo.repo_name)
136 136 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
137 137 assert 'remote: RhodeCode: push completed' in stderr
138 138
139 139 def test_hg_push_shows_pull_request_refs(self, backend_hg, rc_web_server, tmpdir):
140 140 empty_repo = backend_hg.create_repo()
141 141
142 142 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
143 143
144 144 cmd = Command(tmpdir.strpath)
145 145 cmd.execute('hg clone', clone_url)
146 146
147 147 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
148 148 repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello'))
149 149 repo.in_memory_commit.commit(
150 150 message=u'Commit on branch default',
151 151 author=u'Automatic test',
152 152 branch='default')
153 153
154 154 repo_cmd = Command(repo.path)
155 155 repo_cmd.execute('hg checkout default')
156 156
157 157 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
158 158 _check_proper_hg_push(stdout, stderr, branch='default')
159 159
160 160 ref = '{}/{}/pull-request/new?branch=default'.format(
161 161 rc_web_server.host_url(), empty_repo.repo_name)
162 162 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
163 163 assert 'remote: RhodeCode: push completed' in stdout
164 164
165 165 # push on the same branch
166 166 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
167 167 repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n'))
168 168 repo.in_memory_commit.commit(
169 169 message=u'Commit2 on branch default',
170 170 author=u'Automatic test2',
171 171 branch=u'default')
172 172
173 173 repo_cmd = Command(repo.path)
174 174 repo_cmd.execute('hg checkout default')
175 175
176 176 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
177 177 _check_proper_hg_push(stdout, stderr, branch='default')
178 178
179 179 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
180 180 assert 'remote: RhodeCode: push completed' in stdout
181 181
182 182 # new Branch
183 183 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
184 184 repo.in_memory_commit.add(FileNode(u'feature1.py', content=u'## Hello world'))
185 185 repo.in_memory_commit.commit(
186 186 message=u'Commit on branch feature',
187 187 author=u'Automatic test',
188 188 branch=u'feature')
189 189
190 190 repo_cmd = Command(repo.path)
191 191 repo_cmd.execute('hg checkout feature')
192 192
193 193 stdout, stderr = repo_cmd.execute('hg push --new-branch --verbose', clone_url)
194 194 _check_proper_hg_push(stdout, stderr, branch='feature')
195 195
196 196 ref = '{}/{}/pull-request/new?branch=feature'.format(
197 197 rc_web_server.host_url(), empty_repo.repo_name)
198 198 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
199 199 assert 'remote: RhodeCode: push completed' in stdout
200 200
201 201 def test_hg_push_shows_pull_request_refs_book(self, backend_hg, rc_web_server, tmpdir):
202 202 empty_repo = backend_hg.create_repo()
203 203
204 204 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
205 205
206 206 cmd = Command(tmpdir.strpath)
207 207 cmd.execute('hg clone', clone_url)
208 208
209 209 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
210 210 repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello'))
211 211 repo.in_memory_commit.commit(
212 212 message=u'Commit on branch default',
213 213 author=u'Automatic test',
214 214 branch='default')
215 215
216 216 repo_cmd = Command(repo.path)
217 217 repo_cmd.execute('hg checkout default')
218 218
219 219 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
220 220 _check_proper_hg_push(stdout, stderr, branch='default')
221 221
222 222 ref = '{}/{}/pull-request/new?branch=default'.format(
223 223 rc_web_server.host_url(), empty_repo.repo_name)
224 224 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
225 225 assert 'remote: RhodeCode: push completed' in stdout
226 226
227 227 # add bookmark
228 228 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
229 229 repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n'))
230 230 repo.in_memory_commit.commit(
231 231 message=u'Commit2 on branch default',
232 232 author=u'Automatic test2',
233 233 branch=u'default')
234 234
235 235 repo_cmd = Command(repo.path)
236 236 repo_cmd.execute('hg checkout default')
237 237 repo_cmd.execute('hg bookmark feature2')
238 238 stdout, stderr = repo_cmd.execute('hg push -B feature2 --verbose', clone_url)
239 239 _check_proper_hg_push(stdout, stderr, branch='default')
240 240
241 241 ref = '{}/{}/pull-request/new?branch=default'.format(
242 242 rc_web_server.host_url(), empty_repo.repo_name)
243 243 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
244 244 ref = '{}/{}/pull-request/new?bookmark=feature2'.format(
245 245 rc_web_server.host_url(), empty_repo.repo_name)
246 246 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
247 247 assert 'remote: RhodeCode: push completed' in stdout
248 248 assert 'exporting bookmark feature2' in stdout
249 249
250 250 def test_push_is_forbidden_on_archived_repo_hg(self, backend_hg, rc_web_server, tmpdir):
251 251 empty_repo = backend_hg.create_repo()
252 252 repo_name = empty_repo.repo_name
253 253
254 254 repo = Repository.get_by_repo_name(repo_name)
255 255 repo.archived = True
256 256 Session().commit()
257 257
258 258 clone_url = rc_web_server.repo_clone_url(repo_name)
259 259 stdout, stderr = Command('/tmp').execute(
260 260 'hg clone', clone_url, tmpdir.strpath)
261 261
262 262 stdout, stderr = _add_files_and_push(
263 263 'hg', tmpdir.strpath, clone_url=clone_url)
264 264
265 265 assert 'abort: HTTP Error 403: Forbidden' in stderr
266 266
267 267 def test_push_is_forbidden_on_archived_repo_git(self, backend_git, rc_web_server, tmpdir):
268 268 empty_repo = backend_git.create_repo()
269 269 repo_name = empty_repo.repo_name
270 270
271 271 repo = Repository.get_by_repo_name(repo_name)
272 272 repo.archived = True
273 273 Session().commit()
274 274
275 275 clone_url = rc_web_server.repo_clone_url(repo_name)
276 276 stdout, stderr = Command('/tmp').execute(
277 277 'git clone', clone_url, tmpdir.strpath)
278 278
279 279 stdout, stderr = _add_files_and_push(
280 280 'git', tmpdir.strpath, clone_url=clone_url)
281 281
282 282 assert "The requested URL returned error: 403" in stderr
General Comments 0
You need to be logged in to leave comments. Login now