##// END OF EJS Templates
tests: fixed further tests
marcink -
r3777:aceb6b16 new-ui
parent child Browse files
Show More
@@ -1,1218 +1,1218 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35
36 36
37 37 def route_path(name, params=None, **kwargs):
38 38 import urllib
39 39
40 40 base_url = {
41 41 'repo_changelog': '/{repo_name}/changelog',
42 42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_commits': '/{repo_name}/changelog',
44 'repo_commits_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_commits': '/{repo_name}/commits',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 46 'pullrequest_show_all': '/{repo_name}/pull-request',
47 47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 50 'pullrequest_new': '/{repo_name}/pull-request/new',
51 51 'pullrequest_create': '/{repo_name}/pull-request/create',
52 52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 57 }[name].format(**kwargs)
58 58
59 59 if params:
60 60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
61 61 return base_url
62 62
63 63
64 64 @pytest.mark.usefixtures('app', 'autologin_user')
65 65 @pytest.mark.backends("git", "hg")
66 66 class TestPullrequestsView(object):
67 67
68 68 def test_index(self, backend):
69 69 self.app.get(route_path(
70 70 'pullrequest_new',
71 71 repo_name=backend.repo_name))
72 72
73 73 def test_option_menu_create_pull_request_exists(self, backend):
74 74 repo_name = backend.repo_name
75 75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
76 76
77 77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
78 78 'pullrequest_new', repo_name=repo_name)
79 79 response.mustcontain(create_pr_link)
80 80
81 81 def test_create_pr_form_with_raw_commit_id(self, backend):
82 82 repo = backend.repo
83 83
84 84 self.app.get(
85 85 route_path('pullrequest_new', repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 @pytest.mark.parametrize('range_diff', ["0", "1"])
91 91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
92 92 pull_request = pr_util.create_pull_request(
93 93 mergeable=pr_merge_enabled, enable_notifications=False)
94 94
95 95 response = self.app.get(route_path(
96 96 'pullrequest_show',
97 97 repo_name=pull_request.target_repo.scm_instance().name,
98 98 pull_request_id=pull_request.pull_request_id,
99 99 params={'range-diff': range_diff}))
100 100
101 101 for commit_id in pull_request.revisions:
102 102 response.mustcontain(commit_id)
103 103
104 104 assert pull_request.target_ref_parts.type in response
105 105 assert pull_request.target_ref_parts.name in response
106 106 target_clone_url = pull_request.target_repo.clone_url()
107 107 assert target_clone_url in response
108 108
109 109 assert 'class="pull-request-merge"' in response
110 110 if pr_merge_enabled:
111 111 response.mustcontain('Pull request reviewer approval is pending')
112 112 else:
113 113 response.mustcontain('Server-side pull request merging is disabled.')
114 114
115 115 if range_diff == "1":
116 116 response.mustcontain('Turn off: Show the diff as commit range')
117 117
118 118 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
119 119 # Logout
120 120 response = self.app.post(
121 121 h.route_path('logout'),
122 122 params={'csrf_token': csrf_token})
123 123 # Login as regular user
124 124 response = self.app.post(h.route_path('login'),
125 125 {'username': TEST_USER_REGULAR_LOGIN,
126 126 'password': 'test12'})
127 127
128 128 pull_request = pr_util.create_pull_request(
129 129 author=TEST_USER_REGULAR_LOGIN)
130 130
131 131 response = self.app.get(route_path(
132 132 'pullrequest_show',
133 133 repo_name=pull_request.target_repo.scm_instance().name,
134 134 pull_request_id=pull_request.pull_request_id))
135 135
136 136 response.mustcontain('Server-side pull request merging is disabled.')
137 137
138 138 assert_response = response.assert_response()
139 139 # for regular user without a merge permissions, we don't see it
140 140 assert_response.no_element_exists('#close-pull-request-action')
141 141
142 142 user_util.grant_user_permission_to_repo(
143 143 pull_request.target_repo,
144 144 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
145 145 'repository.write')
146 146 response = self.app.get(route_path(
147 147 'pullrequest_show',
148 148 repo_name=pull_request.target_repo.scm_instance().name,
149 149 pull_request_id=pull_request.pull_request_id))
150 150
151 151 response.mustcontain('Server-side pull request merging is disabled.')
152 152
153 153 assert_response = response.assert_response()
154 154 # now regular user has a merge permissions, we have CLOSE button
155 155 assert_response.one_element_exists('#close-pull-request-action')
156 156
157 157 def test_show_invalid_commit_id(self, pr_util):
158 158 # Simulating invalid revisions which will cause a lookup error
159 159 pull_request = pr_util.create_pull_request()
160 160 pull_request.revisions = ['invalid']
161 161 Session().add(pull_request)
162 162 Session().commit()
163 163
164 164 response = self.app.get(route_path(
165 165 'pullrequest_show',
166 166 repo_name=pull_request.target_repo.scm_instance().name,
167 167 pull_request_id=pull_request.pull_request_id))
168 168
169 169 for commit_id in pull_request.revisions:
170 170 response.mustcontain(commit_id)
171 171
172 172 def test_show_invalid_source_reference(self, pr_util):
173 173 pull_request = pr_util.create_pull_request()
174 174 pull_request.source_ref = 'branch:b:invalid'
175 175 Session().add(pull_request)
176 176 Session().commit()
177 177
178 178 self.app.get(route_path(
179 179 'pullrequest_show',
180 180 repo_name=pull_request.target_repo.scm_instance().name,
181 181 pull_request_id=pull_request.pull_request_id))
182 182
183 183 def test_edit_title_description(self, pr_util, csrf_token):
184 184 pull_request = pr_util.create_pull_request()
185 185 pull_request_id = pull_request.pull_request_id
186 186
187 187 response = self.app.post(
188 188 route_path('pullrequest_update',
189 189 repo_name=pull_request.target_repo.repo_name,
190 190 pull_request_id=pull_request_id),
191 191 params={
192 192 'edit_pull_request': 'true',
193 193 'title': 'New title',
194 194 'description': 'New description',
195 195 'csrf_token': csrf_token})
196 196
197 197 assert_session_flash(
198 198 response, u'Pull request title & description updated.',
199 199 category='success')
200 200
201 201 pull_request = PullRequest.get(pull_request_id)
202 202 assert pull_request.title == 'New title'
203 203 assert pull_request.description == 'New description'
204 204
205 205 def test_edit_title_description_closed(self, pr_util, csrf_token):
206 206 pull_request = pr_util.create_pull_request()
207 207 pull_request_id = pull_request.pull_request_id
208 208 repo_name = pull_request.target_repo.repo_name
209 209 pr_util.close()
210 210
211 211 response = self.app.post(
212 212 route_path('pullrequest_update',
213 213 repo_name=repo_name, pull_request_id=pull_request_id),
214 214 params={
215 215 'edit_pull_request': 'true',
216 216 'title': 'New title',
217 217 'description': 'New description',
218 218 'csrf_token': csrf_token}, status=200)
219 219 assert_session_flash(
220 220 response, u'Cannot update closed pull requests.',
221 221 category='error')
222 222
223 223 def test_update_invalid_source_reference(self, pr_util, csrf_token):
224 224 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
225 225
226 226 pull_request = pr_util.create_pull_request()
227 227 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
228 228 Session().add(pull_request)
229 229 Session().commit()
230 230
231 231 pull_request_id = pull_request.pull_request_id
232 232
233 233 response = self.app.post(
234 234 route_path('pullrequest_update',
235 235 repo_name=pull_request.target_repo.repo_name,
236 236 pull_request_id=pull_request_id),
237 237 params={'update_commits': 'true', 'csrf_token': csrf_token})
238 238
239 239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
240 240 UpdateFailureReason.MISSING_SOURCE_REF])
241 241 assert_session_flash(response, expected_msg, category='error')
242 242
243 243 def test_missing_target_reference(self, pr_util, csrf_token):
244 244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
245 245 pull_request = pr_util.create_pull_request(
246 246 approved=True, mergeable=True)
247 247 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
248 248 pull_request.target_ref = unicode_reference
249 249 Session().add(pull_request)
250 250 Session().commit()
251 251
252 252 pull_request_id = pull_request.pull_request_id
253 253 pull_request_url = route_path(
254 254 'pullrequest_show',
255 255 repo_name=pull_request.target_repo.repo_name,
256 256 pull_request_id=pull_request_id)
257 257
258 258 response = self.app.get(pull_request_url)
259 259 target_ref_id = 'invalid-branch'
260 260 merge_resp = MergeResponse(
261 261 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
262 262 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
263 263 response.assert_response().element_contains(
264 264 'span[data-role="merge-message"]', merge_resp.merge_status_message)
265 265
266 266 def test_comment_and_close_pull_request_custom_message_approved(
267 267 self, pr_util, csrf_token, xhr_header):
268 268
269 269 pull_request = pr_util.create_pull_request(approved=True)
270 270 pull_request_id = pull_request.pull_request_id
271 271 author = pull_request.user_id
272 272 repo = pull_request.target_repo.repo_id
273 273
274 274 self.app.post(
275 275 route_path('pullrequest_comment_create',
276 276 repo_name=pull_request.target_repo.scm_instance().name,
277 277 pull_request_id=pull_request_id),
278 278 params={
279 279 'close_pull_request': '1',
280 280 'text': 'Closing a PR',
281 281 'csrf_token': csrf_token},
282 282 extra_environ=xhr_header,)
283 283
284 284 journal = UserLog.query()\
285 285 .filter(UserLog.user_id == author)\
286 286 .filter(UserLog.repository_id == repo) \
287 287 .order_by('user_log_id') \
288 288 .all()
289 289 assert journal[-1].action == 'repo.pull_request.close'
290 290
291 291 pull_request = PullRequest.get(pull_request_id)
292 292 assert pull_request.is_closed()
293 293
294 294 status = ChangesetStatusModel().get_status(
295 295 pull_request.source_repo, pull_request=pull_request)
296 296 assert status == ChangesetStatus.STATUS_APPROVED
297 297 comments = ChangesetComment().query() \
298 298 .filter(ChangesetComment.pull_request == pull_request) \
299 299 .order_by(ChangesetComment.comment_id.asc())\
300 300 .all()
301 301 assert comments[-1].text == 'Closing a PR'
302 302
303 303 def test_comment_force_close_pull_request_rejected(
304 304 self, pr_util, csrf_token, xhr_header):
305 305 pull_request = pr_util.create_pull_request()
306 306 pull_request_id = pull_request.pull_request_id
307 307 PullRequestModel().update_reviewers(
308 308 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
309 309 pull_request.author)
310 310 author = pull_request.user_id
311 311 repo = pull_request.target_repo.repo_id
312 312
313 313 self.app.post(
314 314 route_path('pullrequest_comment_create',
315 315 repo_name=pull_request.target_repo.scm_instance().name,
316 316 pull_request_id=pull_request_id),
317 317 params={
318 318 'close_pull_request': '1',
319 319 'csrf_token': csrf_token},
320 320 extra_environ=xhr_header)
321 321
322 322 pull_request = PullRequest.get(pull_request_id)
323 323
324 324 journal = UserLog.query()\
325 325 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
326 326 .order_by('user_log_id') \
327 327 .all()
328 328 assert journal[-1].action == 'repo.pull_request.close'
329 329
330 330 # check only the latest status, not the review status
331 331 status = ChangesetStatusModel().get_status(
332 332 pull_request.source_repo, pull_request=pull_request)
333 333 assert status == ChangesetStatus.STATUS_REJECTED
334 334
335 335 def test_comment_and_close_pull_request(
336 336 self, pr_util, csrf_token, xhr_header):
337 337 pull_request = pr_util.create_pull_request()
338 338 pull_request_id = pull_request.pull_request_id
339 339
340 340 response = self.app.post(
341 341 route_path('pullrequest_comment_create',
342 342 repo_name=pull_request.target_repo.scm_instance().name,
343 343 pull_request_id=pull_request.pull_request_id),
344 344 params={
345 345 'close_pull_request': 'true',
346 346 'csrf_token': csrf_token},
347 347 extra_environ=xhr_header)
348 348
349 349 assert response.json
350 350
351 351 pull_request = PullRequest.get(pull_request_id)
352 352 assert pull_request.is_closed()
353 353
354 354 # check only the latest status, not the review status
355 355 status = ChangesetStatusModel().get_status(
356 356 pull_request.source_repo, pull_request=pull_request)
357 357 assert status == ChangesetStatus.STATUS_REJECTED
358 358
359 359 def test_create_pull_request(self, backend, csrf_token):
360 360 commits = [
361 361 {'message': 'ancestor'},
362 362 {'message': 'change'},
363 363 {'message': 'change2'},
364 364 ]
365 365 commit_ids = backend.create_master_repo(commits)
366 366 target = backend.create_repo(heads=['ancestor'])
367 367 source = backend.create_repo(heads=['change2'])
368 368
369 369 response = self.app.post(
370 370 route_path('pullrequest_create', repo_name=source.repo_name),
371 371 [
372 372 ('source_repo', source.repo_name),
373 373 ('source_ref', 'branch:default:' + commit_ids['change2']),
374 374 ('target_repo', target.repo_name),
375 375 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
376 376 ('common_ancestor', commit_ids['ancestor']),
377 377 ('pullrequest_title', 'Title'),
378 378 ('pullrequest_desc', 'Description'),
379 379 ('description_renderer', 'markdown'),
380 380 ('__start__', 'review_members:sequence'),
381 381 ('__start__', 'reviewer:mapping'),
382 382 ('user_id', '1'),
383 383 ('__start__', 'reasons:sequence'),
384 384 ('reason', 'Some reason'),
385 385 ('__end__', 'reasons:sequence'),
386 386 ('__start__', 'rules:sequence'),
387 387 ('__end__', 'rules:sequence'),
388 388 ('mandatory', 'False'),
389 389 ('__end__', 'reviewer:mapping'),
390 390 ('__end__', 'review_members:sequence'),
391 391 ('__start__', 'revisions:sequence'),
392 392 ('revisions', commit_ids['change']),
393 393 ('revisions', commit_ids['change2']),
394 394 ('__end__', 'revisions:sequence'),
395 395 ('user', ''),
396 396 ('csrf_token', csrf_token),
397 397 ],
398 398 status=302)
399 399
400 400 location = response.headers['Location']
401 401 pull_request_id = location.rsplit('/', 1)[1]
402 402 assert pull_request_id != 'new'
403 403 pull_request = PullRequest.get(int(pull_request_id))
404 404
405 405 # check that we have now both revisions
406 406 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
407 407 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
408 408 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
409 409 assert pull_request.target_ref == expected_target_ref
410 410
411 411 def test_reviewer_notifications(self, backend, csrf_token):
412 412 # We have to use the app.post for this test so it will create the
413 413 # notifications properly with the new PR
414 414 commits = [
415 415 {'message': 'ancestor',
416 416 'added': [FileNode('file_A', content='content_of_ancestor')]},
417 417 {'message': 'change',
418 418 'added': [FileNode('file_a', content='content_of_change')]},
419 419 {'message': 'change-child'},
420 420 {'message': 'ancestor-child', 'parents': ['ancestor'],
421 421 'added': [
422 422 FileNode('file_B', content='content_of_ancestor_child')]},
423 423 {'message': 'ancestor-child-2'},
424 424 ]
425 425 commit_ids = backend.create_master_repo(commits)
426 426 target = backend.create_repo(heads=['ancestor-child'])
427 427 source = backend.create_repo(heads=['change'])
428 428
429 429 response = self.app.post(
430 430 route_path('pullrequest_create', repo_name=source.repo_name),
431 431 [
432 432 ('source_repo', source.repo_name),
433 433 ('source_ref', 'branch:default:' + commit_ids['change']),
434 434 ('target_repo', target.repo_name),
435 435 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
436 436 ('common_ancestor', commit_ids['ancestor']),
437 437 ('pullrequest_title', 'Title'),
438 438 ('pullrequest_desc', 'Description'),
439 439 ('description_renderer', 'markdown'),
440 440 ('__start__', 'review_members:sequence'),
441 441 ('__start__', 'reviewer:mapping'),
442 442 ('user_id', '2'),
443 443 ('__start__', 'reasons:sequence'),
444 444 ('reason', 'Some reason'),
445 445 ('__end__', 'reasons:sequence'),
446 446 ('__start__', 'rules:sequence'),
447 447 ('__end__', 'rules:sequence'),
448 448 ('mandatory', 'False'),
449 449 ('__end__', 'reviewer:mapping'),
450 450 ('__end__', 'review_members:sequence'),
451 451 ('__start__', 'revisions:sequence'),
452 452 ('revisions', commit_ids['change']),
453 453 ('__end__', 'revisions:sequence'),
454 454 ('user', ''),
455 455 ('csrf_token', csrf_token),
456 456 ],
457 457 status=302)
458 458
459 459 location = response.headers['Location']
460 460
461 461 pull_request_id = location.rsplit('/', 1)[1]
462 462 assert pull_request_id != 'new'
463 463 pull_request = PullRequest.get(int(pull_request_id))
464 464
465 465 # Check that a notification was made
466 466 notifications = Notification.query()\
467 467 .filter(Notification.created_by == pull_request.author.user_id,
468 468 Notification.type_ == Notification.TYPE_PULL_REQUEST,
469 469 Notification.subject.contains(
470 470 "wants you to review pull request #%s" % pull_request_id))
471 471 assert len(notifications.all()) == 1
472 472
473 473 # Change reviewers and check that a notification was made
474 474 PullRequestModel().update_reviewers(
475 475 pull_request.pull_request_id, [(1, [], False, [])],
476 476 pull_request.author)
477 477 assert len(notifications.all()) == 2
478 478
479 479 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
480 480 csrf_token):
481 481 commits = [
482 482 {'message': 'ancestor',
483 483 'added': [FileNode('file_A', content='content_of_ancestor')]},
484 484 {'message': 'change',
485 485 'added': [FileNode('file_a', content='content_of_change')]},
486 486 {'message': 'change-child'},
487 487 {'message': 'ancestor-child', 'parents': ['ancestor'],
488 488 'added': [
489 489 FileNode('file_B', content='content_of_ancestor_child')]},
490 490 {'message': 'ancestor-child-2'},
491 491 ]
492 492 commit_ids = backend.create_master_repo(commits)
493 493 target = backend.create_repo(heads=['ancestor-child'])
494 494 source = backend.create_repo(heads=['change'])
495 495
496 496 response = self.app.post(
497 497 route_path('pullrequest_create', repo_name=source.repo_name),
498 498 [
499 499 ('source_repo', source.repo_name),
500 500 ('source_ref', 'branch:default:' + commit_ids['change']),
501 501 ('target_repo', target.repo_name),
502 502 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
503 503 ('common_ancestor', commit_ids['ancestor']),
504 504 ('pullrequest_title', 'Title'),
505 505 ('pullrequest_desc', 'Description'),
506 506 ('description_renderer', 'markdown'),
507 507 ('__start__', 'review_members:sequence'),
508 508 ('__start__', 'reviewer:mapping'),
509 509 ('user_id', '1'),
510 510 ('__start__', 'reasons:sequence'),
511 511 ('reason', 'Some reason'),
512 512 ('__end__', 'reasons:sequence'),
513 513 ('__start__', 'rules:sequence'),
514 514 ('__end__', 'rules:sequence'),
515 515 ('mandatory', 'False'),
516 516 ('__end__', 'reviewer:mapping'),
517 517 ('__end__', 'review_members:sequence'),
518 518 ('__start__', 'revisions:sequence'),
519 519 ('revisions', commit_ids['change']),
520 520 ('__end__', 'revisions:sequence'),
521 521 ('user', ''),
522 522 ('csrf_token', csrf_token),
523 523 ],
524 524 status=302)
525 525
526 526 location = response.headers['Location']
527 527
528 528 pull_request_id = location.rsplit('/', 1)[1]
529 529 assert pull_request_id != 'new'
530 530 pull_request = PullRequest.get(int(pull_request_id))
531 531
532 532 # target_ref has to point to the ancestor's commit_id in order to
533 533 # show the correct diff
534 534 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
535 535 assert pull_request.target_ref == expected_target_ref
536 536
537 537 # Check generated diff contents
538 538 response = response.follow()
539 539 assert 'content_of_ancestor' not in response.body
540 540 assert 'content_of_ancestor-child' not in response.body
541 541 assert 'content_of_change' in response.body
542 542
543 543 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
544 544 # Clear any previous calls to rcextensions
545 545 rhodecode.EXTENSIONS.calls.clear()
546 546
547 547 pull_request = pr_util.create_pull_request(
548 548 approved=True, mergeable=True)
549 549 pull_request_id = pull_request.pull_request_id
550 550 repo_name = pull_request.target_repo.scm_instance().name,
551 551
552 552 response = self.app.post(
553 553 route_path('pullrequest_merge',
554 554 repo_name=str(repo_name[0]),
555 555 pull_request_id=pull_request_id),
556 556 params={'csrf_token': csrf_token}).follow()
557 557
558 558 pull_request = PullRequest.get(pull_request_id)
559 559
560 560 assert response.status_int == 200
561 561 assert pull_request.is_closed()
562 562 assert_pull_request_status(
563 563 pull_request, ChangesetStatus.STATUS_APPROVED)
564 564
565 565 # Check the relevant log entries were added
566 566 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
567 567 actions = [log.action for log in user_logs]
568 568 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
569 569 expected_actions = [
570 570 u'repo.pull_request.close',
571 571 u'repo.pull_request.merge',
572 572 u'repo.pull_request.comment.create'
573 573 ]
574 574 assert actions == expected_actions
575 575
576 576 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
577 577 actions = [log for log in user_logs]
578 578 assert actions[-1].action == 'user.push'
579 579 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
580 580
581 581 # Check post_push rcextension was really executed
582 582 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
583 583 assert len(push_calls) == 1
584 584 unused_last_call_args, last_call_kwargs = push_calls[0]
585 585 assert last_call_kwargs['action'] == 'push'
586 586 assert last_call_kwargs['commit_ids'] == pr_commit_ids
587 587
588 588 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
589 589 pull_request = pr_util.create_pull_request(mergeable=False)
590 590 pull_request_id = pull_request.pull_request_id
591 591 pull_request = PullRequest.get(pull_request_id)
592 592
593 593 response = self.app.post(
594 594 route_path('pullrequest_merge',
595 595 repo_name=pull_request.target_repo.scm_instance().name,
596 596 pull_request_id=pull_request.pull_request_id),
597 597 params={'csrf_token': csrf_token}).follow()
598 598
599 599 assert response.status_int == 200
600 600 response.mustcontain(
601 601 'Merge is not currently possible because of below failed checks.')
602 602 response.mustcontain('Server-side pull request merging is disabled.')
603 603
604 604 @pytest.mark.skip_backends('svn')
605 605 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
606 606 pull_request = pr_util.create_pull_request(mergeable=True)
607 607 pull_request_id = pull_request.pull_request_id
608 608 repo_name = pull_request.target_repo.scm_instance().name
609 609
610 610 response = self.app.post(
611 611 route_path('pullrequest_merge',
612 612 repo_name=repo_name, pull_request_id=pull_request_id),
613 613 params={'csrf_token': csrf_token}).follow()
614 614
615 615 assert response.status_int == 200
616 616
617 617 response.mustcontain(
618 618 'Merge is not currently possible because of below failed checks.')
619 619 response.mustcontain('Pull request reviewer approval is pending.')
620 620
621 621 def test_merge_pull_request_renders_failure_reason(
622 622 self, user_regular, csrf_token, pr_util):
623 623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
624 624 pull_request_id = pull_request.pull_request_id
625 625 repo_name = pull_request.target_repo.scm_instance().name
626 626
627 627 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
628 628 MergeFailureReason.PUSH_FAILED,
629 629 metadata={'target': 'shadow repo',
630 630 'merge_commit': 'xxx'})
631 631 model_patcher = mock.patch.multiple(
632 632 PullRequestModel,
633 633 merge_repo=mock.Mock(return_value=merge_resp),
634 634 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
635 635
636 636 with model_patcher:
637 637 response = self.app.post(
638 638 route_path('pullrequest_merge',
639 639 repo_name=repo_name,
640 640 pull_request_id=pull_request_id),
641 641 params={'csrf_token': csrf_token}, status=302)
642 642
643 643 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
644 644 metadata={'target': 'shadow repo',
645 645 'merge_commit': 'xxx'})
646 646 assert_session_flash(response, merge_resp.merge_status_message)
647 647
648 648 def test_update_source_revision(self, backend, csrf_token):
649 649 commits = [
650 650 {'message': 'ancestor'},
651 651 {'message': 'change'},
652 652 {'message': 'change-2'},
653 653 ]
654 654 commit_ids = backend.create_master_repo(commits)
655 655 target = backend.create_repo(heads=['ancestor'])
656 656 source = backend.create_repo(heads=['change'])
657 657
658 658 # create pr from a in source to A in target
659 659 pull_request = PullRequest()
660 660
661 661 pull_request.source_repo = source
662 662 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
663 663 branch=backend.default_branch_name, commit_id=commit_ids['change'])
664 664
665 665 pull_request.target_repo = target
666 666 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
667 667 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
668 668
669 669 pull_request.revisions = [commit_ids['change']]
670 670 pull_request.title = u"Test"
671 671 pull_request.description = u"Description"
672 672 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
673 673 pull_request.pull_request_state = PullRequest.STATE_CREATED
674 674 Session().add(pull_request)
675 675 Session().commit()
676 676 pull_request_id = pull_request.pull_request_id
677 677
678 678 # source has ancestor - change - change-2
679 679 backend.pull_heads(source, heads=['change-2'])
680 680
681 681 # update PR
682 682 self.app.post(
683 683 route_path('pullrequest_update',
684 684 repo_name=target.repo_name, pull_request_id=pull_request_id),
685 685 params={'update_commits': 'true', 'csrf_token': csrf_token})
686 686
687 687 response = self.app.get(
688 688 route_path('pullrequest_show',
689 689 repo_name=target.repo_name,
690 690 pull_request_id=pull_request.pull_request_id))
691 691
692 692 assert response.status_int == 200
693 693 assert 'Pull request updated to' in response.body
694 694 assert 'with 1 added, 0 removed commits.' in response.body
695 695
696 696 # check that we have now both revisions
697 697 pull_request = PullRequest.get(pull_request_id)
698 698 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
699 699
700 700 def test_update_target_revision(self, backend, csrf_token):
701 701 commits = [
702 702 {'message': 'ancestor'},
703 703 {'message': 'change'},
704 704 {'message': 'ancestor-new', 'parents': ['ancestor']},
705 705 {'message': 'change-rebased'},
706 706 ]
707 707 commit_ids = backend.create_master_repo(commits)
708 708 target = backend.create_repo(heads=['ancestor'])
709 709 source = backend.create_repo(heads=['change'])
710 710
711 711 # create pr from a in source to A in target
712 712 pull_request = PullRequest()
713 713
714 714 pull_request.source_repo = source
715 715 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
716 716 branch=backend.default_branch_name, commit_id=commit_ids['change'])
717 717
718 718 pull_request.target_repo = target
719 719 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
720 720 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
721 721
722 722 pull_request.revisions = [commit_ids['change']]
723 723 pull_request.title = u"Test"
724 724 pull_request.description = u"Description"
725 725 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
726 726 pull_request.pull_request_state = PullRequest.STATE_CREATED
727 727
728 728 Session().add(pull_request)
729 729 Session().commit()
730 730 pull_request_id = pull_request.pull_request_id
731 731
732 732 # target has ancestor - ancestor-new
733 733 # source has ancestor - ancestor-new - change-rebased
734 734 backend.pull_heads(target, heads=['ancestor-new'])
735 735 backend.pull_heads(source, heads=['change-rebased'])
736 736
737 737 # update PR
738 738 self.app.post(
739 739 route_path('pullrequest_update',
740 740 repo_name=target.repo_name,
741 741 pull_request_id=pull_request_id),
742 742 params={'update_commits': 'true', 'csrf_token': csrf_token},
743 743 status=200)
744 744
745 745 # check that we have now both revisions
746 746 pull_request = PullRequest.get(pull_request_id)
747 747 assert pull_request.revisions == [commit_ids['change-rebased']]
748 748 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
749 749 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
750 750
751 751 response = self.app.get(
752 752 route_path('pullrequest_show',
753 753 repo_name=target.repo_name,
754 754 pull_request_id=pull_request.pull_request_id))
755 755 assert response.status_int == 200
756 756 assert 'Pull request updated to' in response.body
757 757 assert 'with 1 added, 1 removed commits.' in response.body
758 758
759 759 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
760 760 backend = backend_git
761 761 commits = [
762 762 {'message': 'master-commit-1'},
763 763 {'message': 'master-commit-2-change-1'},
764 764 {'message': 'master-commit-3-change-2'},
765 765
766 766 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
767 767 {'message': 'feat-commit-2'},
768 768 ]
769 769 commit_ids = backend.create_master_repo(commits)
770 770 target = backend.create_repo(heads=['master-commit-3-change-2'])
771 771 source = backend.create_repo(heads=['feat-commit-2'])
772 772
773 773 # create pr from a in source to A in target
774 774 pull_request = PullRequest()
775 775 pull_request.source_repo = source
776 776
777 777 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
778 778 branch=backend.default_branch_name,
779 779 commit_id=commit_ids['master-commit-3-change-2'])
780 780
781 781 pull_request.target_repo = target
782 782 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
783 783 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
784 784
785 785 pull_request.revisions = [
786 786 commit_ids['feat-commit-1'],
787 787 commit_ids['feat-commit-2']
788 788 ]
789 789 pull_request.title = u"Test"
790 790 pull_request.description = u"Description"
791 791 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
792 792 pull_request.pull_request_state = PullRequest.STATE_CREATED
793 793 Session().add(pull_request)
794 794 Session().commit()
795 795 pull_request_id = pull_request.pull_request_id
796 796
797 797 # PR is created, now we simulate a force-push into target,
798 798 # that drops a 2 last commits
799 799 vcsrepo = target.scm_instance()
800 800 vcsrepo.config.clear_section('hooks')
801 801 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
802 802
803 803 # update PR
804 804 self.app.post(
805 805 route_path('pullrequest_update',
806 806 repo_name=target.repo_name,
807 807 pull_request_id=pull_request_id),
808 808 params={'update_commits': 'true', 'csrf_token': csrf_token},
809 809 status=200)
810 810
811 811 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
812 812 assert response.status_int == 200
813 813 response.mustcontain('Pull request updated to')
814 814 response.mustcontain('with 0 added, 0 removed commits.')
815 815
816 816 def test_update_of_ancestor_reference(self, backend, csrf_token):
817 817 commits = [
818 818 {'message': 'ancestor'},
819 819 {'message': 'change'},
820 820 {'message': 'change-2'},
821 821 {'message': 'ancestor-new', 'parents': ['ancestor']},
822 822 {'message': 'change-rebased'},
823 823 ]
824 824 commit_ids = backend.create_master_repo(commits)
825 825 target = backend.create_repo(heads=['ancestor'])
826 826 source = backend.create_repo(heads=['change'])
827 827
828 828 # create pr from a in source to A in target
829 829 pull_request = PullRequest()
830 830 pull_request.source_repo = source
831 831
832 832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
833 833 branch=backend.default_branch_name, commit_id=commit_ids['change'])
834 834 pull_request.target_repo = target
835 835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
836 836 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
837 837 pull_request.revisions = [commit_ids['change']]
838 838 pull_request.title = u"Test"
839 839 pull_request.description = u"Description"
840 840 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
841 841 pull_request.pull_request_state = PullRequest.STATE_CREATED
842 842 Session().add(pull_request)
843 843 Session().commit()
844 844 pull_request_id = pull_request.pull_request_id
845 845
846 846 # target has ancestor - ancestor-new
847 847 # source has ancestor - ancestor-new - change-rebased
848 848 backend.pull_heads(target, heads=['ancestor-new'])
849 849 backend.pull_heads(source, heads=['change-rebased'])
850 850
851 851 # update PR
852 852 self.app.post(
853 853 route_path('pullrequest_update',
854 854 repo_name=target.repo_name, pull_request_id=pull_request_id),
855 855 params={'update_commits': 'true', 'csrf_token': csrf_token},
856 856 status=200)
857 857
858 858 # Expect the target reference to be updated correctly
859 859 pull_request = PullRequest.get(pull_request_id)
860 860 assert pull_request.revisions == [commit_ids['change-rebased']]
861 861 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
862 862 branch=backend.default_branch_name,
863 863 commit_id=commit_ids['ancestor-new'])
864 864 assert pull_request.target_ref == expected_target_ref
865 865
866 866 def test_remove_pull_request_branch(self, backend_git, csrf_token):
867 867 branch_name = 'development'
868 868 commits = [
869 869 {'message': 'initial-commit'},
870 870 {'message': 'old-feature'},
871 871 {'message': 'new-feature', 'branch': branch_name},
872 872 ]
873 873 repo = backend_git.create_repo(commits)
874 874 commit_ids = backend_git.commit_ids
875 875
876 876 pull_request = PullRequest()
877 877 pull_request.source_repo = repo
878 878 pull_request.target_repo = repo
879 879 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
880 880 branch=branch_name, commit_id=commit_ids['new-feature'])
881 881 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
882 882 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
883 883 pull_request.revisions = [commit_ids['new-feature']]
884 884 pull_request.title = u"Test"
885 885 pull_request.description = u"Description"
886 886 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
887 887 pull_request.pull_request_state = PullRequest.STATE_CREATED
888 888 Session().add(pull_request)
889 889 Session().commit()
890 890
891 891 vcs = repo.scm_instance()
892 892 vcs.remove_ref('refs/heads/{}'.format(branch_name))
893 893
894 894 response = self.app.get(route_path(
895 895 'pullrequest_show',
896 896 repo_name=repo.repo_name,
897 897 pull_request_id=pull_request.pull_request_id))
898 898
899 899 assert response.status_int == 200
900 900
901 901 response.assert_response().element_contains(
902 902 '#changeset_compare_view_content .alert strong',
903 903 'Missing commits')
904 904 response.assert_response().element_contains(
905 905 '#changeset_compare_view_content .alert',
906 906 'This pull request cannot be displayed, because one or more'
907 907 ' commits no longer exist in the source repository.')
908 908
909 909 def test_strip_commits_from_pull_request(
910 910 self, backend, pr_util, csrf_token):
911 911 commits = [
912 912 {'message': 'initial-commit'},
913 913 {'message': 'old-feature'},
914 914 {'message': 'new-feature', 'parents': ['initial-commit']},
915 915 ]
916 916 pull_request = pr_util.create_pull_request(
917 917 commits, target_head='initial-commit', source_head='new-feature',
918 918 revisions=['new-feature'])
919 919
920 920 vcs = pr_util.source_repository.scm_instance()
921 921 if backend.alias == 'git':
922 922 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
923 923 else:
924 924 vcs.strip(pr_util.commit_ids['new-feature'])
925 925
926 926 response = self.app.get(route_path(
927 927 'pullrequest_show',
928 928 repo_name=pr_util.target_repository.repo_name,
929 929 pull_request_id=pull_request.pull_request_id))
930 930
931 931 assert response.status_int == 200
932 932
933 933 response.assert_response().element_contains(
934 934 '#changeset_compare_view_content .alert strong',
935 935 'Missing commits')
936 936 response.assert_response().element_contains(
937 937 '#changeset_compare_view_content .alert',
938 938 'This pull request cannot be displayed, because one or more'
939 939 ' commits no longer exist in the source repository.')
940 940 response.assert_response().element_contains(
941 941 '#update_commits',
942 942 'Update commits')
943 943
944 944 def test_strip_commits_and_update(
945 945 self, backend, pr_util, csrf_token):
946 946 commits = [
947 947 {'message': 'initial-commit'},
948 948 {'message': 'old-feature'},
949 949 {'message': 'new-feature', 'parents': ['old-feature']},
950 950 ]
951 951 pull_request = pr_util.create_pull_request(
952 952 commits, target_head='old-feature', source_head='new-feature',
953 953 revisions=['new-feature'], mergeable=True)
954 954
955 955 vcs = pr_util.source_repository.scm_instance()
956 956 if backend.alias == 'git':
957 957 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
958 958 else:
959 959 vcs.strip(pr_util.commit_ids['new-feature'])
960 960
961 961 response = self.app.post(
962 962 route_path('pullrequest_update',
963 963 repo_name=pull_request.target_repo.repo_name,
964 964 pull_request_id=pull_request.pull_request_id),
965 965 params={'update_commits': 'true',
966 966 'csrf_token': csrf_token})
967 967
968 968 assert response.status_int == 200
969 969 assert response.body == 'true'
970 970
971 971 # Make sure that after update, it won't raise 500 errors
972 972 response = self.app.get(route_path(
973 973 'pullrequest_show',
974 974 repo_name=pr_util.target_repository.repo_name,
975 975 pull_request_id=pull_request.pull_request_id))
976 976
977 977 assert response.status_int == 200
978 978 response.assert_response().element_contains(
979 979 '#changeset_compare_view_content .alert strong',
980 980 'Missing commits')
981 981
982 982 def test_branch_is_a_link(self, pr_util):
983 983 pull_request = pr_util.create_pull_request()
984 984 pull_request.source_ref = 'branch:origin:1234567890abcdef'
985 985 pull_request.target_ref = 'branch:target:abcdef1234567890'
986 986 Session().add(pull_request)
987 987 Session().commit()
988 988
989 989 response = self.app.get(route_path(
990 990 'pullrequest_show',
991 991 repo_name=pull_request.target_repo.scm_instance().name,
992 992 pull_request_id=pull_request.pull_request_id))
993 993 assert response.status_int == 200
994 994
995 995 origin = response.assert_response().get_element('.pr-origininfo .tag')
996 996 origin_children = origin.getchildren()
997 997 assert len(origin_children) == 1
998 998 target = response.assert_response().get_element('.pr-targetinfo .tag')
999 999 target_children = target.getchildren()
1000 1000 assert len(target_children) == 1
1001 1001
1002 1002 expected_origin_link = route_path(
1003 1003 'repo_commits',
1004 1004 repo_name=pull_request.source_repo.scm_instance().name,
1005 1005 params=dict(branch='origin'))
1006 1006 expected_target_link = route_path(
1007 1007 'repo_commits',
1008 1008 repo_name=pull_request.target_repo.scm_instance().name,
1009 1009 params=dict(branch='target'))
1010 1010 assert origin_children[0].attrib['href'] == expected_origin_link
1011 1011 assert origin_children[0].text == 'branch: origin'
1012 1012 assert target_children[0].attrib['href'] == expected_target_link
1013 1013 assert target_children[0].text == 'branch: target'
1014 1014
1015 1015 def test_bookmark_is_not_a_link(self, pr_util):
1016 1016 pull_request = pr_util.create_pull_request()
1017 1017 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1018 1018 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1019 1019 Session().add(pull_request)
1020 1020 Session().commit()
1021 1021
1022 1022 response = self.app.get(route_path(
1023 1023 'pullrequest_show',
1024 1024 repo_name=pull_request.target_repo.scm_instance().name,
1025 1025 pull_request_id=pull_request.pull_request_id))
1026 1026 assert response.status_int == 200
1027 1027
1028 1028 origin = response.assert_response().get_element('.pr-origininfo .tag')
1029 1029 assert origin.text.strip() == 'bookmark: origin'
1030 1030 assert origin.getchildren() == []
1031 1031
1032 1032 target = response.assert_response().get_element('.pr-targetinfo .tag')
1033 1033 assert target.text.strip() == 'bookmark: target'
1034 1034 assert target.getchildren() == []
1035 1035
1036 1036 def test_tag_is_not_a_link(self, pr_util):
1037 1037 pull_request = pr_util.create_pull_request()
1038 1038 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1039 1039 pull_request.target_ref = 'tag:target:abcdef1234567890'
1040 1040 Session().add(pull_request)
1041 1041 Session().commit()
1042 1042
1043 1043 response = self.app.get(route_path(
1044 1044 'pullrequest_show',
1045 1045 repo_name=pull_request.target_repo.scm_instance().name,
1046 1046 pull_request_id=pull_request.pull_request_id))
1047 1047 assert response.status_int == 200
1048 1048
1049 1049 origin = response.assert_response().get_element('.pr-origininfo .tag')
1050 1050 assert origin.text.strip() == 'tag: origin'
1051 1051 assert origin.getchildren() == []
1052 1052
1053 1053 target = response.assert_response().get_element('.pr-targetinfo .tag')
1054 1054 assert target.text.strip() == 'tag: target'
1055 1055 assert target.getchildren() == []
1056 1056
1057 1057 @pytest.mark.parametrize('mergeable', [True, False])
1058 1058 def test_shadow_repository_link(
1059 1059 self, mergeable, pr_util, http_host_only_stub):
1060 1060 """
1061 1061 Check that the pull request summary page displays a link to the shadow
1062 1062 repository if the pull request is mergeable. If it is not mergeable
1063 1063 the link should not be displayed.
1064 1064 """
1065 1065 pull_request = pr_util.create_pull_request(
1066 1066 mergeable=mergeable, enable_notifications=False)
1067 1067 target_repo = pull_request.target_repo.scm_instance()
1068 1068 pr_id = pull_request.pull_request_id
1069 1069 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1070 1070 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1071 1071
1072 1072 response = self.app.get(route_path(
1073 1073 'pullrequest_show',
1074 1074 repo_name=target_repo.name,
1075 1075 pull_request_id=pr_id))
1076 1076
1077 1077 if mergeable:
1078 1078 response.assert_response().element_value_contains(
1079 1079 'input.pr-mergeinfo', shadow_url)
1080 1080 response.assert_response().element_value_contains(
1081 1081 'input.pr-mergeinfo ', 'pr-merge')
1082 1082 else:
1083 1083 response.assert_response().no_element_exists('.pr-mergeinfo')
1084 1084
1085 1085
1086 1086 @pytest.mark.usefixtures('app')
1087 1087 @pytest.mark.backends("git", "hg")
1088 1088 class TestPullrequestsControllerDelete(object):
1089 1089 def test_pull_request_delete_button_permissions_admin(
1090 1090 self, autologin_user, user_admin, pr_util):
1091 1091 pull_request = pr_util.create_pull_request(
1092 1092 author=user_admin.username, enable_notifications=False)
1093 1093
1094 1094 response = self.app.get(route_path(
1095 1095 'pullrequest_show',
1096 1096 repo_name=pull_request.target_repo.scm_instance().name,
1097 1097 pull_request_id=pull_request.pull_request_id))
1098 1098
1099 1099 response.mustcontain('id="delete_pullrequest"')
1100 1100 response.mustcontain('Confirm to delete this pull request')
1101 1101
1102 1102 def test_pull_request_delete_button_permissions_owner(
1103 1103 self, autologin_regular_user, user_regular, pr_util):
1104 1104 pull_request = pr_util.create_pull_request(
1105 1105 author=user_regular.username, enable_notifications=False)
1106 1106
1107 1107 response = self.app.get(route_path(
1108 1108 'pullrequest_show',
1109 1109 repo_name=pull_request.target_repo.scm_instance().name,
1110 1110 pull_request_id=pull_request.pull_request_id))
1111 1111
1112 1112 response.mustcontain('id="delete_pullrequest"')
1113 1113 response.mustcontain('Confirm to delete this pull request')
1114 1114
1115 1115 def test_pull_request_delete_button_permissions_forbidden(
1116 1116 self, autologin_regular_user, user_regular, user_admin, pr_util):
1117 1117 pull_request = pr_util.create_pull_request(
1118 1118 author=user_admin.username, enable_notifications=False)
1119 1119
1120 1120 response = self.app.get(route_path(
1121 1121 'pullrequest_show',
1122 1122 repo_name=pull_request.target_repo.scm_instance().name,
1123 1123 pull_request_id=pull_request.pull_request_id))
1124 1124 response.mustcontain(no=['id="delete_pullrequest"'])
1125 1125 response.mustcontain(no=['Confirm to delete this pull request'])
1126 1126
1127 1127 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1128 1128 self, autologin_regular_user, user_regular, user_admin, pr_util,
1129 1129 user_util):
1130 1130
1131 1131 pull_request = pr_util.create_pull_request(
1132 1132 author=user_admin.username, enable_notifications=False)
1133 1133
1134 1134 user_util.grant_user_permission_to_repo(
1135 1135 pull_request.target_repo, user_regular,
1136 1136 'repository.write')
1137 1137
1138 1138 response = self.app.get(route_path(
1139 1139 'pullrequest_show',
1140 1140 repo_name=pull_request.target_repo.scm_instance().name,
1141 1141 pull_request_id=pull_request.pull_request_id))
1142 1142
1143 1143 response.mustcontain('id="open_edit_pullrequest"')
1144 1144 response.mustcontain('id="delete_pullrequest"')
1145 1145 response.mustcontain(no=['Confirm to delete this pull request'])
1146 1146
1147 1147 def test_delete_comment_returns_404_if_comment_does_not_exist(
1148 1148 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1149 1149
1150 1150 pull_request = pr_util.create_pull_request(
1151 1151 author=user_admin.username, enable_notifications=False)
1152 1152
1153 1153 self.app.post(
1154 1154 route_path(
1155 1155 'pullrequest_comment_delete',
1156 1156 repo_name=pull_request.target_repo.scm_instance().name,
1157 1157 pull_request_id=pull_request.pull_request_id,
1158 1158 comment_id=1024404),
1159 1159 extra_environ=xhr_header,
1160 1160 params={'csrf_token': csrf_token},
1161 1161 status=404
1162 1162 )
1163 1163
1164 1164 def test_delete_comment(
1165 1165 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1166 1166
1167 1167 pull_request = pr_util.create_pull_request(
1168 1168 author=user_admin.username, enable_notifications=False)
1169 1169 comment = pr_util.create_comment()
1170 1170 comment_id = comment.comment_id
1171 1171
1172 1172 response = self.app.post(
1173 1173 route_path(
1174 1174 'pullrequest_comment_delete',
1175 1175 repo_name=pull_request.target_repo.scm_instance().name,
1176 1176 pull_request_id=pull_request.pull_request_id,
1177 1177 comment_id=comment_id),
1178 1178 extra_environ=xhr_header,
1179 1179 params={'csrf_token': csrf_token},
1180 1180 status=200
1181 1181 )
1182 1182 assert response.body == 'true'
1183 1183
1184 1184 @pytest.mark.parametrize('url_type', [
1185 1185 'pullrequest_new',
1186 1186 'pullrequest_create',
1187 1187 'pullrequest_update',
1188 1188 'pullrequest_merge',
1189 1189 ])
1190 1190 def test_pull_request_is_forbidden_on_archived_repo(
1191 1191 self, autologin_user, backend, xhr_header, user_util, url_type):
1192 1192
1193 1193 # create a temporary repo
1194 1194 source = user_util.create_repo(repo_type=backend.alias)
1195 1195 repo_name = source.repo_name
1196 1196 repo = Repository.get_by_repo_name(repo_name)
1197 1197 repo.archived = True
1198 1198 Session().commit()
1199 1199
1200 1200 response = self.app.get(
1201 1201 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1202 1202
1203 1203 msg = 'Action not supported for archived repository.'
1204 1204 assert_session_flash(response, msg)
1205 1205
1206 1206
1207 1207 def assert_pull_request_status(pull_request, expected_status):
1208 1208 status = ChangesetStatusModel().calculated_review_status(
1209 1209 pull_request=pull_request)
1210 1210 assert status == expected_status
1211 1211
1212 1212
1213 1213 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1214 1214 @pytest.mark.usefixtures("autologin_user")
1215 1215 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1216 1216 response = app.get(
1217 1217 route_path(route, repo_name=backend_svn.repo_name), status=404)
1218 1218
@@ -1,523 +1,524 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import re
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
27 27 from rhodecode.lib import helpers as h
28 28 from rhodecode.lib.compat import OrderedDict
29 29 from rhodecode.lib.utils2 import AttributeDict, safe_str
30 30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 31 from rhodecode.model.db import Repository
32 32 from rhodecode.model.meta import Session
33 33 from rhodecode.model.repo import RepoModel
34 34 from rhodecode.model.scm import ScmModel
35 35 from rhodecode.tests import assert_session_flash
36 36 from rhodecode.tests.fixture import Fixture
37 37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
38 38
39 39
40 40 fixture = Fixture()
41 41
42 42
43 43 def route_path(name, params=None, **kwargs):
44 44 import urllib
45 45
46 46 base_url = {
47 47 'repo_summary': '/{repo_name}',
48 48 'repo_stats': '/{repo_name}/repo_stats/{commit_id}',
49 49 'repo_refs_data': '/{repo_name}/refs-data',
50 50 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog',
51 51 'repo_creating_check': '/{repo_name}/repo_creating_check',
52 52 }[name].format(**kwargs)
53 53
54 54 if params:
55 55 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
56 56 return base_url
57 57
58 58
59 59 def assert_clone_url(response, server, repo, disabled=False):
60 60
61 61 response.mustcontain(
62 62 '<input type="text" class="input-monospace clone_url_input" '
63 63 '{disabled}readonly="readonly" '
64 64 'value="http://test_admin@{server}/{repo}"/>'.format(
65 65 server=server, repo=repo, disabled='disabled ' if disabled else ' ')
66 66 )
67 67
68 68
69 69 @pytest.mark.usefixtures('app')
70 70 class TestSummaryView(object):
71 71 def test_index(self, autologin_user, backend, http_host_only_stub):
72 72 repo_id = backend.repo.repo_id
73 73 repo_name = backend.repo_name
74 74 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
75 75 return_value=False):
76 76 response = self.app.get(
77 77 route_path('repo_summary', repo_name=repo_name))
78 78
79 79 # repo type
80 80 response.mustcontain(
81 81 '<i class="icon-%s">' % (backend.alias, )
82 82 )
83 83 # public/private
84 84 response.mustcontain(
85 85 """<i class="icon-unlock-alt">"""
86 86 )
87 87
88 88 # clone url...
89 89 assert_clone_url(response, http_host_only_stub, repo_name)
90 90 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
91 91
92 92 def test_index_svn_without_proxy(
93 93 self, autologin_user, backend_svn, http_host_only_stub):
94 94 repo_id = backend_svn.repo.repo_id
95 95 repo_name = backend_svn.repo_name
96 96 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
97 97 # clone url...
98 98
99 99 assert_clone_url(response, http_host_only_stub, repo_name, disabled=True)
100 100 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id), disabled=True)
101 101
102 102 def test_index_with_trailing_slash(
103 103 self, autologin_user, backend, http_host_only_stub):
104 104
105 105 repo_id = backend.repo.repo_id
106 106 repo_name = backend.repo_name
107 107 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
108 108 return_value=False):
109 109 response = self.app.get(
110 110 route_path('repo_summary', repo_name=repo_name) + '/',
111 111 status=200)
112 112
113 113 # clone url...
114 114 assert_clone_url(response, http_host_only_stub, repo_name)
115 115 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
116 116
117 117 def test_index_by_id(self, autologin_user, backend):
118 118 repo_id = backend.repo.repo_id
119 119 response = self.app.get(
120 120 route_path('repo_summary', repo_name='_%s' % (repo_id,)))
121 121
122 122 # repo type
123 123 response.mustcontain(
124 124 '<i class="icon-%s">' % (backend.alias, )
125 125 )
126 126 # public/private
127 127 response.mustcontain(
128 128 """<i class="icon-unlock-alt">"""
129 129 )
130 130
131 131 def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user):
132 132 fixture.create_repo(name='repo_1')
133 133 response = self.app.get(route_path('repo_summary', repo_name='repo_1'))
134 134
135 135 try:
136 136 response.mustcontain("repo_1")
137 137 finally:
138 138 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
139 139 Session().commit()
140 140
141 141 def test_index_with_anonymous_access_disabled(
142 142 self, backend, disable_anonymous_user):
143 143 response = self.app.get(
144 144 route_path('repo_summary', repo_name=backend.repo_name), status=302)
145 145 assert 'login' in response.location
146 146
147 147 def _enable_stats(self, repo):
148 148 r = Repository.get_by_repo_name(repo)
149 149 r.enable_statistics = True
150 150 Session().add(r)
151 151 Session().commit()
152 152
153 153 expected_trending = {
154 154 'hg': {
155 155 "py": {"count": 68, "desc": ["Python"]},
156 156 "rst": {"count": 16, "desc": ["Rst"]},
157 157 "css": {"count": 2, "desc": ["Css"]},
158 158 "sh": {"count": 2, "desc": ["Bash"]},
159 159 "bat": {"count": 1, "desc": ["Batch"]},
160 160 "cfg": {"count": 1, "desc": ["Ini"]},
161 161 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
162 162 "ini": {"count": 1, "desc": ["Ini"]},
163 163 "js": {"count": 1, "desc": ["Javascript"]},
164 164 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
165 165 },
166 166 'git': {
167 167 "py": {"count": 68, "desc": ["Python"]},
168 168 "rst": {"count": 16, "desc": ["Rst"]},
169 169 "css": {"count": 2, "desc": ["Css"]},
170 170 "sh": {"count": 2, "desc": ["Bash"]},
171 171 "bat": {"count": 1, "desc": ["Batch"]},
172 172 "cfg": {"count": 1, "desc": ["Ini"]},
173 173 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
174 174 "ini": {"count": 1, "desc": ["Ini"]},
175 175 "js": {"count": 1, "desc": ["Javascript"]},
176 176 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
177 177 },
178 178 'svn': {
179 179 "py": {"count": 75, "desc": ["Python"]},
180 180 "rst": {"count": 16, "desc": ["Rst"]},
181 181 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
182 182 "css": {"count": 2, "desc": ["Css"]},
183 183 "bat": {"count": 1, "desc": ["Batch"]},
184 184 "cfg": {"count": 1, "desc": ["Ini"]},
185 185 "ini": {"count": 1, "desc": ["Ini"]},
186 186 "js": {"count": 1, "desc": ["Javascript"]},
187 187 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
188 188 "sh": {"count": 1, "desc": ["Bash"]}
189 189 },
190 190 }
191 191
192 192 def test_repo_stats(self, autologin_user, backend, xhr_header):
193 193 response = self.app.get(
194 194 route_path(
195 195 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
196 196 extra_environ=xhr_header,
197 197 status=200)
198 198 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
199 199
200 200 def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header):
201 201 repo_name = backend.repo_name
202 202
203 203 # codes stats
204 204 self._enable_stats(repo_name)
205 205 ScmModel().mark_for_invalidation(repo_name)
206 206
207 207 response = self.app.get(
208 208 route_path(
209 209 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
210 210 extra_environ=xhr_header,
211 211 status=200)
212 212
213 213 expected_data = self.expected_trending[backend.alias]
214 214 returned_stats = response.json['code_stats']
215 215 for k, v in expected_data.items():
216 216 assert v == returned_stats[k]
217 217
218 218 def test_repo_refs_data(self, backend):
219 219 response = self.app.get(
220 220 route_path('repo_refs_data', repo_name=backend.repo_name),
221 221 status=200)
222 222
223 223 # Ensure that there is the correct amount of items in the result
224 224 repo = backend.repo.scm_instance()
225 225 data = response.json['results']
226 226 items = sum(len(section['children']) for section in data)
227 227 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
228 228 assert items == repo_refs
229 229
230 230 def test_index_shows_missing_requirements_message(
231 231 self, backend, autologin_user):
232 232 repo_name = backend.repo_name
233 233 scm_patcher = mock.patch.object(
234 234 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
235 235
236 236 with scm_patcher:
237 237 response = self.app.get(
238 238 route_path('repo_summary', repo_name=repo_name))
239 239 assert_response = AssertResponse(response)
240 240 assert_response.element_contains(
241 241 '.main .alert-warning strong', 'Missing requirements')
242 242 assert_response.element_contains(
243 243 '.main .alert-warning',
244 244 'Commits cannot be displayed, because this repository '
245 245 'uses one or more extensions, which was not enabled.')
246 246
247 247 def test_missing_requirements_page_does_not_contains_switch_to(
248 248 self, autologin_user, backend):
249 249 repo_name = backend.repo_name
250 250 scm_patcher = mock.patch.object(
251 251 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
252 252
253 253 with scm_patcher:
254 254 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
255 255 response.mustcontain(no='Switch To')
256 256
257 257
258 258 @pytest.mark.usefixtures('app')
259 259 class TestRepoLocation(object):
260 260
261 261 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
262 262 def test_missing_filesystem_repo(
263 263 self, autologin_user, backend, suffix, csrf_token):
264 264 repo = backend.create_repo(name_suffix=suffix)
265 265 repo_name = repo.repo_name
266 266
267 267 # delete from file system
268 268 RepoModel()._delete_filesystem_repo(repo)
269 269
270 270 # test if the repo is still in the database
271 271 new_repo = RepoModel().get_by_repo_name(repo_name)
272 272 assert new_repo.repo_name == repo_name
273 273
274 274 # check if repo is not in the filesystem
275 275 assert not repo_on_filesystem(repo_name)
276 276
277 277 response = self.app.get(
278 278 route_path('repo_summary', repo_name=safe_str(repo_name)), status=302)
279 279
280 280 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
281 281 'Please check if it exist, or is not damaged.' % repo_name
282 282 assert_session_flash(response, msg)
283 283
284 284 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
285 285 def test_missing_filesystem_repo_on_repo_check(
286 286 self, autologin_user, backend, suffix, csrf_token):
287 287 repo = backend.create_repo(name_suffix=suffix)
288 288 repo_name = repo.repo_name
289 289
290 290 # delete from file system
291 291 RepoModel()._delete_filesystem_repo(repo)
292 292
293 293 # test if the repo is still in the database
294 294 new_repo = RepoModel().get_by_repo_name(repo_name)
295 295 assert new_repo.repo_name == repo_name
296 296
297 297 # check if repo is not in the filesystem
298 298 assert not repo_on_filesystem(repo_name)
299 299
300 300 # flush the session
301 301 self.app.get(
302 302 route_path('repo_summary', repo_name=safe_str(repo_name)),
303 303 status=302)
304 304
305 305 response = self.app.get(
306 306 route_path('repo_creating_check', repo_name=safe_str(repo_name)),
307 307 status=200)
308 308 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
309 309 'Please check if it exist, or is not damaged.' % repo_name
310 310 assert_session_flash(response, msg )
311 311
312 312
313 313 @pytest.fixture()
314 314 def summary_view(context_stub, request_stub, user_util):
315 315 """
316 316 Bootstrap view to test the view functions
317 317 """
318 318 request_stub.matched_route = AttributeDict(name='test_view')
319 319
320 320 request_stub.user = user_util.create_user().AuthUser()
321 321 request_stub.db_repo = user_util.create_repo()
322 322
323 323 view = RepoSummaryView(context=context_stub, request=request_stub)
324 324 return view
325 325
326 326
327 327 @pytest.mark.usefixtures('app')
328 328 class TestCreateReferenceData(object):
329 329
330 330 @pytest.fixture
331 331 def example_refs(self):
332 332 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
333 333 example_refs = [
334 334 ('section_1', section_1_refs, 't1'),
335 335 ('section_2', {'c': 'c_id'}, 't2'),
336 336 ]
337 337 return example_refs
338 338
339 339 def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view):
340 340 repo = mock.Mock()
341 341 repo.name = 'test-repo'
342 342 repo.alias = 'git'
343 343 full_repo_name = 'pytest-repo-group/' + repo.name
344 344
345 345 result = summary_view._create_reference_data(
346 346 repo, full_repo_name, example_refs)
347 347
348 348 expected_files_url = '/{}/files/'.format(full_repo_name)
349 349 expected_result = [
350 350 {
351 351 'children': [
352 352 {
353 'id': 'a', 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
353 'id': 'a', 'idx': 0, 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
354 354 'files_url': expected_files_url + 'a/?at=a',
355 355 },
356 356 {
357 'id': 'b', 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
357 'id': 'b', 'idx': 0, 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
358 358 'files_url': expected_files_url + 'b/?at=b',
359 359 }
360 360 ],
361 361 'text': 'section_1'
362 362 },
363 363 {
364 364 'children': [
365 365 {
366 'id': 'c', 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
366 'id': 'c', 'idx': 0, 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
367 367 'files_url': expected_files_url + 'c/?at=c',
368 368 }
369 369 ],
370 370 'text': 'section_2'
371 371 }]
372 372 assert result == expected_result
373 373
374 374 def test_generates_refs_with_path_for_svn(self, example_refs, summary_view):
375 375 repo = mock.Mock()
376 376 repo.name = 'test-repo'
377 377 repo.alias = 'svn'
378 378 full_repo_name = 'pytest-repo-group/' + repo.name
379 379
380 380 result = summary_view._create_reference_data(
381 381 repo, full_repo_name, example_refs)
382 382
383 383 expected_files_url = '/{}/files/'.format(full_repo_name)
384 384 expected_result = [
385 385 {
386 386 'children': [
387 387 {
388 'id': 'a@a_id', 'raw_id': 'a_id',
388 'id': 'a@a_id', 'idx': 0, 'raw_id': 'a_id',
389 389 'text': 'a', 'type': 't1',
390 390 'files_url': expected_files_url + 'a_id/a?at=a',
391 391 },
392 392 {
393 'id': 'b@b_id', 'raw_id': 'b_id',
393 'id': 'b@b_id', 'idx': 0, 'raw_id': 'b_id',
394 394 'text': 'b', 'type': 't1',
395 395 'files_url': expected_files_url + 'b_id/b?at=b',
396 396 }
397 397 ],
398 398 'text': 'section_1'
399 399 },
400 400 {
401 401 'children': [
402 402 {
403 'id': 'c@c_id', 'raw_id': 'c_id',
403 'id': 'c@c_id', 'idx': 0, 'raw_id': 'c_id',
404 404 'text': 'c', 'type': 't2',
405 405 'files_url': expected_files_url + 'c_id/c?at=c',
406 406 }
407 407 ],
408 408 'text': 'section_2'
409 409 }
410 410 ]
411 411 assert result == expected_result
412 412
413 413
414 414 class TestCreateFilesUrl(object):
415 415
416 416 def test_creates_non_svn_url(self, app, summary_view):
417 417 repo = mock.Mock()
418 418 repo.name = 'abcde'
419 419 full_repo_name = 'test-repo-group/' + repo.name
420 420 ref_name = 'branch1'
421 421 raw_id = 'deadbeef0123456789'
422 422 is_svn = False
423 423
424 424 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
425 425 result = summary_view._create_files_url(
426 426 repo, full_repo_name, ref_name, raw_id, is_svn)
427 427 url_mock.assert_called_once_with(
428 428 'repo_files', repo_name=full_repo_name, commit_id=ref_name,
429 429 f_path='', _query=dict(at=ref_name))
430 430 assert result == url_mock.return_value
431 431
432 432 def test_creates_svn_url(self, app, summary_view):
433 433 repo = mock.Mock()
434 434 repo.name = 'abcde'
435 435 full_repo_name = 'test-repo-group/' + repo.name
436 436 ref_name = 'branch1'
437 437 raw_id = 'deadbeef0123456789'
438 438 is_svn = True
439 439
440 440 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
441 441 result = summary_view._create_files_url(
442 442 repo, full_repo_name, ref_name, raw_id, is_svn)
443 443 url_mock.assert_called_once_with(
444 444 'repo_files', repo_name=full_repo_name, f_path=ref_name,
445 445 commit_id=raw_id, _query=dict(at=ref_name))
446 446 assert result == url_mock.return_value
447 447
448 448 def test_name_has_slashes(self, app, summary_view):
449 449 repo = mock.Mock()
450 450 repo.name = 'abcde'
451 451 full_repo_name = 'test-repo-group/' + repo.name
452 452 ref_name = 'branch1/branch2'
453 453 raw_id = 'deadbeef0123456789'
454 454 is_svn = False
455 455
456 456 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
457 457 result = summary_view._create_files_url(
458 458 repo, full_repo_name, ref_name, raw_id, is_svn)
459 459 url_mock.assert_called_once_with(
460 460 'repo_files', repo_name=full_repo_name, commit_id=raw_id,
461 461 f_path='', _query=dict(at=ref_name))
462 462 assert result == url_mock.return_value
463 463
464 464
465 465 class TestReferenceItems(object):
466 466 repo = mock.Mock()
467 467 repo.name = 'pytest-repo'
468 468 repo_full_name = 'pytest-repo-group/' + repo.name
469 469 ref_type = 'branch'
470 470 fake_url = '/abcde/'
471 471
472 472 @staticmethod
473 473 def _format_function(name, id_):
474 474 return 'format_function_{}_{}'.format(name, id_)
475 475
476 476 def test_creates_required_amount_of_items(self, summary_view):
477 477 amount = 100
478 478 refs = {
479 479 'ref{}'.format(i): '{0:040d}'.format(i)
480 480 for i in range(amount)
481 481 }
482 482
483 483 url_patcher = mock.patch.object(summary_view, '_create_files_url')
484 484 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
485 485 return_value=False)
486 486
487 487 with url_patcher as url_mock, svn_patcher:
488 488 result = summary_view._create_reference_items(
489 489 self.repo, self.repo_full_name, refs, self.ref_type,
490 490 self._format_function)
491 491 assert len(result) == amount
492 492 assert url_mock.call_count == amount
493 493
494 494 def test_single_item_details(self, summary_view):
495 495 ref_name = 'ref1'
496 496 ref_id = 'deadbeef'
497 497 refs = {
498 498 ref_name: ref_id
499 499 }
500 500
501 501 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
502 502 return_value=False)
503 503
504 504 url_patcher = mock.patch.object(
505 505 summary_view, '_create_files_url', return_value=self.fake_url)
506 506
507 507 with url_patcher as url_mock, svn_patcher:
508 508 result = summary_view._create_reference_items(
509 509 self.repo, self.repo_full_name, refs, self.ref_type,
510 510 self._format_function)
511 511
512 512 url_mock.assert_called_once_with(
513 513 self.repo, self.repo_full_name, ref_name, ref_id, False)
514 514 expected_result = [
515 515 {
516 516 'text': ref_name,
517 517 'id': self._format_function(ref_name, ref_id),
518 518 'raw_id': ref_id,
519 'idx': 0,
519 520 'type': self.ref_type,
520 521 'files_url': self.fake_url
521 522 }
522 523 ]
523 524 assert result == expected_result
@@ -1,1288 +1,1293 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import mock
23 23 import os
24 24 import sys
25 25 import shutil
26 26
27 27 import pytest
28 28
29 29 from rhodecode.lib.utils import make_db_config
30 30 from rhodecode.lib.vcs.backends.base import Reference
31 31 from rhodecode.lib.vcs.backends.git import (
32 32 GitRepository, GitCommit, discover_git_version)
33 33 from rhodecode.lib.vcs.exceptions import (
34 34 RepositoryError, VCSError, NodeDoesNotExistError)
35 35 from rhodecode.lib.vcs.nodes import (
36 36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39 39
40 40
41 41 pytestmark = pytest.mark.backends("git")
42 42
43 43
44 44 def repo_path_generator():
45 45 """
46 46 Return a different path to be used for cloning repos.
47 47 """
48 48 i = 0
49 49 while True:
50 50 i += 1
51 51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52 52
53 53
54 54 REPO_PATH_GENERATOR = repo_path_generator()
55 55
56 56
57 57 class TestGitRepository:
58 58
59 59 # pylint: disable=protected-access
60 60
61 61 def __check_for_existing_repo(self):
62 62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 63 self.fail('Cannot test git clone repo as location %s already '
64 64 'exists. You should manually remove it first.'
65 65 % TEST_GIT_REPO_CLONE)
66 66
67 67 @pytest.fixture(autouse=True)
68 68 def prepare(self, request, baseapp):
69 69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70 70
71 71 def get_clone_repo(self):
72 72 """
73 73 Return a non bare clone of the base repo.
74 74 """
75 75 clone_path = next(REPO_PATH_GENERATOR)
76 76 repo_clone = GitRepository(
77 77 clone_path, create=True, src_url=self.repo.path, bare=False)
78 78
79 79 return repo_clone
80 80
81 81 def get_empty_repo(self, bare=False):
82 82 """
83 83 Return a non bare empty repo.
84 84 """
85 85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86 86
87 87 def test_wrong_repo_path(self):
88 88 wrong_repo_path = '/tmp/errorrepo_git'
89 89 with pytest.raises(RepositoryError):
90 90 GitRepository(wrong_repo_path)
91 91
92 92 def test_repo_clone(self):
93 93 self.__check_for_existing_repo()
94 94 repo = GitRepository(TEST_GIT_REPO)
95 95 repo_clone = GitRepository(
96 96 TEST_GIT_REPO_CLONE,
97 97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
98 98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 99 # Checking hashes of commits should be enough
100 100 for commit in repo.get_commits():
101 101 raw_id = commit.raw_id
102 102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103 103
104 104 def test_repo_clone_without_create(self):
105 105 with pytest.raises(RepositoryError):
106 106 GitRepository(
107 107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108 108
109 109 def test_repo_clone_with_update(self):
110 110 repo = GitRepository(TEST_GIT_REPO)
111 111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 112 repo_clone = GitRepository(
113 113 clone_path,
114 114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
115 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 116
117 117 # check if current workdir was updated
118 118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 119 assert os.path.isfile(fpath)
120 120
121 121 def test_repo_clone_without_update(self):
122 122 repo = GitRepository(TEST_GIT_REPO)
123 123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 124 repo_clone = GitRepository(
125 125 clone_path,
126 126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
127 127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 128 # check if current workdir was *NOT* updated
129 129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 130 # Make sure it's not bare repo
131 131 assert not repo_clone.bare
132 132 assert not os.path.isfile(fpath)
133 133
134 134 def test_repo_clone_into_bare_repo(self):
135 135 repo = GitRepository(TEST_GIT_REPO)
136 136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 137 repo_clone = GitRepository(
138 138 clone_path, create=True, src_url=repo.path, bare=True)
139 139 assert repo_clone.bare
140 140
141 141 def test_create_repo_is_not_bare_by_default(self):
142 142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 143 assert not repo.bare
144 144
145 145 def test_create_bare_repo(self):
146 146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 147 assert repo.bare
148 148
149 149 def test_update_server_info(self):
150 150 self.repo._update_server_info()
151 151
152 152 def test_fetch(self, vcsbackend_git):
153 153 # Note: This is a git specific part of the API, it's only implemented
154 154 # by the git backend.
155 155 source_repo = vcsbackend_git.repo
156 156 target_repo = vcsbackend_git.create_repo(bare=True)
157 157 target_repo.fetch(source_repo.path)
158 158 # Note: Get a fresh instance, avoids caching trouble
159 159 target_repo = vcsbackend_git.backend(target_repo.path)
160 160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161 161
162 162 def test_commit_ids(self):
163 163 # there are 112 commits (by now)
164 164 # so we can assume they would be available from now on
165 165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 167 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 168 '102607b09cdd60e2793929c4f90478be29f85a17',
169 169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 170 '2d1028c054665b962fa3d307adfc923ddd528038',
171 171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 174 '8430a588b43b5d6da365400117c89400326e7992',
175 175 'd955cd312c17b02143c04fa1099a352b04368118',
176 176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 187 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
190 190 assert subset.issubset(set(self.repo.commit_ids))
191 191
192 192 def test_slicing(self):
193 193 # 4 1 5 10 95
194 194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 195 (10, 20, 10), (5, 100, 95)]:
196 196 commit_ids = list(self.repo[sfrom:sto])
197 197 assert len(commit_ids) == size
198 198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200 200
201 201 def test_branches(self):
202 202 # TODO: Need more tests here
203 203 # Removed (those are 'remotes' branches for cloned repo)
204 204 # assert 'master' in self.repo.branches
205 205 # assert 'gittree' in self.repo.branches
206 206 # assert 'web-branch' in self.repo.branches
207 207 for __, commit_id in self.repo.branches.items():
208 208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209 209
210 210 def test_tags(self):
211 211 # TODO: Need more tests here
212 212 assert 'v0.1.1' in self.repo.tags
213 213 assert 'v0.1.2' in self.repo.tags
214 214 for __, commit_id in self.repo.tags.items():
215 215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216 216
217 217 def _test_single_commit_cache(self, commit_id):
218 218 commit = self.repo.get_commit(commit_id)
219 219 assert commit_id in self.repo.commits
220 220 assert commit is self.repo.commits[commit_id]
221 221
222 222 def test_initial_commit(self):
223 223 commit_id = self.repo.commit_ids[0]
224 224 init_commit = self.repo.get_commit(commit_id)
225 225 init_author = init_commit.author
226 226
227 227 assert init_commit.message == 'initial import\n'
228 228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 229 assert init_author == init_commit.committer
230 230 for path in ('vcs/__init__.py',
231 231 'vcs/backends/BaseRepository.py',
232 232 'vcs/backends/__init__.py'):
233 233 assert isinstance(init_commit.get_node(path), FileNode)
234 234 for path in ('', 'vcs', 'vcs/backends'):
235 235 assert isinstance(init_commit.get_node(path), DirNode)
236 236
237 237 with pytest.raises(NodeDoesNotExistError):
238 238 init_commit.get_node(path='foobar')
239 239
240 240 node = init_commit.get_node('vcs/')
241 241 assert hasattr(node, 'kind')
242 242 assert node.kind == NodeKind.DIR
243 243
244 244 node = init_commit.get_node('vcs')
245 245 assert hasattr(node, 'kind')
246 246 assert node.kind == NodeKind.DIR
247 247
248 248 node = init_commit.get_node('vcs/__init__.py')
249 249 assert hasattr(node, 'kind')
250 250 assert node.kind == NodeKind.FILE
251 251
252 252 def test_not_existing_commit(self):
253 253 with pytest.raises(RepositoryError):
254 254 self.repo.get_commit('f' * 40)
255 255
256 256 def test_commit10(self):
257 257
258 258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 259 README = """===
260 260 VCS
261 261 ===
262 262
263 263 Various Version Control System management abstraction layer for Python.
264 264
265 265 Introduction
266 266 ------------
267 267
268 268 TODO: To be written...
269 269
270 270 """
271 271 node = commit10.get_node('README.rst')
272 272 assert node.kind == NodeKind.FILE
273 273 assert node.content == README
274 274
275 275 def test_head(self):
276 276 assert self.repo.head == self.repo.get_commit().raw_id
277 277
278 278 def test_checkout_with_create(self):
279 279 repo_clone = self.get_clone_repo()
280 280
281 281 new_branch = 'new_branch'
282 282 assert repo_clone._current_branch() == 'master'
283 283 assert set(repo_clone.branches) == {'master'}
284 284 repo_clone._checkout(new_branch, create=True)
285 285
286 286 # Branches is a lazy property so we need to recrete the Repo object.
287 287 repo_clone = GitRepository(repo_clone.path)
288 288 assert set(repo_clone.branches) == {'master', new_branch}
289 289 assert repo_clone._current_branch() == new_branch
290 290
291 291 def test_checkout(self):
292 292 repo_clone = self.get_clone_repo()
293 293
294 294 repo_clone._checkout('new_branch', create=True)
295 295 repo_clone._checkout('master')
296 296
297 297 assert repo_clone._current_branch() == 'master'
298 298
299 299 def test_checkout_same_branch(self):
300 300 repo_clone = self.get_clone_repo()
301 301
302 302 repo_clone._checkout('master')
303 303 assert repo_clone._current_branch() == 'master'
304 304
305 305 def test_checkout_branch_already_exists(self):
306 306 repo_clone = self.get_clone_repo()
307 307
308 308 with pytest.raises(RepositoryError):
309 309 repo_clone._checkout('master', create=True)
310 310
311 311 def test_checkout_bare_repo(self):
312 312 with pytest.raises(RepositoryError):
313 313 self.repo._checkout('master')
314 314
315 315 def test_current_branch_bare_repo(self):
316 316 with pytest.raises(RepositoryError):
317 317 self.repo._current_branch()
318 318
319 319 def test_current_branch_empty_repo(self):
320 320 repo = self.get_empty_repo()
321 321 assert repo._current_branch() is None
322 322
323 323 def test_local_clone(self):
324 324 clone_path = next(REPO_PATH_GENERATOR)
325 325 self.repo._local_clone(clone_path, 'master')
326 326 repo_clone = GitRepository(clone_path)
327 327
328 328 assert self.repo.commit_ids == repo_clone.commit_ids
329 329
330 330 def test_local_clone_with_specific_branch(self):
331 331 source_repo = self.get_clone_repo()
332 332
333 333 # Create a new branch in source repo
334 334 new_branch_commit = source_repo.commit_ids[-3]
335 335 source_repo._checkout(new_branch_commit)
336 336 source_repo._checkout('new_branch', create=True)
337 337
338 338 clone_path = next(REPO_PATH_GENERATOR)
339 339 source_repo._local_clone(clone_path, 'new_branch')
340 340 repo_clone = GitRepository(clone_path)
341 341
342 342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343 343
344 344 clone_path = next(REPO_PATH_GENERATOR)
345 345 source_repo._local_clone(clone_path, 'master')
346 346 repo_clone = GitRepository(clone_path)
347 347
348 348 assert source_repo.commit_ids == repo_clone.commit_ids
349 349
350 350 def test_local_clone_fails_if_target_exists(self):
351 351 with pytest.raises(RepositoryError):
352 352 self.repo._local_clone(self.repo.path, 'master')
353 353
354 354 def test_local_fetch(self):
355 355 target_repo = self.get_empty_repo()
356 356 source_repo = self.get_clone_repo()
357 357
358 358 # Create a new branch in source repo
359 359 master_commit = source_repo.commit_ids[-1]
360 360 new_branch_commit = source_repo.commit_ids[-3]
361 361 source_repo._checkout(new_branch_commit)
362 362 source_repo._checkout('new_branch', create=True)
363 363
364 364 target_repo._local_fetch(source_repo.path, 'new_branch')
365 365 assert target_repo._last_fetch_heads() == [new_branch_commit]
366 366
367 367 target_repo._local_fetch(source_repo.path, 'master')
368 368 assert target_repo._last_fetch_heads() == [master_commit]
369 369
370 370 def test_local_fetch_from_bare_repo(self):
371 371 target_repo = self.get_empty_repo()
372 372 target_repo._local_fetch(self.repo.path, 'master')
373 373
374 374 master_commit = self.repo.commit_ids[-1]
375 375 assert target_repo._last_fetch_heads() == [master_commit]
376 376
377 377 def test_local_fetch_from_same_repo(self):
378 378 with pytest.raises(ValueError):
379 379 self.repo._local_fetch(self.repo.path, 'master')
380 380
381 381 def test_local_fetch_branch_does_not_exist(self):
382 382 target_repo = self.get_empty_repo()
383 383
384 384 with pytest.raises(RepositoryError):
385 385 target_repo._local_fetch(self.repo.path, 'new_branch')
386 386
387 387 def test_local_pull(self):
388 388 target_repo = self.get_empty_repo()
389 389 source_repo = self.get_clone_repo()
390 390
391 391 # Create a new branch in source repo
392 392 master_commit = source_repo.commit_ids[-1]
393 393 new_branch_commit = source_repo.commit_ids[-3]
394 394 source_repo._checkout(new_branch_commit)
395 395 source_repo._checkout('new_branch', create=True)
396 396
397 397 target_repo._local_pull(source_repo.path, 'new_branch')
398 398 target_repo = GitRepository(target_repo.path)
399 399 assert target_repo.head == new_branch_commit
400 400
401 401 target_repo._local_pull(source_repo.path, 'master')
402 402 target_repo = GitRepository(target_repo.path)
403 403 assert target_repo.head == master_commit
404 404
405 405 def test_local_pull_in_bare_repo(self):
406 406 with pytest.raises(RepositoryError):
407 407 self.repo._local_pull(self.repo.path, 'master')
408 408
409 409 def test_local_merge(self):
410 410 target_repo = self.get_empty_repo()
411 411 source_repo = self.get_clone_repo()
412 412
413 413 # Create a new branch in source repo
414 414 master_commit = source_repo.commit_ids[-1]
415 415 new_branch_commit = source_repo.commit_ids[-3]
416 416 source_repo._checkout(new_branch_commit)
417 417 source_repo._checkout('new_branch', create=True)
418 418
419 419 # This is required as one cannot do a -ff-only merge in an empty repo.
420 420 target_repo._local_pull(source_repo.path, 'new_branch')
421 421
422 422 target_repo._local_fetch(source_repo.path, 'master')
423 423 merge_message = 'Merge message\n\nDescription:...'
424 424 user_name = 'Albert Einstein'
425 425 user_email = 'albert@einstein.com'
426 426 target_repo._local_merge(merge_message, user_name, user_email,
427 427 target_repo._last_fetch_heads())
428 428
429 429 target_repo = GitRepository(target_repo.path)
430 430 assert target_repo.commit_ids[-2] == master_commit
431 431 last_commit = target_repo.get_commit(target_repo.head)
432 432 assert last_commit.message.strip() == merge_message
433 433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434 434
435 435 assert not os.path.exists(
436 436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437 437
438 438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441 441
442 442 target_repo._local_fetch(self.repo.path, 'master')
443 443 with pytest.raises(RepositoryError):
444 444 target_repo._local_merge(
445 445 'merge_message', 'user name', 'user@name.com',
446 446 target_repo._last_fetch_heads())
447 447
448 448 # Check we are not left in an intermediate merge state
449 449 assert not os.path.exists(
450 450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451 451
452 452 def test_local_merge_into_empty_repo(self):
453 453 target_repo = self.get_empty_repo()
454 454
455 455 # This is required as one cannot do a -ff-only merge in an empty repo.
456 456 target_repo._local_fetch(self.repo.path, 'master')
457 457 with pytest.raises(RepositoryError):
458 458 target_repo._local_merge(
459 459 'merge_message', 'user name', 'user@name.com',
460 460 target_repo._last_fetch_heads())
461 461
462 462 def test_local_merge_in_bare_repo(self):
463 463 with pytest.raises(RepositoryError):
464 464 self.repo._local_merge(
465 465 'merge_message', 'user name', 'user@name.com', None)
466 466
467 467 def test_local_push_non_bare(self):
468 468 target_repo = self.get_empty_repo()
469 469
470 470 pushed_branch = 'pushed_branch'
471 471 self.repo._local_push('master', target_repo.path, pushed_branch)
472 472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 473 # report any branches.
474 474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 475 f.write('ref: refs/heads/%s' % pushed_branch)
476 476
477 477 target_repo = GitRepository(target_repo.path)
478 478
479 479 assert (target_repo.branches[pushed_branch] ==
480 480 self.repo.branches['master'])
481 481
482 482 def test_local_push_bare(self):
483 483 target_repo = self.get_empty_repo(bare=True)
484 484
485 485 pushed_branch = 'pushed_branch'
486 486 self.repo._local_push('master', target_repo.path, pushed_branch)
487 487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 488 # report any branches.
489 489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 490 f.write('ref: refs/heads/%s' % pushed_branch)
491 491
492 492 target_repo = GitRepository(target_repo.path)
493 493
494 494 assert (target_repo.branches[pushed_branch] ==
495 495 self.repo.branches['master'])
496 496
497 497 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 498 target_repo = self.get_clone_repo()
499 499
500 500 pushed_branch = 'pushed_branch'
501 501 # Create a new branch in source repo
502 502 new_branch_commit = target_repo.commit_ids[-3]
503 503 target_repo._checkout(new_branch_commit)
504 504 target_repo._checkout(pushed_branch, create=True)
505 505
506 506 self.repo._local_push('master', target_repo.path, pushed_branch)
507 507
508 508 target_repo = GitRepository(target_repo.path)
509 509
510 510 assert (target_repo.branches[pushed_branch] ==
511 511 self.repo.branches['master'])
512 512
513 513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 515 with pytest.raises(RepositoryError):
516 516 self.repo._local_push('master', target_repo.path, 'master')
517 517
518 518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 519 target_repo = self.get_empty_repo(bare=True)
520 520
521 521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 522 self.repo._local_push(
523 523 'master', target_repo.path, 'master', enable_hooks=True)
524 524 env = run_mock.call_args[1]['extra_env']
525 525 assert 'RC_SKIP_HOOKS' not in env
526 526
527 527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 528 path_components = (
529 529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 530 hook_path = os.path.join(repo_path, *path_components)
531 531 with open(hook_path, 'w') as f:
532 532 script_lines = [
533 533 '#!%s' % sys.executable,
534 534 'import os',
535 535 'import sys',
536 536 'if os.environ.get("RC_SKIP_HOOKS"):',
537 537 ' sys.exit(0)',
538 538 'sys.exit(1)',
539 539 ]
540 540 f.write('\n'.join(script_lines))
541 541 os.chmod(hook_path, 0o755)
542 542
543 543 def test_local_push_does_not_execute_hook(self):
544 544 target_repo = self.get_empty_repo()
545 545
546 546 pushed_branch = 'pushed_branch'
547 547 self._add_failing_hook(target_repo.path, 'pre-receive')
548 548 self.repo._local_push('master', target_repo.path, pushed_branch)
549 549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 550 # report any branches.
551 551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 552 f.write('ref: refs/heads/%s' % pushed_branch)
553 553
554 554 target_repo = GitRepository(target_repo.path)
555 555
556 556 assert (target_repo.branches[pushed_branch] ==
557 557 self.repo.branches['master'])
558 558
559 559 def test_local_push_executes_hook(self):
560 560 target_repo = self.get_empty_repo(bare=True)
561 561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 562 with pytest.raises(RepositoryError):
563 563 self.repo._local_push(
564 564 'master', target_repo.path, 'master', enable_hooks=True)
565 565
566 566 def test_maybe_prepare_merge_workspace(self):
567 567 workspace = self.repo._maybe_prepare_merge_workspace(
568 568 2, 'pr2', Reference('branch', 'master', 'unused'),
569 569 Reference('branch', 'master', 'unused'))
570 570
571 571 assert os.path.isdir(workspace)
572 572 workspace_repo = GitRepository(workspace)
573 573 assert workspace_repo.branches == self.repo.branches
574 574
575 575 # Calling it a second time should also succeed
576 576 workspace = self.repo._maybe_prepare_merge_workspace(
577 577 2, 'pr2', Reference('branch', 'master', 'unused'),
578 578 Reference('branch', 'master', 'unused'))
579 579 assert os.path.isdir(workspace)
580 580
581 581 def test_maybe_prepare_merge_workspace_different_refs(self):
582 582 workspace = self.repo._maybe_prepare_merge_workspace(
583 583 2, 'pr2', Reference('branch', 'master', 'unused'),
584 584 Reference('branch', 'develop', 'unused'))
585 585
586 586 assert os.path.isdir(workspace)
587 587 workspace_repo = GitRepository(workspace)
588 588 assert workspace_repo.branches == self.repo.branches
589 589
590 590 # Calling it a second time should also succeed
591 591 workspace = self.repo._maybe_prepare_merge_workspace(
592 592 2, 'pr2', Reference('branch', 'master', 'unused'),
593 593 Reference('branch', 'develop', 'unused'))
594 594 assert os.path.isdir(workspace)
595 595
596 596 def test_cleanup_merge_workspace(self):
597 597 workspace = self.repo._maybe_prepare_merge_workspace(
598 598 2, 'pr3', Reference('branch', 'master', 'unused'),
599 599 Reference('branch', 'master', 'unused'))
600 600 self.repo.cleanup_merge_workspace(2, 'pr3')
601 601
602 602 assert not os.path.exists(workspace)
603 603
604 604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
605 605 # No assert: because in case of an inexistent workspace this function
606 606 # should still succeed.
607 607 self.repo.cleanup_merge_workspace(1, 'pr4')
608 608
609 609 def test_set_refs(self):
610 610 test_ref = 'refs/test-refs/abcde'
611 611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
612 612
613 613 self.repo.set_refs(test_ref, test_commit_id)
614 614 stdout, _ = self.repo.run_git_command(['show-ref'])
615 615 assert test_ref in stdout
616 616 assert test_commit_id in stdout
617 617
618 618 def test_remove_ref(self):
619 619 test_ref = 'refs/test-refs/abcde'
620 620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
621 621 self.repo.set_refs(test_ref, test_commit_id)
622 622 stdout, _ = self.repo.run_git_command(['show-ref'])
623 623 assert test_ref in stdout
624 624 assert test_commit_id in stdout
625 625
626 626 self.repo.remove_ref(test_ref)
627 627 stdout, _ = self.repo.run_git_command(['show-ref'])
628 628 assert test_ref not in stdout
629 629 assert test_commit_id not in stdout
630 630
631 631
632 632 class TestGitCommit(object):
633 633
634 634 @pytest.fixture(autouse=True)
635 635 def prepare(self):
636 636 self.repo = GitRepository(TEST_GIT_REPO)
637 637
638 638 def test_default_commit(self):
639 639 tip = self.repo.get_commit()
640 640 assert tip == self.repo.get_commit(None)
641 641 assert tip == self.repo.get_commit('tip')
642 642
643 643 def test_root_node(self):
644 644 tip = self.repo.get_commit()
645 645 assert tip.root is tip.get_node('')
646 646
647 647 def test_lazy_fetch(self):
648 648 """
649 649 Test if commit's nodes expands and are cached as we walk through
650 650 the commit. This test is somewhat hard to write as order of tests
651 651 is a key here. Written by running command after command in a shell.
652 652 """
653 653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
654 654 assert commit_id in self.repo.commit_ids
655 655 commit = self.repo.get_commit(commit_id)
656 656 assert len(commit.nodes) == 0
657 657 root = commit.root
658 658 assert len(commit.nodes) == 1
659 659 assert len(root.nodes) == 8
660 660 # accessing root.nodes updates commit.nodes
661 661 assert len(commit.nodes) == 9
662 662
663 663 docs = root.get_node('docs')
664 664 # we haven't yet accessed anything new as docs dir was already cached
665 665 assert len(commit.nodes) == 9
666 666 assert len(docs.nodes) == 8
667 667 # accessing docs.nodes updates commit.nodes
668 668 assert len(commit.nodes) == 17
669 669
670 670 assert docs is commit.get_node('docs')
671 671 assert docs is root.nodes[0]
672 672 assert docs is root.dirs[0]
673 673 assert docs is commit.get_node('docs')
674 674
675 675 def test_nodes_with_commit(self):
676 676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
677 677 commit = self.repo.get_commit(commit_id)
678 678 root = commit.root
679 679 docs = root.get_node('docs')
680 680 assert docs is commit.get_node('docs')
681 681 api = docs.get_node('api')
682 682 assert api is commit.get_node('docs/api')
683 683 index = api.get_node('index.rst')
684 684 assert index is commit.get_node('docs/api/index.rst')
685 685 assert index is commit.get_node('docs')\
686 686 .get_node('api')\
687 687 .get_node('index.rst')
688 688
689 689 def test_branch_and_tags(self):
690 690 """
691 691 rev0 = self.repo.commit_ids[0]
692 692 commit0 = self.repo.get_commit(rev0)
693 693 assert commit0.branch == 'master'
694 694 assert commit0.tags == []
695 695
696 696 rev10 = self.repo.commit_ids[10]
697 697 commit10 = self.repo.get_commit(rev10)
698 698 assert commit10.branch == 'master'
699 699 assert commit10.tags == []
700 700
701 701 rev44 = self.repo.commit_ids[44]
702 702 commit44 = self.repo.get_commit(rev44)
703 703 assert commit44.branch == 'web-branch'
704 704
705 705 tip = self.repo.get_commit('tip')
706 706 assert 'tip' in tip.tags
707 707 """
708 708 # Those tests would fail - branches are now going
709 709 # to be changed at main API in order to support git backend
710 710 pass
711 711
712 712 def test_file_size(self):
713 713 to_check = (
714 714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
715 715 'vcs/backends/BaseRepository.py', 502),
716 716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
717 717 'vcs/backends/hg.py', 854),
718 718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
719 719 'setup.py', 1068),
720 720
721 721 ('d955cd312c17b02143c04fa1099a352b04368118',
722 722 'vcs/backends/base.py', 2921),
723 723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
724 724 'vcs/backends/base.py', 3936),
725 725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
726 726 'vcs/backends/base.py', 6189),
727 727 )
728 728 for commit_id, path, size in to_check:
729 729 node = self.repo.get_commit(commit_id).get_node(path)
730 730 assert node.is_file()
731 731 assert node.size == size
732 732
733 733 def test_file_history_from_commits(self):
734 734 node = self.repo[10].get_node('setup.py')
735 735 commit_ids = [commit.raw_id for commit in node.history]
736 736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
737 737
738 738 node = self.repo[20].get_node('setup.py')
739 739 node_ids = [commit.raw_id for commit in node.history]
740 740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
741 741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
742 742
743 743 # special case we check history from commit that has this particular
744 744 # file changed this means we check if it's included as well
745 745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
746 746 .get_node('setup.py')
747 747 node_ids = [commit.raw_id for commit in node.history]
748 748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
749 749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
750 750
751 751 def test_file_history(self):
752 752 # we can only check if those commits are present in the history
753 753 # as we cannot update this test every time file is changed
754 754 files = {
755 755 'setup.py': [
756 756 '54386793436c938cff89326944d4c2702340037d',
757 757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
758 758 '998ed409c795fec2012b1c0ca054d99888b22090',
759 759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
760 760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
761 761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
762 762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
763 763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
764 764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
765 765 ],
766 766 'vcs/nodes.py': [
767 767 '33fa3223355104431402a888fa77a4e9956feb3e',
768 768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
769 769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
770 770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
771 771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
772 772 '4313566d2e417cb382948f8d9d7c765330356054',
773 773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
774 774 '54386793436c938cff89326944d4c2702340037d',
775 775 '54000345d2e78b03a99d561399e8e548de3f3203',
776 776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
777 777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
778 778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
779 779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
780 780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
781 781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
782 782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
783 783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
784 784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
785 785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
786 786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
787 787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
788 788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
789 789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
790 790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
791 791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
792 792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
793 793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
794 794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
795 795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
796 796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
797 797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
798 798 ],
799 799 'vcs/backends/git.py': [
800 800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
801 801 '9a751d84d8e9408e736329767387f41b36935153',
802 802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
803 803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
804 804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
805 805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
806 806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
807 807 '54000345d2e78b03a99d561399e8e548de3f3203',
808 808 ],
809 809 }
810 810 for path, commit_ids in files.items():
811 811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
812 812 node_ids = [commit.raw_id for commit in node.history]
813 813 assert set(commit_ids).issubset(set(node_ids)), (
814 814 "We assumed that %s is subset of commit_ids for which file %s "
815 815 "has been changed, and history of that node returned: %s"
816 816 % (commit_ids, path, node_ids))
817 817
818 818 def test_file_annotate(self):
819 819 files = {
820 820 'vcs/backends/__init__.py': {
821 821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
822 822 'lines_no': 1,
823 823 'commits': [
824 824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
825 825 ],
826 826 },
827 827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
828 828 'lines_no': 21,
829 829 'commits': [
830 830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
831 831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 851 ],
852 852 },
853 853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
854 854 'lines_no': 32,
855 855 'commits': [
856 856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
859 859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
862 862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 863 '54000345d2e78b03a99d561399e8e548de3f3203',
864 864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
867 867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
872 872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
873 873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
874 874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
882 882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
883 883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 888 ],
889 889 },
890 890 },
891 891 }
892 892
893 893 for fname, commit_dict in files.items():
894 894 for commit_id, __ in commit_dict.items():
895 895 commit = self.repo.get_commit(commit_id)
896 896
897 897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
898 898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
899 899 assert l1_1 == l1_2
900 900 l1 = l1_1
901 901 l2 = files[fname][commit_id]['commits']
902 902 assert l1 == l2, (
903 903 "The lists of commit_ids for %s@commit_id %s"
904 904 "from annotation list should match each other, "
905 905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
906 906
907 907 def test_files_state(self):
908 908 """
909 909 Tests state of FileNodes.
910 910 """
911 911 node = self.repo\
912 912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
913 913 .get_node('vcs/utils/diffs.py')
914 914 assert node.state, NodeState.ADDED
915 915 assert node.added
916 916 assert not node.changed
917 917 assert not node.not_changed
918 918 assert not node.removed
919 919
920 920 node = self.repo\
921 921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
922 922 .get_node('.hgignore')
923 923 assert node.state, NodeState.CHANGED
924 924 assert not node.added
925 925 assert node.changed
926 926 assert not node.not_changed
927 927 assert not node.removed
928 928
929 929 node = self.repo\
930 930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
931 931 .get_node('setup.py')
932 932 assert node.state, NodeState.NOT_CHANGED
933 933 assert not node.added
934 934 assert not node.changed
935 935 assert node.not_changed
936 936 assert not node.removed
937 937
938 938 # If node has REMOVED state then trying to fetch it would raise
939 939 # CommitError exception
940 940 commit = self.repo.get_commit(
941 941 'fa6600f6848800641328adbf7811fd2372c02ab2')
942 942 path = 'vcs/backends/BaseRepository.py'
943 943 with pytest.raises(NodeDoesNotExistError):
944 944 commit.get_node(path)
945 945 # but it would be one of ``removed`` (commit's attribute)
946 946 assert path in [rf.path for rf in commit.removed]
947 947
948 948 commit = self.repo.get_commit(
949 949 '54386793436c938cff89326944d4c2702340037d')
950 950 changed = [
951 951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
952 952 'vcs/nodes.py']
953 953 assert set(changed) == set([f.path for f in commit.changed])
954 954
955 955 def test_unicode_branch_refs(self):
956 956 unicode_branches = {
957 957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
958 958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
959 959 }
960 960 with mock.patch(
961 961 ("rhodecode.lib.vcs.backends.git.repository"
962 962 ".GitRepository._refs"),
963 963 unicode_branches):
964 964 branches = self.repo.branches
965 965
966 966 assert 'unicode' in branches
967 967 assert u'uniΓ§ΓΆβˆ‚e' in branches
968 968
969 969 def test_unicode_tag_refs(self):
970 970 unicode_tags = {
971 971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 973 }
974 974 with mock.patch(
975 975 ("rhodecode.lib.vcs.backends.git.repository"
976 976 ".GitRepository._refs"),
977 977 unicode_tags):
978 978 tags = self.repo.tags
979 979
980 980 assert 'unicode' in tags
981 981 assert u'uniΓ§ΓΆβˆ‚e' in tags
982 982
983 983 def test_commit_message_is_unicode(self):
984 984 for commit in self.repo:
985 985 assert type(commit.message) == unicode
986 986
987 987 def test_commit_author_is_unicode(self):
988 988 for commit in self.repo:
989 989 assert type(commit.author) == unicode
990 990
991 991 def test_repo_files_content_is_unicode(self):
992 992 commit = self.repo.get_commit()
993 993 for node in commit.get_node('/'):
994 994 if node.is_file():
995 995 assert type(node.content) == unicode
996 996
997 997 def test_wrong_path(self):
998 998 # There is 'setup.py' in the root dir but not there:
999 999 path = 'foo/bar/setup.py'
1000 1000 tip = self.repo.get_commit()
1001 1001 with pytest.raises(VCSError):
1002 1002 tip.get_node(path)
1003 1003
1004 1004 @pytest.mark.parametrize("author_email, commit_id", [
1005 1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1006 1006 ('lukasz.balcerzak@python-center.pl',
1007 1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1008 1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1009 1009 ])
1010 1010 def test_author_email(self, author_email, commit_id):
1011 1011 commit = self.repo.get_commit(commit_id)
1012 1012 assert author_email == commit.author_email
1013 1013
1014 1014 @pytest.mark.parametrize("author, commit_id", [
1015 1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1016 1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1017 1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1018 1018 ])
1019 1019 def test_author_username(self, author, commit_id):
1020 1020 commit = self.repo.get_commit(commit_id)
1021 1021 assert author == commit.author_name
1022 1022
1023 1023
1024 1024 class TestLargeFileRepo(object):
1025 1025
1026 1026 def test_large_file(self, backend_git):
1027 1027 conf = make_db_config()
1028 1028 repo = backend_git.create_test_repo('largefiles', conf)
1029 1029
1030 1030 tip = repo.scm_instance().get_commit()
1031 1031
1032 1032 # extract stored LF node into the origin cache
1033 1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1034 1034
1035 1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1036 1036 oid_path = os.path.join(lfs_store, oid)
1037 1037 oid_destination = os.path.join(
1038 1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1039 1039 shutil.copy(oid_path, oid_destination)
1040 1040
1041 1041 node = tip.get_node('1MB.zip')
1042 1042
1043 1043 lf_node = node.get_largefile_node()
1044 1044
1045 1045 assert lf_node.is_largefile() is True
1046 1046 assert lf_node.size == 1024000
1047 1047 assert lf_node.name == '1MB.zip'
1048 1048
1049 1049
1050 1050 @pytest.mark.usefixtures("vcs_repository_support")
1051 1051 class TestGitSpecificWithRepo(BackendTestMixin):
1052 1052
1053 1053 @classmethod
1054 1054 def _get_commits(cls):
1055 1055 return [
1056 1056 {
1057 1057 'message': 'Initial',
1058 1058 'author': 'Joe Doe <joe.doe@example.com>',
1059 1059 'date': datetime.datetime(2010, 1, 1, 20),
1060 1060 'added': [
1061 1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1062 1062 FileNode(
1063 1063 'foobar/static/admin', content='admin',
1064 1064 mode=0o120000), # this is a link
1065 1065 FileNode('foo', content='foo'),
1066 1066 ],
1067 1067 },
1068 1068 {
1069 1069 'message': 'Second',
1070 1070 'author': 'Joe Doe <joe.doe@example.com>',
1071 1071 'date': datetime.datetime(2010, 1, 1, 22),
1072 1072 'added': [
1073 1073 FileNode('foo2', content='foo2'),
1074 1074 ],
1075 1075 },
1076 1076 ]
1077 1077
1078 1078 def test_paths_slow_traversing(self):
1079 1079 commit = self.repo.get_commit()
1080 1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1081 1081 .get_node('admin').get_node('base.js').content == 'base'
1082 1082
1083 1083 def test_paths_fast_traversing(self):
1084 1084 commit = self.repo.get_commit()
1085 1085 assert (
1086 1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1087 1087 'base')
1088 1088
1089 1089 def test_get_diff_runs_git_command_with_hashes(self):
1090 comm1 = self.repo[0]
1091 comm2 = self.repo[1]
1090 1092 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1091 self.repo.get_diff(self.repo[0], self.repo[1])
1093 self.repo.get_diff(comm1, comm2)
1094
1095 self.repo.run_git_command.assert_called_once_with(
1096 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1097 '--abbrev=40', comm1.raw_id, comm2.raw_id])
1098
1099 def test_get_diff_runs_git_command_with_str_hashes(self):
1100 comm2 = self.repo[1]
1101 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1102 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1103 self.repo.run_git_command.assert_called_once_with(
1104 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1105 '--abbrev=40', comm2.raw_id])
1106
1107 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1108 comm1 = self.repo[0]
1109 comm2 = self.repo[1]
1110 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1111 self.repo.get_diff(comm1, comm2, 'foo')
1092 1112 self.repo.run_git_command.assert_called_once_with(
1093 1113 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1094 1114 '--abbrev=40', self.repo._lookup_commit(0),
1095 self.repo._lookup_commit(1)])
1096
1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1100 self.repo.run_git_command.assert_called_once_with(
1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1102 '--abbrev=40', self.repo._lookup_commit(1)])
1103
1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1107 self.repo.run_git_command.assert_called_once_with(
1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1109 '--abbrev=40', self.repo._lookup_commit(0),
1110 self.repo._lookup_commit(1), '--', 'foo'])
1115 comm2.raw_id, '--', 'foo'])
1111 1116
1112 1117
1113 1118 @pytest.mark.usefixtures("vcs_repository_support")
1114 1119 class TestGitRegression(BackendTestMixin):
1115 1120
1116 1121 @classmethod
1117 1122 def _get_commits(cls):
1118 1123 return [
1119 1124 {
1120 1125 'message': 'Initial',
1121 1126 'author': 'Joe Doe <joe.doe@example.com>',
1122 1127 'date': datetime.datetime(2010, 1, 1, 20),
1123 1128 'added': [
1124 1129 FileNode('bot/__init__.py', content='base'),
1125 1130 FileNode('bot/templates/404.html', content='base'),
1126 1131 FileNode('bot/templates/500.html', content='base'),
1127 1132 ],
1128 1133 },
1129 1134 {
1130 1135 'message': 'Second',
1131 1136 'author': 'Joe Doe <joe.doe@example.com>',
1132 1137 'date': datetime.datetime(2010, 1, 1, 22),
1133 1138 'added': [
1134 1139 FileNode('bot/build/migrations/1.py', content='foo2'),
1135 1140 FileNode('bot/build/migrations/2.py', content='foo2'),
1136 1141 FileNode(
1137 1142 'bot/build/static/templates/f.html', content='foo2'),
1138 1143 FileNode(
1139 1144 'bot/build/static/templates/f1.html', content='foo2'),
1140 1145 FileNode('bot/build/templates/err.html', content='foo2'),
1141 1146 FileNode('bot/build/templates/err2.html', content='foo2'),
1142 1147 ],
1143 1148 },
1144 1149 ]
1145 1150
1146 1151 @pytest.mark.parametrize("path, expected_paths", [
1147 1152 ('bot', [
1148 1153 'bot/build',
1149 1154 'bot/templates',
1150 1155 'bot/__init__.py']),
1151 1156 ('bot/build', [
1152 1157 'bot/build/migrations',
1153 1158 'bot/build/static',
1154 1159 'bot/build/templates']),
1155 1160 ('bot/build/static', [
1156 1161 'bot/build/static/templates']),
1157 1162 ('bot/build/static/templates', [
1158 1163 'bot/build/static/templates/f.html',
1159 1164 'bot/build/static/templates/f1.html']),
1160 1165 ('bot/build/templates', [
1161 1166 'bot/build/templates/err.html',
1162 1167 'bot/build/templates/err2.html']),
1163 1168 ('bot/templates/', [
1164 1169 'bot/templates/404.html',
1165 1170 'bot/templates/500.html']),
1166 1171 ])
1167 1172 def test_similar_paths(self, path, expected_paths):
1168 1173 commit = self.repo.get_commit()
1169 1174 paths = [n.path for n in commit.get_nodes(path)]
1170 1175 assert paths == expected_paths
1171 1176
1172 1177
1173 1178 class TestDiscoverGitVersion(object):
1174 1179
1175 1180 def test_returns_git_version(self, baseapp):
1176 1181 version = discover_git_version()
1177 1182 assert version
1178 1183
1179 1184 def test_returns_empty_string_without_vcsserver(self):
1180 1185 mock_connection = mock.Mock()
1181 1186 mock_connection.discover_git_version = mock.Mock(
1182 1187 side_effect=Exception)
1183 1188 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1184 1189 version = discover_git_version()
1185 1190 assert version == ''
1186 1191
1187 1192
1188 1193 class TestGetSubmoduleUrl(object):
1189 1194 def test_submodules_file_found(self):
1190 1195 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1191 1196 node = mock.Mock()
1192 1197 with mock.patch.object(
1193 1198 commit, 'get_node', return_value=node) as get_node_mock:
1194 1199 node.content = (
1195 1200 '[submodule "subrepo1"]\n'
1196 1201 '\tpath = subrepo1\n'
1197 1202 '\turl = https://code.rhodecode.com/dulwich\n'
1198 1203 )
1199 1204 result = commit._get_submodule_url('subrepo1')
1200 1205 get_node_mock.assert_called_once_with('.gitmodules')
1201 1206 assert result == 'https://code.rhodecode.com/dulwich'
1202 1207
1203 1208 def test_complex_submodule_path(self):
1204 1209 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1205 1210 node = mock.Mock()
1206 1211 with mock.patch.object(
1207 1212 commit, 'get_node', return_value=node) as get_node_mock:
1208 1213 node.content = (
1209 1214 '[submodule "complex/subrepo/path"]\n'
1210 1215 '\tpath = complex/subrepo/path\n'
1211 1216 '\turl = https://code.rhodecode.com/dulwich\n'
1212 1217 )
1213 1218 result = commit._get_submodule_url('complex/subrepo/path')
1214 1219 get_node_mock.assert_called_once_with('.gitmodules')
1215 1220 assert result == 'https://code.rhodecode.com/dulwich'
1216 1221
1217 1222 def test_submodules_file_not_found(self):
1218 1223 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1219 1224 with mock.patch.object(
1220 1225 commit, 'get_node', side_effect=NodeDoesNotExistError):
1221 1226 result = commit._get_submodule_url('complex/subrepo/path')
1222 1227 assert result is None
1223 1228
1224 1229 def test_path_not_found(self):
1225 1230 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1226 1231 node = mock.Mock()
1227 1232 with mock.patch.object(
1228 1233 commit, 'get_node', return_value=node) as get_node_mock:
1229 1234 node.content = (
1230 1235 '[submodule "subrepo1"]\n'
1231 1236 '\tpath = subrepo1\n'
1232 1237 '\turl = https://code.rhodecode.com/dulwich\n'
1233 1238 )
1234 1239 result = commit._get_submodule_url('subrepo2')
1235 1240 get_node_mock.assert_called_once_with('.gitmodules')
1236 1241 assert result is None
1237 1242
1238 1243 def test_returns_cached_values(self):
1239 1244 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1240 1245 node = mock.Mock()
1241 1246 with mock.patch.object(
1242 1247 commit, 'get_node', return_value=node) as get_node_mock:
1243 1248 node.content = (
1244 1249 '[submodule "subrepo1"]\n'
1245 1250 '\tpath = subrepo1\n'
1246 1251 '\turl = https://code.rhodecode.com/dulwich\n'
1247 1252 )
1248 1253 for _ in range(3):
1249 1254 commit._get_submodule_url('subrepo1')
1250 1255 get_node_mock.assert_called_once_with('.gitmodules')
1251 1256
1252 1257 def test_get_node_returns_a_link(self):
1253 1258 repository = mock.Mock()
1254 1259 repository.alias = 'git'
1255 1260 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1256 1261 submodule_url = 'https://code.rhodecode.com/dulwich'
1257 1262 get_id_patch = mock.patch.object(
1258 1263 commit, '_get_id_for_path', return_value=(1, 'link'))
1259 1264 get_submodule_patch = mock.patch.object(
1260 1265 commit, '_get_submodule_url', return_value=submodule_url)
1261 1266
1262 1267 with get_id_patch, get_submodule_patch as submodule_mock:
1263 1268 node = commit.get_node('/abcde')
1264 1269
1265 1270 submodule_mock.assert_called_once_with('/abcde')
1266 1271 assert type(node) == SubModuleNode
1267 1272 assert node.url == submodule_url
1268 1273
1269 1274 def test_get_nodes_returns_links(self):
1270 1275 repository = mock.MagicMock()
1271 1276 repository.alias = 'git'
1272 1277 repository._remote.tree_items.return_value = [
1273 1278 ('subrepo', 'stat', 1, 'link')
1274 1279 ]
1275 1280 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1276 1281 submodule_url = 'https://code.rhodecode.com/dulwich'
1277 1282 get_id_patch = mock.patch.object(
1278 1283 commit, '_get_id_for_path', return_value=(1, 'tree'))
1279 1284 get_submodule_patch = mock.patch.object(
1280 1285 commit, '_get_submodule_url', return_value=submodule_url)
1281 1286
1282 1287 with get_id_patch, get_submodule_patch as submodule_mock:
1283 1288 nodes = commit.get_nodes('/abcde')
1284 1289
1285 1290 submodule_mock.assert_called_once_with('/abcde/subrepo')
1286 1291 assert len(nodes) == 1
1287 1292 assert type(nodes[0]) == SubModuleNode
1288 1293 assert nodes[0].url == submodule_url
General Comments 0
You need to be logged in to leave comments. Login now