##// END OF EJS Templates
pull-requests: added nicer formatting for merge conflicting files
marcink -
r4087:697a75c3 default
parent child Browse files
Show More
@@ -1,1217 +1,1217 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35
36 36
37 37 def route_path(name, params=None, **kwargs):
38 38 import urllib
39 39
40 40 base_url = {
41 41 'repo_changelog': '/{repo_name}/changelog',
42 42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 43 'repo_commits': '/{repo_name}/commits',
44 44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 46 'pullrequest_show_all': '/{repo_name}/pull-request',
47 47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 50 'pullrequest_new': '/{repo_name}/pull-request/new',
51 51 'pullrequest_create': '/{repo_name}/pull-request/create',
52 52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 57 }[name].format(**kwargs)
58 58
59 59 if params:
60 60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
61 61 return base_url
62 62
63 63
64 64 @pytest.mark.usefixtures('app', 'autologin_user')
65 65 @pytest.mark.backends("git", "hg")
66 66 class TestPullrequestsView(object):
67 67
68 68 def test_index(self, backend):
69 69 self.app.get(route_path(
70 70 'pullrequest_new',
71 71 repo_name=backend.repo_name))
72 72
73 73 def test_option_menu_create_pull_request_exists(self, backend):
74 74 repo_name = backend.repo_name
75 75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
76 76
77 77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
78 78 'pullrequest_new', repo_name=repo_name)
79 79 response.mustcontain(create_pr_link)
80 80
81 81 def test_create_pr_form_with_raw_commit_id(self, backend):
82 82 repo = backend.repo
83 83
84 84 self.app.get(
85 85 route_path('pullrequest_new', repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 @pytest.mark.parametrize('range_diff', ["0", "1"])
91 91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
92 92 pull_request = pr_util.create_pull_request(
93 93 mergeable=pr_merge_enabled, enable_notifications=False)
94 94
95 95 response = self.app.get(route_path(
96 96 'pullrequest_show',
97 97 repo_name=pull_request.target_repo.scm_instance().name,
98 98 pull_request_id=pull_request.pull_request_id,
99 99 params={'range-diff': range_diff}))
100 100
101 101 for commit_id in pull_request.revisions:
102 102 response.mustcontain(commit_id)
103 103
104 104 assert pull_request.target_ref_parts.type in response
105 105 assert pull_request.target_ref_parts.name in response
106 106 target_clone_url = pull_request.target_repo.clone_url()
107 107 assert target_clone_url in response
108 108
109 109 assert 'class="pull-request-merge"' in response
110 110 if pr_merge_enabled:
111 111 response.mustcontain('Pull request reviewer approval is pending')
112 112 else:
113 113 response.mustcontain('Server-side pull request merging is disabled.')
114 114
115 115 if range_diff == "1":
116 116 response.mustcontain('Turn off: Show the diff as commit range')
117 117
118 118 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
119 119 # Logout
120 120 response = self.app.post(
121 121 h.route_path('logout'),
122 122 params={'csrf_token': csrf_token})
123 123 # Login as regular user
124 124 response = self.app.post(h.route_path('login'),
125 125 {'username': TEST_USER_REGULAR_LOGIN,
126 126 'password': 'test12'})
127 127
128 128 pull_request = pr_util.create_pull_request(
129 129 author=TEST_USER_REGULAR_LOGIN)
130 130
131 131 response = self.app.get(route_path(
132 132 'pullrequest_show',
133 133 repo_name=pull_request.target_repo.scm_instance().name,
134 134 pull_request_id=pull_request.pull_request_id))
135 135
136 136 response.mustcontain('Server-side pull request merging is disabled.')
137 137
138 138 assert_response = response.assert_response()
139 139 # for regular user without a merge permissions, we don't see it
140 140 assert_response.no_element_exists('#close-pull-request-action')
141 141
142 142 user_util.grant_user_permission_to_repo(
143 143 pull_request.target_repo,
144 144 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
145 145 'repository.write')
146 146 response = self.app.get(route_path(
147 147 'pullrequest_show',
148 148 repo_name=pull_request.target_repo.scm_instance().name,
149 149 pull_request_id=pull_request.pull_request_id))
150 150
151 151 response.mustcontain('Server-side pull request merging is disabled.')
152 152
153 153 assert_response = response.assert_response()
154 154 # now regular user has a merge permissions, we have CLOSE button
155 155 assert_response.one_element_exists('#close-pull-request-action')
156 156
157 157 def test_show_invalid_commit_id(self, pr_util):
158 158 # Simulating invalid revisions which will cause a lookup error
159 159 pull_request = pr_util.create_pull_request()
160 160 pull_request.revisions = ['invalid']
161 161 Session().add(pull_request)
162 162 Session().commit()
163 163
164 164 response = self.app.get(route_path(
165 165 'pullrequest_show',
166 166 repo_name=pull_request.target_repo.scm_instance().name,
167 167 pull_request_id=pull_request.pull_request_id))
168 168
169 169 for commit_id in pull_request.revisions:
170 170 response.mustcontain(commit_id)
171 171
172 172 def test_show_invalid_source_reference(self, pr_util):
173 173 pull_request = pr_util.create_pull_request()
174 174 pull_request.source_ref = 'branch:b:invalid'
175 175 Session().add(pull_request)
176 176 Session().commit()
177 177
178 178 self.app.get(route_path(
179 179 'pullrequest_show',
180 180 repo_name=pull_request.target_repo.scm_instance().name,
181 181 pull_request_id=pull_request.pull_request_id))
182 182
183 183 def test_edit_title_description(self, pr_util, csrf_token):
184 184 pull_request = pr_util.create_pull_request()
185 185 pull_request_id = pull_request.pull_request_id
186 186
187 187 response = self.app.post(
188 188 route_path('pullrequest_update',
189 189 repo_name=pull_request.target_repo.repo_name,
190 190 pull_request_id=pull_request_id),
191 191 params={
192 192 'edit_pull_request': 'true',
193 193 'title': 'New title',
194 194 'description': 'New description',
195 195 'csrf_token': csrf_token})
196 196
197 197 assert_session_flash(
198 198 response, u'Pull request title & description updated.',
199 199 category='success')
200 200
201 201 pull_request = PullRequest.get(pull_request_id)
202 202 assert pull_request.title == 'New title'
203 203 assert pull_request.description == 'New description'
204 204
205 205 def test_edit_title_description_closed(self, pr_util, csrf_token):
206 206 pull_request = pr_util.create_pull_request()
207 207 pull_request_id = pull_request.pull_request_id
208 208 repo_name = pull_request.target_repo.repo_name
209 209 pr_util.close()
210 210
211 211 response = self.app.post(
212 212 route_path('pullrequest_update',
213 213 repo_name=repo_name, pull_request_id=pull_request_id),
214 214 params={
215 215 'edit_pull_request': 'true',
216 216 'title': 'New title',
217 217 'description': 'New description',
218 218 'csrf_token': csrf_token}, status=200)
219 219 assert_session_flash(
220 220 response, u'Cannot update closed pull requests.',
221 221 category='error')
222 222
223 223 def test_update_invalid_source_reference(self, pr_util, csrf_token):
224 224 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
225 225
226 226 pull_request = pr_util.create_pull_request()
227 227 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
228 228 Session().add(pull_request)
229 229 Session().commit()
230 230
231 231 pull_request_id = pull_request.pull_request_id
232 232
233 233 response = self.app.post(
234 234 route_path('pullrequest_update',
235 235 repo_name=pull_request.target_repo.repo_name,
236 236 pull_request_id=pull_request_id),
237 237 params={'update_commits': 'true', 'csrf_token': csrf_token})
238 238
239 239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
240 240 UpdateFailureReason.MISSING_SOURCE_REF])
241 241 assert_session_flash(response, expected_msg, category='error')
242 242
243 243 def test_missing_target_reference(self, pr_util, csrf_token):
244 244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
245 245 pull_request = pr_util.create_pull_request(
246 246 approved=True, mergeable=True)
247 247 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
248 248 pull_request.target_ref = unicode_reference
249 249 Session().add(pull_request)
250 250 Session().commit()
251 251
252 252 pull_request_id = pull_request.pull_request_id
253 253 pull_request_url = route_path(
254 254 'pullrequest_show',
255 255 repo_name=pull_request.target_repo.repo_name,
256 256 pull_request_id=pull_request_id)
257 257
258 258 response = self.app.get(pull_request_url)
259 259 target_ref_id = 'invalid-branch'
260 260 merge_resp = MergeResponse(
261 261 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
262 262 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
263 263 response.assert_response().element_contains(
264 'span[data-role="merge-message"]', merge_resp.merge_status_message)
264 'div[data-role="merge-message"]', merge_resp.merge_status_message)
265 265
266 266 def test_comment_and_close_pull_request_custom_message_approved(
267 267 self, pr_util, csrf_token, xhr_header):
268 268
269 269 pull_request = pr_util.create_pull_request(approved=True)
270 270 pull_request_id = pull_request.pull_request_id
271 271 author = pull_request.user_id
272 272 repo = pull_request.target_repo.repo_id
273 273
274 274 self.app.post(
275 275 route_path('pullrequest_comment_create',
276 276 repo_name=pull_request.target_repo.scm_instance().name,
277 277 pull_request_id=pull_request_id),
278 278 params={
279 279 'close_pull_request': '1',
280 280 'text': 'Closing a PR',
281 281 'csrf_token': csrf_token},
282 282 extra_environ=xhr_header,)
283 283
284 284 journal = UserLog.query()\
285 285 .filter(UserLog.user_id == author)\
286 286 .filter(UserLog.repository_id == repo) \
287 287 .order_by(UserLog.user_log_id.asc()) \
288 288 .all()
289 289 assert journal[-1].action == 'repo.pull_request.close'
290 290
291 291 pull_request = PullRequest.get(pull_request_id)
292 292 assert pull_request.is_closed()
293 293
294 294 status = ChangesetStatusModel().get_status(
295 295 pull_request.source_repo, pull_request=pull_request)
296 296 assert status == ChangesetStatus.STATUS_APPROVED
297 297 comments = ChangesetComment().query() \
298 298 .filter(ChangesetComment.pull_request == pull_request) \
299 299 .order_by(ChangesetComment.comment_id.asc())\
300 300 .all()
301 301 assert comments[-1].text == 'Closing a PR'
302 302
303 303 def test_comment_force_close_pull_request_rejected(
304 304 self, pr_util, csrf_token, xhr_header):
305 305 pull_request = pr_util.create_pull_request()
306 306 pull_request_id = pull_request.pull_request_id
307 307 PullRequestModel().update_reviewers(
308 308 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
309 309 pull_request.author)
310 310 author = pull_request.user_id
311 311 repo = pull_request.target_repo.repo_id
312 312
313 313 self.app.post(
314 314 route_path('pullrequest_comment_create',
315 315 repo_name=pull_request.target_repo.scm_instance().name,
316 316 pull_request_id=pull_request_id),
317 317 params={
318 318 'close_pull_request': '1',
319 319 'csrf_token': csrf_token},
320 320 extra_environ=xhr_header)
321 321
322 322 pull_request = PullRequest.get(pull_request_id)
323 323
324 324 journal = UserLog.query()\
325 325 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
326 326 .order_by(UserLog.user_log_id.asc()) \
327 327 .all()
328 328 assert journal[-1].action == 'repo.pull_request.close'
329 329
330 330 # check only the latest status, not the review status
331 331 status = ChangesetStatusModel().get_status(
332 332 pull_request.source_repo, pull_request=pull_request)
333 333 assert status == ChangesetStatus.STATUS_REJECTED
334 334
335 335 def test_comment_and_close_pull_request(
336 336 self, pr_util, csrf_token, xhr_header):
337 337 pull_request = pr_util.create_pull_request()
338 338 pull_request_id = pull_request.pull_request_id
339 339
340 340 response = self.app.post(
341 341 route_path('pullrequest_comment_create',
342 342 repo_name=pull_request.target_repo.scm_instance().name,
343 343 pull_request_id=pull_request.pull_request_id),
344 344 params={
345 345 'close_pull_request': 'true',
346 346 'csrf_token': csrf_token},
347 347 extra_environ=xhr_header)
348 348
349 349 assert response.json
350 350
351 351 pull_request = PullRequest.get(pull_request_id)
352 352 assert pull_request.is_closed()
353 353
354 354 # check only the latest status, not the review status
355 355 status = ChangesetStatusModel().get_status(
356 356 pull_request.source_repo, pull_request=pull_request)
357 357 assert status == ChangesetStatus.STATUS_REJECTED
358 358
359 359 def test_create_pull_request(self, backend, csrf_token):
360 360 commits = [
361 361 {'message': 'ancestor'},
362 362 {'message': 'change'},
363 363 {'message': 'change2'},
364 364 ]
365 365 commit_ids = backend.create_master_repo(commits)
366 366 target = backend.create_repo(heads=['ancestor'])
367 367 source = backend.create_repo(heads=['change2'])
368 368
369 369 response = self.app.post(
370 370 route_path('pullrequest_create', repo_name=source.repo_name),
371 371 [
372 372 ('source_repo', source.repo_name),
373 373 ('source_ref', 'branch:default:' + commit_ids['change2']),
374 374 ('target_repo', target.repo_name),
375 375 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
376 376 ('common_ancestor', commit_ids['ancestor']),
377 377 ('pullrequest_title', 'Title'),
378 378 ('pullrequest_desc', 'Description'),
379 379 ('description_renderer', 'markdown'),
380 380 ('__start__', 'review_members:sequence'),
381 381 ('__start__', 'reviewer:mapping'),
382 382 ('user_id', '1'),
383 383 ('__start__', 'reasons:sequence'),
384 384 ('reason', 'Some reason'),
385 385 ('__end__', 'reasons:sequence'),
386 386 ('__start__', 'rules:sequence'),
387 387 ('__end__', 'rules:sequence'),
388 388 ('mandatory', 'False'),
389 389 ('__end__', 'reviewer:mapping'),
390 390 ('__end__', 'review_members:sequence'),
391 391 ('__start__', 'revisions:sequence'),
392 392 ('revisions', commit_ids['change']),
393 393 ('revisions', commit_ids['change2']),
394 394 ('__end__', 'revisions:sequence'),
395 395 ('user', ''),
396 396 ('csrf_token', csrf_token),
397 397 ],
398 398 status=302)
399 399
400 400 location = response.headers['Location']
401 401 pull_request_id = location.rsplit('/', 1)[1]
402 402 assert pull_request_id != 'new'
403 403 pull_request = PullRequest.get(int(pull_request_id))
404 404
405 405 # check that we have now both revisions
406 406 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
407 407 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
408 408 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
409 409 assert pull_request.target_ref == expected_target_ref
410 410
411 411 def test_reviewer_notifications(self, backend, csrf_token):
412 412 # We have to use the app.post for this test so it will create the
413 413 # notifications properly with the new PR
414 414 commits = [
415 415 {'message': 'ancestor',
416 416 'added': [FileNode('file_A', content='content_of_ancestor')]},
417 417 {'message': 'change',
418 418 'added': [FileNode('file_a', content='content_of_change')]},
419 419 {'message': 'change-child'},
420 420 {'message': 'ancestor-child', 'parents': ['ancestor'],
421 421 'added': [
422 422 FileNode('file_B', content='content_of_ancestor_child')]},
423 423 {'message': 'ancestor-child-2'},
424 424 ]
425 425 commit_ids = backend.create_master_repo(commits)
426 426 target = backend.create_repo(heads=['ancestor-child'])
427 427 source = backend.create_repo(heads=['change'])
428 428
429 429 response = self.app.post(
430 430 route_path('pullrequest_create', repo_name=source.repo_name),
431 431 [
432 432 ('source_repo', source.repo_name),
433 433 ('source_ref', 'branch:default:' + commit_ids['change']),
434 434 ('target_repo', target.repo_name),
435 435 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
436 436 ('common_ancestor', commit_ids['ancestor']),
437 437 ('pullrequest_title', 'Title'),
438 438 ('pullrequest_desc', 'Description'),
439 439 ('description_renderer', 'markdown'),
440 440 ('__start__', 'review_members:sequence'),
441 441 ('__start__', 'reviewer:mapping'),
442 442 ('user_id', '2'),
443 443 ('__start__', 'reasons:sequence'),
444 444 ('reason', 'Some reason'),
445 445 ('__end__', 'reasons:sequence'),
446 446 ('__start__', 'rules:sequence'),
447 447 ('__end__', 'rules:sequence'),
448 448 ('mandatory', 'False'),
449 449 ('__end__', 'reviewer:mapping'),
450 450 ('__end__', 'review_members:sequence'),
451 451 ('__start__', 'revisions:sequence'),
452 452 ('revisions', commit_ids['change']),
453 453 ('__end__', 'revisions:sequence'),
454 454 ('user', ''),
455 455 ('csrf_token', csrf_token),
456 456 ],
457 457 status=302)
458 458
459 459 location = response.headers['Location']
460 460
461 461 pull_request_id = location.rsplit('/', 1)[1]
462 462 assert pull_request_id != 'new'
463 463 pull_request = PullRequest.get(int(pull_request_id))
464 464
465 465 # Check that a notification was made
466 466 notifications = Notification.query()\
467 467 .filter(Notification.created_by == pull_request.author.user_id,
468 468 Notification.type_ == Notification.TYPE_PULL_REQUEST,
469 469 Notification.subject.contains(
470 470 "requested a pull request review. !%s" % pull_request_id))
471 471 assert len(notifications.all()) == 1
472 472
473 473 # Change reviewers and check that a notification was made
474 474 PullRequestModel().update_reviewers(
475 475 pull_request.pull_request_id, [(1, [], False, [])],
476 476 pull_request.author)
477 477 assert len(notifications.all()) == 2
478 478
479 479 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
480 480 csrf_token):
481 481 commits = [
482 482 {'message': 'ancestor',
483 483 'added': [FileNode('file_A', content='content_of_ancestor')]},
484 484 {'message': 'change',
485 485 'added': [FileNode('file_a', content='content_of_change')]},
486 486 {'message': 'change-child'},
487 487 {'message': 'ancestor-child', 'parents': ['ancestor'],
488 488 'added': [
489 489 FileNode('file_B', content='content_of_ancestor_child')]},
490 490 {'message': 'ancestor-child-2'},
491 491 ]
492 492 commit_ids = backend.create_master_repo(commits)
493 493 target = backend.create_repo(heads=['ancestor-child'])
494 494 source = backend.create_repo(heads=['change'])
495 495
496 496 response = self.app.post(
497 497 route_path('pullrequest_create', repo_name=source.repo_name),
498 498 [
499 499 ('source_repo', source.repo_name),
500 500 ('source_ref', 'branch:default:' + commit_ids['change']),
501 501 ('target_repo', target.repo_name),
502 502 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
503 503 ('common_ancestor', commit_ids['ancestor']),
504 504 ('pullrequest_title', 'Title'),
505 505 ('pullrequest_desc', 'Description'),
506 506 ('description_renderer', 'markdown'),
507 507 ('__start__', 'review_members:sequence'),
508 508 ('__start__', 'reviewer:mapping'),
509 509 ('user_id', '1'),
510 510 ('__start__', 'reasons:sequence'),
511 511 ('reason', 'Some reason'),
512 512 ('__end__', 'reasons:sequence'),
513 513 ('__start__', 'rules:sequence'),
514 514 ('__end__', 'rules:sequence'),
515 515 ('mandatory', 'False'),
516 516 ('__end__', 'reviewer:mapping'),
517 517 ('__end__', 'review_members:sequence'),
518 518 ('__start__', 'revisions:sequence'),
519 519 ('revisions', commit_ids['change']),
520 520 ('__end__', 'revisions:sequence'),
521 521 ('user', ''),
522 522 ('csrf_token', csrf_token),
523 523 ],
524 524 status=302)
525 525
526 526 location = response.headers['Location']
527 527
528 528 pull_request_id = location.rsplit('/', 1)[1]
529 529 assert pull_request_id != 'new'
530 530 pull_request = PullRequest.get(int(pull_request_id))
531 531
532 532 # target_ref has to point to the ancestor's commit_id in order to
533 533 # show the correct diff
534 534 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
535 535 assert pull_request.target_ref == expected_target_ref
536 536
537 537 # Check generated diff contents
538 538 response = response.follow()
539 539 assert 'content_of_ancestor' not in response.body
540 540 assert 'content_of_ancestor-child' not in response.body
541 541 assert 'content_of_change' in response.body
542 542
543 543 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
544 544 # Clear any previous calls to rcextensions
545 545 rhodecode.EXTENSIONS.calls.clear()
546 546
547 547 pull_request = pr_util.create_pull_request(
548 548 approved=True, mergeable=True)
549 549 pull_request_id = pull_request.pull_request_id
550 550 repo_name = pull_request.target_repo.scm_instance().name,
551 551
552 552 url = route_path('pullrequest_merge',
553 553 repo_name=str(repo_name[0]),
554 554 pull_request_id=pull_request_id)
555 555 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
556 556
557 557 pull_request = PullRequest.get(pull_request_id)
558 558
559 559 assert response.status_int == 200
560 560 assert pull_request.is_closed()
561 561 assert_pull_request_status(
562 562 pull_request, ChangesetStatus.STATUS_APPROVED)
563 563
564 564 # Check the relevant log entries were added
565 565 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
566 566 actions = [log.action for log in user_logs]
567 567 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
568 568 expected_actions = [
569 569 u'repo.pull_request.close',
570 570 u'repo.pull_request.merge',
571 571 u'repo.pull_request.comment.create'
572 572 ]
573 573 assert actions == expected_actions
574 574
575 575 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
576 576 actions = [log for log in user_logs]
577 577 assert actions[-1].action == 'user.push'
578 578 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
579 579
580 580 # Check post_push rcextension was really executed
581 581 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
582 582 assert len(push_calls) == 1
583 583 unused_last_call_args, last_call_kwargs = push_calls[0]
584 584 assert last_call_kwargs['action'] == 'push'
585 585 assert last_call_kwargs['commit_ids'] == pr_commit_ids
586 586
587 587 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
588 588 pull_request = pr_util.create_pull_request(mergeable=False)
589 589 pull_request_id = pull_request.pull_request_id
590 590 pull_request = PullRequest.get(pull_request_id)
591 591
592 592 response = self.app.post(
593 593 route_path('pullrequest_merge',
594 594 repo_name=pull_request.target_repo.scm_instance().name,
595 595 pull_request_id=pull_request.pull_request_id),
596 596 params={'csrf_token': csrf_token}).follow()
597 597
598 598 assert response.status_int == 200
599 599 response.mustcontain(
600 600 'Merge is not currently possible because of below failed checks.')
601 601 response.mustcontain('Server-side pull request merging is disabled.')
602 602
603 603 @pytest.mark.skip_backends('svn')
604 604 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
605 605 pull_request = pr_util.create_pull_request(mergeable=True)
606 606 pull_request_id = pull_request.pull_request_id
607 607 repo_name = pull_request.target_repo.scm_instance().name
608 608
609 609 response = self.app.post(
610 610 route_path('pullrequest_merge',
611 611 repo_name=repo_name, pull_request_id=pull_request_id),
612 612 params={'csrf_token': csrf_token}).follow()
613 613
614 614 assert response.status_int == 200
615 615
616 616 response.mustcontain(
617 617 'Merge is not currently possible because of below failed checks.')
618 618 response.mustcontain('Pull request reviewer approval is pending.')
619 619
620 620 def test_merge_pull_request_renders_failure_reason(
621 621 self, user_regular, csrf_token, pr_util):
622 622 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
623 623 pull_request_id = pull_request.pull_request_id
624 624 repo_name = pull_request.target_repo.scm_instance().name
625 625
626 626 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
627 627 MergeFailureReason.PUSH_FAILED,
628 628 metadata={'target': 'shadow repo',
629 629 'merge_commit': 'xxx'})
630 630 model_patcher = mock.patch.multiple(
631 631 PullRequestModel,
632 632 merge_repo=mock.Mock(return_value=merge_resp),
633 633 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
634 634
635 635 with model_patcher:
636 636 response = self.app.post(
637 637 route_path('pullrequest_merge',
638 638 repo_name=repo_name,
639 639 pull_request_id=pull_request_id),
640 640 params={'csrf_token': csrf_token}, status=302)
641 641
642 642 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
643 643 metadata={'target': 'shadow repo',
644 644 'merge_commit': 'xxx'})
645 645 assert_session_flash(response, merge_resp.merge_status_message)
646 646
647 647 def test_update_source_revision(self, backend, csrf_token):
648 648 commits = [
649 649 {'message': 'ancestor'},
650 650 {'message': 'change'},
651 651 {'message': 'change-2'},
652 652 ]
653 653 commit_ids = backend.create_master_repo(commits)
654 654 target = backend.create_repo(heads=['ancestor'])
655 655 source = backend.create_repo(heads=['change'])
656 656
657 657 # create pr from a in source to A in target
658 658 pull_request = PullRequest()
659 659
660 660 pull_request.source_repo = source
661 661 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
662 662 branch=backend.default_branch_name, commit_id=commit_ids['change'])
663 663
664 664 pull_request.target_repo = target
665 665 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
666 666 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
667 667
668 668 pull_request.revisions = [commit_ids['change']]
669 669 pull_request.title = u"Test"
670 670 pull_request.description = u"Description"
671 671 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
672 672 pull_request.pull_request_state = PullRequest.STATE_CREATED
673 673 Session().add(pull_request)
674 674 Session().commit()
675 675 pull_request_id = pull_request.pull_request_id
676 676
677 677 # source has ancestor - change - change-2
678 678 backend.pull_heads(source, heads=['change-2'])
679 679
680 680 # update PR
681 681 self.app.post(
682 682 route_path('pullrequest_update',
683 683 repo_name=target.repo_name, pull_request_id=pull_request_id),
684 684 params={'update_commits': 'true', 'csrf_token': csrf_token})
685 685
686 686 response = self.app.get(
687 687 route_path('pullrequest_show',
688 688 repo_name=target.repo_name,
689 689 pull_request_id=pull_request.pull_request_id))
690 690
691 691 assert response.status_int == 200
692 692 assert 'Pull request updated to' in response.body
693 693 assert 'with 1 added, 0 removed commits.' in response.body
694 694
695 695 # check that we have now both revisions
696 696 pull_request = PullRequest.get(pull_request_id)
697 697 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
698 698
699 699 def test_update_target_revision(self, backend, csrf_token):
700 700 commits = [
701 701 {'message': 'ancestor'},
702 702 {'message': 'change'},
703 703 {'message': 'ancestor-new', 'parents': ['ancestor']},
704 704 {'message': 'change-rebased'},
705 705 ]
706 706 commit_ids = backend.create_master_repo(commits)
707 707 target = backend.create_repo(heads=['ancestor'])
708 708 source = backend.create_repo(heads=['change'])
709 709
710 710 # create pr from a in source to A in target
711 711 pull_request = PullRequest()
712 712
713 713 pull_request.source_repo = source
714 714 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
715 715 branch=backend.default_branch_name, commit_id=commit_ids['change'])
716 716
717 717 pull_request.target_repo = target
718 718 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
719 719 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
720 720
721 721 pull_request.revisions = [commit_ids['change']]
722 722 pull_request.title = u"Test"
723 723 pull_request.description = u"Description"
724 724 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
725 725 pull_request.pull_request_state = PullRequest.STATE_CREATED
726 726
727 727 Session().add(pull_request)
728 728 Session().commit()
729 729 pull_request_id = pull_request.pull_request_id
730 730
731 731 # target has ancestor - ancestor-new
732 732 # source has ancestor - ancestor-new - change-rebased
733 733 backend.pull_heads(target, heads=['ancestor-new'])
734 734 backend.pull_heads(source, heads=['change-rebased'])
735 735
736 736 # update PR
737 737 url = route_path('pullrequest_update',
738 738 repo_name=target.repo_name,
739 739 pull_request_id=pull_request_id)
740 740 self.app.post(url,
741 741 params={'update_commits': 'true', 'csrf_token': csrf_token},
742 742 status=200)
743 743
744 744 # check that we have now both revisions
745 745 pull_request = PullRequest.get(pull_request_id)
746 746 assert pull_request.revisions == [commit_ids['change-rebased']]
747 747 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
748 748 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
749 749
750 750 response = self.app.get(
751 751 route_path('pullrequest_show',
752 752 repo_name=target.repo_name,
753 753 pull_request_id=pull_request.pull_request_id))
754 754 assert response.status_int == 200
755 755 assert 'Pull request updated to' in response.body
756 756 assert 'with 1 added, 1 removed commits.' in response.body
757 757
758 758 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
759 759 backend = backend_git
760 760 commits = [
761 761 {'message': 'master-commit-1'},
762 762 {'message': 'master-commit-2-change-1'},
763 763 {'message': 'master-commit-3-change-2'},
764 764
765 765 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
766 766 {'message': 'feat-commit-2'},
767 767 ]
768 768 commit_ids = backend.create_master_repo(commits)
769 769 target = backend.create_repo(heads=['master-commit-3-change-2'])
770 770 source = backend.create_repo(heads=['feat-commit-2'])
771 771
772 772 # create pr from a in source to A in target
773 773 pull_request = PullRequest()
774 774 pull_request.source_repo = source
775 775
776 776 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
777 777 branch=backend.default_branch_name,
778 778 commit_id=commit_ids['master-commit-3-change-2'])
779 779
780 780 pull_request.target_repo = target
781 781 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
782 782 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
783 783
784 784 pull_request.revisions = [
785 785 commit_ids['feat-commit-1'],
786 786 commit_ids['feat-commit-2']
787 787 ]
788 788 pull_request.title = u"Test"
789 789 pull_request.description = u"Description"
790 790 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
791 791 pull_request.pull_request_state = PullRequest.STATE_CREATED
792 792 Session().add(pull_request)
793 793 Session().commit()
794 794 pull_request_id = pull_request.pull_request_id
795 795
796 796 # PR is created, now we simulate a force-push into target,
797 797 # that drops a 2 last commits
798 798 vcsrepo = target.scm_instance()
799 799 vcsrepo.config.clear_section('hooks')
800 800 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
801 801
802 802 # update PR
803 803 url = route_path('pullrequest_update',
804 804 repo_name=target.repo_name,
805 805 pull_request_id=pull_request_id)
806 806 self.app.post(url,
807 807 params={'update_commits': 'true', 'csrf_token': csrf_token},
808 808 status=200)
809 809
810 810 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
811 811 assert response.status_int == 200
812 812 response.mustcontain('Pull request updated to')
813 813 response.mustcontain('with 0 added, 0 removed commits.')
814 814
815 815 def test_update_of_ancestor_reference(self, backend, csrf_token):
816 816 commits = [
817 817 {'message': 'ancestor'},
818 818 {'message': 'change'},
819 819 {'message': 'change-2'},
820 820 {'message': 'ancestor-new', 'parents': ['ancestor']},
821 821 {'message': 'change-rebased'},
822 822 ]
823 823 commit_ids = backend.create_master_repo(commits)
824 824 target = backend.create_repo(heads=['ancestor'])
825 825 source = backend.create_repo(heads=['change'])
826 826
827 827 # create pr from a in source to A in target
828 828 pull_request = PullRequest()
829 829 pull_request.source_repo = source
830 830
831 831 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
832 832 branch=backend.default_branch_name, commit_id=commit_ids['change'])
833 833 pull_request.target_repo = target
834 834 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
835 835 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
836 836 pull_request.revisions = [commit_ids['change']]
837 837 pull_request.title = u"Test"
838 838 pull_request.description = u"Description"
839 839 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
840 840 pull_request.pull_request_state = PullRequest.STATE_CREATED
841 841 Session().add(pull_request)
842 842 Session().commit()
843 843 pull_request_id = pull_request.pull_request_id
844 844
845 845 # target has ancestor - ancestor-new
846 846 # source has ancestor - ancestor-new - change-rebased
847 847 backend.pull_heads(target, heads=['ancestor-new'])
848 848 backend.pull_heads(source, heads=['change-rebased'])
849 849
850 850 # update PR
851 851 self.app.post(
852 852 route_path('pullrequest_update',
853 853 repo_name=target.repo_name, pull_request_id=pull_request_id),
854 854 params={'update_commits': 'true', 'csrf_token': csrf_token},
855 855 status=200)
856 856
857 857 # Expect the target reference to be updated correctly
858 858 pull_request = PullRequest.get(pull_request_id)
859 859 assert pull_request.revisions == [commit_ids['change-rebased']]
860 860 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
861 861 branch=backend.default_branch_name,
862 862 commit_id=commit_ids['ancestor-new'])
863 863 assert pull_request.target_ref == expected_target_ref
864 864
865 865 def test_remove_pull_request_branch(self, backend_git, csrf_token):
866 866 branch_name = 'development'
867 867 commits = [
868 868 {'message': 'initial-commit'},
869 869 {'message': 'old-feature'},
870 870 {'message': 'new-feature', 'branch': branch_name},
871 871 ]
872 872 repo = backend_git.create_repo(commits)
873 873 repo_name = repo.repo_name
874 874 commit_ids = backend_git.commit_ids
875 875
876 876 pull_request = PullRequest()
877 877 pull_request.source_repo = repo
878 878 pull_request.target_repo = repo
879 879 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
880 880 branch=branch_name, commit_id=commit_ids['new-feature'])
881 881 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
882 882 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
883 883 pull_request.revisions = [commit_ids['new-feature']]
884 884 pull_request.title = u"Test"
885 885 pull_request.description = u"Description"
886 886 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
887 887 pull_request.pull_request_state = PullRequest.STATE_CREATED
888 888 Session().add(pull_request)
889 889 Session().commit()
890 890
891 891 pull_request_id = pull_request.pull_request_id
892 892
893 893 vcs = repo.scm_instance()
894 894 vcs.remove_ref('refs/heads/{}'.format(branch_name))
895 895
896 896 response = self.app.get(route_path(
897 897 'pullrequest_show',
898 898 repo_name=repo_name,
899 899 pull_request_id=pull_request_id))
900 900
901 901 assert response.status_int == 200
902 902
903 903 response.assert_response().element_contains(
904 904 '#changeset_compare_view_content .alert strong',
905 905 'Missing commits')
906 906 response.assert_response().element_contains(
907 907 '#changeset_compare_view_content .alert',
908 908 'This pull request cannot be displayed, because one or more'
909 909 ' commits no longer exist in the source repository.')
910 910
911 911 def test_strip_commits_from_pull_request(
912 912 self, backend, pr_util, csrf_token):
913 913 commits = [
914 914 {'message': 'initial-commit'},
915 915 {'message': 'old-feature'},
916 916 {'message': 'new-feature', 'parents': ['initial-commit']},
917 917 ]
918 918 pull_request = pr_util.create_pull_request(
919 919 commits, target_head='initial-commit', source_head='new-feature',
920 920 revisions=['new-feature'])
921 921
922 922 vcs = pr_util.source_repository.scm_instance()
923 923 if backend.alias == 'git':
924 924 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
925 925 else:
926 926 vcs.strip(pr_util.commit_ids['new-feature'])
927 927
928 928 response = self.app.get(route_path(
929 929 'pullrequest_show',
930 930 repo_name=pr_util.target_repository.repo_name,
931 931 pull_request_id=pull_request.pull_request_id))
932 932
933 933 assert response.status_int == 200
934 934
935 935 response.assert_response().element_contains(
936 936 '#changeset_compare_view_content .alert strong',
937 937 'Missing commits')
938 938 response.assert_response().element_contains(
939 939 '#changeset_compare_view_content .alert',
940 940 'This pull request cannot be displayed, because one or more'
941 941 ' commits no longer exist in the source repository.')
942 942 response.assert_response().element_contains(
943 943 '#update_commits',
944 944 'Update commits')
945 945
946 946 def test_strip_commits_and_update(
947 947 self, backend, pr_util, csrf_token):
948 948 commits = [
949 949 {'message': 'initial-commit'},
950 950 {'message': 'old-feature'},
951 951 {'message': 'new-feature', 'parents': ['old-feature']},
952 952 ]
953 953 pull_request = pr_util.create_pull_request(
954 954 commits, target_head='old-feature', source_head='new-feature',
955 955 revisions=['new-feature'], mergeable=True)
956 956
957 957 vcs = pr_util.source_repository.scm_instance()
958 958 if backend.alias == 'git':
959 959 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
960 960 else:
961 961 vcs.strip(pr_util.commit_ids['new-feature'])
962 962
963 963 url = route_path('pullrequest_update',
964 964 repo_name=pull_request.target_repo.repo_name,
965 965 pull_request_id=pull_request.pull_request_id)
966 966 response = self.app.post(url,
967 967 params={'update_commits': 'true',
968 968 'csrf_token': csrf_token})
969 969
970 970 assert response.status_int == 200
971 971 assert response.body == 'true'
972 972
973 973 # Make sure that after update, it won't raise 500 errors
974 974 response = self.app.get(route_path(
975 975 'pullrequest_show',
976 976 repo_name=pr_util.target_repository.repo_name,
977 977 pull_request_id=pull_request.pull_request_id))
978 978
979 979 assert response.status_int == 200
980 980 response.assert_response().element_contains(
981 981 '#changeset_compare_view_content .alert strong',
982 982 'Missing commits')
983 983
984 984 def test_branch_is_a_link(self, pr_util):
985 985 pull_request = pr_util.create_pull_request()
986 986 pull_request.source_ref = 'branch:origin:1234567890abcdef'
987 987 pull_request.target_ref = 'branch:target:abcdef1234567890'
988 988 Session().add(pull_request)
989 989 Session().commit()
990 990
991 991 response = self.app.get(route_path(
992 992 'pullrequest_show',
993 993 repo_name=pull_request.target_repo.scm_instance().name,
994 994 pull_request_id=pull_request.pull_request_id))
995 995 assert response.status_int == 200
996 996
997 997 origin = response.assert_response().get_element('.pr-origininfo .tag')
998 998 origin_children = origin.getchildren()
999 999 assert len(origin_children) == 1
1000 1000 target = response.assert_response().get_element('.pr-targetinfo .tag')
1001 1001 target_children = target.getchildren()
1002 1002 assert len(target_children) == 1
1003 1003
1004 1004 expected_origin_link = route_path(
1005 1005 'repo_commits',
1006 1006 repo_name=pull_request.source_repo.scm_instance().name,
1007 1007 params=dict(branch='origin'))
1008 1008 expected_target_link = route_path(
1009 1009 'repo_commits',
1010 1010 repo_name=pull_request.target_repo.scm_instance().name,
1011 1011 params=dict(branch='target'))
1012 1012 assert origin_children[0].attrib['href'] == expected_origin_link
1013 1013 assert origin_children[0].text == 'branch: origin'
1014 1014 assert target_children[0].attrib['href'] == expected_target_link
1015 1015 assert target_children[0].text == 'branch: target'
1016 1016
1017 1017 def test_bookmark_is_not_a_link(self, pr_util):
1018 1018 pull_request = pr_util.create_pull_request()
1019 1019 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1020 1020 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1021 1021 Session().add(pull_request)
1022 1022 Session().commit()
1023 1023
1024 1024 response = self.app.get(route_path(
1025 1025 'pullrequest_show',
1026 1026 repo_name=pull_request.target_repo.scm_instance().name,
1027 1027 pull_request_id=pull_request.pull_request_id))
1028 1028 assert response.status_int == 200
1029 1029
1030 1030 origin = response.assert_response().get_element('.pr-origininfo .tag')
1031 1031 assert origin.text.strip() == 'bookmark: origin'
1032 1032 assert origin.getchildren() == []
1033 1033
1034 1034 target = response.assert_response().get_element('.pr-targetinfo .tag')
1035 1035 assert target.text.strip() == 'bookmark: target'
1036 1036 assert target.getchildren() == []
1037 1037
1038 1038 def test_tag_is_not_a_link(self, pr_util):
1039 1039 pull_request = pr_util.create_pull_request()
1040 1040 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1041 1041 pull_request.target_ref = 'tag:target:abcdef1234567890'
1042 1042 Session().add(pull_request)
1043 1043 Session().commit()
1044 1044
1045 1045 response = self.app.get(route_path(
1046 1046 'pullrequest_show',
1047 1047 repo_name=pull_request.target_repo.scm_instance().name,
1048 1048 pull_request_id=pull_request.pull_request_id))
1049 1049 assert response.status_int == 200
1050 1050
1051 1051 origin = response.assert_response().get_element('.pr-origininfo .tag')
1052 1052 assert origin.text.strip() == 'tag: origin'
1053 1053 assert origin.getchildren() == []
1054 1054
1055 1055 target = response.assert_response().get_element('.pr-targetinfo .tag')
1056 1056 assert target.text.strip() == 'tag: target'
1057 1057 assert target.getchildren() == []
1058 1058
1059 1059 @pytest.mark.parametrize('mergeable', [True, False])
1060 1060 def test_shadow_repository_link(
1061 1061 self, mergeable, pr_util, http_host_only_stub):
1062 1062 """
1063 1063 Check that the pull request summary page displays a link to the shadow
1064 1064 repository if the pull request is mergeable. If it is not mergeable
1065 1065 the link should not be displayed.
1066 1066 """
1067 1067 pull_request = pr_util.create_pull_request(
1068 1068 mergeable=mergeable, enable_notifications=False)
1069 1069 target_repo = pull_request.target_repo.scm_instance()
1070 1070 pr_id = pull_request.pull_request_id
1071 1071 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1072 1072 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1073 1073
1074 1074 response = self.app.get(route_path(
1075 1075 'pullrequest_show',
1076 1076 repo_name=target_repo.name,
1077 1077 pull_request_id=pr_id))
1078 1078
1079 1079 if mergeable:
1080 1080 response.assert_response().element_value_contains(
1081 1081 'input.pr-mergeinfo', shadow_url)
1082 1082 response.assert_response().element_value_contains(
1083 1083 'input.pr-mergeinfo ', 'pr-merge')
1084 1084 else:
1085 1085 response.assert_response().no_element_exists('.pr-mergeinfo')
1086 1086
1087 1087
1088 1088 @pytest.mark.usefixtures('app')
1089 1089 @pytest.mark.backends("git", "hg")
1090 1090 class TestPullrequestsControllerDelete(object):
1091 1091 def test_pull_request_delete_button_permissions_admin(
1092 1092 self, autologin_user, user_admin, pr_util):
1093 1093 pull_request = pr_util.create_pull_request(
1094 1094 author=user_admin.username, enable_notifications=False)
1095 1095
1096 1096 response = self.app.get(route_path(
1097 1097 'pullrequest_show',
1098 1098 repo_name=pull_request.target_repo.scm_instance().name,
1099 1099 pull_request_id=pull_request.pull_request_id))
1100 1100
1101 1101 response.mustcontain('id="delete_pullrequest"')
1102 1102 response.mustcontain('Confirm to delete this pull request')
1103 1103
1104 1104 def test_pull_request_delete_button_permissions_owner(
1105 1105 self, autologin_regular_user, user_regular, pr_util):
1106 1106 pull_request = pr_util.create_pull_request(
1107 1107 author=user_regular.username, enable_notifications=False)
1108 1108
1109 1109 response = self.app.get(route_path(
1110 1110 'pullrequest_show',
1111 1111 repo_name=pull_request.target_repo.scm_instance().name,
1112 1112 pull_request_id=pull_request.pull_request_id))
1113 1113
1114 1114 response.mustcontain('id="delete_pullrequest"')
1115 1115 response.mustcontain('Confirm to delete this pull request')
1116 1116
1117 1117 def test_pull_request_delete_button_permissions_forbidden(
1118 1118 self, autologin_regular_user, user_regular, user_admin, pr_util):
1119 1119 pull_request = pr_util.create_pull_request(
1120 1120 author=user_admin.username, enable_notifications=False)
1121 1121
1122 1122 response = self.app.get(route_path(
1123 1123 'pullrequest_show',
1124 1124 repo_name=pull_request.target_repo.scm_instance().name,
1125 1125 pull_request_id=pull_request.pull_request_id))
1126 1126 response.mustcontain(no=['id="delete_pullrequest"'])
1127 1127 response.mustcontain(no=['Confirm to delete this pull request'])
1128 1128
1129 1129 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1130 1130 self, autologin_regular_user, user_regular, user_admin, pr_util,
1131 1131 user_util):
1132 1132
1133 1133 pull_request = pr_util.create_pull_request(
1134 1134 author=user_admin.username, enable_notifications=False)
1135 1135
1136 1136 user_util.grant_user_permission_to_repo(
1137 1137 pull_request.target_repo, user_regular,
1138 1138 'repository.write')
1139 1139
1140 1140 response = self.app.get(route_path(
1141 1141 'pullrequest_show',
1142 1142 repo_name=pull_request.target_repo.scm_instance().name,
1143 1143 pull_request_id=pull_request.pull_request_id))
1144 1144
1145 1145 response.mustcontain('id="open_edit_pullrequest"')
1146 1146 response.mustcontain('id="delete_pullrequest"')
1147 1147 response.mustcontain(no=['Confirm to delete this pull request'])
1148 1148
1149 1149 def test_delete_comment_returns_404_if_comment_does_not_exist(
1150 1150 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1151 1151
1152 1152 pull_request = pr_util.create_pull_request(
1153 1153 author=user_admin.username, enable_notifications=False)
1154 1154
1155 1155 self.app.post(
1156 1156 route_path(
1157 1157 'pullrequest_comment_delete',
1158 1158 repo_name=pull_request.target_repo.scm_instance().name,
1159 1159 pull_request_id=pull_request.pull_request_id,
1160 1160 comment_id=1024404),
1161 1161 extra_environ=xhr_header,
1162 1162 params={'csrf_token': csrf_token},
1163 1163 status=404
1164 1164 )
1165 1165
1166 1166 def test_delete_comment(
1167 1167 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1168 1168
1169 1169 pull_request = pr_util.create_pull_request(
1170 1170 author=user_admin.username, enable_notifications=False)
1171 1171 comment = pr_util.create_comment()
1172 1172 comment_id = comment.comment_id
1173 1173
1174 1174 response = self.app.post(
1175 1175 route_path(
1176 1176 'pullrequest_comment_delete',
1177 1177 repo_name=pull_request.target_repo.scm_instance().name,
1178 1178 pull_request_id=pull_request.pull_request_id,
1179 1179 comment_id=comment_id),
1180 1180 extra_environ=xhr_header,
1181 1181 params={'csrf_token': csrf_token},
1182 1182 status=200
1183 1183 )
1184 1184 assert response.body == 'true'
1185 1185
1186 1186 @pytest.mark.parametrize('url_type', [
1187 1187 'pullrequest_new',
1188 1188 'pullrequest_create',
1189 1189 'pullrequest_update',
1190 1190 'pullrequest_merge',
1191 1191 ])
1192 1192 def test_pull_request_is_forbidden_on_archived_repo(
1193 1193 self, autologin_user, backend, xhr_header, user_util, url_type):
1194 1194
1195 1195 # create a temporary repo
1196 1196 source = user_util.create_repo(repo_type=backend.alias)
1197 1197 repo_name = source.repo_name
1198 1198 repo = Repository.get_by_repo_name(repo_name)
1199 1199 repo.archived = True
1200 1200 Session().commit()
1201 1201
1202 1202 response = self.app.get(
1203 1203 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1204 1204
1205 1205 msg = 'Action not supported for archived repository.'
1206 1206 assert_session_flash(response, msg)
1207 1207
1208 1208
1209 1209 def assert_pull_request_status(pull_request, expected_status):
1210 1210 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1211 1211 assert status == expected_status
1212 1212
1213 1213
1214 1214 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1215 1215 @pytest.mark.usefixtures("autologin_user")
1216 1216 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1217 1217 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,1017 +1,1017 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else self.get_default_config()
66 66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 67
68 68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def _remote(self):
75 75 repo_id = self.path
76 76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 77
78 78 @LazyProperty
79 79 def bare(self):
80 80 return self._remote.bare()
81 81
82 82 @LazyProperty
83 83 def head(self):
84 84 return self._remote.head()
85 85
86 86 @CachedProperty
87 87 def commit_ids(self):
88 88 """
89 89 Returns list of commit ids, in ascending order. Being lazy
90 90 attribute allows external tools to inject commit ids from cache.
91 91 """
92 92 commit_ids = self._get_all_commit_ids()
93 93 self._rebuild_cache(commit_ids)
94 94 return commit_ids
95 95
96 96 def _rebuild_cache(self, commit_ids):
97 97 self._commit_ids = dict((commit_id, index)
98 98 for index, commit_id in enumerate(commit_ids))
99 99
100 100 def run_git_command(self, cmd, **opts):
101 101 """
102 102 Runs given ``cmd`` as git command and returns tuple
103 103 (stdout, stderr).
104 104
105 105 :param cmd: git command to be executed
106 106 :param opts: env options to pass into Subprocess command
107 107 """
108 108 if not isinstance(cmd, list):
109 109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 110
111 111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 112 out, err = self._remote.run_git_command(cmd, **opts)
113 113 if err and not skip_stderr_log:
114 114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 115 return out, err
116 116
117 117 @staticmethod
118 118 def check_url(url, config):
119 119 """
120 120 Function will check given url and try to verify if it's a valid
121 121 link. Sometimes it may happened that git will issue basic
122 122 auth request that can cause whole API to hang when used from python
123 123 or other external calls.
124 124
125 125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 126 when the return code is non 200
127 127 """
128 128 # check first if it's not an url
129 129 if os.path.isdir(url) or url.startswith('file:'):
130 130 return True
131 131
132 132 if '+' in url.split('://', 1)[0]:
133 133 url = url.split('+', 1)[1]
134 134
135 135 # Request the _remote to verify the url
136 136 return connection.Git.check_url(url, config.serialize())
137 137
138 138 @staticmethod
139 139 def is_valid_repository(path):
140 140 if os.path.isdir(os.path.join(path, '.git')):
141 141 return True
142 142 # check case of bare repository
143 143 try:
144 144 GitRepository(path)
145 145 return True
146 146 except VCSError:
147 147 pass
148 148 return False
149 149
150 150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 151 bare=False):
152 152 if create and os.path.exists(self.path):
153 153 raise RepositoryError(
154 154 "Cannot create repository at %s, location already exist"
155 155 % self.path)
156 156
157 157 if bare and do_workspace_checkout:
158 158 raise RepositoryError("Cannot update a bare repository")
159 159 try:
160 160
161 161 if src_url:
162 162 # check URL before any actions
163 163 GitRepository.check_url(src_url, self.config)
164 164
165 165 if create:
166 166 os.makedirs(self.path, mode=0o755)
167 167
168 168 if bare:
169 169 self._remote.init_bare()
170 170 else:
171 171 self._remote.init()
172 172
173 173 if src_url and bare:
174 174 # bare repository only allows a fetch and checkout is not allowed
175 175 self.fetch(src_url, commit_ids=None)
176 176 elif src_url:
177 177 self.pull(src_url, commit_ids=None,
178 178 update_after=do_workspace_checkout)
179 179
180 180 else:
181 181 if not self._remote.assert_correct_path():
182 182 raise RepositoryError(
183 183 'Path "%s" does not contain a Git repository' %
184 184 (self.path,))
185 185
186 186 # TODO: johbo: check if we have to translate the OSError here
187 187 except OSError as err:
188 188 raise RepositoryError(err)
189 189
190 190 def _get_all_commit_ids(self):
191 191 return self._remote.get_all_commit_ids()
192 192
193 193 def _get_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199
200 200 if not head:
201 201 return []
202 202
203 203 rev_filter = ['--branches', '--tags']
204 204 extra_filter = []
205 205
206 206 if filters:
207 207 if filters.get('since'):
208 208 extra_filter.append('--since=%s' % (filters['since']))
209 209 if filters.get('until'):
210 210 extra_filter.append('--until=%s' % (filters['until']))
211 211 if filters.get('branch_name'):
212 212 rev_filter = []
213 213 extra_filter.append(filters['branch_name'])
214 214 rev_filter.extend(extra_filter)
215 215
216 216 # if filters.get('start') or filters.get('end'):
217 217 # # skip is offset, max-count is limit
218 218 # if filters.get('start'):
219 219 # extra_filter += ' --skip=%s' % filters['start']
220 220 # if filters.get('end'):
221 221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 222
223 223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 224 try:
225 225 output, __ = self.run_git_command(cmd)
226 226 except RepositoryError:
227 227 # Can be raised for empty repositories
228 228 return []
229 229 return output.splitlines()
230 230
231 231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
232 232 def is_null(value):
233 233 return len(value) == commit_id_or_idx.count('0')
234 234
235 235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 236 return self.commit_ids[-1]
237 237 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 238 *map(safe_str, [commit_id_or_idx, self.name]))
239 239
240 240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 243 try:
244 244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 245 except Exception:
246 246 raise CommitDoesNotExistError(commit_missing_err)
247 247
248 248 elif is_bstr:
249 249 # Need to call remote to translate id for tagging scenario
250 250 try:
251 251 remote_data = self._remote.get_object(commit_id_or_idx)
252 252 commit_id_or_idx = remote_data["commit_id"]
253 253 except (CommitDoesNotExistError,):
254 254 raise CommitDoesNotExistError(commit_missing_err)
255 255
256 256 # Ensure we return full id
257 257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 258 raise CommitDoesNotExistError(
259 259 "Given commit id %s not recognized" % commit_id_or_idx)
260 260 return commit_id_or_idx
261 261
262 262 def get_hook_location(self):
263 263 """
264 264 returns absolute path to location where hooks are stored
265 265 """
266 266 loc = os.path.join(self.path, 'hooks')
267 267 if not self.bare:
268 268 loc = os.path.join(self.path, '.git', 'hooks')
269 269 return loc
270 270
271 271 @LazyProperty
272 272 def last_change(self):
273 273 """
274 274 Returns last change made on this repository as
275 275 `datetime.datetime` object.
276 276 """
277 277 try:
278 278 return self.get_commit().date
279 279 except RepositoryError:
280 280 tzoffset = makedate()[1]
281 281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282 282
283 283 def _get_fs_mtime(self):
284 284 idx_loc = '' if self.bare else '.git'
285 285 # fallback to filesystem
286 286 in_path = os.path.join(self.path, idx_loc, "index")
287 287 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 288 if os.path.exists(in_path):
289 289 return os.stat(in_path).st_mtime
290 290 else:
291 291 return os.stat(he_path).st_mtime
292 292
293 293 @LazyProperty
294 294 def description(self):
295 295 description = self._remote.get_description()
296 296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297 297
298 298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 299 if self.is_empty():
300 300 return OrderedDict()
301 301
302 302 result = []
303 303 for ref, sha in self._refs.iteritems():
304 304 if ref.startswith(prefix):
305 305 ref_name = ref
306 306 if strip_prefix:
307 307 ref_name = ref[len(prefix):]
308 308 result.append((safe_unicode(ref_name), sha))
309 309
310 310 def get_name(entry):
311 311 return entry[0]
312 312
313 313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314 314
315 315 def _get_branches(self):
316 316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317 317
318 318 @CachedProperty
319 319 def branches(self):
320 320 return self._get_branches()
321 321
322 322 @CachedProperty
323 323 def branches_closed(self):
324 324 return {}
325 325
326 326 @CachedProperty
327 327 def bookmarks(self):
328 328 return {}
329 329
330 330 @CachedProperty
331 331 def branches_all(self):
332 332 all_branches = {}
333 333 all_branches.update(self.branches)
334 334 all_branches.update(self.branches_closed)
335 335 return all_branches
336 336
337 337 @CachedProperty
338 338 def tags(self):
339 339 return self._get_tags()
340 340
341 341 def _get_tags(self):
342 342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343 343
344 344 def tag(self, name, user, commit_id=None, message=None, date=None,
345 345 **kwargs):
346 346 # TODO: fix this method to apply annotated tags correct with message
347 347 """
348 348 Creates and returns a tag for the given ``commit_id``.
349 349
350 350 :param name: name for new tag
351 351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 352 :param commit_id: commit id for which new tag would be created
353 353 :param message: message of the tag's commit
354 354 :param date: date of tag's commit
355 355
356 356 :raises TagAlreadyExistError: if tag with same name already exists
357 357 """
358 358 if name in self.tags:
359 359 raise TagAlreadyExistError("Tag %s already exists" % name)
360 360 commit = self.get_commit(commit_id=commit_id)
361 361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362 362
363 363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364 364
365 365 self._invalidate_prop_cache('tags')
366 366 self._invalidate_prop_cache('_refs')
367 367
368 368 return commit
369 369
370 370 def remove_tag(self, name, user, message=None, date=None):
371 371 """
372 372 Removes tag with the given ``name``.
373 373
374 374 :param name: name of the tag to be removed
375 375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 376 :param message: message of the tag's removal commit
377 377 :param date: date of tag's removal commit
378 378
379 379 :raises TagDoesNotExistError: if tag with given name does not exists
380 380 """
381 381 if name not in self.tags:
382 382 raise TagDoesNotExistError("Tag %s does not exist" % name)
383 383
384 384 self._remote.tag_remove(name)
385 385 self._invalidate_prop_cache('tags')
386 386 self._invalidate_prop_cache('_refs')
387 387
388 388 def _get_refs(self):
389 389 return self._remote.get_refs()
390 390
391 391 @CachedProperty
392 392 def _refs(self):
393 393 return self._get_refs()
394 394
395 395 @property
396 396 def _ref_tree(self):
397 397 node = tree = {}
398 398 for ref, sha in self._refs.iteritems():
399 399 path = ref.split('/')
400 400 for bit in path[:-1]:
401 401 node = node.setdefault(bit, {})
402 402 node[path[-1]] = sha
403 403 node = tree
404 404 return tree
405 405
406 406 def get_remote_ref(self, ref_name):
407 407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 408 try:
409 409 return self._refs[ref_key]
410 410 except Exception:
411 411 return
412 412
413 413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
414 414 """
415 415 Returns `GitCommit` object representing commit from git repository
416 416 at the given `commit_id` or head (most recent commit) if None given.
417 417 """
418 418 if self.is_empty():
419 419 raise EmptyRepositoryError("There are no commits yet")
420 420
421 421 if commit_id is not None:
422 422 self._validate_commit_id(commit_id)
423 423 try:
424 424 # we have cached idx, use it without contacting the remote
425 425 idx = self._commit_ids[commit_id]
426 426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 427 except KeyError:
428 428 pass
429 429
430 430 elif commit_idx is not None:
431 431 self._validate_commit_idx(commit_idx)
432 432 try:
433 433 _commit_id = self.commit_ids[commit_idx]
434 434 if commit_idx < 0:
435 435 commit_idx = self.commit_ids.index(_commit_id)
436 436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 437 except IndexError:
438 438 commit_id = commit_idx
439 439 else:
440 440 commit_id = "tip"
441 441
442 442 if translate_tag:
443 443 commit_id = self._lookup_commit(commit_id)
444 444
445 445 try:
446 446 idx = self._commit_ids[commit_id]
447 447 except KeyError:
448 448 idx = -1
449 449
450 450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451 451
452 452 def get_commits(
453 453 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 455 """
456 456 Returns generator of `GitCommit` objects from start to end (both
457 457 are inclusive), in ascending date order.
458 458
459 459 :param start_id: None, str(commit_id)
460 460 :param end_id: None, str(commit_id)
461 461 :param start_date: if specified, commits with commit date less than
462 462 ``start_date`` would be filtered out from returned set
463 463 :param end_date: if specified, commits with commit date greater than
464 464 ``end_date`` would be filtered out from returned set
465 465 :param branch_name: if specified, commits not reachable from given
466 466 branch would be filtered out from returned set
467 467 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 468 Mercurial evolve
469 469 :raise BranchDoesNotExistError: If given `branch_name` does not
470 470 exist.
471 471 :raise CommitDoesNotExistError: If commits for given `start` or
472 472 `end` could not be found.
473 473
474 474 """
475 475 if self.is_empty():
476 476 raise EmptyRepositoryError("There are no commits yet")
477 477
478 478 self._validate_branch_name(branch_name)
479 479
480 480 if start_id is not None:
481 481 self._validate_commit_id(start_id)
482 482 if end_id is not None:
483 483 self._validate_commit_id(end_id)
484 484
485 485 start_raw_id = self._lookup_commit(start_id)
486 486 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 487 end_raw_id = self._lookup_commit(end_id)
488 488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489 489
490 490 if None not in [start_id, end_id] and start_pos > end_pos:
491 491 raise RepositoryError(
492 492 "Start commit '%s' cannot be after end commit '%s'" %
493 493 (start_id, end_id))
494 494
495 495 if end_pos is not None:
496 496 end_pos += 1
497 497
498 498 filter_ = []
499 499 if branch_name:
500 500 filter_.append({'branch_name': branch_name})
501 501 if start_date and not end_date:
502 502 filter_.append({'since': start_date})
503 503 if end_date and not start_date:
504 504 filter_.append({'until': end_date})
505 505 if start_date and end_date:
506 506 filter_.append({'since': start_date})
507 507 filter_.append({'until': end_date})
508 508
509 509 # if start_pos or end_pos:
510 510 # filter_.append({'start': start_pos})
511 511 # filter_.append({'end': end_pos})
512 512
513 513 if filter_:
514 514 revfilters = {
515 515 'branch_name': branch_name,
516 516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 518 'start': start_pos,
519 519 'end': end_pos,
520 520 }
521 521 commit_ids = self._get_commit_ids(filters=revfilters)
522 522
523 523 else:
524 524 commit_ids = self.commit_ids
525 525
526 526 if start_pos or end_pos:
527 527 commit_ids = commit_ids[start_pos: end_pos]
528 528
529 529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 530 translate_tag=translate_tags)
531 531
532 532 def get_diff(
533 533 self, commit1, commit2, path='', ignore_whitespace=False,
534 534 context=3, path1=None):
535 535 """
536 536 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 537 ``commit2`` since ``commit1``.
538 538
539 539 :param commit1: Entry point from which diff is shown. Can be
540 540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 541 the changes since empty state of the repository until ``commit2``
542 542 :param commit2: Until which commits changes should be shown.
543 543 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 544 changes. Defaults to ``False``.
545 545 :param context: How many lines before/after changed lines should be
546 546 shown. Defaults to ``3``.
547 547 """
548 548 self._validate_diff_commits(commit1, commit2)
549 549 if path1 is not None and path1 != path:
550 550 raise ValueError("Diff of two different paths not supported.")
551 551
552 552 if path:
553 553 file_filter = path
554 554 else:
555 555 file_filter = None
556 556
557 557 diff = self._remote.diff(
558 558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
559 559 opt_ignorews=ignore_whitespace,
560 560 context=context)
561 561 return GitDiff(diff)
562 562
563 563 def strip(self, commit_id, branch_name):
564 564 commit = self.get_commit(commit_id=commit_id)
565 565 if commit.merge:
566 566 raise Exception('Cannot reset to merge commit')
567 567
568 568 # parent is going to be the new head now
569 569 commit = commit.parents[0]
570 570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
571 571
572 572 # clear cached properties
573 573 self._invalidate_prop_cache('commit_ids')
574 574 self._invalidate_prop_cache('_refs')
575 575 self._invalidate_prop_cache('branches')
576 576
577 577 return len(self.commit_ids)
578 578
579 579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
580 580 if commit_id1 == commit_id2:
581 581 return commit_id1
582 582
583 583 if self != repo2:
584 584 commits = self._remote.get_missing_revs(
585 585 commit_id1, commit_id2, repo2.path)
586 586 if commits:
587 587 commit = repo2.get_commit(commits[-1])
588 588 if commit.parents:
589 589 ancestor_id = commit.parents[0].raw_id
590 590 else:
591 591 ancestor_id = None
592 592 else:
593 593 # no commits from other repo, ancestor_id is the commit_id2
594 594 ancestor_id = commit_id2
595 595 else:
596 596 output, __ = self.run_git_command(
597 597 ['merge-base', commit_id1, commit_id2])
598 598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
599 599
600 600 return ancestor_id
601 601
602 602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
603 603 repo1 = self
604 604 ancestor_id = None
605 605
606 606 if commit_id1 == commit_id2:
607 607 commits = []
608 608 elif repo1 != repo2:
609 609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
610 610 repo2.path)
611 611 commits = [
612 612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
613 613 for commit_id in reversed(missing_ids)]
614 614 else:
615 615 output, __ = repo1.run_git_command(
616 616 ['log', '--reverse', '--pretty=format: %H', '-s',
617 617 '%s..%s' % (commit_id1, commit_id2)])
618 618 commits = [
619 619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
620 620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
621 621
622 622 return commits
623 623
624 624 @LazyProperty
625 625 def in_memory_commit(self):
626 626 """
627 627 Returns ``GitInMemoryCommit`` object for this repository.
628 628 """
629 629 return GitInMemoryCommit(self)
630 630
631 631 def pull(self, url, commit_ids=None, update_after=False):
632 632 """
633 633 Pull changes from external location. Pull is different in GIT
634 634 that fetch since it's doing a checkout
635 635
636 636 :param commit_ids: Optional. Can be set to a list of commit ids
637 637 which shall be pulled from the other repository.
638 638 """
639 639 refs = None
640 640 if commit_ids is not None:
641 641 remote_refs = self._remote.get_remote_refs(url)
642 642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
643 643 self._remote.pull(url, refs=refs, update_after=update_after)
644 644 self._remote.invalidate_vcs_cache()
645 645
646 646 def fetch(self, url, commit_ids=None):
647 647 """
648 648 Fetch all git objects from external location.
649 649 """
650 650 self._remote.sync_fetch(url, refs=commit_ids)
651 651 self._remote.invalidate_vcs_cache()
652 652
653 653 def push(self, url):
654 654 refs = None
655 655 self._remote.sync_push(url, refs=refs)
656 656
657 657 def set_refs(self, ref_name, commit_id):
658 658 self._remote.set_refs(ref_name, commit_id)
659 659 self._invalidate_prop_cache('_refs')
660 660
661 661 def remove_ref(self, ref_name):
662 662 self._remote.remove_ref(ref_name)
663 663 self._invalidate_prop_cache('_refs')
664 664
665 665 def _update_server_info(self):
666 666 """
667 667 runs gits update-server-info command in this repo instance
668 668 """
669 669 self._remote.update_server_info()
670 670
671 671 def _current_branch(self):
672 672 """
673 673 Return the name of the current branch.
674 674
675 675 It only works for non bare repositories (i.e. repositories with a
676 676 working copy)
677 677 """
678 678 if self.bare:
679 679 raise RepositoryError('Bare git repos do not have active branches')
680 680
681 681 if self.is_empty():
682 682 return None
683 683
684 684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
685 685 return stdout.strip()
686 686
687 687 def _checkout(self, branch_name, create=False, force=False):
688 688 """
689 689 Checkout a branch in the working directory.
690 690
691 691 It tries to create the branch if create is True, failing if the branch
692 692 already exists.
693 693
694 694 It only works for non bare repositories (i.e. repositories with a
695 695 working copy)
696 696 """
697 697 if self.bare:
698 698 raise RepositoryError('Cannot checkout branches in a bare git repo')
699 699
700 700 cmd = ['checkout']
701 701 if force:
702 702 cmd.append('-f')
703 703 if create:
704 704 cmd.append('-b')
705 705 cmd.append(branch_name)
706 706 self.run_git_command(cmd, fail_on_stderr=False)
707 707
708 708 def _create_branch(self, branch_name, commit_id):
709 709 """
710 710 creates a branch in a GIT repo
711 711 """
712 712 self._remote.create_branch(branch_name, commit_id)
713 713
714 714 def _identify(self):
715 715 """
716 716 Return the current state of the working directory.
717 717 """
718 718 if self.bare:
719 719 raise RepositoryError('Bare git repos do not have active branches')
720 720
721 721 if self.is_empty():
722 722 return None
723 723
724 724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
725 725 return stdout.strip()
726 726
727 727 def _local_clone(self, clone_path, branch_name, source_branch=None):
728 728 """
729 729 Create a local clone of the current repo.
730 730 """
731 731 # N.B.(skreft): the --branch option is required as otherwise the shallow
732 732 # clone will only fetch the active branch.
733 733 cmd = ['clone', '--branch', branch_name,
734 734 self.path, os.path.abspath(clone_path)]
735 735
736 736 self.run_git_command(cmd, fail_on_stderr=False)
737 737
738 738 # if we get the different source branch, make sure we also fetch it for
739 739 # merge conditions
740 740 if source_branch and source_branch != branch_name:
741 741 # check if the ref exists.
742 742 shadow_repo = GitRepository(os.path.abspath(clone_path))
743 743 if shadow_repo.get_remote_ref(source_branch):
744 744 cmd = ['fetch', self.path, source_branch]
745 745 self.run_git_command(cmd, fail_on_stderr=False)
746 746
747 747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
748 748 """
749 749 Fetch a branch from a local repository.
750 750 """
751 751 repository_path = os.path.abspath(repository_path)
752 752 if repository_path == self.path:
753 753 raise ValueError('Cannot fetch from the same repository')
754 754
755 755 if use_origin:
756 756 branch_name = '+{branch}:refs/heads/{branch}'.format(
757 757 branch=branch_name)
758 758
759 759 cmd = ['fetch', '--no-tags', '--update-head-ok',
760 760 repository_path, branch_name]
761 761 self.run_git_command(cmd, fail_on_stderr=False)
762 762
763 763 def _local_reset(self, branch_name):
764 764 branch_name = '{}'.format(branch_name)
765 765 cmd = ['reset', '--hard', branch_name, '--']
766 766 self.run_git_command(cmd, fail_on_stderr=False)
767 767
768 768 def _last_fetch_heads(self):
769 769 """
770 770 Return the last fetched heads that need merging.
771 771
772 772 The algorithm is defined at
773 773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
774 774 """
775 775 if not self.bare:
776 776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
777 777 else:
778 778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
779 779
780 780 heads = []
781 781 with open(fetch_heads_path) as f:
782 782 for line in f:
783 783 if ' not-for-merge ' in line:
784 784 continue
785 785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
786 786 heads.append(line)
787 787
788 788 return heads
789 789
790 790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
791 791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
792 792
793 793 def _local_pull(self, repository_path, branch_name, ff_only=True):
794 794 """
795 795 Pull a branch from a local repository.
796 796 """
797 797 if self.bare:
798 798 raise RepositoryError('Cannot pull into a bare git repository')
799 799 # N.B.(skreft): The --ff-only option is to make sure this is a
800 800 # fast-forward (i.e., we are only pulling new changes and there are no
801 801 # conflicts with our current branch)
802 802 # Additionally, that option needs to go before --no-tags, otherwise git
803 803 # pull complains about it being an unknown flag.
804 804 cmd = ['pull']
805 805 if ff_only:
806 806 cmd.append('--ff-only')
807 807 cmd.extend(['--no-tags', repository_path, branch_name])
808 808 self.run_git_command(cmd, fail_on_stderr=False)
809 809
810 810 def _local_merge(self, merge_message, user_name, user_email, heads):
811 811 """
812 812 Merge the given head into the checked out branch.
813 813
814 814 It will force a merge commit.
815 815
816 816 Currently it raises an error if the repo is empty, as it is not possible
817 817 to create a merge commit in an empty repo.
818 818
819 819 :param merge_message: The message to use for the merge commit.
820 820 :param heads: the heads to merge.
821 821 """
822 822 if self.bare:
823 823 raise RepositoryError('Cannot merge into a bare git repository')
824 824
825 825 if not heads:
826 826 return
827 827
828 828 if self.is_empty():
829 829 # TODO(skreft): do something more robust in this case.
830 830 raise RepositoryError(
831 831 'Do not know how to merge into empty repositories yet')
832 832 unresolved = None
833 833
834 834 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
835 835 # commit message. We also specify the user who is doing the merge.
836 836 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
837 837 '-c', 'user.email=%s' % safe_str(user_email),
838 838 'merge', '--no-ff', '-m', safe_str(merge_message)]
839 839 cmd.extend(heads)
840 840 try:
841 841 output = self.run_git_command(cmd, fail_on_stderr=False)
842 842 except RepositoryError:
843 843 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
844 844 fail_on_stderr=False)[0].splitlines()
845 845 # NOTE(marcink): we add U notation for consistent with HG backend output
846 846 unresolved = ['U {}'.format(f) for f in files]
847 847
848 848 # Cleanup any merge leftovers
849 849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
850 850
851 851 if unresolved:
852 852 raise UnresolvedFilesInRepo(unresolved)
853 853 else:
854 854 raise
855 855
856 856 def _local_push(
857 857 self, source_branch, repository_path, target_branch,
858 858 enable_hooks=False, rc_scm_data=None):
859 859 """
860 860 Push the source_branch to the given repository and target_branch.
861 861
862 862 Currently it if the target_branch is not master and the target repo is
863 863 empty, the push will work, but then GitRepository won't be able to find
864 864 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
865 865 pointing to master, which does not exist).
866 866
867 867 It does not run the hooks in the target repo.
868 868 """
869 869 # TODO(skreft): deal with the case in which the target repo is empty,
870 870 # and the target_branch is not master.
871 871 target_repo = GitRepository(repository_path)
872 872 if (not target_repo.bare and
873 873 target_repo._current_branch() == target_branch):
874 874 # Git prevents pushing to the checked out branch, so simulate it by
875 875 # pulling into the target repository.
876 876 target_repo._local_pull(self.path, source_branch)
877 877 else:
878 878 cmd = ['push', os.path.abspath(repository_path),
879 879 '%s:%s' % (source_branch, target_branch)]
880 880 gitenv = {}
881 881 if rc_scm_data:
882 882 gitenv.update({'RC_SCM_DATA': rc_scm_data})
883 883
884 884 if not enable_hooks:
885 885 gitenv['RC_SKIP_HOOKS'] = '1'
886 886 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
887 887
888 888 def _get_new_pr_branch(self, source_branch, target_branch):
889 889 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
890 890 pr_branches = []
891 891 for branch in self.branches:
892 892 if branch.startswith(prefix):
893 893 pr_branches.append(int(branch[len(prefix):]))
894 894
895 895 if not pr_branches:
896 896 branch_id = 0
897 897 else:
898 898 branch_id = max(pr_branches) + 1
899 899
900 900 return '%s%d' % (prefix, branch_id)
901 901
902 902 def _maybe_prepare_merge_workspace(
903 903 self, repo_id, workspace_id, target_ref, source_ref):
904 904 shadow_repository_path = self._get_shadow_repository_path(
905 905 self.path, repo_id, workspace_id)
906 906 if not os.path.exists(shadow_repository_path):
907 907 self._local_clone(
908 908 shadow_repository_path, target_ref.name, source_ref.name)
909 909 log.debug('Prepared %s shadow repository in %s',
910 910 self.alias, shadow_repository_path)
911 911
912 912 return shadow_repository_path
913 913
914 914 def _merge_repo(self, repo_id, workspace_id, target_ref,
915 915 source_repo, source_ref, merge_message,
916 916 merger_name, merger_email, dry_run=False,
917 917 use_rebase=False, close_branch=False):
918 918
919 919 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
920 920 'rebase' if use_rebase else 'merge', dry_run)
921 921 if target_ref.commit_id != self.branches[target_ref.name]:
922 922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
923 923 target_ref.commit_id, self.branches[target_ref.name])
924 924 return MergeResponse(
925 925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
926 926 metadata={'target_ref': target_ref})
927 927
928 928 shadow_repository_path = self._maybe_prepare_merge_workspace(
929 929 repo_id, workspace_id, target_ref, source_ref)
930 930 shadow_repo = self.get_shadow_instance(shadow_repository_path)
931 931
932 932 # checkout source, if it's different. Otherwise we could not
933 933 # fetch proper commits for merge testing
934 934 if source_ref.name != target_ref.name:
935 935 if shadow_repo.get_remote_ref(source_ref.name):
936 936 shadow_repo._checkout(source_ref.name, force=True)
937 937
938 938 # checkout target, and fetch changes
939 939 shadow_repo._checkout(target_ref.name, force=True)
940 940
941 941 # fetch/reset pull the target, in case it is changed
942 942 # this handles even force changes
943 943 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
944 944 shadow_repo._local_reset(target_ref.name)
945 945
946 946 # Need to reload repo to invalidate the cache, or otherwise we cannot
947 947 # retrieve the last target commit.
948 948 shadow_repo = self.get_shadow_instance(shadow_repository_path)
949 949 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
950 950 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
951 951 target_ref, target_ref.commit_id,
952 952 shadow_repo.branches[target_ref.name])
953 953 return MergeResponse(
954 954 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
955 955 metadata={'target_ref': target_ref})
956 956
957 957 # calculate new branch
958 958 pr_branch = shadow_repo._get_new_pr_branch(
959 959 source_ref.name, target_ref.name)
960 960 log.debug('using pull-request merge branch: `%s`', pr_branch)
961 961 # checkout to temp branch, and fetch changes
962 962 shadow_repo._checkout(pr_branch, create=True)
963 963 try:
964 964 shadow_repo._local_fetch(source_repo.path, source_ref.name)
965 965 except RepositoryError:
966 966 log.exception('Failure when doing local fetch on '
967 967 'shadow repo: %s', shadow_repo)
968 968 return MergeResponse(
969 969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
970 970 metadata={'source_ref': source_ref})
971 971
972 972 merge_ref = None
973 973 merge_failure_reason = MergeFailureReason.NONE
974 974 metadata = {}
975 975 try:
976 976 shadow_repo._local_merge(merge_message, merger_name, merger_email,
977 977 [source_ref.commit_id])
978 978 merge_possible = True
979 979
980 980 # Need to invalidate the cache, or otherwise we
981 981 # cannot retrieve the merge commit.
982 982 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
983 983 merge_commit_id = shadow_repo.branches[pr_branch]
984 984
985 985 # Set a reference pointing to the merge commit. This reference may
986 986 # be used to easily identify the last successful merge commit in
987 987 # the shadow repository.
988 988 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
989 989 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
990 990 except RepositoryError as e:
991 991 log.exception('Failure when doing local merge on git shadow repo')
992 992 if isinstance(e, UnresolvedFilesInRepo):
993 metadata['unresolved_files'] = 'file: ' + (', file: '.join(e.args[0]))
993 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
994 994
995 995 merge_possible = False
996 996 merge_failure_reason = MergeFailureReason.MERGE_FAILED
997 997
998 998 if merge_possible and not dry_run:
999 999 try:
1000 1000 shadow_repo._local_push(
1001 1001 pr_branch, self.path, target_ref.name, enable_hooks=True,
1002 1002 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1003 1003 merge_succeeded = True
1004 1004 except RepositoryError:
1005 1005 log.exception(
1006 1006 'Failure when doing local push from the shadow '
1007 1007 'repository to the target repository at %s.', self.path)
1008 1008 merge_succeeded = False
1009 1009 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1010 1010 metadata['target'] = 'git shadow repo'
1011 1011 metadata['merge_commit'] = pr_branch
1012 1012 else:
1013 1013 merge_succeeded = False
1014 1014
1015 1015 return MergeResponse(
1016 1016 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1017 1017 metadata=metadata)
@@ -1,972 +1,972 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, exceptions
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 46 from rhodecode.lib.vcs.compat import configparser
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
294 294 if commit_id1 == commit_id2:
295 295 return commit_id1
296 296
297 297 ancestors = self._remote.revs_from_revspec(
298 298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
299 299 other_path=repo2.path)
300 300 return repo2[ancestors[0]].raw_id if ancestors else None
301 301
302 302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
303 303 if commit_id1 == commit_id2:
304 304 commits = []
305 305 else:
306 306 if merge:
307 307 indexes = self._remote.revs_from_revspec(
308 308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
309 309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
310 310 else:
311 311 indexes = self._remote.revs_from_revspec(
312 312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
313 313 commit_id1, other_path=repo2.path)
314 314
315 315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
316 316 for idx in indexes]
317 317
318 318 return commits
319 319
320 320 @staticmethod
321 321 def check_url(url, config):
322 322 """
323 323 Function will check given url and try to verify if it's a valid
324 324 link. Sometimes it may happened that mercurial will issue basic
325 325 auth request that can cause whole API to hang when used from python
326 326 or other external calls.
327 327
328 328 On failures it'll raise urllib2.HTTPError, exception is also thrown
329 329 when the return code is non 200
330 330 """
331 331 # check first if it's not an local url
332 332 if os.path.isdir(url) or url.startswith('file:'):
333 333 return True
334 334
335 335 # Request the _remote to verify the url
336 336 return connection.Hg.check_url(url, config.serialize())
337 337
338 338 @staticmethod
339 339 def is_valid_repository(path):
340 340 return os.path.isdir(os.path.join(path, '.hg'))
341 341
342 342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
343 343 """
344 344 Function will check for mercurial repository in given path. If there
345 345 is no repository in that path it will raise an exception unless
346 346 `create` parameter is set to True - in that case repository would
347 347 be created.
348 348
349 349 If `src_url` is given, would try to clone repository from the
350 350 location at given clone_point. Additionally it'll make update to
351 351 working copy accordingly to `do_workspace_checkout` flag.
352 352 """
353 353 if create and os.path.exists(self.path):
354 354 raise RepositoryError(
355 355 "Cannot create repository at %s, location already exist"
356 356 % self.path)
357 357
358 358 if src_url:
359 359 url = str(self._get_url(src_url))
360 360 MercurialRepository.check_url(url, self.config)
361 361
362 362 self._remote.clone(url, self.path, do_workspace_checkout)
363 363
364 364 # Don't try to create if we've already cloned repo
365 365 create = False
366 366
367 367 if create:
368 368 os.makedirs(self.path, mode=0o755)
369 369 self._remote.localrepository(create)
370 370
371 371 @LazyProperty
372 372 def in_memory_commit(self):
373 373 return MercurialInMemoryCommit(self)
374 374
375 375 @LazyProperty
376 376 def description(self):
377 377 description = self._remote.get_config_value(
378 378 'web', 'description', untrusted=True)
379 379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
380 380
381 381 @LazyProperty
382 382 def contact(self):
383 383 contact = (
384 384 self._remote.get_config_value("web", "contact") or
385 385 self._remote.get_config_value("ui", "username"))
386 386 return safe_unicode(contact or self.DEFAULT_CONTACT)
387 387
388 388 @LazyProperty
389 389 def last_change(self):
390 390 """
391 391 Returns last change made on this repository as
392 392 `datetime.datetime` object.
393 393 """
394 394 try:
395 395 return self.get_commit().date
396 396 except RepositoryError:
397 397 tzoffset = makedate()[1]
398 398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
399 399
400 400 def _get_fs_mtime(self):
401 401 # fallback to filesystem
402 402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
403 403 st_path = os.path.join(self.path, '.hg', "store")
404 404 if os.path.exists(cl_path):
405 405 return os.stat(cl_path).st_mtime
406 406 else:
407 407 return os.stat(st_path).st_mtime
408 408
409 409 def _get_url(self, url):
410 410 """
411 411 Returns normalized url. If schema is not given, would fall
412 412 to filesystem
413 413 (``file:///``) schema.
414 414 """
415 415 url = url.encode('utf8')
416 416 if url != 'default' and '://' not in url:
417 417 url = "file:" + urllib.pathname2url(url)
418 418 return url
419 419
420 420 def get_hook_location(self):
421 421 """
422 422 returns absolute path to location where hooks are stored
423 423 """
424 424 return os.path.join(self.path, '.hg', '.hgrc')
425 425
426 426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
427 427 """
428 428 Returns ``MercurialCommit`` object representing repository's
429 429 commit at the given `commit_id` or `commit_idx`.
430 430 """
431 431 if self.is_empty():
432 432 raise EmptyRepositoryError("There are no commits yet")
433 433
434 434 if commit_id is not None:
435 435 self._validate_commit_id(commit_id)
436 436 try:
437 437 # we have cached idx, use it without contacting the remote
438 438 idx = self._commit_ids[commit_id]
439 439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
440 440 except KeyError:
441 441 pass
442 442
443 443 elif commit_idx is not None:
444 444 self._validate_commit_idx(commit_idx)
445 445 try:
446 446 _commit_id = self.commit_ids[commit_idx]
447 447 if commit_idx < 0:
448 448 commit_idx = self.commit_ids.index(_commit_id)
449 449
450 450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
451 451 except IndexError:
452 452 commit_id = commit_idx
453 453 else:
454 454 commit_id = "tip"
455 455
456 456 if isinstance(commit_id, unicode):
457 457 commit_id = safe_str(commit_id)
458 458
459 459 try:
460 460 raw_id, idx = self._remote.lookup(commit_id, both=True)
461 461 except CommitDoesNotExistError:
462 462 msg = "Commit {} does not exist for `{}`".format(
463 463 *map(safe_str, [commit_id, self.name]))
464 464 raise CommitDoesNotExistError(msg)
465 465
466 466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
467 467
468 468 def get_commits(
469 469 self, start_id=None, end_id=None, start_date=None, end_date=None,
470 470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
471 471 """
472 472 Returns generator of ``MercurialCommit`` objects from start to end
473 473 (both are inclusive)
474 474
475 475 :param start_id: None, str(commit_id)
476 476 :param end_id: None, str(commit_id)
477 477 :param start_date: if specified, commits with commit date less than
478 478 ``start_date`` would be filtered out from returned set
479 479 :param end_date: if specified, commits with commit date greater than
480 480 ``end_date`` would be filtered out from returned set
481 481 :param branch_name: if specified, commits not reachable from given
482 482 branch would be filtered out from returned set
483 483 :param show_hidden: Show hidden commits such as obsolete or hidden from
484 484 Mercurial evolve
485 485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
486 486 exist.
487 487 :raise CommitDoesNotExistError: If commit for given ``start`` or
488 488 ``end`` could not be found.
489 489 """
490 490 # actually we should check now if it's not an empty repo
491 491 if self.is_empty():
492 492 raise EmptyRepositoryError("There are no commits yet")
493 493 self._validate_branch_name(branch_name)
494 494
495 495 branch_ancestors = False
496 496 if start_id is not None:
497 497 self._validate_commit_id(start_id)
498 498 c_start = self.get_commit(commit_id=start_id)
499 499 start_pos = self._commit_ids[c_start.raw_id]
500 500 else:
501 501 start_pos = None
502 502
503 503 if end_id is not None:
504 504 self._validate_commit_id(end_id)
505 505 c_end = self.get_commit(commit_id=end_id)
506 506 end_pos = max(0, self._commit_ids[c_end.raw_id])
507 507 else:
508 508 end_pos = None
509 509
510 510 if None not in [start_id, end_id] and start_pos > end_pos:
511 511 raise RepositoryError(
512 512 "Start commit '%s' cannot be after end commit '%s'" %
513 513 (start_id, end_id))
514 514
515 515 if end_pos is not None:
516 516 end_pos += 1
517 517
518 518 commit_filter = []
519 519
520 520 if branch_name and not branch_ancestors:
521 521 commit_filter.append('branch("%s")' % (branch_name,))
522 522 elif branch_name and branch_ancestors:
523 523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
524 524
525 525 if start_date and not end_date:
526 526 commit_filter.append('date(">%s")' % (start_date,))
527 527 if end_date and not start_date:
528 528 commit_filter.append('date("<%s")' % (end_date,))
529 529 if start_date and end_date:
530 530 commit_filter.append(
531 531 'date(">%s") and date("<%s")' % (start_date, end_date))
532 532
533 533 if not show_hidden:
534 534 commit_filter.append('not obsolete()')
535 535 commit_filter.append('not hidden()')
536 536
537 537 # TODO: johbo: Figure out a simpler way for this solution
538 538 collection_generator = CollectionGenerator
539 539 if commit_filter:
540 540 commit_filter = ' and '.join(map(safe_str, commit_filter))
541 541 revisions = self._remote.rev_range([commit_filter])
542 542 collection_generator = MercurialIndexBasedCollectionGenerator
543 543 else:
544 544 revisions = self.commit_ids
545 545
546 546 if start_pos or end_pos:
547 547 revisions = revisions[start_pos:end_pos]
548 548
549 549 return collection_generator(self, revisions, pre_load=pre_load)
550 550
551 551 def pull(self, url, commit_ids=None):
552 552 """
553 553 Pull changes from external location.
554 554
555 555 :param commit_ids: Optional. Can be set to a list of commit ids
556 556 which shall be pulled from the other repository.
557 557 """
558 558 url = self._get_url(url)
559 559 self._remote.pull(url, commit_ids=commit_ids)
560 560 self._remote.invalidate_vcs_cache()
561 561
562 562 def fetch(self, url, commit_ids=None):
563 563 """
564 564 Backward compatibility with GIT fetch==pull
565 565 """
566 566 return self.pull(url, commit_ids=commit_ids)
567 567
568 568 def push(self, url):
569 569 url = self._get_url(url)
570 570 self._remote.sync_push(url)
571 571
572 572 def _local_clone(self, clone_path):
573 573 """
574 574 Create a local clone of the current repo.
575 575 """
576 576 self._remote.clone(self.path, clone_path, update_after_clone=True,
577 577 hooks=False)
578 578
579 579 def _update(self, revision, clean=False):
580 580 """
581 581 Update the working copy to the specified revision.
582 582 """
583 583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
584 584 self._remote.update(revision, clean=clean)
585 585
586 586 def _identify(self):
587 587 """
588 588 Return the current state of the working directory.
589 589 """
590 590 return self._remote.identify().strip().rstrip('+')
591 591
592 592 def _heads(self, branch=None):
593 593 """
594 594 Return the commit ids of the repository heads.
595 595 """
596 596 return self._remote.heads(branch=branch).strip().split(' ')
597 597
598 598 def _ancestor(self, revision1, revision2):
599 599 """
600 600 Return the common ancestor of the two revisions.
601 601 """
602 602 return self._remote.ancestor(revision1, revision2)
603 603
604 604 def _local_push(
605 605 self, revision, repository_path, push_branches=False,
606 606 enable_hooks=False):
607 607 """
608 608 Push the given revision to the specified repository.
609 609
610 610 :param push_branches: allow to create branches in the target repo.
611 611 """
612 612 self._remote.push(
613 613 [revision], repository_path, hooks=enable_hooks,
614 614 push_branches=push_branches)
615 615
616 616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
617 617 source_ref, use_rebase=False, dry_run=False):
618 618 """
619 619 Merge the given source_revision into the checked out revision.
620 620
621 621 Returns the commit id of the merge and a boolean indicating if the
622 622 commit needs to be pushed.
623 623 """
624 624 self._update(target_ref.commit_id, clean=True)
625 625
626 626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
627 627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
628 628
629 629 if ancestor == source_ref.commit_id:
630 630 # Nothing to do, the changes were already integrated
631 631 return target_ref.commit_id, False
632 632
633 633 elif ancestor == target_ref.commit_id and is_the_same_branch:
634 634 # In this case we should force a commit message
635 635 return source_ref.commit_id, True
636 636
637 637 unresolved = None
638 638 if use_rebase:
639 639 try:
640 640 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
641 641 target_ref.commit_id)
642 642 self.bookmark(bookmark_name, revision=source_ref.commit_id)
643 643 self._remote.rebase(
644 644 source=source_ref.commit_id, dest=target_ref.commit_id)
645 645 self._remote.invalidate_vcs_cache()
646 646 self._update(bookmark_name, clean=True)
647 647 return self._identify(), True
648 648 except RepositoryError as e:
649 649 # The rebase-abort may raise another exception which 'hides'
650 650 # the original one, therefore we log it here.
651 651 log.exception('Error while rebasing shadow repo during merge.')
652 652 if 'unresolved conflicts' in e.message:
653 653 unresolved = self._remote.get_unresolved_files()
654 654 log.debug('unresolved files: %s', unresolved)
655 655
656 656 # Cleanup any rebase leftovers
657 657 self._remote.invalidate_vcs_cache()
658 658 self._remote.rebase(abort=True)
659 659 self._remote.invalidate_vcs_cache()
660 660 self._remote.update(clean=True)
661 661 if unresolved:
662 662 raise UnresolvedFilesInRepo(unresolved)
663 663 else:
664 664 raise
665 665 else:
666 666 try:
667 667 self._remote.merge(source_ref.commit_id)
668 668 self._remote.invalidate_vcs_cache()
669 669 self._remote.commit(
670 670 message=safe_str(merge_message),
671 671 username=safe_str('%s <%s>' % (user_name, user_email)))
672 672 self._remote.invalidate_vcs_cache()
673 673 return self._identify(), True
674 674 except RepositoryError as e:
675 675 # The merge-abort may raise another exception which 'hides'
676 676 # the original one, therefore we log it here.
677 677 log.exception('Error while merging shadow repo during merge.')
678 678 if 'unresolved merge conflicts' in e.message:
679 679 unresolved = self._remote.get_unresolved_files()
680 680 log.debug('unresolved files: %s', unresolved)
681 681
682 682 # Cleanup any merge leftovers
683 683 self._remote.update(clean=True)
684 684 if unresolved:
685 685 raise UnresolvedFilesInRepo(unresolved)
686 686 else:
687 687 raise
688 688
689 689 def _local_close(self, target_ref, user_name, user_email,
690 690 source_ref, close_message=''):
691 691 """
692 692 Close the branch of the given source_revision
693 693
694 694 Returns the commit id of the close and a boolean indicating if the
695 695 commit needs to be pushed.
696 696 """
697 697 self._update(source_ref.commit_id)
698 698 message = close_message or "Closing branch: `{}`".format(source_ref.name)
699 699 try:
700 700 self._remote.commit(
701 701 message=safe_str(message),
702 702 username=safe_str('%s <%s>' % (user_name, user_email)),
703 703 close_branch=True)
704 704 self._remote.invalidate_vcs_cache()
705 705 return self._identify(), True
706 706 except RepositoryError:
707 707 # Cleanup any commit leftovers
708 708 self._remote.update(clean=True)
709 709 raise
710 710
711 711 def _is_the_same_branch(self, target_ref, source_ref):
712 712 return (
713 713 self._get_branch_name(target_ref) ==
714 714 self._get_branch_name(source_ref))
715 715
716 716 def _get_branch_name(self, ref):
717 717 if ref.type == 'branch':
718 718 return ref.name
719 719 return self._remote.ctx_branch(ref.commit_id)
720 720
721 721 def _maybe_prepare_merge_workspace(
722 722 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
723 723 shadow_repository_path = self._get_shadow_repository_path(
724 724 self.path, repo_id, workspace_id)
725 725 if not os.path.exists(shadow_repository_path):
726 726 self._local_clone(shadow_repository_path)
727 727 log.debug(
728 728 'Prepared shadow repository in %s', shadow_repository_path)
729 729
730 730 return shadow_repository_path
731 731
732 732 def _merge_repo(self, repo_id, workspace_id, target_ref,
733 733 source_repo, source_ref, merge_message,
734 734 merger_name, merger_email, dry_run=False,
735 735 use_rebase=False, close_branch=False):
736 736
737 737 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
738 738 'rebase' if use_rebase else 'merge', dry_run)
739 739 if target_ref.commit_id not in self._heads():
740 740 return MergeResponse(
741 741 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
742 742 metadata={'target_ref': target_ref})
743 743
744 744 try:
745 745 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
746 746 heads = '\n,'.join(self._heads(target_ref.name))
747 747 metadata = {
748 748 'target_ref': target_ref,
749 749 'source_ref': source_ref,
750 750 'heads': heads
751 751 }
752 752 return MergeResponse(
753 753 False, False, None,
754 754 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
755 755 metadata=metadata)
756 756 except CommitDoesNotExistError:
757 757 log.exception('Failure when looking up branch heads on hg target')
758 758 return MergeResponse(
759 759 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
760 760 metadata={'target_ref': target_ref})
761 761
762 762 shadow_repository_path = self._maybe_prepare_merge_workspace(
763 763 repo_id, workspace_id, target_ref, source_ref)
764 764 shadow_repo = self.get_shadow_instance(shadow_repository_path)
765 765
766 766 log.debug('Pulling in target reference %s', target_ref)
767 767 self._validate_pull_reference(target_ref)
768 768 shadow_repo._local_pull(self.path, target_ref)
769 769
770 770 try:
771 771 log.debug('Pulling in source reference %s', source_ref)
772 772 source_repo._validate_pull_reference(source_ref)
773 773 shadow_repo._local_pull(source_repo.path, source_ref)
774 774 except CommitDoesNotExistError:
775 775 log.exception('Failure when doing local pull on hg shadow repo')
776 776 return MergeResponse(
777 777 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
778 778 metadata={'source_ref': source_ref})
779 779
780 780 merge_ref = None
781 781 merge_commit_id = None
782 782 close_commit_id = None
783 783 merge_failure_reason = MergeFailureReason.NONE
784 784 metadata = {}
785 785
786 786 # enforce that close branch should be used only in case we source from
787 787 # an actual Branch
788 788 close_branch = close_branch and source_ref.type == 'branch'
789 789
790 790 # don't allow to close branch if source and target are the same
791 791 close_branch = close_branch and source_ref.name != target_ref.name
792 792
793 793 needs_push_on_close = False
794 794 if close_branch and not use_rebase and not dry_run:
795 795 try:
796 796 close_commit_id, needs_push_on_close = shadow_repo._local_close(
797 797 target_ref, merger_name, merger_email, source_ref)
798 798 merge_possible = True
799 799 except RepositoryError:
800 800 log.exception('Failure when doing close branch on '
801 801 'shadow repo: %s', shadow_repo)
802 802 merge_possible = False
803 803 merge_failure_reason = MergeFailureReason.MERGE_FAILED
804 804 else:
805 805 merge_possible = True
806 806
807 807 needs_push = False
808 808 if merge_possible:
809 809 try:
810 810 merge_commit_id, needs_push = shadow_repo._local_merge(
811 811 target_ref, merge_message, merger_name, merger_email,
812 812 source_ref, use_rebase=use_rebase, dry_run=dry_run)
813 813 merge_possible = True
814 814
815 815 # read the state of the close action, if it
816 816 # maybe required a push
817 817 needs_push = needs_push or needs_push_on_close
818 818
819 819 # Set a bookmark pointing to the merge commit. This bookmark
820 820 # may be used to easily identify the last successful merge
821 821 # commit in the shadow repository.
822 822 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
823 823 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
824 824 except SubrepoMergeError:
825 825 log.exception(
826 826 'Subrepo merge error during local merge on hg shadow repo.')
827 827 merge_possible = False
828 828 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
829 829 needs_push = False
830 830 except RepositoryError as e:
831 831 log.exception('Failure when doing local merge on hg shadow repo')
832 832 if isinstance(e, UnresolvedFilesInRepo):
833 metadata['unresolved_files'] = 'file: ' + (', file: '.join(e.args[0]))
833 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
834 834
835 835 merge_possible = False
836 836 merge_failure_reason = MergeFailureReason.MERGE_FAILED
837 837 needs_push = False
838 838
839 839 if merge_possible and not dry_run:
840 840 if needs_push:
841 841 # In case the target is a bookmark, update it, so after pushing
842 842 # the bookmarks is also updated in the target.
843 843 if target_ref.type == 'book':
844 844 shadow_repo.bookmark(
845 845 target_ref.name, revision=merge_commit_id)
846 846 try:
847 847 shadow_repo_with_hooks = self.get_shadow_instance(
848 848 shadow_repository_path,
849 849 enable_hooks=True)
850 850 # This is the actual merge action, we push from shadow
851 851 # into origin.
852 852 # Note: the push_branches option will push any new branch
853 853 # defined in the source repository to the target. This may
854 854 # be dangerous as branches are permanent in Mercurial.
855 855 # This feature was requested in issue #441.
856 856 shadow_repo_with_hooks._local_push(
857 857 merge_commit_id, self.path, push_branches=True,
858 858 enable_hooks=True)
859 859
860 860 # maybe we also need to push the close_commit_id
861 861 if close_commit_id:
862 862 shadow_repo_with_hooks._local_push(
863 863 close_commit_id, self.path, push_branches=True,
864 864 enable_hooks=True)
865 865 merge_succeeded = True
866 866 except RepositoryError:
867 867 log.exception(
868 868 'Failure when doing local push from the shadow '
869 869 'repository to the target repository at %s.', self.path)
870 870 merge_succeeded = False
871 871 merge_failure_reason = MergeFailureReason.PUSH_FAILED
872 872 metadata['target'] = 'hg shadow repo'
873 873 metadata['merge_commit'] = merge_commit_id
874 874 else:
875 875 merge_succeeded = True
876 876 else:
877 877 merge_succeeded = False
878 878
879 879 return MergeResponse(
880 880 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
881 881 metadata=metadata)
882 882
883 883 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
884 884 config = self.config.copy()
885 885 if not enable_hooks:
886 886 config.clear_section('hooks')
887 887 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
888 888
889 889 def _validate_pull_reference(self, reference):
890 890 if not (reference.name in self.bookmarks or
891 891 reference.name in self.branches or
892 892 self.get_commit(reference.commit_id)):
893 893 raise CommitDoesNotExistError(
894 894 'Unknown branch, bookmark or commit id')
895 895
896 896 def _local_pull(self, repository_path, reference):
897 897 """
898 898 Fetch a branch, bookmark or commit from a local repository.
899 899 """
900 900 repository_path = os.path.abspath(repository_path)
901 901 if repository_path == self.path:
902 902 raise ValueError('Cannot pull from the same repository')
903 903
904 904 reference_type_to_option_name = {
905 905 'book': 'bookmark',
906 906 'branch': 'branch',
907 907 }
908 908 option_name = reference_type_to_option_name.get(
909 909 reference.type, 'revision')
910 910
911 911 if option_name == 'revision':
912 912 ref = reference.commit_id
913 913 else:
914 914 ref = reference.name
915 915
916 916 options = {option_name: [ref]}
917 917 self._remote.pull_cmd(repository_path, hooks=False, **options)
918 918 self._remote.invalidate_vcs_cache()
919 919
920 920 def bookmark(self, bookmark, revision=None):
921 921 if isinstance(bookmark, unicode):
922 922 bookmark = safe_str(bookmark)
923 923 self._remote.bookmark(bookmark, revision=revision)
924 924 self._remote.invalidate_vcs_cache()
925 925
926 926 def get_path_permissions(self, username):
927 927 hgacl_file = os.path.join(self.path, '.hg/hgacl')
928 928
929 929 def read_patterns(suffix):
930 930 svalue = None
931 931 for section, option in [
932 932 ('narrowacl', username + suffix),
933 933 ('narrowacl', 'default' + suffix),
934 934 ('narrowhgacl', username + suffix),
935 935 ('narrowhgacl', 'default' + suffix)
936 936 ]:
937 937 try:
938 938 svalue = hgacl.get(section, option)
939 939 break # stop at the first value we find
940 940 except configparser.NoOptionError:
941 941 pass
942 942 if not svalue:
943 943 return None
944 944 result = ['/']
945 945 for pattern in svalue.split():
946 946 result.append(pattern)
947 947 if '*' not in pattern and '?' not in pattern:
948 948 result.append(pattern + '/*')
949 949 return result
950 950
951 951 if os.path.exists(hgacl_file):
952 952 try:
953 953 hgacl = configparser.RawConfigParser()
954 954 hgacl.read(hgacl_file)
955 955
956 956 includes = read_patterns('.includes')
957 957 excludes = read_patterns('.excludes')
958 958 return BasePathPermissionChecker.create_from_patterns(
959 959 includes, excludes)
960 960 except BaseException as e:
961 961 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
962 962 hgacl_file, self.name, e)
963 963 raise exceptions.RepositoryRequirementError(msg)
964 964 else:
965 965 return None
966 966
967 967
968 968 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
969 969
970 970 def _commit_factory(self, commit_id):
971 971 return self.repo.get_commit(
972 972 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,84 +1,84 b''
1 1
2 2 <div class="pull-request-wrap">
3 3
4 4 % if c.pr_merge_possible:
5 5 <h2 class="merge-status">
6 6 <span class="merge-icon success"><i class="icon-ok"></i></span>
7 7 ${_('This pull request can be merged automatically.')}
8 8 </h2>
9 9 % else:
10 10 <h2 class="merge-status">
11 11 <span class="merge-icon warning"><i class="icon-false"></i></span>
12 12 ${_('Merge is not currently possible because of below failed checks.')}
13 13 </h2>
14 14 % endif
15 15
16 16 % if c.pr_merge_errors.items():
17 17 <ul>
18 18 % for pr_check_key, pr_check_details in c.pr_merge_errors.items():
19 19 <% pr_check_type = pr_check_details['error_type'] %>
20 20 <li>
21 <span class="merge-message ${pr_check_type}" data-role="merge-message">
22 - ${pr_check_details['message']}
21 <div class="merge-message ${pr_check_type}" data-role="merge-message">
22 <span style="white-space: pre-line">- ${pr_check_details['message']}</span>
23 23 % if pr_check_key == 'todo':
24 24 % for co in pr_check_details['details']:
25 25 <a class="permalink" href="#comment-${co.comment_id}" onclick="Rhodecode.comments.scrollToComment($('#comment-${co.comment_id}'), 0, ${h.json.dumps(co.outdated)})"> #${co.comment_id}</a>${'' if loop.last else ','}
26 26 % endfor
27 27 % endif
28 </span>
28 </div>
29 29 </li>
30 30 % endfor
31 31 </ul>
32 32 % endif
33 33
34 34 <div class="pull-request-merge-actions">
35 35 % if c.allowed_to_merge:
36 36 ## Merge info, show only if all errors are taken care of
37 37 % if not c.pr_merge_errors and c.pr_merge_info:
38 38 <div class="pull-request-merge-info">
39 39 <ul>
40 40 % for pr_merge_key, pr_merge_details in c.pr_merge_info.items():
41 41 <li>
42 42 - ${pr_merge_details['message']}
43 43 </li>
44 44 % endfor
45 45 </ul>
46 46 </div>
47 47 % endif
48 48
49 49 <div>
50 50 ${h.secure_form(h.route_path('pullrequest_merge', repo_name=c.repo_name, pull_request_id=c.pull_request.pull_request_id), id='merge_pull_request_form', request=request)}
51 51 <% merge_disabled = ' disabled' if c.pr_merge_possible is False else '' %>
52 52
53 53 % if c.allowed_to_close:
54 54 ## close PR action, injected later next to COMMENT button
55 55 % if c.pull_request_review_status == c.REVIEW_STATUS_APPROVED:
56 56 <a id="close-pull-request-action" class="btn btn-approved-status" href="#close-as-approved" onclick="closePullRequest('${c.REVIEW_STATUS_APPROVED}'); return false;">
57 57 ${_('Close with status {}').format(h.commit_status_lbl(c.REVIEW_STATUS_APPROVED))}
58 58 </a>
59 59 % else:
60 60 <a id="close-pull-request-action" class="btn btn-rejected-status" href="#close-as-rejected" onclick="closePullRequest('${c.REVIEW_STATUS_REJECTED}'); return false;">
61 61 ${_('Close with status {}').format(h.commit_status_lbl(c.REVIEW_STATUS_REJECTED))}
62 62 </a>
63 63 % endif
64 64 % endif
65 65
66 66 <input type="submit" id="merge_pull_request" value="${_('Merge and close Pull Request')}" class="btn${merge_disabled}"${merge_disabled}>
67 67 ${h.end_form()}
68 68
69 69 <div class="pull-request-merge-refresh">
70 70 <a href="#refreshChecks" onclick="refreshMergeChecks(); return false;">${_('refresh checks')}</a>
71 71 /
72 72 <a class="tooltip" title="Force refresh of the merge workspace in case current status seems wrong." href="${h.route_path('pullrequest_show', repo_name=c.repo_name, pull_request_id=c.pull_request.pull_request_id,_query={"force_refresh":1})}">forced recheck</a>
73 73 </div>
74 74
75 75 </div>
76 76 % elif c.rhodecode_user.username != h.DEFAULT_USER:
77 77 <a class="btn" href="#" onclick="refreshMergeChecks(); return false;">${_('refresh checks')}</a>
78 78 <input type="submit" value="${_('Merge and close Pull Request')}" class="btn disabled" disabled="disabled" title="${_('You are not allowed to merge this pull request.')}">
79 79 % else:
80 80 <input type="submit" value="${_('Login to Merge this Pull Request')}" class="btn disabled" disabled="disabled">
81 81 % endif
82 82 </div>
83 83
84 84 </div>
General Comments 0
You need to be logged in to leave comments. Login now