##// END OF EJS Templates
pull-requests: added nicer formatting for merge conflicting files
marcink -
r4087:697a75c3 default
parent child Browse files
Show More
@@ -1,1217 +1,1217 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35
35
36
36
37 def route_path(name, params=None, **kwargs):
37 def route_path(name, params=None, **kwargs):
38 import urllib
38 import urllib
39
39
40 base_url = {
40 base_url = {
41 'repo_changelog': '/{repo_name}/changelog',
41 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_commits': '/{repo_name}/commits',
43 'repo_commits': '/{repo_name}/commits',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 }[name].format(**kwargs)
57 }[name].format(**kwargs)
58
58
59 if params:
59 if params:
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
61 return base_url
61 return base_url
62
62
63
63
64 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.usefixtures('app', 'autologin_user')
65 @pytest.mark.backends("git", "hg")
65 @pytest.mark.backends("git", "hg")
66 class TestPullrequestsView(object):
66 class TestPullrequestsView(object):
67
67
68 def test_index(self, backend):
68 def test_index(self, backend):
69 self.app.get(route_path(
69 self.app.get(route_path(
70 'pullrequest_new',
70 'pullrequest_new',
71 repo_name=backend.repo_name))
71 repo_name=backend.repo_name))
72
72
73 def test_option_menu_create_pull_request_exists(self, backend):
73 def test_option_menu_create_pull_request_exists(self, backend):
74 repo_name = backend.repo_name
74 repo_name = backend.repo_name
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
76
76
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
78 'pullrequest_new', repo_name=repo_name)
78 'pullrequest_new', repo_name=repo_name)
79 response.mustcontain(create_pr_link)
79 response.mustcontain(create_pr_link)
80
80
81 def test_create_pr_form_with_raw_commit_id(self, backend):
81 def test_create_pr_form_with_raw_commit_id(self, backend):
82 repo = backend.repo
82 repo = backend.repo
83
83
84 self.app.get(
84 self.app.get(
85 route_path('pullrequest_new', repo_name=repo.repo_name,
85 route_path('pullrequest_new', repo_name=repo.repo_name,
86 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
92 pull_request = pr_util.create_pull_request(
92 pull_request = pr_util.create_pull_request(
93 mergeable=pr_merge_enabled, enable_notifications=False)
93 mergeable=pr_merge_enabled, enable_notifications=False)
94
94
95 response = self.app.get(route_path(
95 response = self.app.get(route_path(
96 'pullrequest_show',
96 'pullrequest_show',
97 repo_name=pull_request.target_repo.scm_instance().name,
97 repo_name=pull_request.target_repo.scm_instance().name,
98 pull_request_id=pull_request.pull_request_id,
98 pull_request_id=pull_request.pull_request_id,
99 params={'range-diff': range_diff}))
99 params={'range-diff': range_diff}))
100
100
101 for commit_id in pull_request.revisions:
101 for commit_id in pull_request.revisions:
102 response.mustcontain(commit_id)
102 response.mustcontain(commit_id)
103
103
104 assert pull_request.target_ref_parts.type in response
104 assert pull_request.target_ref_parts.type in response
105 assert pull_request.target_ref_parts.name in response
105 assert pull_request.target_ref_parts.name in response
106 target_clone_url = pull_request.target_repo.clone_url()
106 target_clone_url = pull_request.target_repo.clone_url()
107 assert target_clone_url in response
107 assert target_clone_url in response
108
108
109 assert 'class="pull-request-merge"' in response
109 assert 'class="pull-request-merge"' in response
110 if pr_merge_enabled:
110 if pr_merge_enabled:
111 response.mustcontain('Pull request reviewer approval is pending')
111 response.mustcontain('Pull request reviewer approval is pending')
112 else:
112 else:
113 response.mustcontain('Server-side pull request merging is disabled.')
113 response.mustcontain('Server-side pull request merging is disabled.')
114
114
115 if range_diff == "1":
115 if range_diff == "1":
116 response.mustcontain('Turn off: Show the diff as commit range')
116 response.mustcontain('Turn off: Show the diff as commit range')
117
117
118 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
118 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
119 # Logout
119 # Logout
120 response = self.app.post(
120 response = self.app.post(
121 h.route_path('logout'),
121 h.route_path('logout'),
122 params={'csrf_token': csrf_token})
122 params={'csrf_token': csrf_token})
123 # Login as regular user
123 # Login as regular user
124 response = self.app.post(h.route_path('login'),
124 response = self.app.post(h.route_path('login'),
125 {'username': TEST_USER_REGULAR_LOGIN,
125 {'username': TEST_USER_REGULAR_LOGIN,
126 'password': 'test12'})
126 'password': 'test12'})
127
127
128 pull_request = pr_util.create_pull_request(
128 pull_request = pr_util.create_pull_request(
129 author=TEST_USER_REGULAR_LOGIN)
129 author=TEST_USER_REGULAR_LOGIN)
130
130
131 response = self.app.get(route_path(
131 response = self.app.get(route_path(
132 'pullrequest_show',
132 'pullrequest_show',
133 repo_name=pull_request.target_repo.scm_instance().name,
133 repo_name=pull_request.target_repo.scm_instance().name,
134 pull_request_id=pull_request.pull_request_id))
134 pull_request_id=pull_request.pull_request_id))
135
135
136 response.mustcontain('Server-side pull request merging is disabled.')
136 response.mustcontain('Server-side pull request merging is disabled.')
137
137
138 assert_response = response.assert_response()
138 assert_response = response.assert_response()
139 # for regular user without a merge permissions, we don't see it
139 # for regular user without a merge permissions, we don't see it
140 assert_response.no_element_exists('#close-pull-request-action')
140 assert_response.no_element_exists('#close-pull-request-action')
141
141
142 user_util.grant_user_permission_to_repo(
142 user_util.grant_user_permission_to_repo(
143 pull_request.target_repo,
143 pull_request.target_repo,
144 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
144 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
145 'repository.write')
145 'repository.write')
146 response = self.app.get(route_path(
146 response = self.app.get(route_path(
147 'pullrequest_show',
147 'pullrequest_show',
148 repo_name=pull_request.target_repo.scm_instance().name,
148 repo_name=pull_request.target_repo.scm_instance().name,
149 pull_request_id=pull_request.pull_request_id))
149 pull_request_id=pull_request.pull_request_id))
150
150
151 response.mustcontain('Server-side pull request merging is disabled.')
151 response.mustcontain('Server-side pull request merging is disabled.')
152
152
153 assert_response = response.assert_response()
153 assert_response = response.assert_response()
154 # now regular user has a merge permissions, we have CLOSE button
154 # now regular user has a merge permissions, we have CLOSE button
155 assert_response.one_element_exists('#close-pull-request-action')
155 assert_response.one_element_exists('#close-pull-request-action')
156
156
157 def test_show_invalid_commit_id(self, pr_util):
157 def test_show_invalid_commit_id(self, pr_util):
158 # Simulating invalid revisions which will cause a lookup error
158 # Simulating invalid revisions which will cause a lookup error
159 pull_request = pr_util.create_pull_request()
159 pull_request = pr_util.create_pull_request()
160 pull_request.revisions = ['invalid']
160 pull_request.revisions = ['invalid']
161 Session().add(pull_request)
161 Session().add(pull_request)
162 Session().commit()
162 Session().commit()
163
163
164 response = self.app.get(route_path(
164 response = self.app.get(route_path(
165 'pullrequest_show',
165 'pullrequest_show',
166 repo_name=pull_request.target_repo.scm_instance().name,
166 repo_name=pull_request.target_repo.scm_instance().name,
167 pull_request_id=pull_request.pull_request_id))
167 pull_request_id=pull_request.pull_request_id))
168
168
169 for commit_id in pull_request.revisions:
169 for commit_id in pull_request.revisions:
170 response.mustcontain(commit_id)
170 response.mustcontain(commit_id)
171
171
172 def test_show_invalid_source_reference(self, pr_util):
172 def test_show_invalid_source_reference(self, pr_util):
173 pull_request = pr_util.create_pull_request()
173 pull_request = pr_util.create_pull_request()
174 pull_request.source_ref = 'branch:b:invalid'
174 pull_request.source_ref = 'branch:b:invalid'
175 Session().add(pull_request)
175 Session().add(pull_request)
176 Session().commit()
176 Session().commit()
177
177
178 self.app.get(route_path(
178 self.app.get(route_path(
179 'pullrequest_show',
179 'pullrequest_show',
180 repo_name=pull_request.target_repo.scm_instance().name,
180 repo_name=pull_request.target_repo.scm_instance().name,
181 pull_request_id=pull_request.pull_request_id))
181 pull_request_id=pull_request.pull_request_id))
182
182
183 def test_edit_title_description(self, pr_util, csrf_token):
183 def test_edit_title_description(self, pr_util, csrf_token):
184 pull_request = pr_util.create_pull_request()
184 pull_request = pr_util.create_pull_request()
185 pull_request_id = pull_request.pull_request_id
185 pull_request_id = pull_request.pull_request_id
186
186
187 response = self.app.post(
187 response = self.app.post(
188 route_path('pullrequest_update',
188 route_path('pullrequest_update',
189 repo_name=pull_request.target_repo.repo_name,
189 repo_name=pull_request.target_repo.repo_name,
190 pull_request_id=pull_request_id),
190 pull_request_id=pull_request_id),
191 params={
191 params={
192 'edit_pull_request': 'true',
192 'edit_pull_request': 'true',
193 'title': 'New title',
193 'title': 'New title',
194 'description': 'New description',
194 'description': 'New description',
195 'csrf_token': csrf_token})
195 'csrf_token': csrf_token})
196
196
197 assert_session_flash(
197 assert_session_flash(
198 response, u'Pull request title & description updated.',
198 response, u'Pull request title & description updated.',
199 category='success')
199 category='success')
200
200
201 pull_request = PullRequest.get(pull_request_id)
201 pull_request = PullRequest.get(pull_request_id)
202 assert pull_request.title == 'New title'
202 assert pull_request.title == 'New title'
203 assert pull_request.description == 'New description'
203 assert pull_request.description == 'New description'
204
204
205 def test_edit_title_description_closed(self, pr_util, csrf_token):
205 def test_edit_title_description_closed(self, pr_util, csrf_token):
206 pull_request = pr_util.create_pull_request()
206 pull_request = pr_util.create_pull_request()
207 pull_request_id = pull_request.pull_request_id
207 pull_request_id = pull_request.pull_request_id
208 repo_name = pull_request.target_repo.repo_name
208 repo_name = pull_request.target_repo.repo_name
209 pr_util.close()
209 pr_util.close()
210
210
211 response = self.app.post(
211 response = self.app.post(
212 route_path('pullrequest_update',
212 route_path('pullrequest_update',
213 repo_name=repo_name, pull_request_id=pull_request_id),
213 repo_name=repo_name, pull_request_id=pull_request_id),
214 params={
214 params={
215 'edit_pull_request': 'true',
215 'edit_pull_request': 'true',
216 'title': 'New title',
216 'title': 'New title',
217 'description': 'New description',
217 'description': 'New description',
218 'csrf_token': csrf_token}, status=200)
218 'csrf_token': csrf_token}, status=200)
219 assert_session_flash(
219 assert_session_flash(
220 response, u'Cannot update closed pull requests.',
220 response, u'Cannot update closed pull requests.',
221 category='error')
221 category='error')
222
222
223 def test_update_invalid_source_reference(self, pr_util, csrf_token):
223 def test_update_invalid_source_reference(self, pr_util, csrf_token):
224 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
224 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
225
225
226 pull_request = pr_util.create_pull_request()
226 pull_request = pr_util.create_pull_request()
227 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
227 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
228 Session().add(pull_request)
228 Session().add(pull_request)
229 Session().commit()
229 Session().commit()
230
230
231 pull_request_id = pull_request.pull_request_id
231 pull_request_id = pull_request.pull_request_id
232
232
233 response = self.app.post(
233 response = self.app.post(
234 route_path('pullrequest_update',
234 route_path('pullrequest_update',
235 repo_name=pull_request.target_repo.repo_name,
235 repo_name=pull_request.target_repo.repo_name,
236 pull_request_id=pull_request_id),
236 pull_request_id=pull_request_id),
237 params={'update_commits': 'true', 'csrf_token': csrf_token})
237 params={'update_commits': 'true', 'csrf_token': csrf_token})
238
238
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
240 UpdateFailureReason.MISSING_SOURCE_REF])
240 UpdateFailureReason.MISSING_SOURCE_REF])
241 assert_session_flash(response, expected_msg, category='error')
241 assert_session_flash(response, expected_msg, category='error')
242
242
243 def test_missing_target_reference(self, pr_util, csrf_token):
243 def test_missing_target_reference(self, pr_util, csrf_token):
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
245 pull_request = pr_util.create_pull_request(
245 pull_request = pr_util.create_pull_request(
246 approved=True, mergeable=True)
246 approved=True, mergeable=True)
247 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
247 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
248 pull_request.target_ref = unicode_reference
248 pull_request.target_ref = unicode_reference
249 Session().add(pull_request)
249 Session().add(pull_request)
250 Session().commit()
250 Session().commit()
251
251
252 pull_request_id = pull_request.pull_request_id
252 pull_request_id = pull_request.pull_request_id
253 pull_request_url = route_path(
253 pull_request_url = route_path(
254 'pullrequest_show',
254 'pullrequest_show',
255 repo_name=pull_request.target_repo.repo_name,
255 repo_name=pull_request.target_repo.repo_name,
256 pull_request_id=pull_request_id)
256 pull_request_id=pull_request_id)
257
257
258 response = self.app.get(pull_request_url)
258 response = self.app.get(pull_request_url)
259 target_ref_id = 'invalid-branch'
259 target_ref_id = 'invalid-branch'
260 merge_resp = MergeResponse(
260 merge_resp = MergeResponse(
261 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
261 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
262 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
262 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
263 response.assert_response().element_contains(
263 response.assert_response().element_contains(
264 'span[data-role="merge-message"]', merge_resp.merge_status_message)
264 'div[data-role="merge-message"]', merge_resp.merge_status_message)
265
265
266 def test_comment_and_close_pull_request_custom_message_approved(
266 def test_comment_and_close_pull_request_custom_message_approved(
267 self, pr_util, csrf_token, xhr_header):
267 self, pr_util, csrf_token, xhr_header):
268
268
269 pull_request = pr_util.create_pull_request(approved=True)
269 pull_request = pr_util.create_pull_request(approved=True)
270 pull_request_id = pull_request.pull_request_id
270 pull_request_id = pull_request.pull_request_id
271 author = pull_request.user_id
271 author = pull_request.user_id
272 repo = pull_request.target_repo.repo_id
272 repo = pull_request.target_repo.repo_id
273
273
274 self.app.post(
274 self.app.post(
275 route_path('pullrequest_comment_create',
275 route_path('pullrequest_comment_create',
276 repo_name=pull_request.target_repo.scm_instance().name,
276 repo_name=pull_request.target_repo.scm_instance().name,
277 pull_request_id=pull_request_id),
277 pull_request_id=pull_request_id),
278 params={
278 params={
279 'close_pull_request': '1',
279 'close_pull_request': '1',
280 'text': 'Closing a PR',
280 'text': 'Closing a PR',
281 'csrf_token': csrf_token},
281 'csrf_token': csrf_token},
282 extra_environ=xhr_header,)
282 extra_environ=xhr_header,)
283
283
284 journal = UserLog.query()\
284 journal = UserLog.query()\
285 .filter(UserLog.user_id == author)\
285 .filter(UserLog.user_id == author)\
286 .filter(UserLog.repository_id == repo) \
286 .filter(UserLog.repository_id == repo) \
287 .order_by(UserLog.user_log_id.asc()) \
287 .order_by(UserLog.user_log_id.asc()) \
288 .all()
288 .all()
289 assert journal[-1].action == 'repo.pull_request.close'
289 assert journal[-1].action == 'repo.pull_request.close'
290
290
291 pull_request = PullRequest.get(pull_request_id)
291 pull_request = PullRequest.get(pull_request_id)
292 assert pull_request.is_closed()
292 assert pull_request.is_closed()
293
293
294 status = ChangesetStatusModel().get_status(
294 status = ChangesetStatusModel().get_status(
295 pull_request.source_repo, pull_request=pull_request)
295 pull_request.source_repo, pull_request=pull_request)
296 assert status == ChangesetStatus.STATUS_APPROVED
296 assert status == ChangesetStatus.STATUS_APPROVED
297 comments = ChangesetComment().query() \
297 comments = ChangesetComment().query() \
298 .filter(ChangesetComment.pull_request == pull_request) \
298 .filter(ChangesetComment.pull_request == pull_request) \
299 .order_by(ChangesetComment.comment_id.asc())\
299 .order_by(ChangesetComment.comment_id.asc())\
300 .all()
300 .all()
301 assert comments[-1].text == 'Closing a PR'
301 assert comments[-1].text == 'Closing a PR'
302
302
303 def test_comment_force_close_pull_request_rejected(
303 def test_comment_force_close_pull_request_rejected(
304 self, pr_util, csrf_token, xhr_header):
304 self, pr_util, csrf_token, xhr_header):
305 pull_request = pr_util.create_pull_request()
305 pull_request = pr_util.create_pull_request()
306 pull_request_id = pull_request.pull_request_id
306 pull_request_id = pull_request.pull_request_id
307 PullRequestModel().update_reviewers(
307 PullRequestModel().update_reviewers(
308 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
308 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
309 pull_request.author)
309 pull_request.author)
310 author = pull_request.user_id
310 author = pull_request.user_id
311 repo = pull_request.target_repo.repo_id
311 repo = pull_request.target_repo.repo_id
312
312
313 self.app.post(
313 self.app.post(
314 route_path('pullrequest_comment_create',
314 route_path('pullrequest_comment_create',
315 repo_name=pull_request.target_repo.scm_instance().name,
315 repo_name=pull_request.target_repo.scm_instance().name,
316 pull_request_id=pull_request_id),
316 pull_request_id=pull_request_id),
317 params={
317 params={
318 'close_pull_request': '1',
318 'close_pull_request': '1',
319 'csrf_token': csrf_token},
319 'csrf_token': csrf_token},
320 extra_environ=xhr_header)
320 extra_environ=xhr_header)
321
321
322 pull_request = PullRequest.get(pull_request_id)
322 pull_request = PullRequest.get(pull_request_id)
323
323
324 journal = UserLog.query()\
324 journal = UserLog.query()\
325 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
325 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
326 .order_by(UserLog.user_log_id.asc()) \
326 .order_by(UserLog.user_log_id.asc()) \
327 .all()
327 .all()
328 assert journal[-1].action == 'repo.pull_request.close'
328 assert journal[-1].action == 'repo.pull_request.close'
329
329
330 # check only the latest status, not the review status
330 # check only the latest status, not the review status
331 status = ChangesetStatusModel().get_status(
331 status = ChangesetStatusModel().get_status(
332 pull_request.source_repo, pull_request=pull_request)
332 pull_request.source_repo, pull_request=pull_request)
333 assert status == ChangesetStatus.STATUS_REJECTED
333 assert status == ChangesetStatus.STATUS_REJECTED
334
334
335 def test_comment_and_close_pull_request(
335 def test_comment_and_close_pull_request(
336 self, pr_util, csrf_token, xhr_header):
336 self, pr_util, csrf_token, xhr_header):
337 pull_request = pr_util.create_pull_request()
337 pull_request = pr_util.create_pull_request()
338 pull_request_id = pull_request.pull_request_id
338 pull_request_id = pull_request.pull_request_id
339
339
340 response = self.app.post(
340 response = self.app.post(
341 route_path('pullrequest_comment_create',
341 route_path('pullrequest_comment_create',
342 repo_name=pull_request.target_repo.scm_instance().name,
342 repo_name=pull_request.target_repo.scm_instance().name,
343 pull_request_id=pull_request.pull_request_id),
343 pull_request_id=pull_request.pull_request_id),
344 params={
344 params={
345 'close_pull_request': 'true',
345 'close_pull_request': 'true',
346 'csrf_token': csrf_token},
346 'csrf_token': csrf_token},
347 extra_environ=xhr_header)
347 extra_environ=xhr_header)
348
348
349 assert response.json
349 assert response.json
350
350
351 pull_request = PullRequest.get(pull_request_id)
351 pull_request = PullRequest.get(pull_request_id)
352 assert pull_request.is_closed()
352 assert pull_request.is_closed()
353
353
354 # check only the latest status, not the review status
354 # check only the latest status, not the review status
355 status = ChangesetStatusModel().get_status(
355 status = ChangesetStatusModel().get_status(
356 pull_request.source_repo, pull_request=pull_request)
356 pull_request.source_repo, pull_request=pull_request)
357 assert status == ChangesetStatus.STATUS_REJECTED
357 assert status == ChangesetStatus.STATUS_REJECTED
358
358
359 def test_create_pull_request(self, backend, csrf_token):
359 def test_create_pull_request(self, backend, csrf_token):
360 commits = [
360 commits = [
361 {'message': 'ancestor'},
361 {'message': 'ancestor'},
362 {'message': 'change'},
362 {'message': 'change'},
363 {'message': 'change2'},
363 {'message': 'change2'},
364 ]
364 ]
365 commit_ids = backend.create_master_repo(commits)
365 commit_ids = backend.create_master_repo(commits)
366 target = backend.create_repo(heads=['ancestor'])
366 target = backend.create_repo(heads=['ancestor'])
367 source = backend.create_repo(heads=['change2'])
367 source = backend.create_repo(heads=['change2'])
368
368
369 response = self.app.post(
369 response = self.app.post(
370 route_path('pullrequest_create', repo_name=source.repo_name),
370 route_path('pullrequest_create', repo_name=source.repo_name),
371 [
371 [
372 ('source_repo', source.repo_name),
372 ('source_repo', source.repo_name),
373 ('source_ref', 'branch:default:' + commit_ids['change2']),
373 ('source_ref', 'branch:default:' + commit_ids['change2']),
374 ('target_repo', target.repo_name),
374 ('target_repo', target.repo_name),
375 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
375 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
376 ('common_ancestor', commit_ids['ancestor']),
376 ('common_ancestor', commit_ids['ancestor']),
377 ('pullrequest_title', 'Title'),
377 ('pullrequest_title', 'Title'),
378 ('pullrequest_desc', 'Description'),
378 ('pullrequest_desc', 'Description'),
379 ('description_renderer', 'markdown'),
379 ('description_renderer', 'markdown'),
380 ('__start__', 'review_members:sequence'),
380 ('__start__', 'review_members:sequence'),
381 ('__start__', 'reviewer:mapping'),
381 ('__start__', 'reviewer:mapping'),
382 ('user_id', '1'),
382 ('user_id', '1'),
383 ('__start__', 'reasons:sequence'),
383 ('__start__', 'reasons:sequence'),
384 ('reason', 'Some reason'),
384 ('reason', 'Some reason'),
385 ('__end__', 'reasons:sequence'),
385 ('__end__', 'reasons:sequence'),
386 ('__start__', 'rules:sequence'),
386 ('__start__', 'rules:sequence'),
387 ('__end__', 'rules:sequence'),
387 ('__end__', 'rules:sequence'),
388 ('mandatory', 'False'),
388 ('mandatory', 'False'),
389 ('__end__', 'reviewer:mapping'),
389 ('__end__', 'reviewer:mapping'),
390 ('__end__', 'review_members:sequence'),
390 ('__end__', 'review_members:sequence'),
391 ('__start__', 'revisions:sequence'),
391 ('__start__', 'revisions:sequence'),
392 ('revisions', commit_ids['change']),
392 ('revisions', commit_ids['change']),
393 ('revisions', commit_ids['change2']),
393 ('revisions', commit_ids['change2']),
394 ('__end__', 'revisions:sequence'),
394 ('__end__', 'revisions:sequence'),
395 ('user', ''),
395 ('user', ''),
396 ('csrf_token', csrf_token),
396 ('csrf_token', csrf_token),
397 ],
397 ],
398 status=302)
398 status=302)
399
399
400 location = response.headers['Location']
400 location = response.headers['Location']
401 pull_request_id = location.rsplit('/', 1)[1]
401 pull_request_id = location.rsplit('/', 1)[1]
402 assert pull_request_id != 'new'
402 assert pull_request_id != 'new'
403 pull_request = PullRequest.get(int(pull_request_id))
403 pull_request = PullRequest.get(int(pull_request_id))
404
404
405 # check that we have now both revisions
405 # check that we have now both revisions
406 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
406 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
407 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
407 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
408 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
408 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
409 assert pull_request.target_ref == expected_target_ref
409 assert pull_request.target_ref == expected_target_ref
410
410
411 def test_reviewer_notifications(self, backend, csrf_token):
411 def test_reviewer_notifications(self, backend, csrf_token):
412 # We have to use the app.post for this test so it will create the
412 # We have to use the app.post for this test so it will create the
413 # notifications properly with the new PR
413 # notifications properly with the new PR
414 commits = [
414 commits = [
415 {'message': 'ancestor',
415 {'message': 'ancestor',
416 'added': [FileNode('file_A', content='content_of_ancestor')]},
416 'added': [FileNode('file_A', content='content_of_ancestor')]},
417 {'message': 'change',
417 {'message': 'change',
418 'added': [FileNode('file_a', content='content_of_change')]},
418 'added': [FileNode('file_a', content='content_of_change')]},
419 {'message': 'change-child'},
419 {'message': 'change-child'},
420 {'message': 'ancestor-child', 'parents': ['ancestor'],
420 {'message': 'ancestor-child', 'parents': ['ancestor'],
421 'added': [
421 'added': [
422 FileNode('file_B', content='content_of_ancestor_child')]},
422 FileNode('file_B', content='content_of_ancestor_child')]},
423 {'message': 'ancestor-child-2'},
423 {'message': 'ancestor-child-2'},
424 ]
424 ]
425 commit_ids = backend.create_master_repo(commits)
425 commit_ids = backend.create_master_repo(commits)
426 target = backend.create_repo(heads=['ancestor-child'])
426 target = backend.create_repo(heads=['ancestor-child'])
427 source = backend.create_repo(heads=['change'])
427 source = backend.create_repo(heads=['change'])
428
428
429 response = self.app.post(
429 response = self.app.post(
430 route_path('pullrequest_create', repo_name=source.repo_name),
430 route_path('pullrequest_create', repo_name=source.repo_name),
431 [
431 [
432 ('source_repo', source.repo_name),
432 ('source_repo', source.repo_name),
433 ('source_ref', 'branch:default:' + commit_ids['change']),
433 ('source_ref', 'branch:default:' + commit_ids['change']),
434 ('target_repo', target.repo_name),
434 ('target_repo', target.repo_name),
435 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
435 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
436 ('common_ancestor', commit_ids['ancestor']),
436 ('common_ancestor', commit_ids['ancestor']),
437 ('pullrequest_title', 'Title'),
437 ('pullrequest_title', 'Title'),
438 ('pullrequest_desc', 'Description'),
438 ('pullrequest_desc', 'Description'),
439 ('description_renderer', 'markdown'),
439 ('description_renderer', 'markdown'),
440 ('__start__', 'review_members:sequence'),
440 ('__start__', 'review_members:sequence'),
441 ('__start__', 'reviewer:mapping'),
441 ('__start__', 'reviewer:mapping'),
442 ('user_id', '2'),
442 ('user_id', '2'),
443 ('__start__', 'reasons:sequence'),
443 ('__start__', 'reasons:sequence'),
444 ('reason', 'Some reason'),
444 ('reason', 'Some reason'),
445 ('__end__', 'reasons:sequence'),
445 ('__end__', 'reasons:sequence'),
446 ('__start__', 'rules:sequence'),
446 ('__start__', 'rules:sequence'),
447 ('__end__', 'rules:sequence'),
447 ('__end__', 'rules:sequence'),
448 ('mandatory', 'False'),
448 ('mandatory', 'False'),
449 ('__end__', 'reviewer:mapping'),
449 ('__end__', 'reviewer:mapping'),
450 ('__end__', 'review_members:sequence'),
450 ('__end__', 'review_members:sequence'),
451 ('__start__', 'revisions:sequence'),
451 ('__start__', 'revisions:sequence'),
452 ('revisions', commit_ids['change']),
452 ('revisions', commit_ids['change']),
453 ('__end__', 'revisions:sequence'),
453 ('__end__', 'revisions:sequence'),
454 ('user', ''),
454 ('user', ''),
455 ('csrf_token', csrf_token),
455 ('csrf_token', csrf_token),
456 ],
456 ],
457 status=302)
457 status=302)
458
458
459 location = response.headers['Location']
459 location = response.headers['Location']
460
460
461 pull_request_id = location.rsplit('/', 1)[1]
461 pull_request_id = location.rsplit('/', 1)[1]
462 assert pull_request_id != 'new'
462 assert pull_request_id != 'new'
463 pull_request = PullRequest.get(int(pull_request_id))
463 pull_request = PullRequest.get(int(pull_request_id))
464
464
465 # Check that a notification was made
465 # Check that a notification was made
466 notifications = Notification.query()\
466 notifications = Notification.query()\
467 .filter(Notification.created_by == pull_request.author.user_id,
467 .filter(Notification.created_by == pull_request.author.user_id,
468 Notification.type_ == Notification.TYPE_PULL_REQUEST,
468 Notification.type_ == Notification.TYPE_PULL_REQUEST,
469 Notification.subject.contains(
469 Notification.subject.contains(
470 "requested a pull request review. !%s" % pull_request_id))
470 "requested a pull request review. !%s" % pull_request_id))
471 assert len(notifications.all()) == 1
471 assert len(notifications.all()) == 1
472
472
473 # Change reviewers and check that a notification was made
473 # Change reviewers and check that a notification was made
474 PullRequestModel().update_reviewers(
474 PullRequestModel().update_reviewers(
475 pull_request.pull_request_id, [(1, [], False, [])],
475 pull_request.pull_request_id, [(1, [], False, [])],
476 pull_request.author)
476 pull_request.author)
477 assert len(notifications.all()) == 2
477 assert len(notifications.all()) == 2
478
478
479 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
479 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
480 csrf_token):
480 csrf_token):
481 commits = [
481 commits = [
482 {'message': 'ancestor',
482 {'message': 'ancestor',
483 'added': [FileNode('file_A', content='content_of_ancestor')]},
483 'added': [FileNode('file_A', content='content_of_ancestor')]},
484 {'message': 'change',
484 {'message': 'change',
485 'added': [FileNode('file_a', content='content_of_change')]},
485 'added': [FileNode('file_a', content='content_of_change')]},
486 {'message': 'change-child'},
486 {'message': 'change-child'},
487 {'message': 'ancestor-child', 'parents': ['ancestor'],
487 {'message': 'ancestor-child', 'parents': ['ancestor'],
488 'added': [
488 'added': [
489 FileNode('file_B', content='content_of_ancestor_child')]},
489 FileNode('file_B', content='content_of_ancestor_child')]},
490 {'message': 'ancestor-child-2'},
490 {'message': 'ancestor-child-2'},
491 ]
491 ]
492 commit_ids = backend.create_master_repo(commits)
492 commit_ids = backend.create_master_repo(commits)
493 target = backend.create_repo(heads=['ancestor-child'])
493 target = backend.create_repo(heads=['ancestor-child'])
494 source = backend.create_repo(heads=['change'])
494 source = backend.create_repo(heads=['change'])
495
495
496 response = self.app.post(
496 response = self.app.post(
497 route_path('pullrequest_create', repo_name=source.repo_name),
497 route_path('pullrequest_create', repo_name=source.repo_name),
498 [
498 [
499 ('source_repo', source.repo_name),
499 ('source_repo', source.repo_name),
500 ('source_ref', 'branch:default:' + commit_ids['change']),
500 ('source_ref', 'branch:default:' + commit_ids['change']),
501 ('target_repo', target.repo_name),
501 ('target_repo', target.repo_name),
502 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
502 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
503 ('common_ancestor', commit_ids['ancestor']),
503 ('common_ancestor', commit_ids['ancestor']),
504 ('pullrequest_title', 'Title'),
504 ('pullrequest_title', 'Title'),
505 ('pullrequest_desc', 'Description'),
505 ('pullrequest_desc', 'Description'),
506 ('description_renderer', 'markdown'),
506 ('description_renderer', 'markdown'),
507 ('__start__', 'review_members:sequence'),
507 ('__start__', 'review_members:sequence'),
508 ('__start__', 'reviewer:mapping'),
508 ('__start__', 'reviewer:mapping'),
509 ('user_id', '1'),
509 ('user_id', '1'),
510 ('__start__', 'reasons:sequence'),
510 ('__start__', 'reasons:sequence'),
511 ('reason', 'Some reason'),
511 ('reason', 'Some reason'),
512 ('__end__', 'reasons:sequence'),
512 ('__end__', 'reasons:sequence'),
513 ('__start__', 'rules:sequence'),
513 ('__start__', 'rules:sequence'),
514 ('__end__', 'rules:sequence'),
514 ('__end__', 'rules:sequence'),
515 ('mandatory', 'False'),
515 ('mandatory', 'False'),
516 ('__end__', 'reviewer:mapping'),
516 ('__end__', 'reviewer:mapping'),
517 ('__end__', 'review_members:sequence'),
517 ('__end__', 'review_members:sequence'),
518 ('__start__', 'revisions:sequence'),
518 ('__start__', 'revisions:sequence'),
519 ('revisions', commit_ids['change']),
519 ('revisions', commit_ids['change']),
520 ('__end__', 'revisions:sequence'),
520 ('__end__', 'revisions:sequence'),
521 ('user', ''),
521 ('user', ''),
522 ('csrf_token', csrf_token),
522 ('csrf_token', csrf_token),
523 ],
523 ],
524 status=302)
524 status=302)
525
525
526 location = response.headers['Location']
526 location = response.headers['Location']
527
527
528 pull_request_id = location.rsplit('/', 1)[1]
528 pull_request_id = location.rsplit('/', 1)[1]
529 assert pull_request_id != 'new'
529 assert pull_request_id != 'new'
530 pull_request = PullRequest.get(int(pull_request_id))
530 pull_request = PullRequest.get(int(pull_request_id))
531
531
532 # target_ref has to point to the ancestor's commit_id in order to
532 # target_ref has to point to the ancestor's commit_id in order to
533 # show the correct diff
533 # show the correct diff
534 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
534 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
535 assert pull_request.target_ref == expected_target_ref
535 assert pull_request.target_ref == expected_target_ref
536
536
537 # Check generated diff contents
537 # Check generated diff contents
538 response = response.follow()
538 response = response.follow()
539 assert 'content_of_ancestor' not in response.body
539 assert 'content_of_ancestor' not in response.body
540 assert 'content_of_ancestor-child' not in response.body
540 assert 'content_of_ancestor-child' not in response.body
541 assert 'content_of_change' in response.body
541 assert 'content_of_change' in response.body
542
542
543 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
543 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
544 # Clear any previous calls to rcextensions
544 # Clear any previous calls to rcextensions
545 rhodecode.EXTENSIONS.calls.clear()
545 rhodecode.EXTENSIONS.calls.clear()
546
546
547 pull_request = pr_util.create_pull_request(
547 pull_request = pr_util.create_pull_request(
548 approved=True, mergeable=True)
548 approved=True, mergeable=True)
549 pull_request_id = pull_request.pull_request_id
549 pull_request_id = pull_request.pull_request_id
550 repo_name = pull_request.target_repo.scm_instance().name,
550 repo_name = pull_request.target_repo.scm_instance().name,
551
551
552 url = route_path('pullrequest_merge',
552 url = route_path('pullrequest_merge',
553 repo_name=str(repo_name[0]),
553 repo_name=str(repo_name[0]),
554 pull_request_id=pull_request_id)
554 pull_request_id=pull_request_id)
555 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
555 response = self.app.post(url, params={'csrf_token': csrf_token}).follow()
556
556
557 pull_request = PullRequest.get(pull_request_id)
557 pull_request = PullRequest.get(pull_request_id)
558
558
559 assert response.status_int == 200
559 assert response.status_int == 200
560 assert pull_request.is_closed()
560 assert pull_request.is_closed()
561 assert_pull_request_status(
561 assert_pull_request_status(
562 pull_request, ChangesetStatus.STATUS_APPROVED)
562 pull_request, ChangesetStatus.STATUS_APPROVED)
563
563
564 # Check the relevant log entries were added
564 # Check the relevant log entries were added
565 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
565 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3)
566 actions = [log.action for log in user_logs]
566 actions = [log.action for log in user_logs]
567 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
567 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
568 expected_actions = [
568 expected_actions = [
569 u'repo.pull_request.close',
569 u'repo.pull_request.close',
570 u'repo.pull_request.merge',
570 u'repo.pull_request.merge',
571 u'repo.pull_request.comment.create'
571 u'repo.pull_request.comment.create'
572 ]
572 ]
573 assert actions == expected_actions
573 assert actions == expected_actions
574
574
575 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
575 user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4)
576 actions = [log for log in user_logs]
576 actions = [log for log in user_logs]
577 assert actions[-1].action == 'user.push'
577 assert actions[-1].action == 'user.push'
578 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
578 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
579
579
580 # Check post_push rcextension was really executed
580 # Check post_push rcextension was really executed
581 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
581 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
582 assert len(push_calls) == 1
582 assert len(push_calls) == 1
583 unused_last_call_args, last_call_kwargs = push_calls[0]
583 unused_last_call_args, last_call_kwargs = push_calls[0]
584 assert last_call_kwargs['action'] == 'push'
584 assert last_call_kwargs['action'] == 'push'
585 assert last_call_kwargs['commit_ids'] == pr_commit_ids
585 assert last_call_kwargs['commit_ids'] == pr_commit_ids
586
586
587 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
587 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
588 pull_request = pr_util.create_pull_request(mergeable=False)
588 pull_request = pr_util.create_pull_request(mergeable=False)
589 pull_request_id = pull_request.pull_request_id
589 pull_request_id = pull_request.pull_request_id
590 pull_request = PullRequest.get(pull_request_id)
590 pull_request = PullRequest.get(pull_request_id)
591
591
592 response = self.app.post(
592 response = self.app.post(
593 route_path('pullrequest_merge',
593 route_path('pullrequest_merge',
594 repo_name=pull_request.target_repo.scm_instance().name,
594 repo_name=pull_request.target_repo.scm_instance().name,
595 pull_request_id=pull_request.pull_request_id),
595 pull_request_id=pull_request.pull_request_id),
596 params={'csrf_token': csrf_token}).follow()
596 params={'csrf_token': csrf_token}).follow()
597
597
598 assert response.status_int == 200
598 assert response.status_int == 200
599 response.mustcontain(
599 response.mustcontain(
600 'Merge is not currently possible because of below failed checks.')
600 'Merge is not currently possible because of below failed checks.')
601 response.mustcontain('Server-side pull request merging is disabled.')
601 response.mustcontain('Server-side pull request merging is disabled.')
602
602
603 @pytest.mark.skip_backends('svn')
603 @pytest.mark.skip_backends('svn')
604 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
604 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
605 pull_request = pr_util.create_pull_request(mergeable=True)
605 pull_request = pr_util.create_pull_request(mergeable=True)
606 pull_request_id = pull_request.pull_request_id
606 pull_request_id = pull_request.pull_request_id
607 repo_name = pull_request.target_repo.scm_instance().name
607 repo_name = pull_request.target_repo.scm_instance().name
608
608
609 response = self.app.post(
609 response = self.app.post(
610 route_path('pullrequest_merge',
610 route_path('pullrequest_merge',
611 repo_name=repo_name, pull_request_id=pull_request_id),
611 repo_name=repo_name, pull_request_id=pull_request_id),
612 params={'csrf_token': csrf_token}).follow()
612 params={'csrf_token': csrf_token}).follow()
613
613
614 assert response.status_int == 200
614 assert response.status_int == 200
615
615
616 response.mustcontain(
616 response.mustcontain(
617 'Merge is not currently possible because of below failed checks.')
617 'Merge is not currently possible because of below failed checks.')
618 response.mustcontain('Pull request reviewer approval is pending.')
618 response.mustcontain('Pull request reviewer approval is pending.')
619
619
620 def test_merge_pull_request_renders_failure_reason(
620 def test_merge_pull_request_renders_failure_reason(
621 self, user_regular, csrf_token, pr_util):
621 self, user_regular, csrf_token, pr_util):
622 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
622 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
623 pull_request_id = pull_request.pull_request_id
623 pull_request_id = pull_request.pull_request_id
624 repo_name = pull_request.target_repo.scm_instance().name
624 repo_name = pull_request.target_repo.scm_instance().name
625
625
626 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
626 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
627 MergeFailureReason.PUSH_FAILED,
627 MergeFailureReason.PUSH_FAILED,
628 metadata={'target': 'shadow repo',
628 metadata={'target': 'shadow repo',
629 'merge_commit': 'xxx'})
629 'merge_commit': 'xxx'})
630 model_patcher = mock.patch.multiple(
630 model_patcher = mock.patch.multiple(
631 PullRequestModel,
631 PullRequestModel,
632 merge_repo=mock.Mock(return_value=merge_resp),
632 merge_repo=mock.Mock(return_value=merge_resp),
633 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
633 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
634
634
635 with model_patcher:
635 with model_patcher:
636 response = self.app.post(
636 response = self.app.post(
637 route_path('pullrequest_merge',
637 route_path('pullrequest_merge',
638 repo_name=repo_name,
638 repo_name=repo_name,
639 pull_request_id=pull_request_id),
639 pull_request_id=pull_request_id),
640 params={'csrf_token': csrf_token}, status=302)
640 params={'csrf_token': csrf_token}, status=302)
641
641
642 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
642 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
643 metadata={'target': 'shadow repo',
643 metadata={'target': 'shadow repo',
644 'merge_commit': 'xxx'})
644 'merge_commit': 'xxx'})
645 assert_session_flash(response, merge_resp.merge_status_message)
645 assert_session_flash(response, merge_resp.merge_status_message)
646
646
647 def test_update_source_revision(self, backend, csrf_token):
647 def test_update_source_revision(self, backend, csrf_token):
648 commits = [
648 commits = [
649 {'message': 'ancestor'},
649 {'message': 'ancestor'},
650 {'message': 'change'},
650 {'message': 'change'},
651 {'message': 'change-2'},
651 {'message': 'change-2'},
652 ]
652 ]
653 commit_ids = backend.create_master_repo(commits)
653 commit_ids = backend.create_master_repo(commits)
654 target = backend.create_repo(heads=['ancestor'])
654 target = backend.create_repo(heads=['ancestor'])
655 source = backend.create_repo(heads=['change'])
655 source = backend.create_repo(heads=['change'])
656
656
657 # create pr from a in source to A in target
657 # create pr from a in source to A in target
658 pull_request = PullRequest()
658 pull_request = PullRequest()
659
659
660 pull_request.source_repo = source
660 pull_request.source_repo = source
661 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
661 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
662 branch=backend.default_branch_name, commit_id=commit_ids['change'])
662 branch=backend.default_branch_name, commit_id=commit_ids['change'])
663
663
664 pull_request.target_repo = target
664 pull_request.target_repo = target
665 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
665 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
666 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
666 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
667
667
668 pull_request.revisions = [commit_ids['change']]
668 pull_request.revisions = [commit_ids['change']]
669 pull_request.title = u"Test"
669 pull_request.title = u"Test"
670 pull_request.description = u"Description"
670 pull_request.description = u"Description"
671 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
671 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
672 pull_request.pull_request_state = PullRequest.STATE_CREATED
672 pull_request.pull_request_state = PullRequest.STATE_CREATED
673 Session().add(pull_request)
673 Session().add(pull_request)
674 Session().commit()
674 Session().commit()
675 pull_request_id = pull_request.pull_request_id
675 pull_request_id = pull_request.pull_request_id
676
676
677 # source has ancestor - change - change-2
677 # source has ancestor - change - change-2
678 backend.pull_heads(source, heads=['change-2'])
678 backend.pull_heads(source, heads=['change-2'])
679
679
680 # update PR
680 # update PR
681 self.app.post(
681 self.app.post(
682 route_path('pullrequest_update',
682 route_path('pullrequest_update',
683 repo_name=target.repo_name, pull_request_id=pull_request_id),
683 repo_name=target.repo_name, pull_request_id=pull_request_id),
684 params={'update_commits': 'true', 'csrf_token': csrf_token})
684 params={'update_commits': 'true', 'csrf_token': csrf_token})
685
685
686 response = self.app.get(
686 response = self.app.get(
687 route_path('pullrequest_show',
687 route_path('pullrequest_show',
688 repo_name=target.repo_name,
688 repo_name=target.repo_name,
689 pull_request_id=pull_request.pull_request_id))
689 pull_request_id=pull_request.pull_request_id))
690
690
691 assert response.status_int == 200
691 assert response.status_int == 200
692 assert 'Pull request updated to' in response.body
692 assert 'Pull request updated to' in response.body
693 assert 'with 1 added, 0 removed commits.' in response.body
693 assert 'with 1 added, 0 removed commits.' in response.body
694
694
695 # check that we have now both revisions
695 # check that we have now both revisions
696 pull_request = PullRequest.get(pull_request_id)
696 pull_request = PullRequest.get(pull_request_id)
697 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
697 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
698
698
699 def test_update_target_revision(self, backend, csrf_token):
699 def test_update_target_revision(self, backend, csrf_token):
700 commits = [
700 commits = [
701 {'message': 'ancestor'},
701 {'message': 'ancestor'},
702 {'message': 'change'},
702 {'message': 'change'},
703 {'message': 'ancestor-new', 'parents': ['ancestor']},
703 {'message': 'ancestor-new', 'parents': ['ancestor']},
704 {'message': 'change-rebased'},
704 {'message': 'change-rebased'},
705 ]
705 ]
706 commit_ids = backend.create_master_repo(commits)
706 commit_ids = backend.create_master_repo(commits)
707 target = backend.create_repo(heads=['ancestor'])
707 target = backend.create_repo(heads=['ancestor'])
708 source = backend.create_repo(heads=['change'])
708 source = backend.create_repo(heads=['change'])
709
709
710 # create pr from a in source to A in target
710 # create pr from a in source to A in target
711 pull_request = PullRequest()
711 pull_request = PullRequest()
712
712
713 pull_request.source_repo = source
713 pull_request.source_repo = source
714 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
714 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
715 branch=backend.default_branch_name, commit_id=commit_ids['change'])
715 branch=backend.default_branch_name, commit_id=commit_ids['change'])
716
716
717 pull_request.target_repo = target
717 pull_request.target_repo = target
718 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
718 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
719 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
719 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
720
720
721 pull_request.revisions = [commit_ids['change']]
721 pull_request.revisions = [commit_ids['change']]
722 pull_request.title = u"Test"
722 pull_request.title = u"Test"
723 pull_request.description = u"Description"
723 pull_request.description = u"Description"
724 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
724 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
725 pull_request.pull_request_state = PullRequest.STATE_CREATED
725 pull_request.pull_request_state = PullRequest.STATE_CREATED
726
726
727 Session().add(pull_request)
727 Session().add(pull_request)
728 Session().commit()
728 Session().commit()
729 pull_request_id = pull_request.pull_request_id
729 pull_request_id = pull_request.pull_request_id
730
730
731 # target has ancestor - ancestor-new
731 # target has ancestor - ancestor-new
732 # source has ancestor - ancestor-new - change-rebased
732 # source has ancestor - ancestor-new - change-rebased
733 backend.pull_heads(target, heads=['ancestor-new'])
733 backend.pull_heads(target, heads=['ancestor-new'])
734 backend.pull_heads(source, heads=['change-rebased'])
734 backend.pull_heads(source, heads=['change-rebased'])
735
735
736 # update PR
736 # update PR
737 url = route_path('pullrequest_update',
737 url = route_path('pullrequest_update',
738 repo_name=target.repo_name,
738 repo_name=target.repo_name,
739 pull_request_id=pull_request_id)
739 pull_request_id=pull_request_id)
740 self.app.post(url,
740 self.app.post(url,
741 params={'update_commits': 'true', 'csrf_token': csrf_token},
741 params={'update_commits': 'true', 'csrf_token': csrf_token},
742 status=200)
742 status=200)
743
743
744 # check that we have now both revisions
744 # check that we have now both revisions
745 pull_request = PullRequest.get(pull_request_id)
745 pull_request = PullRequest.get(pull_request_id)
746 assert pull_request.revisions == [commit_ids['change-rebased']]
746 assert pull_request.revisions == [commit_ids['change-rebased']]
747 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
747 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
748 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
748 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
749
749
750 response = self.app.get(
750 response = self.app.get(
751 route_path('pullrequest_show',
751 route_path('pullrequest_show',
752 repo_name=target.repo_name,
752 repo_name=target.repo_name,
753 pull_request_id=pull_request.pull_request_id))
753 pull_request_id=pull_request.pull_request_id))
754 assert response.status_int == 200
754 assert response.status_int == 200
755 assert 'Pull request updated to' in response.body
755 assert 'Pull request updated to' in response.body
756 assert 'with 1 added, 1 removed commits.' in response.body
756 assert 'with 1 added, 1 removed commits.' in response.body
757
757
758 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
758 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
759 backend = backend_git
759 backend = backend_git
760 commits = [
760 commits = [
761 {'message': 'master-commit-1'},
761 {'message': 'master-commit-1'},
762 {'message': 'master-commit-2-change-1'},
762 {'message': 'master-commit-2-change-1'},
763 {'message': 'master-commit-3-change-2'},
763 {'message': 'master-commit-3-change-2'},
764
764
765 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
765 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
766 {'message': 'feat-commit-2'},
766 {'message': 'feat-commit-2'},
767 ]
767 ]
768 commit_ids = backend.create_master_repo(commits)
768 commit_ids = backend.create_master_repo(commits)
769 target = backend.create_repo(heads=['master-commit-3-change-2'])
769 target = backend.create_repo(heads=['master-commit-3-change-2'])
770 source = backend.create_repo(heads=['feat-commit-2'])
770 source = backend.create_repo(heads=['feat-commit-2'])
771
771
772 # create pr from a in source to A in target
772 # create pr from a in source to A in target
773 pull_request = PullRequest()
773 pull_request = PullRequest()
774 pull_request.source_repo = source
774 pull_request.source_repo = source
775
775
776 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
776 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
777 branch=backend.default_branch_name,
777 branch=backend.default_branch_name,
778 commit_id=commit_ids['master-commit-3-change-2'])
778 commit_id=commit_ids['master-commit-3-change-2'])
779
779
780 pull_request.target_repo = target
780 pull_request.target_repo = target
781 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
781 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
782 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
782 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
783
783
784 pull_request.revisions = [
784 pull_request.revisions = [
785 commit_ids['feat-commit-1'],
785 commit_ids['feat-commit-1'],
786 commit_ids['feat-commit-2']
786 commit_ids['feat-commit-2']
787 ]
787 ]
788 pull_request.title = u"Test"
788 pull_request.title = u"Test"
789 pull_request.description = u"Description"
789 pull_request.description = u"Description"
790 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
790 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
791 pull_request.pull_request_state = PullRequest.STATE_CREATED
791 pull_request.pull_request_state = PullRequest.STATE_CREATED
792 Session().add(pull_request)
792 Session().add(pull_request)
793 Session().commit()
793 Session().commit()
794 pull_request_id = pull_request.pull_request_id
794 pull_request_id = pull_request.pull_request_id
795
795
796 # PR is created, now we simulate a force-push into target,
796 # PR is created, now we simulate a force-push into target,
797 # that drops a 2 last commits
797 # that drops a 2 last commits
798 vcsrepo = target.scm_instance()
798 vcsrepo = target.scm_instance()
799 vcsrepo.config.clear_section('hooks')
799 vcsrepo.config.clear_section('hooks')
800 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
800 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
801
801
802 # update PR
802 # update PR
803 url = route_path('pullrequest_update',
803 url = route_path('pullrequest_update',
804 repo_name=target.repo_name,
804 repo_name=target.repo_name,
805 pull_request_id=pull_request_id)
805 pull_request_id=pull_request_id)
806 self.app.post(url,
806 self.app.post(url,
807 params={'update_commits': 'true', 'csrf_token': csrf_token},
807 params={'update_commits': 'true', 'csrf_token': csrf_token},
808 status=200)
808 status=200)
809
809
810 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
810 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
811 assert response.status_int == 200
811 assert response.status_int == 200
812 response.mustcontain('Pull request updated to')
812 response.mustcontain('Pull request updated to')
813 response.mustcontain('with 0 added, 0 removed commits.')
813 response.mustcontain('with 0 added, 0 removed commits.')
814
814
815 def test_update_of_ancestor_reference(self, backend, csrf_token):
815 def test_update_of_ancestor_reference(self, backend, csrf_token):
816 commits = [
816 commits = [
817 {'message': 'ancestor'},
817 {'message': 'ancestor'},
818 {'message': 'change'},
818 {'message': 'change'},
819 {'message': 'change-2'},
819 {'message': 'change-2'},
820 {'message': 'ancestor-new', 'parents': ['ancestor']},
820 {'message': 'ancestor-new', 'parents': ['ancestor']},
821 {'message': 'change-rebased'},
821 {'message': 'change-rebased'},
822 ]
822 ]
823 commit_ids = backend.create_master_repo(commits)
823 commit_ids = backend.create_master_repo(commits)
824 target = backend.create_repo(heads=['ancestor'])
824 target = backend.create_repo(heads=['ancestor'])
825 source = backend.create_repo(heads=['change'])
825 source = backend.create_repo(heads=['change'])
826
826
827 # create pr from a in source to A in target
827 # create pr from a in source to A in target
828 pull_request = PullRequest()
828 pull_request = PullRequest()
829 pull_request.source_repo = source
829 pull_request.source_repo = source
830
830
831 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
831 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
832 branch=backend.default_branch_name, commit_id=commit_ids['change'])
832 branch=backend.default_branch_name, commit_id=commit_ids['change'])
833 pull_request.target_repo = target
833 pull_request.target_repo = target
834 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
834 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
835 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
835 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
836 pull_request.revisions = [commit_ids['change']]
836 pull_request.revisions = [commit_ids['change']]
837 pull_request.title = u"Test"
837 pull_request.title = u"Test"
838 pull_request.description = u"Description"
838 pull_request.description = u"Description"
839 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
839 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
840 pull_request.pull_request_state = PullRequest.STATE_CREATED
840 pull_request.pull_request_state = PullRequest.STATE_CREATED
841 Session().add(pull_request)
841 Session().add(pull_request)
842 Session().commit()
842 Session().commit()
843 pull_request_id = pull_request.pull_request_id
843 pull_request_id = pull_request.pull_request_id
844
844
845 # target has ancestor - ancestor-new
845 # target has ancestor - ancestor-new
846 # source has ancestor - ancestor-new - change-rebased
846 # source has ancestor - ancestor-new - change-rebased
847 backend.pull_heads(target, heads=['ancestor-new'])
847 backend.pull_heads(target, heads=['ancestor-new'])
848 backend.pull_heads(source, heads=['change-rebased'])
848 backend.pull_heads(source, heads=['change-rebased'])
849
849
850 # update PR
850 # update PR
851 self.app.post(
851 self.app.post(
852 route_path('pullrequest_update',
852 route_path('pullrequest_update',
853 repo_name=target.repo_name, pull_request_id=pull_request_id),
853 repo_name=target.repo_name, pull_request_id=pull_request_id),
854 params={'update_commits': 'true', 'csrf_token': csrf_token},
854 params={'update_commits': 'true', 'csrf_token': csrf_token},
855 status=200)
855 status=200)
856
856
857 # Expect the target reference to be updated correctly
857 # Expect the target reference to be updated correctly
858 pull_request = PullRequest.get(pull_request_id)
858 pull_request = PullRequest.get(pull_request_id)
859 assert pull_request.revisions == [commit_ids['change-rebased']]
859 assert pull_request.revisions == [commit_ids['change-rebased']]
860 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
860 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
861 branch=backend.default_branch_name,
861 branch=backend.default_branch_name,
862 commit_id=commit_ids['ancestor-new'])
862 commit_id=commit_ids['ancestor-new'])
863 assert pull_request.target_ref == expected_target_ref
863 assert pull_request.target_ref == expected_target_ref
864
864
865 def test_remove_pull_request_branch(self, backend_git, csrf_token):
865 def test_remove_pull_request_branch(self, backend_git, csrf_token):
866 branch_name = 'development'
866 branch_name = 'development'
867 commits = [
867 commits = [
868 {'message': 'initial-commit'},
868 {'message': 'initial-commit'},
869 {'message': 'old-feature'},
869 {'message': 'old-feature'},
870 {'message': 'new-feature', 'branch': branch_name},
870 {'message': 'new-feature', 'branch': branch_name},
871 ]
871 ]
872 repo = backend_git.create_repo(commits)
872 repo = backend_git.create_repo(commits)
873 repo_name = repo.repo_name
873 repo_name = repo.repo_name
874 commit_ids = backend_git.commit_ids
874 commit_ids = backend_git.commit_ids
875
875
876 pull_request = PullRequest()
876 pull_request = PullRequest()
877 pull_request.source_repo = repo
877 pull_request.source_repo = repo
878 pull_request.target_repo = repo
878 pull_request.target_repo = repo
879 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
879 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
880 branch=branch_name, commit_id=commit_ids['new-feature'])
880 branch=branch_name, commit_id=commit_ids['new-feature'])
881 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
881 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
882 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
882 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
883 pull_request.revisions = [commit_ids['new-feature']]
883 pull_request.revisions = [commit_ids['new-feature']]
884 pull_request.title = u"Test"
884 pull_request.title = u"Test"
885 pull_request.description = u"Description"
885 pull_request.description = u"Description"
886 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
886 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
887 pull_request.pull_request_state = PullRequest.STATE_CREATED
887 pull_request.pull_request_state = PullRequest.STATE_CREATED
888 Session().add(pull_request)
888 Session().add(pull_request)
889 Session().commit()
889 Session().commit()
890
890
891 pull_request_id = pull_request.pull_request_id
891 pull_request_id = pull_request.pull_request_id
892
892
893 vcs = repo.scm_instance()
893 vcs = repo.scm_instance()
894 vcs.remove_ref('refs/heads/{}'.format(branch_name))
894 vcs.remove_ref('refs/heads/{}'.format(branch_name))
895
895
896 response = self.app.get(route_path(
896 response = self.app.get(route_path(
897 'pullrequest_show',
897 'pullrequest_show',
898 repo_name=repo_name,
898 repo_name=repo_name,
899 pull_request_id=pull_request_id))
899 pull_request_id=pull_request_id))
900
900
901 assert response.status_int == 200
901 assert response.status_int == 200
902
902
903 response.assert_response().element_contains(
903 response.assert_response().element_contains(
904 '#changeset_compare_view_content .alert strong',
904 '#changeset_compare_view_content .alert strong',
905 'Missing commits')
905 'Missing commits')
906 response.assert_response().element_contains(
906 response.assert_response().element_contains(
907 '#changeset_compare_view_content .alert',
907 '#changeset_compare_view_content .alert',
908 'This pull request cannot be displayed, because one or more'
908 'This pull request cannot be displayed, because one or more'
909 ' commits no longer exist in the source repository.')
909 ' commits no longer exist in the source repository.')
910
910
911 def test_strip_commits_from_pull_request(
911 def test_strip_commits_from_pull_request(
912 self, backend, pr_util, csrf_token):
912 self, backend, pr_util, csrf_token):
913 commits = [
913 commits = [
914 {'message': 'initial-commit'},
914 {'message': 'initial-commit'},
915 {'message': 'old-feature'},
915 {'message': 'old-feature'},
916 {'message': 'new-feature', 'parents': ['initial-commit']},
916 {'message': 'new-feature', 'parents': ['initial-commit']},
917 ]
917 ]
918 pull_request = pr_util.create_pull_request(
918 pull_request = pr_util.create_pull_request(
919 commits, target_head='initial-commit', source_head='new-feature',
919 commits, target_head='initial-commit', source_head='new-feature',
920 revisions=['new-feature'])
920 revisions=['new-feature'])
921
921
922 vcs = pr_util.source_repository.scm_instance()
922 vcs = pr_util.source_repository.scm_instance()
923 if backend.alias == 'git':
923 if backend.alias == 'git':
924 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
924 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
925 else:
925 else:
926 vcs.strip(pr_util.commit_ids['new-feature'])
926 vcs.strip(pr_util.commit_ids['new-feature'])
927
927
928 response = self.app.get(route_path(
928 response = self.app.get(route_path(
929 'pullrequest_show',
929 'pullrequest_show',
930 repo_name=pr_util.target_repository.repo_name,
930 repo_name=pr_util.target_repository.repo_name,
931 pull_request_id=pull_request.pull_request_id))
931 pull_request_id=pull_request.pull_request_id))
932
932
933 assert response.status_int == 200
933 assert response.status_int == 200
934
934
935 response.assert_response().element_contains(
935 response.assert_response().element_contains(
936 '#changeset_compare_view_content .alert strong',
936 '#changeset_compare_view_content .alert strong',
937 'Missing commits')
937 'Missing commits')
938 response.assert_response().element_contains(
938 response.assert_response().element_contains(
939 '#changeset_compare_view_content .alert',
939 '#changeset_compare_view_content .alert',
940 'This pull request cannot be displayed, because one or more'
940 'This pull request cannot be displayed, because one or more'
941 ' commits no longer exist in the source repository.')
941 ' commits no longer exist in the source repository.')
942 response.assert_response().element_contains(
942 response.assert_response().element_contains(
943 '#update_commits',
943 '#update_commits',
944 'Update commits')
944 'Update commits')
945
945
946 def test_strip_commits_and_update(
946 def test_strip_commits_and_update(
947 self, backend, pr_util, csrf_token):
947 self, backend, pr_util, csrf_token):
948 commits = [
948 commits = [
949 {'message': 'initial-commit'},
949 {'message': 'initial-commit'},
950 {'message': 'old-feature'},
950 {'message': 'old-feature'},
951 {'message': 'new-feature', 'parents': ['old-feature']},
951 {'message': 'new-feature', 'parents': ['old-feature']},
952 ]
952 ]
953 pull_request = pr_util.create_pull_request(
953 pull_request = pr_util.create_pull_request(
954 commits, target_head='old-feature', source_head='new-feature',
954 commits, target_head='old-feature', source_head='new-feature',
955 revisions=['new-feature'], mergeable=True)
955 revisions=['new-feature'], mergeable=True)
956
956
957 vcs = pr_util.source_repository.scm_instance()
957 vcs = pr_util.source_repository.scm_instance()
958 if backend.alias == 'git':
958 if backend.alias == 'git':
959 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
959 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
960 else:
960 else:
961 vcs.strip(pr_util.commit_ids['new-feature'])
961 vcs.strip(pr_util.commit_ids['new-feature'])
962
962
963 url = route_path('pullrequest_update',
963 url = route_path('pullrequest_update',
964 repo_name=pull_request.target_repo.repo_name,
964 repo_name=pull_request.target_repo.repo_name,
965 pull_request_id=pull_request.pull_request_id)
965 pull_request_id=pull_request.pull_request_id)
966 response = self.app.post(url,
966 response = self.app.post(url,
967 params={'update_commits': 'true',
967 params={'update_commits': 'true',
968 'csrf_token': csrf_token})
968 'csrf_token': csrf_token})
969
969
970 assert response.status_int == 200
970 assert response.status_int == 200
971 assert response.body == 'true'
971 assert response.body == 'true'
972
972
973 # Make sure that after update, it won't raise 500 errors
973 # Make sure that after update, it won't raise 500 errors
974 response = self.app.get(route_path(
974 response = self.app.get(route_path(
975 'pullrequest_show',
975 'pullrequest_show',
976 repo_name=pr_util.target_repository.repo_name,
976 repo_name=pr_util.target_repository.repo_name,
977 pull_request_id=pull_request.pull_request_id))
977 pull_request_id=pull_request.pull_request_id))
978
978
979 assert response.status_int == 200
979 assert response.status_int == 200
980 response.assert_response().element_contains(
980 response.assert_response().element_contains(
981 '#changeset_compare_view_content .alert strong',
981 '#changeset_compare_view_content .alert strong',
982 'Missing commits')
982 'Missing commits')
983
983
984 def test_branch_is_a_link(self, pr_util):
984 def test_branch_is_a_link(self, pr_util):
985 pull_request = pr_util.create_pull_request()
985 pull_request = pr_util.create_pull_request()
986 pull_request.source_ref = 'branch:origin:1234567890abcdef'
986 pull_request.source_ref = 'branch:origin:1234567890abcdef'
987 pull_request.target_ref = 'branch:target:abcdef1234567890'
987 pull_request.target_ref = 'branch:target:abcdef1234567890'
988 Session().add(pull_request)
988 Session().add(pull_request)
989 Session().commit()
989 Session().commit()
990
990
991 response = self.app.get(route_path(
991 response = self.app.get(route_path(
992 'pullrequest_show',
992 'pullrequest_show',
993 repo_name=pull_request.target_repo.scm_instance().name,
993 repo_name=pull_request.target_repo.scm_instance().name,
994 pull_request_id=pull_request.pull_request_id))
994 pull_request_id=pull_request.pull_request_id))
995 assert response.status_int == 200
995 assert response.status_int == 200
996
996
997 origin = response.assert_response().get_element('.pr-origininfo .tag')
997 origin = response.assert_response().get_element('.pr-origininfo .tag')
998 origin_children = origin.getchildren()
998 origin_children = origin.getchildren()
999 assert len(origin_children) == 1
999 assert len(origin_children) == 1
1000 target = response.assert_response().get_element('.pr-targetinfo .tag')
1000 target = response.assert_response().get_element('.pr-targetinfo .tag')
1001 target_children = target.getchildren()
1001 target_children = target.getchildren()
1002 assert len(target_children) == 1
1002 assert len(target_children) == 1
1003
1003
1004 expected_origin_link = route_path(
1004 expected_origin_link = route_path(
1005 'repo_commits',
1005 'repo_commits',
1006 repo_name=pull_request.source_repo.scm_instance().name,
1006 repo_name=pull_request.source_repo.scm_instance().name,
1007 params=dict(branch='origin'))
1007 params=dict(branch='origin'))
1008 expected_target_link = route_path(
1008 expected_target_link = route_path(
1009 'repo_commits',
1009 'repo_commits',
1010 repo_name=pull_request.target_repo.scm_instance().name,
1010 repo_name=pull_request.target_repo.scm_instance().name,
1011 params=dict(branch='target'))
1011 params=dict(branch='target'))
1012 assert origin_children[0].attrib['href'] == expected_origin_link
1012 assert origin_children[0].attrib['href'] == expected_origin_link
1013 assert origin_children[0].text == 'branch: origin'
1013 assert origin_children[0].text == 'branch: origin'
1014 assert target_children[0].attrib['href'] == expected_target_link
1014 assert target_children[0].attrib['href'] == expected_target_link
1015 assert target_children[0].text == 'branch: target'
1015 assert target_children[0].text == 'branch: target'
1016
1016
1017 def test_bookmark_is_not_a_link(self, pr_util):
1017 def test_bookmark_is_not_a_link(self, pr_util):
1018 pull_request = pr_util.create_pull_request()
1018 pull_request = pr_util.create_pull_request()
1019 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1019 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1020 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1020 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1021 Session().add(pull_request)
1021 Session().add(pull_request)
1022 Session().commit()
1022 Session().commit()
1023
1023
1024 response = self.app.get(route_path(
1024 response = self.app.get(route_path(
1025 'pullrequest_show',
1025 'pullrequest_show',
1026 repo_name=pull_request.target_repo.scm_instance().name,
1026 repo_name=pull_request.target_repo.scm_instance().name,
1027 pull_request_id=pull_request.pull_request_id))
1027 pull_request_id=pull_request.pull_request_id))
1028 assert response.status_int == 200
1028 assert response.status_int == 200
1029
1029
1030 origin = response.assert_response().get_element('.pr-origininfo .tag')
1030 origin = response.assert_response().get_element('.pr-origininfo .tag')
1031 assert origin.text.strip() == 'bookmark: origin'
1031 assert origin.text.strip() == 'bookmark: origin'
1032 assert origin.getchildren() == []
1032 assert origin.getchildren() == []
1033
1033
1034 target = response.assert_response().get_element('.pr-targetinfo .tag')
1034 target = response.assert_response().get_element('.pr-targetinfo .tag')
1035 assert target.text.strip() == 'bookmark: target'
1035 assert target.text.strip() == 'bookmark: target'
1036 assert target.getchildren() == []
1036 assert target.getchildren() == []
1037
1037
1038 def test_tag_is_not_a_link(self, pr_util):
1038 def test_tag_is_not_a_link(self, pr_util):
1039 pull_request = pr_util.create_pull_request()
1039 pull_request = pr_util.create_pull_request()
1040 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1040 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1041 pull_request.target_ref = 'tag:target:abcdef1234567890'
1041 pull_request.target_ref = 'tag:target:abcdef1234567890'
1042 Session().add(pull_request)
1042 Session().add(pull_request)
1043 Session().commit()
1043 Session().commit()
1044
1044
1045 response = self.app.get(route_path(
1045 response = self.app.get(route_path(
1046 'pullrequest_show',
1046 'pullrequest_show',
1047 repo_name=pull_request.target_repo.scm_instance().name,
1047 repo_name=pull_request.target_repo.scm_instance().name,
1048 pull_request_id=pull_request.pull_request_id))
1048 pull_request_id=pull_request.pull_request_id))
1049 assert response.status_int == 200
1049 assert response.status_int == 200
1050
1050
1051 origin = response.assert_response().get_element('.pr-origininfo .tag')
1051 origin = response.assert_response().get_element('.pr-origininfo .tag')
1052 assert origin.text.strip() == 'tag: origin'
1052 assert origin.text.strip() == 'tag: origin'
1053 assert origin.getchildren() == []
1053 assert origin.getchildren() == []
1054
1054
1055 target = response.assert_response().get_element('.pr-targetinfo .tag')
1055 target = response.assert_response().get_element('.pr-targetinfo .tag')
1056 assert target.text.strip() == 'tag: target'
1056 assert target.text.strip() == 'tag: target'
1057 assert target.getchildren() == []
1057 assert target.getchildren() == []
1058
1058
1059 @pytest.mark.parametrize('mergeable', [True, False])
1059 @pytest.mark.parametrize('mergeable', [True, False])
1060 def test_shadow_repository_link(
1060 def test_shadow_repository_link(
1061 self, mergeable, pr_util, http_host_only_stub):
1061 self, mergeable, pr_util, http_host_only_stub):
1062 """
1062 """
1063 Check that the pull request summary page displays a link to the shadow
1063 Check that the pull request summary page displays a link to the shadow
1064 repository if the pull request is mergeable. If it is not mergeable
1064 repository if the pull request is mergeable. If it is not mergeable
1065 the link should not be displayed.
1065 the link should not be displayed.
1066 """
1066 """
1067 pull_request = pr_util.create_pull_request(
1067 pull_request = pr_util.create_pull_request(
1068 mergeable=mergeable, enable_notifications=False)
1068 mergeable=mergeable, enable_notifications=False)
1069 target_repo = pull_request.target_repo.scm_instance()
1069 target_repo = pull_request.target_repo.scm_instance()
1070 pr_id = pull_request.pull_request_id
1070 pr_id = pull_request.pull_request_id
1071 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1071 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1072 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1072 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1073
1073
1074 response = self.app.get(route_path(
1074 response = self.app.get(route_path(
1075 'pullrequest_show',
1075 'pullrequest_show',
1076 repo_name=target_repo.name,
1076 repo_name=target_repo.name,
1077 pull_request_id=pr_id))
1077 pull_request_id=pr_id))
1078
1078
1079 if mergeable:
1079 if mergeable:
1080 response.assert_response().element_value_contains(
1080 response.assert_response().element_value_contains(
1081 'input.pr-mergeinfo', shadow_url)
1081 'input.pr-mergeinfo', shadow_url)
1082 response.assert_response().element_value_contains(
1082 response.assert_response().element_value_contains(
1083 'input.pr-mergeinfo ', 'pr-merge')
1083 'input.pr-mergeinfo ', 'pr-merge')
1084 else:
1084 else:
1085 response.assert_response().no_element_exists('.pr-mergeinfo')
1085 response.assert_response().no_element_exists('.pr-mergeinfo')
1086
1086
1087
1087
1088 @pytest.mark.usefixtures('app')
1088 @pytest.mark.usefixtures('app')
1089 @pytest.mark.backends("git", "hg")
1089 @pytest.mark.backends("git", "hg")
1090 class TestPullrequestsControllerDelete(object):
1090 class TestPullrequestsControllerDelete(object):
1091 def test_pull_request_delete_button_permissions_admin(
1091 def test_pull_request_delete_button_permissions_admin(
1092 self, autologin_user, user_admin, pr_util):
1092 self, autologin_user, user_admin, pr_util):
1093 pull_request = pr_util.create_pull_request(
1093 pull_request = pr_util.create_pull_request(
1094 author=user_admin.username, enable_notifications=False)
1094 author=user_admin.username, enable_notifications=False)
1095
1095
1096 response = self.app.get(route_path(
1096 response = self.app.get(route_path(
1097 'pullrequest_show',
1097 'pullrequest_show',
1098 repo_name=pull_request.target_repo.scm_instance().name,
1098 repo_name=pull_request.target_repo.scm_instance().name,
1099 pull_request_id=pull_request.pull_request_id))
1099 pull_request_id=pull_request.pull_request_id))
1100
1100
1101 response.mustcontain('id="delete_pullrequest"')
1101 response.mustcontain('id="delete_pullrequest"')
1102 response.mustcontain('Confirm to delete this pull request')
1102 response.mustcontain('Confirm to delete this pull request')
1103
1103
1104 def test_pull_request_delete_button_permissions_owner(
1104 def test_pull_request_delete_button_permissions_owner(
1105 self, autologin_regular_user, user_regular, pr_util):
1105 self, autologin_regular_user, user_regular, pr_util):
1106 pull_request = pr_util.create_pull_request(
1106 pull_request = pr_util.create_pull_request(
1107 author=user_regular.username, enable_notifications=False)
1107 author=user_regular.username, enable_notifications=False)
1108
1108
1109 response = self.app.get(route_path(
1109 response = self.app.get(route_path(
1110 'pullrequest_show',
1110 'pullrequest_show',
1111 repo_name=pull_request.target_repo.scm_instance().name,
1111 repo_name=pull_request.target_repo.scm_instance().name,
1112 pull_request_id=pull_request.pull_request_id))
1112 pull_request_id=pull_request.pull_request_id))
1113
1113
1114 response.mustcontain('id="delete_pullrequest"')
1114 response.mustcontain('id="delete_pullrequest"')
1115 response.mustcontain('Confirm to delete this pull request')
1115 response.mustcontain('Confirm to delete this pull request')
1116
1116
1117 def test_pull_request_delete_button_permissions_forbidden(
1117 def test_pull_request_delete_button_permissions_forbidden(
1118 self, autologin_regular_user, user_regular, user_admin, pr_util):
1118 self, autologin_regular_user, user_regular, user_admin, pr_util):
1119 pull_request = pr_util.create_pull_request(
1119 pull_request = pr_util.create_pull_request(
1120 author=user_admin.username, enable_notifications=False)
1120 author=user_admin.username, enable_notifications=False)
1121
1121
1122 response = self.app.get(route_path(
1122 response = self.app.get(route_path(
1123 'pullrequest_show',
1123 'pullrequest_show',
1124 repo_name=pull_request.target_repo.scm_instance().name,
1124 repo_name=pull_request.target_repo.scm_instance().name,
1125 pull_request_id=pull_request.pull_request_id))
1125 pull_request_id=pull_request.pull_request_id))
1126 response.mustcontain(no=['id="delete_pullrequest"'])
1126 response.mustcontain(no=['id="delete_pullrequest"'])
1127 response.mustcontain(no=['Confirm to delete this pull request'])
1127 response.mustcontain(no=['Confirm to delete this pull request'])
1128
1128
1129 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1129 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1130 self, autologin_regular_user, user_regular, user_admin, pr_util,
1130 self, autologin_regular_user, user_regular, user_admin, pr_util,
1131 user_util):
1131 user_util):
1132
1132
1133 pull_request = pr_util.create_pull_request(
1133 pull_request = pr_util.create_pull_request(
1134 author=user_admin.username, enable_notifications=False)
1134 author=user_admin.username, enable_notifications=False)
1135
1135
1136 user_util.grant_user_permission_to_repo(
1136 user_util.grant_user_permission_to_repo(
1137 pull_request.target_repo, user_regular,
1137 pull_request.target_repo, user_regular,
1138 'repository.write')
1138 'repository.write')
1139
1139
1140 response = self.app.get(route_path(
1140 response = self.app.get(route_path(
1141 'pullrequest_show',
1141 'pullrequest_show',
1142 repo_name=pull_request.target_repo.scm_instance().name,
1142 repo_name=pull_request.target_repo.scm_instance().name,
1143 pull_request_id=pull_request.pull_request_id))
1143 pull_request_id=pull_request.pull_request_id))
1144
1144
1145 response.mustcontain('id="open_edit_pullrequest"')
1145 response.mustcontain('id="open_edit_pullrequest"')
1146 response.mustcontain('id="delete_pullrequest"')
1146 response.mustcontain('id="delete_pullrequest"')
1147 response.mustcontain(no=['Confirm to delete this pull request'])
1147 response.mustcontain(no=['Confirm to delete this pull request'])
1148
1148
1149 def test_delete_comment_returns_404_if_comment_does_not_exist(
1149 def test_delete_comment_returns_404_if_comment_does_not_exist(
1150 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1150 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1151
1151
1152 pull_request = pr_util.create_pull_request(
1152 pull_request = pr_util.create_pull_request(
1153 author=user_admin.username, enable_notifications=False)
1153 author=user_admin.username, enable_notifications=False)
1154
1154
1155 self.app.post(
1155 self.app.post(
1156 route_path(
1156 route_path(
1157 'pullrequest_comment_delete',
1157 'pullrequest_comment_delete',
1158 repo_name=pull_request.target_repo.scm_instance().name,
1158 repo_name=pull_request.target_repo.scm_instance().name,
1159 pull_request_id=pull_request.pull_request_id,
1159 pull_request_id=pull_request.pull_request_id,
1160 comment_id=1024404),
1160 comment_id=1024404),
1161 extra_environ=xhr_header,
1161 extra_environ=xhr_header,
1162 params={'csrf_token': csrf_token},
1162 params={'csrf_token': csrf_token},
1163 status=404
1163 status=404
1164 )
1164 )
1165
1165
1166 def test_delete_comment(
1166 def test_delete_comment(
1167 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1167 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1168
1168
1169 pull_request = pr_util.create_pull_request(
1169 pull_request = pr_util.create_pull_request(
1170 author=user_admin.username, enable_notifications=False)
1170 author=user_admin.username, enable_notifications=False)
1171 comment = pr_util.create_comment()
1171 comment = pr_util.create_comment()
1172 comment_id = comment.comment_id
1172 comment_id = comment.comment_id
1173
1173
1174 response = self.app.post(
1174 response = self.app.post(
1175 route_path(
1175 route_path(
1176 'pullrequest_comment_delete',
1176 'pullrequest_comment_delete',
1177 repo_name=pull_request.target_repo.scm_instance().name,
1177 repo_name=pull_request.target_repo.scm_instance().name,
1178 pull_request_id=pull_request.pull_request_id,
1178 pull_request_id=pull_request.pull_request_id,
1179 comment_id=comment_id),
1179 comment_id=comment_id),
1180 extra_environ=xhr_header,
1180 extra_environ=xhr_header,
1181 params={'csrf_token': csrf_token},
1181 params={'csrf_token': csrf_token},
1182 status=200
1182 status=200
1183 )
1183 )
1184 assert response.body == 'true'
1184 assert response.body == 'true'
1185
1185
1186 @pytest.mark.parametrize('url_type', [
1186 @pytest.mark.parametrize('url_type', [
1187 'pullrequest_new',
1187 'pullrequest_new',
1188 'pullrequest_create',
1188 'pullrequest_create',
1189 'pullrequest_update',
1189 'pullrequest_update',
1190 'pullrequest_merge',
1190 'pullrequest_merge',
1191 ])
1191 ])
1192 def test_pull_request_is_forbidden_on_archived_repo(
1192 def test_pull_request_is_forbidden_on_archived_repo(
1193 self, autologin_user, backend, xhr_header, user_util, url_type):
1193 self, autologin_user, backend, xhr_header, user_util, url_type):
1194
1194
1195 # create a temporary repo
1195 # create a temporary repo
1196 source = user_util.create_repo(repo_type=backend.alias)
1196 source = user_util.create_repo(repo_type=backend.alias)
1197 repo_name = source.repo_name
1197 repo_name = source.repo_name
1198 repo = Repository.get_by_repo_name(repo_name)
1198 repo = Repository.get_by_repo_name(repo_name)
1199 repo.archived = True
1199 repo.archived = True
1200 Session().commit()
1200 Session().commit()
1201
1201
1202 response = self.app.get(
1202 response = self.app.get(
1203 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1203 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1204
1204
1205 msg = 'Action not supported for archived repository.'
1205 msg = 'Action not supported for archived repository.'
1206 assert_session_flash(response, msg)
1206 assert_session_flash(response, msg)
1207
1207
1208
1208
1209 def assert_pull_request_status(pull_request, expected_status):
1209 def assert_pull_request_status(pull_request, expected_status):
1210 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1210 status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request)
1211 assert status == expected_status
1211 assert status == expected_status
1212
1212
1213
1213
1214 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1214 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1215 @pytest.mark.usefixtures("autologin_user")
1215 @pytest.mark.usefixtures("autologin_user")
1216 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1216 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1217 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
1217 app.get(route_path(route, repo_name=backend_svn.repo_name), status=404)
@@ -1,1017 +1,1017 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'master'
57 DEFAULT_BRANCH_NAME = 'master'
58
58
59 contact = BaseRepository.DEFAULT_CONTACT
59 contact = BaseRepository.DEFAULT_CONTACT
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
63
63
64 self.path = safe_str(os.path.abspath(repo_path))
64 self.path = safe_str(os.path.abspath(repo_path))
65 self.config = config if config else self.get_default_config()
65 self.config = config if config else self.get_default_config()
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67
67
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69
69
70 # caches
70 # caches
71 self._commit_ids = {}
71 self._commit_ids = {}
72
72
73 @LazyProperty
73 @LazyProperty
74 def _remote(self):
74 def _remote(self):
75 repo_id = self.path
75 repo_id = self.path
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77
77
78 @LazyProperty
78 @LazyProperty
79 def bare(self):
79 def bare(self):
80 return self._remote.bare()
80 return self._remote.bare()
81
81
82 @LazyProperty
82 @LazyProperty
83 def head(self):
83 def head(self):
84 return self._remote.head()
84 return self._remote.head()
85
85
86 @CachedProperty
86 @CachedProperty
87 def commit_ids(self):
87 def commit_ids(self):
88 """
88 """
89 Returns list of commit ids, in ascending order. Being lazy
89 Returns list of commit ids, in ascending order. Being lazy
90 attribute allows external tools to inject commit ids from cache.
90 attribute allows external tools to inject commit ids from cache.
91 """
91 """
92 commit_ids = self._get_all_commit_ids()
92 commit_ids = self._get_all_commit_ids()
93 self._rebuild_cache(commit_ids)
93 self._rebuild_cache(commit_ids)
94 return commit_ids
94 return commit_ids
95
95
96 def _rebuild_cache(self, commit_ids):
96 def _rebuild_cache(self, commit_ids):
97 self._commit_ids = dict((commit_id, index)
97 self._commit_ids = dict((commit_id, index)
98 for index, commit_id in enumerate(commit_ids))
98 for index, commit_id in enumerate(commit_ids))
99
99
100 def run_git_command(self, cmd, **opts):
100 def run_git_command(self, cmd, **opts):
101 """
101 """
102 Runs given ``cmd`` as git command and returns tuple
102 Runs given ``cmd`` as git command and returns tuple
103 (stdout, stderr).
103 (stdout, stderr).
104
104
105 :param cmd: git command to be executed
105 :param cmd: git command to be executed
106 :param opts: env options to pass into Subprocess command
106 :param opts: env options to pass into Subprocess command
107 """
107 """
108 if not isinstance(cmd, list):
108 if not isinstance(cmd, list):
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110
110
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 out, err = self._remote.run_git_command(cmd, **opts)
112 out, err = self._remote.run_git_command(cmd, **opts)
113 if err and not skip_stderr_log:
113 if err and not skip_stderr_log:
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 return out, err
115 return out, err
116
116
117 @staticmethod
117 @staticmethod
118 def check_url(url, config):
118 def check_url(url, config):
119 """
119 """
120 Function will check given url and try to verify if it's a valid
120 Function will check given url and try to verify if it's a valid
121 link. Sometimes it may happened that git will issue basic
121 link. Sometimes it may happened that git will issue basic
122 auth request that can cause whole API to hang when used from python
122 auth request that can cause whole API to hang when used from python
123 or other external calls.
123 or other external calls.
124
124
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 when the return code is non 200
126 when the return code is non 200
127 """
127 """
128 # check first if it's not an url
128 # check first if it's not an url
129 if os.path.isdir(url) or url.startswith('file:'):
129 if os.path.isdir(url) or url.startswith('file:'):
130 return True
130 return True
131
131
132 if '+' in url.split('://', 1)[0]:
132 if '+' in url.split('://', 1)[0]:
133 url = url.split('+', 1)[1]
133 url = url.split('+', 1)[1]
134
134
135 # Request the _remote to verify the url
135 # Request the _remote to verify the url
136 return connection.Git.check_url(url, config.serialize())
136 return connection.Git.check_url(url, config.serialize())
137
137
138 @staticmethod
138 @staticmethod
139 def is_valid_repository(path):
139 def is_valid_repository(path):
140 if os.path.isdir(os.path.join(path, '.git')):
140 if os.path.isdir(os.path.join(path, '.git')):
141 return True
141 return True
142 # check case of bare repository
142 # check case of bare repository
143 try:
143 try:
144 GitRepository(path)
144 GitRepository(path)
145 return True
145 return True
146 except VCSError:
146 except VCSError:
147 pass
147 pass
148 return False
148 return False
149
149
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 bare=False):
151 bare=False):
152 if create and os.path.exists(self.path):
152 if create and os.path.exists(self.path):
153 raise RepositoryError(
153 raise RepositoryError(
154 "Cannot create repository at %s, location already exist"
154 "Cannot create repository at %s, location already exist"
155 % self.path)
155 % self.path)
156
156
157 if bare and do_workspace_checkout:
157 if bare and do_workspace_checkout:
158 raise RepositoryError("Cannot update a bare repository")
158 raise RepositoryError("Cannot update a bare repository")
159 try:
159 try:
160
160
161 if src_url:
161 if src_url:
162 # check URL before any actions
162 # check URL before any actions
163 GitRepository.check_url(src_url, self.config)
163 GitRepository.check_url(src_url, self.config)
164
164
165 if create:
165 if create:
166 os.makedirs(self.path, mode=0o755)
166 os.makedirs(self.path, mode=0o755)
167
167
168 if bare:
168 if bare:
169 self._remote.init_bare()
169 self._remote.init_bare()
170 else:
170 else:
171 self._remote.init()
171 self._remote.init()
172
172
173 if src_url and bare:
173 if src_url and bare:
174 # bare repository only allows a fetch and checkout is not allowed
174 # bare repository only allows a fetch and checkout is not allowed
175 self.fetch(src_url, commit_ids=None)
175 self.fetch(src_url, commit_ids=None)
176 elif src_url:
176 elif src_url:
177 self.pull(src_url, commit_ids=None,
177 self.pull(src_url, commit_ids=None,
178 update_after=do_workspace_checkout)
178 update_after=do_workspace_checkout)
179
179
180 else:
180 else:
181 if not self._remote.assert_correct_path():
181 if not self._remote.assert_correct_path():
182 raise RepositoryError(
182 raise RepositoryError(
183 'Path "%s" does not contain a Git repository' %
183 'Path "%s" does not contain a Git repository' %
184 (self.path,))
184 (self.path,))
185
185
186 # TODO: johbo: check if we have to translate the OSError here
186 # TODO: johbo: check if we have to translate the OSError here
187 except OSError as err:
187 except OSError as err:
188 raise RepositoryError(err)
188 raise RepositoryError(err)
189
189
190 def _get_all_commit_ids(self):
190 def _get_all_commit_ids(self):
191 return self._remote.get_all_commit_ids()
191 return self._remote.get_all_commit_ids()
192
192
193 def _get_commit_ids(self, filters=None):
193 def _get_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
194 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
196 # errors
197
197
198 head = self._remote.head(show_exc=False)
198 head = self._remote.head(show_exc=False)
199
199
200 if not head:
200 if not head:
201 return []
201 return []
202
202
203 rev_filter = ['--branches', '--tags']
203 rev_filter = ['--branches', '--tags']
204 extra_filter = []
204 extra_filter = []
205
205
206 if filters:
206 if filters:
207 if filters.get('since'):
207 if filters.get('since'):
208 extra_filter.append('--since=%s' % (filters['since']))
208 extra_filter.append('--since=%s' % (filters['since']))
209 if filters.get('until'):
209 if filters.get('until'):
210 extra_filter.append('--until=%s' % (filters['until']))
210 extra_filter.append('--until=%s' % (filters['until']))
211 if filters.get('branch_name'):
211 if filters.get('branch_name'):
212 rev_filter = []
212 rev_filter = []
213 extra_filter.append(filters['branch_name'])
213 extra_filter.append(filters['branch_name'])
214 rev_filter.extend(extra_filter)
214 rev_filter.extend(extra_filter)
215
215
216 # if filters.get('start') or filters.get('end'):
216 # if filters.get('start') or filters.get('end'):
217 # # skip is offset, max-count is limit
217 # # skip is offset, max-count is limit
218 # if filters.get('start'):
218 # if filters.get('start'):
219 # extra_filter += ' --skip=%s' % filters['start']
219 # extra_filter += ' --skip=%s' % filters['start']
220 # if filters.get('end'):
220 # if filters.get('end'):
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222
222
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 try:
224 try:
225 output, __ = self.run_git_command(cmd)
225 output, __ = self.run_git_command(cmd)
226 except RepositoryError:
226 except RepositoryError:
227 # Can be raised for empty repositories
227 # Can be raised for empty repositories
228 return []
228 return []
229 return output.splitlines()
229 return output.splitlines()
230
230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
232 def is_null(value):
232 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
233 return len(value) == commit_id_or_idx.count('0')
234
234
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
236 return self.commit_ids[-1]
237 commit_missing_err = "Commit {} does not exist for `{}`".format(
237 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 *map(safe_str, [commit_id_or_idx, self.name]))
238 *map(safe_str, [commit_id_or_idx, self.name]))
239
239
240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 try:
243 try:
244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 except Exception:
245 except Exception:
246 raise CommitDoesNotExistError(commit_missing_err)
246 raise CommitDoesNotExistError(commit_missing_err)
247
247
248 elif is_bstr:
248 elif is_bstr:
249 # Need to call remote to translate id for tagging scenario
249 # Need to call remote to translate id for tagging scenario
250 try:
250 try:
251 remote_data = self._remote.get_object(commit_id_or_idx)
251 remote_data = self._remote.get_object(commit_id_or_idx)
252 commit_id_or_idx = remote_data["commit_id"]
252 commit_id_or_idx = remote_data["commit_id"]
253 except (CommitDoesNotExistError,):
253 except (CommitDoesNotExistError,):
254 raise CommitDoesNotExistError(commit_missing_err)
254 raise CommitDoesNotExistError(commit_missing_err)
255
255
256 # Ensure we return full id
256 # Ensure we return full id
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 raise CommitDoesNotExistError(
258 raise CommitDoesNotExistError(
259 "Given commit id %s not recognized" % commit_id_or_idx)
259 "Given commit id %s not recognized" % commit_id_or_idx)
260 return commit_id_or_idx
260 return commit_id_or_idx
261
261
262 def get_hook_location(self):
262 def get_hook_location(self):
263 """
263 """
264 returns absolute path to location where hooks are stored
264 returns absolute path to location where hooks are stored
265 """
265 """
266 loc = os.path.join(self.path, 'hooks')
266 loc = os.path.join(self.path, 'hooks')
267 if not self.bare:
267 if not self.bare:
268 loc = os.path.join(self.path, '.git', 'hooks')
268 loc = os.path.join(self.path, '.git', 'hooks')
269 return loc
269 return loc
270
270
271 @LazyProperty
271 @LazyProperty
272 def last_change(self):
272 def last_change(self):
273 """
273 """
274 Returns last change made on this repository as
274 Returns last change made on this repository as
275 `datetime.datetime` object.
275 `datetime.datetime` object.
276 """
276 """
277 try:
277 try:
278 return self.get_commit().date
278 return self.get_commit().date
279 except RepositoryError:
279 except RepositoryError:
280 tzoffset = makedate()[1]
280 tzoffset = makedate()[1]
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282
282
283 def _get_fs_mtime(self):
283 def _get_fs_mtime(self):
284 idx_loc = '' if self.bare else '.git'
284 idx_loc = '' if self.bare else '.git'
285 # fallback to filesystem
285 # fallback to filesystem
286 in_path = os.path.join(self.path, idx_loc, "index")
286 in_path = os.path.join(self.path, idx_loc, "index")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 if os.path.exists(in_path):
288 if os.path.exists(in_path):
289 return os.stat(in_path).st_mtime
289 return os.stat(in_path).st_mtime
290 else:
290 else:
291 return os.stat(he_path).st_mtime
291 return os.stat(he_path).st_mtime
292
292
293 @LazyProperty
293 @LazyProperty
294 def description(self):
294 def description(self):
295 description = self._remote.get_description()
295 description = self._remote.get_description()
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297
297
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 if self.is_empty():
299 if self.is_empty():
300 return OrderedDict()
300 return OrderedDict()
301
301
302 result = []
302 result = []
303 for ref, sha in self._refs.iteritems():
303 for ref, sha in self._refs.iteritems():
304 if ref.startswith(prefix):
304 if ref.startswith(prefix):
305 ref_name = ref
305 ref_name = ref
306 if strip_prefix:
306 if strip_prefix:
307 ref_name = ref[len(prefix):]
307 ref_name = ref[len(prefix):]
308 result.append((safe_unicode(ref_name), sha))
308 result.append((safe_unicode(ref_name), sha))
309
309
310 def get_name(entry):
310 def get_name(entry):
311 return entry[0]
311 return entry[0]
312
312
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314
314
315 def _get_branches(self):
315 def _get_branches(self):
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317
317
318 @CachedProperty
318 @CachedProperty
319 def branches(self):
319 def branches(self):
320 return self._get_branches()
320 return self._get_branches()
321
321
322 @CachedProperty
322 @CachedProperty
323 def branches_closed(self):
323 def branches_closed(self):
324 return {}
324 return {}
325
325
326 @CachedProperty
326 @CachedProperty
327 def bookmarks(self):
327 def bookmarks(self):
328 return {}
328 return {}
329
329
330 @CachedProperty
330 @CachedProperty
331 def branches_all(self):
331 def branches_all(self):
332 all_branches = {}
332 all_branches = {}
333 all_branches.update(self.branches)
333 all_branches.update(self.branches)
334 all_branches.update(self.branches_closed)
334 all_branches.update(self.branches_closed)
335 return all_branches
335 return all_branches
336
336
337 @CachedProperty
337 @CachedProperty
338 def tags(self):
338 def tags(self):
339 return self._get_tags()
339 return self._get_tags()
340
340
341 def _get_tags(self):
341 def _get_tags(self):
342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343
343
344 def tag(self, name, user, commit_id=None, message=None, date=None,
344 def tag(self, name, user, commit_id=None, message=None, date=None,
345 **kwargs):
345 **kwargs):
346 # TODO: fix this method to apply annotated tags correct with message
346 # TODO: fix this method to apply annotated tags correct with message
347 """
347 """
348 Creates and returns a tag for the given ``commit_id``.
348 Creates and returns a tag for the given ``commit_id``.
349
349
350 :param name: name for new tag
350 :param name: name for new tag
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 :param commit_id: commit id for which new tag would be created
352 :param commit_id: commit id for which new tag would be created
353 :param message: message of the tag's commit
353 :param message: message of the tag's commit
354 :param date: date of tag's commit
354 :param date: date of tag's commit
355
355
356 :raises TagAlreadyExistError: if tag with same name already exists
356 :raises TagAlreadyExistError: if tag with same name already exists
357 """
357 """
358 if name in self.tags:
358 if name in self.tags:
359 raise TagAlreadyExistError("Tag %s already exists" % name)
359 raise TagAlreadyExistError("Tag %s already exists" % name)
360 commit = self.get_commit(commit_id=commit_id)
360 commit = self.get_commit(commit_id=commit_id)
361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362
362
363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364
364
365 self._invalidate_prop_cache('tags')
365 self._invalidate_prop_cache('tags')
366 self._invalidate_prop_cache('_refs')
366 self._invalidate_prop_cache('_refs')
367
367
368 return commit
368 return commit
369
369
370 def remove_tag(self, name, user, message=None, date=None):
370 def remove_tag(self, name, user, message=None, date=None):
371 """
371 """
372 Removes tag with the given ``name``.
372 Removes tag with the given ``name``.
373
373
374 :param name: name of the tag to be removed
374 :param name: name of the tag to be removed
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 :param message: message of the tag's removal commit
376 :param message: message of the tag's removal commit
377 :param date: date of tag's removal commit
377 :param date: date of tag's removal commit
378
378
379 :raises TagDoesNotExistError: if tag with given name does not exists
379 :raises TagDoesNotExistError: if tag with given name does not exists
380 """
380 """
381 if name not in self.tags:
381 if name not in self.tags:
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
383
383
384 self._remote.tag_remove(name)
384 self._remote.tag_remove(name)
385 self._invalidate_prop_cache('tags')
385 self._invalidate_prop_cache('tags')
386 self._invalidate_prop_cache('_refs')
386 self._invalidate_prop_cache('_refs')
387
387
388 def _get_refs(self):
388 def _get_refs(self):
389 return self._remote.get_refs()
389 return self._remote.get_refs()
390
390
391 @CachedProperty
391 @CachedProperty
392 def _refs(self):
392 def _refs(self):
393 return self._get_refs()
393 return self._get_refs()
394
394
395 @property
395 @property
396 def _ref_tree(self):
396 def _ref_tree(self):
397 node = tree = {}
397 node = tree = {}
398 for ref, sha in self._refs.iteritems():
398 for ref, sha in self._refs.iteritems():
399 path = ref.split('/')
399 path = ref.split('/')
400 for bit in path[:-1]:
400 for bit in path[:-1]:
401 node = node.setdefault(bit, {})
401 node = node.setdefault(bit, {})
402 node[path[-1]] = sha
402 node[path[-1]] = sha
403 node = tree
403 node = tree
404 return tree
404 return tree
405
405
406 def get_remote_ref(self, ref_name):
406 def get_remote_ref(self, ref_name):
407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 try:
408 try:
409 return self._refs[ref_key]
409 return self._refs[ref_key]
410 except Exception:
410 except Exception:
411 return
411 return
412
412
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
414 """
414 """
415 Returns `GitCommit` object representing commit from git repository
415 Returns `GitCommit` object representing commit from git repository
416 at the given `commit_id` or head (most recent commit) if None given.
416 at the given `commit_id` or head (most recent commit) if None given.
417 """
417 """
418 if self.is_empty():
418 if self.is_empty():
419 raise EmptyRepositoryError("There are no commits yet")
419 raise EmptyRepositoryError("There are no commits yet")
420
420
421 if commit_id is not None:
421 if commit_id is not None:
422 self._validate_commit_id(commit_id)
422 self._validate_commit_id(commit_id)
423 try:
423 try:
424 # we have cached idx, use it without contacting the remote
424 # we have cached idx, use it without contacting the remote
425 idx = self._commit_ids[commit_id]
425 idx = self._commit_ids[commit_id]
426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 except KeyError:
427 except KeyError:
428 pass
428 pass
429
429
430 elif commit_idx is not None:
430 elif commit_idx is not None:
431 self._validate_commit_idx(commit_idx)
431 self._validate_commit_idx(commit_idx)
432 try:
432 try:
433 _commit_id = self.commit_ids[commit_idx]
433 _commit_id = self.commit_ids[commit_idx]
434 if commit_idx < 0:
434 if commit_idx < 0:
435 commit_idx = self.commit_ids.index(_commit_id)
435 commit_idx = self.commit_ids.index(_commit_id)
436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 except IndexError:
437 except IndexError:
438 commit_id = commit_idx
438 commit_id = commit_idx
439 else:
439 else:
440 commit_id = "tip"
440 commit_id = "tip"
441
441
442 if translate_tag:
442 if translate_tag:
443 commit_id = self._lookup_commit(commit_id)
443 commit_id = self._lookup_commit(commit_id)
444
444
445 try:
445 try:
446 idx = self._commit_ids[commit_id]
446 idx = self._commit_ids[commit_id]
447 except KeyError:
447 except KeyError:
448 idx = -1
448 idx = -1
449
449
450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451
451
452 def get_commits(
452 def get_commits(
453 self, start_id=None, end_id=None, start_date=None, end_date=None,
453 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 """
455 """
456 Returns generator of `GitCommit` objects from start to end (both
456 Returns generator of `GitCommit` objects from start to end (both
457 are inclusive), in ascending date order.
457 are inclusive), in ascending date order.
458
458
459 :param start_id: None, str(commit_id)
459 :param start_id: None, str(commit_id)
460 :param end_id: None, str(commit_id)
460 :param end_id: None, str(commit_id)
461 :param start_date: if specified, commits with commit date less than
461 :param start_date: if specified, commits with commit date less than
462 ``start_date`` would be filtered out from returned set
462 ``start_date`` would be filtered out from returned set
463 :param end_date: if specified, commits with commit date greater than
463 :param end_date: if specified, commits with commit date greater than
464 ``end_date`` would be filtered out from returned set
464 ``end_date`` would be filtered out from returned set
465 :param branch_name: if specified, commits not reachable from given
465 :param branch_name: if specified, commits not reachable from given
466 branch would be filtered out from returned set
466 branch would be filtered out from returned set
467 :param show_hidden: Show hidden commits such as obsolete or hidden from
467 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 Mercurial evolve
468 Mercurial evolve
469 :raise BranchDoesNotExistError: If given `branch_name` does not
469 :raise BranchDoesNotExistError: If given `branch_name` does not
470 exist.
470 exist.
471 :raise CommitDoesNotExistError: If commits for given `start` or
471 :raise CommitDoesNotExistError: If commits for given `start` or
472 `end` could not be found.
472 `end` could not be found.
473
473
474 """
474 """
475 if self.is_empty():
475 if self.is_empty():
476 raise EmptyRepositoryError("There are no commits yet")
476 raise EmptyRepositoryError("There are no commits yet")
477
477
478 self._validate_branch_name(branch_name)
478 self._validate_branch_name(branch_name)
479
479
480 if start_id is not None:
480 if start_id is not None:
481 self._validate_commit_id(start_id)
481 self._validate_commit_id(start_id)
482 if end_id is not None:
482 if end_id is not None:
483 self._validate_commit_id(end_id)
483 self._validate_commit_id(end_id)
484
484
485 start_raw_id = self._lookup_commit(start_id)
485 start_raw_id = self._lookup_commit(start_id)
486 start_pos = self._commit_ids[start_raw_id] if start_id else None
486 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 end_raw_id = self._lookup_commit(end_id)
487 end_raw_id = self._lookup_commit(end_id)
488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489
489
490 if None not in [start_id, end_id] and start_pos > end_pos:
490 if None not in [start_id, end_id] and start_pos > end_pos:
491 raise RepositoryError(
491 raise RepositoryError(
492 "Start commit '%s' cannot be after end commit '%s'" %
492 "Start commit '%s' cannot be after end commit '%s'" %
493 (start_id, end_id))
493 (start_id, end_id))
494
494
495 if end_pos is not None:
495 if end_pos is not None:
496 end_pos += 1
496 end_pos += 1
497
497
498 filter_ = []
498 filter_ = []
499 if branch_name:
499 if branch_name:
500 filter_.append({'branch_name': branch_name})
500 filter_.append({'branch_name': branch_name})
501 if start_date and not end_date:
501 if start_date and not end_date:
502 filter_.append({'since': start_date})
502 filter_.append({'since': start_date})
503 if end_date and not start_date:
503 if end_date and not start_date:
504 filter_.append({'until': end_date})
504 filter_.append({'until': end_date})
505 if start_date and end_date:
505 if start_date and end_date:
506 filter_.append({'since': start_date})
506 filter_.append({'since': start_date})
507 filter_.append({'until': end_date})
507 filter_.append({'until': end_date})
508
508
509 # if start_pos or end_pos:
509 # if start_pos or end_pos:
510 # filter_.append({'start': start_pos})
510 # filter_.append({'start': start_pos})
511 # filter_.append({'end': end_pos})
511 # filter_.append({'end': end_pos})
512
512
513 if filter_:
513 if filter_:
514 revfilters = {
514 revfilters = {
515 'branch_name': branch_name,
515 'branch_name': branch_name,
516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 'start': start_pos,
518 'start': start_pos,
519 'end': end_pos,
519 'end': end_pos,
520 }
520 }
521 commit_ids = self._get_commit_ids(filters=revfilters)
521 commit_ids = self._get_commit_ids(filters=revfilters)
522
522
523 else:
523 else:
524 commit_ids = self.commit_ids
524 commit_ids = self.commit_ids
525
525
526 if start_pos or end_pos:
526 if start_pos or end_pos:
527 commit_ids = commit_ids[start_pos: end_pos]
527 commit_ids = commit_ids[start_pos: end_pos]
528
528
529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 translate_tag=translate_tags)
530 translate_tag=translate_tags)
531
531
532 def get_diff(
532 def get_diff(
533 self, commit1, commit2, path='', ignore_whitespace=False,
533 self, commit1, commit2, path='', ignore_whitespace=False,
534 context=3, path1=None):
534 context=3, path1=None):
535 """
535 """
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 ``commit2`` since ``commit1``.
537 ``commit2`` since ``commit1``.
538
538
539 :param commit1: Entry point from which diff is shown. Can be
539 :param commit1: Entry point from which diff is shown. Can be
540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 the changes since empty state of the repository until ``commit2``
541 the changes since empty state of the repository until ``commit2``
542 :param commit2: Until which commits changes should be shown.
542 :param commit2: Until which commits changes should be shown.
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 changes. Defaults to ``False``.
544 changes. Defaults to ``False``.
545 :param context: How many lines before/after changed lines should be
545 :param context: How many lines before/after changed lines should be
546 shown. Defaults to ``3``.
546 shown. Defaults to ``3``.
547 """
547 """
548 self._validate_diff_commits(commit1, commit2)
548 self._validate_diff_commits(commit1, commit2)
549 if path1 is not None and path1 != path:
549 if path1 is not None and path1 != path:
550 raise ValueError("Diff of two different paths not supported.")
550 raise ValueError("Diff of two different paths not supported.")
551
551
552 if path:
552 if path:
553 file_filter = path
553 file_filter = path
554 else:
554 else:
555 file_filter = None
555 file_filter = None
556
556
557 diff = self._remote.diff(
557 diff = self._remote.diff(
558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
559 opt_ignorews=ignore_whitespace,
559 opt_ignorews=ignore_whitespace,
560 context=context)
560 context=context)
561 return GitDiff(diff)
561 return GitDiff(diff)
562
562
563 def strip(self, commit_id, branch_name):
563 def strip(self, commit_id, branch_name):
564 commit = self.get_commit(commit_id=commit_id)
564 commit = self.get_commit(commit_id=commit_id)
565 if commit.merge:
565 if commit.merge:
566 raise Exception('Cannot reset to merge commit')
566 raise Exception('Cannot reset to merge commit')
567
567
568 # parent is going to be the new head now
568 # parent is going to be the new head now
569 commit = commit.parents[0]
569 commit = commit.parents[0]
570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
571
571
572 # clear cached properties
572 # clear cached properties
573 self._invalidate_prop_cache('commit_ids')
573 self._invalidate_prop_cache('commit_ids')
574 self._invalidate_prop_cache('_refs')
574 self._invalidate_prop_cache('_refs')
575 self._invalidate_prop_cache('branches')
575 self._invalidate_prop_cache('branches')
576
576
577 return len(self.commit_ids)
577 return len(self.commit_ids)
578
578
579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
580 if commit_id1 == commit_id2:
580 if commit_id1 == commit_id2:
581 return commit_id1
581 return commit_id1
582
582
583 if self != repo2:
583 if self != repo2:
584 commits = self._remote.get_missing_revs(
584 commits = self._remote.get_missing_revs(
585 commit_id1, commit_id2, repo2.path)
585 commit_id1, commit_id2, repo2.path)
586 if commits:
586 if commits:
587 commit = repo2.get_commit(commits[-1])
587 commit = repo2.get_commit(commits[-1])
588 if commit.parents:
588 if commit.parents:
589 ancestor_id = commit.parents[0].raw_id
589 ancestor_id = commit.parents[0].raw_id
590 else:
590 else:
591 ancestor_id = None
591 ancestor_id = None
592 else:
592 else:
593 # no commits from other repo, ancestor_id is the commit_id2
593 # no commits from other repo, ancestor_id is the commit_id2
594 ancestor_id = commit_id2
594 ancestor_id = commit_id2
595 else:
595 else:
596 output, __ = self.run_git_command(
596 output, __ = self.run_git_command(
597 ['merge-base', commit_id1, commit_id2])
597 ['merge-base', commit_id1, commit_id2])
598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
599
599
600 return ancestor_id
600 return ancestor_id
601
601
602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
603 repo1 = self
603 repo1 = self
604 ancestor_id = None
604 ancestor_id = None
605
605
606 if commit_id1 == commit_id2:
606 if commit_id1 == commit_id2:
607 commits = []
607 commits = []
608 elif repo1 != repo2:
608 elif repo1 != repo2:
609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
610 repo2.path)
610 repo2.path)
611 commits = [
611 commits = [
612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
613 for commit_id in reversed(missing_ids)]
613 for commit_id in reversed(missing_ids)]
614 else:
614 else:
615 output, __ = repo1.run_git_command(
615 output, __ = repo1.run_git_command(
616 ['log', '--reverse', '--pretty=format: %H', '-s',
616 ['log', '--reverse', '--pretty=format: %H', '-s',
617 '%s..%s' % (commit_id1, commit_id2)])
617 '%s..%s' % (commit_id1, commit_id2)])
618 commits = [
618 commits = [
619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
621
621
622 return commits
622 return commits
623
623
624 @LazyProperty
624 @LazyProperty
625 def in_memory_commit(self):
625 def in_memory_commit(self):
626 """
626 """
627 Returns ``GitInMemoryCommit`` object for this repository.
627 Returns ``GitInMemoryCommit`` object for this repository.
628 """
628 """
629 return GitInMemoryCommit(self)
629 return GitInMemoryCommit(self)
630
630
631 def pull(self, url, commit_ids=None, update_after=False):
631 def pull(self, url, commit_ids=None, update_after=False):
632 """
632 """
633 Pull changes from external location. Pull is different in GIT
633 Pull changes from external location. Pull is different in GIT
634 that fetch since it's doing a checkout
634 that fetch since it's doing a checkout
635
635
636 :param commit_ids: Optional. Can be set to a list of commit ids
636 :param commit_ids: Optional. Can be set to a list of commit ids
637 which shall be pulled from the other repository.
637 which shall be pulled from the other repository.
638 """
638 """
639 refs = None
639 refs = None
640 if commit_ids is not None:
640 if commit_ids is not None:
641 remote_refs = self._remote.get_remote_refs(url)
641 remote_refs = self._remote.get_remote_refs(url)
642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
643 self._remote.pull(url, refs=refs, update_after=update_after)
643 self._remote.pull(url, refs=refs, update_after=update_after)
644 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
645
645
646 def fetch(self, url, commit_ids=None):
646 def fetch(self, url, commit_ids=None):
647 """
647 """
648 Fetch all git objects from external location.
648 Fetch all git objects from external location.
649 """
649 """
650 self._remote.sync_fetch(url, refs=commit_ids)
650 self._remote.sync_fetch(url, refs=commit_ids)
651 self._remote.invalidate_vcs_cache()
651 self._remote.invalidate_vcs_cache()
652
652
653 def push(self, url):
653 def push(self, url):
654 refs = None
654 refs = None
655 self._remote.sync_push(url, refs=refs)
655 self._remote.sync_push(url, refs=refs)
656
656
657 def set_refs(self, ref_name, commit_id):
657 def set_refs(self, ref_name, commit_id):
658 self._remote.set_refs(ref_name, commit_id)
658 self._remote.set_refs(ref_name, commit_id)
659 self._invalidate_prop_cache('_refs')
659 self._invalidate_prop_cache('_refs')
660
660
661 def remove_ref(self, ref_name):
661 def remove_ref(self, ref_name):
662 self._remote.remove_ref(ref_name)
662 self._remote.remove_ref(ref_name)
663 self._invalidate_prop_cache('_refs')
663 self._invalidate_prop_cache('_refs')
664
664
665 def _update_server_info(self):
665 def _update_server_info(self):
666 """
666 """
667 runs gits update-server-info command in this repo instance
667 runs gits update-server-info command in this repo instance
668 """
668 """
669 self._remote.update_server_info()
669 self._remote.update_server_info()
670
670
671 def _current_branch(self):
671 def _current_branch(self):
672 """
672 """
673 Return the name of the current branch.
673 Return the name of the current branch.
674
674
675 It only works for non bare repositories (i.e. repositories with a
675 It only works for non bare repositories (i.e. repositories with a
676 working copy)
676 working copy)
677 """
677 """
678 if self.bare:
678 if self.bare:
679 raise RepositoryError('Bare git repos do not have active branches')
679 raise RepositoryError('Bare git repos do not have active branches')
680
680
681 if self.is_empty():
681 if self.is_empty():
682 return None
682 return None
683
683
684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
685 return stdout.strip()
685 return stdout.strip()
686
686
687 def _checkout(self, branch_name, create=False, force=False):
687 def _checkout(self, branch_name, create=False, force=False):
688 """
688 """
689 Checkout a branch in the working directory.
689 Checkout a branch in the working directory.
690
690
691 It tries to create the branch if create is True, failing if the branch
691 It tries to create the branch if create is True, failing if the branch
692 already exists.
692 already exists.
693
693
694 It only works for non bare repositories (i.e. repositories with a
694 It only works for non bare repositories (i.e. repositories with a
695 working copy)
695 working copy)
696 """
696 """
697 if self.bare:
697 if self.bare:
698 raise RepositoryError('Cannot checkout branches in a bare git repo')
698 raise RepositoryError('Cannot checkout branches in a bare git repo')
699
699
700 cmd = ['checkout']
700 cmd = ['checkout']
701 if force:
701 if force:
702 cmd.append('-f')
702 cmd.append('-f')
703 if create:
703 if create:
704 cmd.append('-b')
704 cmd.append('-b')
705 cmd.append(branch_name)
705 cmd.append(branch_name)
706 self.run_git_command(cmd, fail_on_stderr=False)
706 self.run_git_command(cmd, fail_on_stderr=False)
707
707
708 def _create_branch(self, branch_name, commit_id):
708 def _create_branch(self, branch_name, commit_id):
709 """
709 """
710 creates a branch in a GIT repo
710 creates a branch in a GIT repo
711 """
711 """
712 self._remote.create_branch(branch_name, commit_id)
712 self._remote.create_branch(branch_name, commit_id)
713
713
714 def _identify(self):
714 def _identify(self):
715 """
715 """
716 Return the current state of the working directory.
716 Return the current state of the working directory.
717 """
717 """
718 if self.bare:
718 if self.bare:
719 raise RepositoryError('Bare git repos do not have active branches')
719 raise RepositoryError('Bare git repos do not have active branches')
720
720
721 if self.is_empty():
721 if self.is_empty():
722 return None
722 return None
723
723
724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
725 return stdout.strip()
725 return stdout.strip()
726
726
727 def _local_clone(self, clone_path, branch_name, source_branch=None):
727 def _local_clone(self, clone_path, branch_name, source_branch=None):
728 """
728 """
729 Create a local clone of the current repo.
729 Create a local clone of the current repo.
730 """
730 """
731 # N.B.(skreft): the --branch option is required as otherwise the shallow
731 # N.B.(skreft): the --branch option is required as otherwise the shallow
732 # clone will only fetch the active branch.
732 # clone will only fetch the active branch.
733 cmd = ['clone', '--branch', branch_name,
733 cmd = ['clone', '--branch', branch_name,
734 self.path, os.path.abspath(clone_path)]
734 self.path, os.path.abspath(clone_path)]
735
735
736 self.run_git_command(cmd, fail_on_stderr=False)
736 self.run_git_command(cmd, fail_on_stderr=False)
737
737
738 # if we get the different source branch, make sure we also fetch it for
738 # if we get the different source branch, make sure we also fetch it for
739 # merge conditions
739 # merge conditions
740 if source_branch and source_branch != branch_name:
740 if source_branch and source_branch != branch_name:
741 # check if the ref exists.
741 # check if the ref exists.
742 shadow_repo = GitRepository(os.path.abspath(clone_path))
742 shadow_repo = GitRepository(os.path.abspath(clone_path))
743 if shadow_repo.get_remote_ref(source_branch):
743 if shadow_repo.get_remote_ref(source_branch):
744 cmd = ['fetch', self.path, source_branch]
744 cmd = ['fetch', self.path, source_branch]
745 self.run_git_command(cmd, fail_on_stderr=False)
745 self.run_git_command(cmd, fail_on_stderr=False)
746
746
747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
748 """
748 """
749 Fetch a branch from a local repository.
749 Fetch a branch from a local repository.
750 """
750 """
751 repository_path = os.path.abspath(repository_path)
751 repository_path = os.path.abspath(repository_path)
752 if repository_path == self.path:
752 if repository_path == self.path:
753 raise ValueError('Cannot fetch from the same repository')
753 raise ValueError('Cannot fetch from the same repository')
754
754
755 if use_origin:
755 if use_origin:
756 branch_name = '+{branch}:refs/heads/{branch}'.format(
756 branch_name = '+{branch}:refs/heads/{branch}'.format(
757 branch=branch_name)
757 branch=branch_name)
758
758
759 cmd = ['fetch', '--no-tags', '--update-head-ok',
759 cmd = ['fetch', '--no-tags', '--update-head-ok',
760 repository_path, branch_name]
760 repository_path, branch_name]
761 self.run_git_command(cmd, fail_on_stderr=False)
761 self.run_git_command(cmd, fail_on_stderr=False)
762
762
763 def _local_reset(self, branch_name):
763 def _local_reset(self, branch_name):
764 branch_name = '{}'.format(branch_name)
764 branch_name = '{}'.format(branch_name)
765 cmd = ['reset', '--hard', branch_name, '--']
765 cmd = ['reset', '--hard', branch_name, '--']
766 self.run_git_command(cmd, fail_on_stderr=False)
766 self.run_git_command(cmd, fail_on_stderr=False)
767
767
768 def _last_fetch_heads(self):
768 def _last_fetch_heads(self):
769 """
769 """
770 Return the last fetched heads that need merging.
770 Return the last fetched heads that need merging.
771
771
772 The algorithm is defined at
772 The algorithm is defined at
773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
774 """
774 """
775 if not self.bare:
775 if not self.bare:
776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
777 else:
777 else:
778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
779
779
780 heads = []
780 heads = []
781 with open(fetch_heads_path) as f:
781 with open(fetch_heads_path) as f:
782 for line in f:
782 for line in f:
783 if ' not-for-merge ' in line:
783 if ' not-for-merge ' in line:
784 continue
784 continue
785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
786 heads.append(line)
786 heads.append(line)
787
787
788 return heads
788 return heads
789
789
790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
792
792
793 def _local_pull(self, repository_path, branch_name, ff_only=True):
793 def _local_pull(self, repository_path, branch_name, ff_only=True):
794 """
794 """
795 Pull a branch from a local repository.
795 Pull a branch from a local repository.
796 """
796 """
797 if self.bare:
797 if self.bare:
798 raise RepositoryError('Cannot pull into a bare git repository')
798 raise RepositoryError('Cannot pull into a bare git repository')
799 # N.B.(skreft): The --ff-only option is to make sure this is a
799 # N.B.(skreft): The --ff-only option is to make sure this is a
800 # fast-forward (i.e., we are only pulling new changes and there are no
800 # fast-forward (i.e., we are only pulling new changes and there are no
801 # conflicts with our current branch)
801 # conflicts with our current branch)
802 # Additionally, that option needs to go before --no-tags, otherwise git
802 # Additionally, that option needs to go before --no-tags, otherwise git
803 # pull complains about it being an unknown flag.
803 # pull complains about it being an unknown flag.
804 cmd = ['pull']
804 cmd = ['pull']
805 if ff_only:
805 if ff_only:
806 cmd.append('--ff-only')
806 cmd.append('--ff-only')
807 cmd.extend(['--no-tags', repository_path, branch_name])
807 cmd.extend(['--no-tags', repository_path, branch_name])
808 self.run_git_command(cmd, fail_on_stderr=False)
808 self.run_git_command(cmd, fail_on_stderr=False)
809
809
810 def _local_merge(self, merge_message, user_name, user_email, heads):
810 def _local_merge(self, merge_message, user_name, user_email, heads):
811 """
811 """
812 Merge the given head into the checked out branch.
812 Merge the given head into the checked out branch.
813
813
814 It will force a merge commit.
814 It will force a merge commit.
815
815
816 Currently it raises an error if the repo is empty, as it is not possible
816 Currently it raises an error if the repo is empty, as it is not possible
817 to create a merge commit in an empty repo.
817 to create a merge commit in an empty repo.
818
818
819 :param merge_message: The message to use for the merge commit.
819 :param merge_message: The message to use for the merge commit.
820 :param heads: the heads to merge.
820 :param heads: the heads to merge.
821 """
821 """
822 if self.bare:
822 if self.bare:
823 raise RepositoryError('Cannot merge into a bare git repository')
823 raise RepositoryError('Cannot merge into a bare git repository')
824
824
825 if not heads:
825 if not heads:
826 return
826 return
827
827
828 if self.is_empty():
828 if self.is_empty():
829 # TODO(skreft): do something more robust in this case.
829 # TODO(skreft): do something more robust in this case.
830 raise RepositoryError(
830 raise RepositoryError(
831 'Do not know how to merge into empty repositories yet')
831 'Do not know how to merge into empty repositories yet')
832 unresolved = None
832 unresolved = None
833
833
834 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
834 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
835 # commit message. We also specify the user who is doing the merge.
835 # commit message. We also specify the user who is doing the merge.
836 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
836 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
837 '-c', 'user.email=%s' % safe_str(user_email),
837 '-c', 'user.email=%s' % safe_str(user_email),
838 'merge', '--no-ff', '-m', safe_str(merge_message)]
838 'merge', '--no-ff', '-m', safe_str(merge_message)]
839 cmd.extend(heads)
839 cmd.extend(heads)
840 try:
840 try:
841 output = self.run_git_command(cmd, fail_on_stderr=False)
841 output = self.run_git_command(cmd, fail_on_stderr=False)
842 except RepositoryError:
842 except RepositoryError:
843 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
843 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
844 fail_on_stderr=False)[0].splitlines()
844 fail_on_stderr=False)[0].splitlines()
845 # NOTE(marcink): we add U notation for consistent with HG backend output
845 # NOTE(marcink): we add U notation for consistent with HG backend output
846 unresolved = ['U {}'.format(f) for f in files]
846 unresolved = ['U {}'.format(f) for f in files]
847
847
848 # Cleanup any merge leftovers
848 # Cleanup any merge leftovers
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
850
850
851 if unresolved:
851 if unresolved:
852 raise UnresolvedFilesInRepo(unresolved)
852 raise UnresolvedFilesInRepo(unresolved)
853 else:
853 else:
854 raise
854 raise
855
855
856 def _local_push(
856 def _local_push(
857 self, source_branch, repository_path, target_branch,
857 self, source_branch, repository_path, target_branch,
858 enable_hooks=False, rc_scm_data=None):
858 enable_hooks=False, rc_scm_data=None):
859 """
859 """
860 Push the source_branch to the given repository and target_branch.
860 Push the source_branch to the given repository and target_branch.
861
861
862 Currently it if the target_branch is not master and the target repo is
862 Currently it if the target_branch is not master and the target repo is
863 empty, the push will work, but then GitRepository won't be able to find
863 empty, the push will work, but then GitRepository won't be able to find
864 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
864 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
865 pointing to master, which does not exist).
865 pointing to master, which does not exist).
866
866
867 It does not run the hooks in the target repo.
867 It does not run the hooks in the target repo.
868 """
868 """
869 # TODO(skreft): deal with the case in which the target repo is empty,
869 # TODO(skreft): deal with the case in which the target repo is empty,
870 # and the target_branch is not master.
870 # and the target_branch is not master.
871 target_repo = GitRepository(repository_path)
871 target_repo = GitRepository(repository_path)
872 if (not target_repo.bare and
872 if (not target_repo.bare and
873 target_repo._current_branch() == target_branch):
873 target_repo._current_branch() == target_branch):
874 # Git prevents pushing to the checked out branch, so simulate it by
874 # Git prevents pushing to the checked out branch, so simulate it by
875 # pulling into the target repository.
875 # pulling into the target repository.
876 target_repo._local_pull(self.path, source_branch)
876 target_repo._local_pull(self.path, source_branch)
877 else:
877 else:
878 cmd = ['push', os.path.abspath(repository_path),
878 cmd = ['push', os.path.abspath(repository_path),
879 '%s:%s' % (source_branch, target_branch)]
879 '%s:%s' % (source_branch, target_branch)]
880 gitenv = {}
880 gitenv = {}
881 if rc_scm_data:
881 if rc_scm_data:
882 gitenv.update({'RC_SCM_DATA': rc_scm_data})
882 gitenv.update({'RC_SCM_DATA': rc_scm_data})
883
883
884 if not enable_hooks:
884 if not enable_hooks:
885 gitenv['RC_SKIP_HOOKS'] = '1'
885 gitenv['RC_SKIP_HOOKS'] = '1'
886 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
886 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
887
887
888 def _get_new_pr_branch(self, source_branch, target_branch):
888 def _get_new_pr_branch(self, source_branch, target_branch):
889 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
889 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
890 pr_branches = []
890 pr_branches = []
891 for branch in self.branches:
891 for branch in self.branches:
892 if branch.startswith(prefix):
892 if branch.startswith(prefix):
893 pr_branches.append(int(branch[len(prefix):]))
893 pr_branches.append(int(branch[len(prefix):]))
894
894
895 if not pr_branches:
895 if not pr_branches:
896 branch_id = 0
896 branch_id = 0
897 else:
897 else:
898 branch_id = max(pr_branches) + 1
898 branch_id = max(pr_branches) + 1
899
899
900 return '%s%d' % (prefix, branch_id)
900 return '%s%d' % (prefix, branch_id)
901
901
902 def _maybe_prepare_merge_workspace(
902 def _maybe_prepare_merge_workspace(
903 self, repo_id, workspace_id, target_ref, source_ref):
903 self, repo_id, workspace_id, target_ref, source_ref):
904 shadow_repository_path = self._get_shadow_repository_path(
904 shadow_repository_path = self._get_shadow_repository_path(
905 self.path, repo_id, workspace_id)
905 self.path, repo_id, workspace_id)
906 if not os.path.exists(shadow_repository_path):
906 if not os.path.exists(shadow_repository_path):
907 self._local_clone(
907 self._local_clone(
908 shadow_repository_path, target_ref.name, source_ref.name)
908 shadow_repository_path, target_ref.name, source_ref.name)
909 log.debug('Prepared %s shadow repository in %s',
909 log.debug('Prepared %s shadow repository in %s',
910 self.alias, shadow_repository_path)
910 self.alias, shadow_repository_path)
911
911
912 return shadow_repository_path
912 return shadow_repository_path
913
913
914 def _merge_repo(self, repo_id, workspace_id, target_ref,
914 def _merge_repo(self, repo_id, workspace_id, target_ref,
915 source_repo, source_ref, merge_message,
915 source_repo, source_ref, merge_message,
916 merger_name, merger_email, dry_run=False,
916 merger_name, merger_email, dry_run=False,
917 use_rebase=False, close_branch=False):
917 use_rebase=False, close_branch=False):
918
918
919 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
919 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
920 'rebase' if use_rebase else 'merge', dry_run)
920 'rebase' if use_rebase else 'merge', dry_run)
921 if target_ref.commit_id != self.branches[target_ref.name]:
921 if target_ref.commit_id != self.branches[target_ref.name]:
922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
923 target_ref.commit_id, self.branches[target_ref.name])
923 target_ref.commit_id, self.branches[target_ref.name])
924 return MergeResponse(
924 return MergeResponse(
925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
926 metadata={'target_ref': target_ref})
926 metadata={'target_ref': target_ref})
927
927
928 shadow_repository_path = self._maybe_prepare_merge_workspace(
928 shadow_repository_path = self._maybe_prepare_merge_workspace(
929 repo_id, workspace_id, target_ref, source_ref)
929 repo_id, workspace_id, target_ref, source_ref)
930 shadow_repo = self.get_shadow_instance(shadow_repository_path)
930 shadow_repo = self.get_shadow_instance(shadow_repository_path)
931
931
932 # checkout source, if it's different. Otherwise we could not
932 # checkout source, if it's different. Otherwise we could not
933 # fetch proper commits for merge testing
933 # fetch proper commits for merge testing
934 if source_ref.name != target_ref.name:
934 if source_ref.name != target_ref.name:
935 if shadow_repo.get_remote_ref(source_ref.name):
935 if shadow_repo.get_remote_ref(source_ref.name):
936 shadow_repo._checkout(source_ref.name, force=True)
936 shadow_repo._checkout(source_ref.name, force=True)
937
937
938 # checkout target, and fetch changes
938 # checkout target, and fetch changes
939 shadow_repo._checkout(target_ref.name, force=True)
939 shadow_repo._checkout(target_ref.name, force=True)
940
940
941 # fetch/reset pull the target, in case it is changed
941 # fetch/reset pull the target, in case it is changed
942 # this handles even force changes
942 # this handles even force changes
943 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
943 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
944 shadow_repo._local_reset(target_ref.name)
944 shadow_repo._local_reset(target_ref.name)
945
945
946 # Need to reload repo to invalidate the cache, or otherwise we cannot
946 # Need to reload repo to invalidate the cache, or otherwise we cannot
947 # retrieve the last target commit.
947 # retrieve the last target commit.
948 shadow_repo = self.get_shadow_instance(shadow_repository_path)
948 shadow_repo = self.get_shadow_instance(shadow_repository_path)
949 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
949 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
950 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
950 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
951 target_ref, target_ref.commit_id,
951 target_ref, target_ref.commit_id,
952 shadow_repo.branches[target_ref.name])
952 shadow_repo.branches[target_ref.name])
953 return MergeResponse(
953 return MergeResponse(
954 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
954 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
955 metadata={'target_ref': target_ref})
955 metadata={'target_ref': target_ref})
956
956
957 # calculate new branch
957 # calculate new branch
958 pr_branch = shadow_repo._get_new_pr_branch(
958 pr_branch = shadow_repo._get_new_pr_branch(
959 source_ref.name, target_ref.name)
959 source_ref.name, target_ref.name)
960 log.debug('using pull-request merge branch: `%s`', pr_branch)
960 log.debug('using pull-request merge branch: `%s`', pr_branch)
961 # checkout to temp branch, and fetch changes
961 # checkout to temp branch, and fetch changes
962 shadow_repo._checkout(pr_branch, create=True)
962 shadow_repo._checkout(pr_branch, create=True)
963 try:
963 try:
964 shadow_repo._local_fetch(source_repo.path, source_ref.name)
964 shadow_repo._local_fetch(source_repo.path, source_ref.name)
965 except RepositoryError:
965 except RepositoryError:
966 log.exception('Failure when doing local fetch on '
966 log.exception('Failure when doing local fetch on '
967 'shadow repo: %s', shadow_repo)
967 'shadow repo: %s', shadow_repo)
968 return MergeResponse(
968 return MergeResponse(
969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
970 metadata={'source_ref': source_ref})
970 metadata={'source_ref': source_ref})
971
971
972 merge_ref = None
972 merge_ref = None
973 merge_failure_reason = MergeFailureReason.NONE
973 merge_failure_reason = MergeFailureReason.NONE
974 metadata = {}
974 metadata = {}
975 try:
975 try:
976 shadow_repo._local_merge(merge_message, merger_name, merger_email,
976 shadow_repo._local_merge(merge_message, merger_name, merger_email,
977 [source_ref.commit_id])
977 [source_ref.commit_id])
978 merge_possible = True
978 merge_possible = True
979
979
980 # Need to invalidate the cache, or otherwise we
980 # Need to invalidate the cache, or otherwise we
981 # cannot retrieve the merge commit.
981 # cannot retrieve the merge commit.
982 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
982 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
983 merge_commit_id = shadow_repo.branches[pr_branch]
983 merge_commit_id = shadow_repo.branches[pr_branch]
984
984
985 # Set a reference pointing to the merge commit. This reference may
985 # Set a reference pointing to the merge commit. This reference may
986 # be used to easily identify the last successful merge commit in
986 # be used to easily identify the last successful merge commit in
987 # the shadow repository.
987 # the shadow repository.
988 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
988 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
989 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
989 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
990 except RepositoryError as e:
990 except RepositoryError as e:
991 log.exception('Failure when doing local merge on git shadow repo')
991 log.exception('Failure when doing local merge on git shadow repo')
992 if isinstance(e, UnresolvedFilesInRepo):
992 if isinstance(e, UnresolvedFilesInRepo):
993 metadata['unresolved_files'] = 'file: ' + (', file: '.join(e.args[0]))
993 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
994
994
995 merge_possible = False
995 merge_possible = False
996 merge_failure_reason = MergeFailureReason.MERGE_FAILED
996 merge_failure_reason = MergeFailureReason.MERGE_FAILED
997
997
998 if merge_possible and not dry_run:
998 if merge_possible and not dry_run:
999 try:
999 try:
1000 shadow_repo._local_push(
1000 shadow_repo._local_push(
1001 pr_branch, self.path, target_ref.name, enable_hooks=True,
1001 pr_branch, self.path, target_ref.name, enable_hooks=True,
1002 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1002 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1003 merge_succeeded = True
1003 merge_succeeded = True
1004 except RepositoryError:
1004 except RepositoryError:
1005 log.exception(
1005 log.exception(
1006 'Failure when doing local push from the shadow '
1006 'Failure when doing local push from the shadow '
1007 'repository to the target repository at %s.', self.path)
1007 'repository to the target repository at %s.', self.path)
1008 merge_succeeded = False
1008 merge_succeeded = False
1009 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1009 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1010 metadata['target'] = 'git shadow repo'
1010 metadata['target'] = 'git shadow repo'
1011 metadata['merge_commit'] = pr_branch
1011 metadata['merge_commit'] = pr_branch
1012 else:
1012 else:
1013 merge_succeeded = False
1013 merge_succeeded = False
1014
1014
1015 return MergeResponse(
1015 return MergeResponse(
1016 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1016 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1017 metadata=metadata)
1017 metadata=metadata)
@@ -1,972 +1,972 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
294 if commit_id1 == commit_id2:
294 if commit_id1 == commit_id2:
295 return commit_id1
295 return commit_id1
296
296
297 ancestors = self._remote.revs_from_revspec(
297 ancestors = self._remote.revs_from_revspec(
298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
299 other_path=repo2.path)
299 other_path=repo2.path)
300 return repo2[ancestors[0]].raw_id if ancestors else None
300 return repo2[ancestors[0]].raw_id if ancestors else None
301
301
302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 commits = []
304 commits = []
305 else:
305 else:
306 if merge:
306 if merge:
307 indexes = self._remote.revs_from_revspec(
307 indexes = self._remote.revs_from_revspec(
308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
310 else:
310 else:
311 indexes = self._remote.revs_from_revspec(
311 indexes = self._remote.revs_from_revspec(
312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
313 commit_id1, other_path=repo2.path)
313 commit_id1, other_path=repo2.path)
314
314
315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
316 for idx in indexes]
316 for idx in indexes]
317
317
318 return commits
318 return commits
319
319
320 @staticmethod
320 @staticmethod
321 def check_url(url, config):
321 def check_url(url, config):
322 """
322 """
323 Function will check given url and try to verify if it's a valid
323 Function will check given url and try to verify if it's a valid
324 link. Sometimes it may happened that mercurial will issue basic
324 link. Sometimes it may happened that mercurial will issue basic
325 auth request that can cause whole API to hang when used from python
325 auth request that can cause whole API to hang when used from python
326 or other external calls.
326 or other external calls.
327
327
328 On failures it'll raise urllib2.HTTPError, exception is also thrown
328 On failures it'll raise urllib2.HTTPError, exception is also thrown
329 when the return code is non 200
329 when the return code is non 200
330 """
330 """
331 # check first if it's not an local url
331 # check first if it's not an local url
332 if os.path.isdir(url) or url.startswith('file:'):
332 if os.path.isdir(url) or url.startswith('file:'):
333 return True
333 return True
334
334
335 # Request the _remote to verify the url
335 # Request the _remote to verify the url
336 return connection.Hg.check_url(url, config.serialize())
336 return connection.Hg.check_url(url, config.serialize())
337
337
338 @staticmethod
338 @staticmethod
339 def is_valid_repository(path):
339 def is_valid_repository(path):
340 return os.path.isdir(os.path.join(path, '.hg'))
340 return os.path.isdir(os.path.join(path, '.hg'))
341
341
342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
343 """
343 """
344 Function will check for mercurial repository in given path. If there
344 Function will check for mercurial repository in given path. If there
345 is no repository in that path it will raise an exception unless
345 is no repository in that path it will raise an exception unless
346 `create` parameter is set to True - in that case repository would
346 `create` parameter is set to True - in that case repository would
347 be created.
347 be created.
348
348
349 If `src_url` is given, would try to clone repository from the
349 If `src_url` is given, would try to clone repository from the
350 location at given clone_point. Additionally it'll make update to
350 location at given clone_point. Additionally it'll make update to
351 working copy accordingly to `do_workspace_checkout` flag.
351 working copy accordingly to `do_workspace_checkout` flag.
352 """
352 """
353 if create and os.path.exists(self.path):
353 if create and os.path.exists(self.path):
354 raise RepositoryError(
354 raise RepositoryError(
355 "Cannot create repository at %s, location already exist"
355 "Cannot create repository at %s, location already exist"
356 % self.path)
356 % self.path)
357
357
358 if src_url:
358 if src_url:
359 url = str(self._get_url(src_url))
359 url = str(self._get_url(src_url))
360 MercurialRepository.check_url(url, self.config)
360 MercurialRepository.check_url(url, self.config)
361
361
362 self._remote.clone(url, self.path, do_workspace_checkout)
362 self._remote.clone(url, self.path, do_workspace_checkout)
363
363
364 # Don't try to create if we've already cloned repo
364 # Don't try to create if we've already cloned repo
365 create = False
365 create = False
366
366
367 if create:
367 if create:
368 os.makedirs(self.path, mode=0o755)
368 os.makedirs(self.path, mode=0o755)
369 self._remote.localrepository(create)
369 self._remote.localrepository(create)
370
370
371 @LazyProperty
371 @LazyProperty
372 def in_memory_commit(self):
372 def in_memory_commit(self):
373 return MercurialInMemoryCommit(self)
373 return MercurialInMemoryCommit(self)
374
374
375 @LazyProperty
375 @LazyProperty
376 def description(self):
376 def description(self):
377 description = self._remote.get_config_value(
377 description = self._remote.get_config_value(
378 'web', 'description', untrusted=True)
378 'web', 'description', untrusted=True)
379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
380
380
381 @LazyProperty
381 @LazyProperty
382 def contact(self):
382 def contact(self):
383 contact = (
383 contact = (
384 self._remote.get_config_value("web", "contact") or
384 self._remote.get_config_value("web", "contact") or
385 self._remote.get_config_value("ui", "username"))
385 self._remote.get_config_value("ui", "username"))
386 return safe_unicode(contact or self.DEFAULT_CONTACT)
386 return safe_unicode(contact or self.DEFAULT_CONTACT)
387
387
388 @LazyProperty
388 @LazyProperty
389 def last_change(self):
389 def last_change(self):
390 """
390 """
391 Returns last change made on this repository as
391 Returns last change made on this repository as
392 `datetime.datetime` object.
392 `datetime.datetime` object.
393 """
393 """
394 try:
394 try:
395 return self.get_commit().date
395 return self.get_commit().date
396 except RepositoryError:
396 except RepositoryError:
397 tzoffset = makedate()[1]
397 tzoffset = makedate()[1]
398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
399
399
400 def _get_fs_mtime(self):
400 def _get_fs_mtime(self):
401 # fallback to filesystem
401 # fallback to filesystem
402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
403 st_path = os.path.join(self.path, '.hg', "store")
403 st_path = os.path.join(self.path, '.hg', "store")
404 if os.path.exists(cl_path):
404 if os.path.exists(cl_path):
405 return os.stat(cl_path).st_mtime
405 return os.stat(cl_path).st_mtime
406 else:
406 else:
407 return os.stat(st_path).st_mtime
407 return os.stat(st_path).st_mtime
408
408
409 def _get_url(self, url):
409 def _get_url(self, url):
410 """
410 """
411 Returns normalized url. If schema is not given, would fall
411 Returns normalized url. If schema is not given, would fall
412 to filesystem
412 to filesystem
413 (``file:///``) schema.
413 (``file:///``) schema.
414 """
414 """
415 url = url.encode('utf8')
415 url = url.encode('utf8')
416 if url != 'default' and '://' not in url:
416 if url != 'default' and '://' not in url:
417 url = "file:" + urllib.pathname2url(url)
417 url = "file:" + urllib.pathname2url(url)
418 return url
418 return url
419
419
420 def get_hook_location(self):
420 def get_hook_location(self):
421 """
421 """
422 returns absolute path to location where hooks are stored
422 returns absolute path to location where hooks are stored
423 """
423 """
424 return os.path.join(self.path, '.hg', '.hgrc')
424 return os.path.join(self.path, '.hg', '.hgrc')
425
425
426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
427 """
427 """
428 Returns ``MercurialCommit`` object representing repository's
428 Returns ``MercurialCommit`` object representing repository's
429 commit at the given `commit_id` or `commit_idx`.
429 commit at the given `commit_id` or `commit_idx`.
430 """
430 """
431 if self.is_empty():
431 if self.is_empty():
432 raise EmptyRepositoryError("There are no commits yet")
432 raise EmptyRepositoryError("There are no commits yet")
433
433
434 if commit_id is not None:
434 if commit_id is not None:
435 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
436 try:
436 try:
437 # we have cached idx, use it without contacting the remote
437 # we have cached idx, use it without contacting the remote
438 idx = self._commit_ids[commit_id]
438 idx = self._commit_ids[commit_id]
439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
440 except KeyError:
440 except KeyError:
441 pass
441 pass
442
442
443 elif commit_idx is not None:
443 elif commit_idx is not None:
444 self._validate_commit_idx(commit_idx)
444 self._validate_commit_idx(commit_idx)
445 try:
445 try:
446 _commit_id = self.commit_ids[commit_idx]
446 _commit_id = self.commit_ids[commit_idx]
447 if commit_idx < 0:
447 if commit_idx < 0:
448 commit_idx = self.commit_ids.index(_commit_id)
448 commit_idx = self.commit_ids.index(_commit_id)
449
449
450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
451 except IndexError:
451 except IndexError:
452 commit_id = commit_idx
452 commit_id = commit_idx
453 else:
453 else:
454 commit_id = "tip"
454 commit_id = "tip"
455
455
456 if isinstance(commit_id, unicode):
456 if isinstance(commit_id, unicode):
457 commit_id = safe_str(commit_id)
457 commit_id = safe_str(commit_id)
458
458
459 try:
459 try:
460 raw_id, idx = self._remote.lookup(commit_id, both=True)
460 raw_id, idx = self._remote.lookup(commit_id, both=True)
461 except CommitDoesNotExistError:
461 except CommitDoesNotExistError:
462 msg = "Commit {} does not exist for `{}`".format(
462 msg = "Commit {} does not exist for `{}`".format(
463 *map(safe_str, [commit_id, self.name]))
463 *map(safe_str, [commit_id, self.name]))
464 raise CommitDoesNotExistError(msg)
464 raise CommitDoesNotExistError(msg)
465
465
466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
467
467
468 def get_commits(
468 def get_commits(
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
471 """
471 """
472 Returns generator of ``MercurialCommit`` objects from start to end
472 Returns generator of ``MercurialCommit`` objects from start to end
473 (both are inclusive)
473 (both are inclusive)
474
474
475 :param start_id: None, str(commit_id)
475 :param start_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
477 :param start_date: if specified, commits with commit date less than
477 :param start_date: if specified, commits with commit date less than
478 ``start_date`` would be filtered out from returned set
478 ``start_date`` would be filtered out from returned set
479 :param end_date: if specified, commits with commit date greater than
479 :param end_date: if specified, commits with commit date greater than
480 ``end_date`` would be filtered out from returned set
480 ``end_date`` would be filtered out from returned set
481 :param branch_name: if specified, commits not reachable from given
481 :param branch_name: if specified, commits not reachable from given
482 branch would be filtered out from returned set
482 branch would be filtered out from returned set
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
484 Mercurial evolve
484 Mercurial evolve
485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
486 exist.
486 exist.
487 :raise CommitDoesNotExistError: If commit for given ``start`` or
487 :raise CommitDoesNotExistError: If commit for given ``start`` or
488 ``end`` could not be found.
488 ``end`` could not be found.
489 """
489 """
490 # actually we should check now if it's not an empty repo
490 # actually we should check now if it's not an empty repo
491 if self.is_empty():
491 if self.is_empty():
492 raise EmptyRepositoryError("There are no commits yet")
492 raise EmptyRepositoryError("There are no commits yet")
493 self._validate_branch_name(branch_name)
493 self._validate_branch_name(branch_name)
494
494
495 branch_ancestors = False
495 branch_ancestors = False
496 if start_id is not None:
496 if start_id is not None:
497 self._validate_commit_id(start_id)
497 self._validate_commit_id(start_id)
498 c_start = self.get_commit(commit_id=start_id)
498 c_start = self.get_commit(commit_id=start_id)
499 start_pos = self._commit_ids[c_start.raw_id]
499 start_pos = self._commit_ids[c_start.raw_id]
500 else:
500 else:
501 start_pos = None
501 start_pos = None
502
502
503 if end_id is not None:
503 if end_id is not None:
504 self._validate_commit_id(end_id)
504 self._validate_commit_id(end_id)
505 c_end = self.get_commit(commit_id=end_id)
505 c_end = self.get_commit(commit_id=end_id)
506 end_pos = max(0, self._commit_ids[c_end.raw_id])
506 end_pos = max(0, self._commit_ids[c_end.raw_id])
507 else:
507 else:
508 end_pos = None
508 end_pos = None
509
509
510 if None not in [start_id, end_id] and start_pos > end_pos:
510 if None not in [start_id, end_id] and start_pos > end_pos:
511 raise RepositoryError(
511 raise RepositoryError(
512 "Start commit '%s' cannot be after end commit '%s'" %
512 "Start commit '%s' cannot be after end commit '%s'" %
513 (start_id, end_id))
513 (start_id, end_id))
514
514
515 if end_pos is not None:
515 if end_pos is not None:
516 end_pos += 1
516 end_pos += 1
517
517
518 commit_filter = []
518 commit_filter = []
519
519
520 if branch_name and not branch_ancestors:
520 if branch_name and not branch_ancestors:
521 commit_filter.append('branch("%s")' % (branch_name,))
521 commit_filter.append('branch("%s")' % (branch_name,))
522 elif branch_name and branch_ancestors:
522 elif branch_name and branch_ancestors:
523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
524
524
525 if start_date and not end_date:
525 if start_date and not end_date:
526 commit_filter.append('date(">%s")' % (start_date,))
526 commit_filter.append('date(">%s")' % (start_date,))
527 if end_date and not start_date:
527 if end_date and not start_date:
528 commit_filter.append('date("<%s")' % (end_date,))
528 commit_filter.append('date("<%s")' % (end_date,))
529 if start_date and end_date:
529 if start_date and end_date:
530 commit_filter.append(
530 commit_filter.append(
531 'date(">%s") and date("<%s")' % (start_date, end_date))
531 'date(">%s") and date("<%s")' % (start_date, end_date))
532
532
533 if not show_hidden:
533 if not show_hidden:
534 commit_filter.append('not obsolete()')
534 commit_filter.append('not obsolete()')
535 commit_filter.append('not hidden()')
535 commit_filter.append('not hidden()')
536
536
537 # TODO: johbo: Figure out a simpler way for this solution
537 # TODO: johbo: Figure out a simpler way for this solution
538 collection_generator = CollectionGenerator
538 collection_generator = CollectionGenerator
539 if commit_filter:
539 if commit_filter:
540 commit_filter = ' and '.join(map(safe_str, commit_filter))
540 commit_filter = ' and '.join(map(safe_str, commit_filter))
541 revisions = self._remote.rev_range([commit_filter])
541 revisions = self._remote.rev_range([commit_filter])
542 collection_generator = MercurialIndexBasedCollectionGenerator
542 collection_generator = MercurialIndexBasedCollectionGenerator
543 else:
543 else:
544 revisions = self.commit_ids
544 revisions = self.commit_ids
545
545
546 if start_pos or end_pos:
546 if start_pos or end_pos:
547 revisions = revisions[start_pos:end_pos]
547 revisions = revisions[start_pos:end_pos]
548
548
549 return collection_generator(self, revisions, pre_load=pre_load)
549 return collection_generator(self, revisions, pre_load=pre_load)
550
550
551 def pull(self, url, commit_ids=None):
551 def pull(self, url, commit_ids=None):
552 """
552 """
553 Pull changes from external location.
553 Pull changes from external location.
554
554
555 :param commit_ids: Optional. Can be set to a list of commit ids
555 :param commit_ids: Optional. Can be set to a list of commit ids
556 which shall be pulled from the other repository.
556 which shall be pulled from the other repository.
557 """
557 """
558 url = self._get_url(url)
558 url = self._get_url(url)
559 self._remote.pull(url, commit_ids=commit_ids)
559 self._remote.pull(url, commit_ids=commit_ids)
560 self._remote.invalidate_vcs_cache()
560 self._remote.invalidate_vcs_cache()
561
561
562 def fetch(self, url, commit_ids=None):
562 def fetch(self, url, commit_ids=None):
563 """
563 """
564 Backward compatibility with GIT fetch==pull
564 Backward compatibility with GIT fetch==pull
565 """
565 """
566 return self.pull(url, commit_ids=commit_ids)
566 return self.pull(url, commit_ids=commit_ids)
567
567
568 def push(self, url):
568 def push(self, url):
569 url = self._get_url(url)
569 url = self._get_url(url)
570 self._remote.sync_push(url)
570 self._remote.sync_push(url)
571
571
572 def _local_clone(self, clone_path):
572 def _local_clone(self, clone_path):
573 """
573 """
574 Create a local clone of the current repo.
574 Create a local clone of the current repo.
575 """
575 """
576 self._remote.clone(self.path, clone_path, update_after_clone=True,
576 self._remote.clone(self.path, clone_path, update_after_clone=True,
577 hooks=False)
577 hooks=False)
578
578
579 def _update(self, revision, clean=False):
579 def _update(self, revision, clean=False):
580 """
580 """
581 Update the working copy to the specified revision.
581 Update the working copy to the specified revision.
582 """
582 """
583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
584 self._remote.update(revision, clean=clean)
584 self._remote.update(revision, clean=clean)
585
585
586 def _identify(self):
586 def _identify(self):
587 """
587 """
588 Return the current state of the working directory.
588 Return the current state of the working directory.
589 """
589 """
590 return self._remote.identify().strip().rstrip('+')
590 return self._remote.identify().strip().rstrip('+')
591
591
592 def _heads(self, branch=None):
592 def _heads(self, branch=None):
593 """
593 """
594 Return the commit ids of the repository heads.
594 Return the commit ids of the repository heads.
595 """
595 """
596 return self._remote.heads(branch=branch).strip().split(' ')
596 return self._remote.heads(branch=branch).strip().split(' ')
597
597
598 def _ancestor(self, revision1, revision2):
598 def _ancestor(self, revision1, revision2):
599 """
599 """
600 Return the common ancestor of the two revisions.
600 Return the common ancestor of the two revisions.
601 """
601 """
602 return self._remote.ancestor(revision1, revision2)
602 return self._remote.ancestor(revision1, revision2)
603
603
604 def _local_push(
604 def _local_push(
605 self, revision, repository_path, push_branches=False,
605 self, revision, repository_path, push_branches=False,
606 enable_hooks=False):
606 enable_hooks=False):
607 """
607 """
608 Push the given revision to the specified repository.
608 Push the given revision to the specified repository.
609
609
610 :param push_branches: allow to create branches in the target repo.
610 :param push_branches: allow to create branches in the target repo.
611 """
611 """
612 self._remote.push(
612 self._remote.push(
613 [revision], repository_path, hooks=enable_hooks,
613 [revision], repository_path, hooks=enable_hooks,
614 push_branches=push_branches)
614 push_branches=push_branches)
615
615
616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
617 source_ref, use_rebase=False, dry_run=False):
617 source_ref, use_rebase=False, dry_run=False):
618 """
618 """
619 Merge the given source_revision into the checked out revision.
619 Merge the given source_revision into the checked out revision.
620
620
621 Returns the commit id of the merge and a boolean indicating if the
621 Returns the commit id of the merge and a boolean indicating if the
622 commit needs to be pushed.
622 commit needs to be pushed.
623 """
623 """
624 self._update(target_ref.commit_id, clean=True)
624 self._update(target_ref.commit_id, clean=True)
625
625
626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
628
628
629 if ancestor == source_ref.commit_id:
629 if ancestor == source_ref.commit_id:
630 # Nothing to do, the changes were already integrated
630 # Nothing to do, the changes were already integrated
631 return target_ref.commit_id, False
631 return target_ref.commit_id, False
632
632
633 elif ancestor == target_ref.commit_id and is_the_same_branch:
633 elif ancestor == target_ref.commit_id and is_the_same_branch:
634 # In this case we should force a commit message
634 # In this case we should force a commit message
635 return source_ref.commit_id, True
635 return source_ref.commit_id, True
636
636
637 unresolved = None
637 unresolved = None
638 if use_rebase:
638 if use_rebase:
639 try:
639 try:
640 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
640 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
641 target_ref.commit_id)
641 target_ref.commit_id)
642 self.bookmark(bookmark_name, revision=source_ref.commit_id)
642 self.bookmark(bookmark_name, revision=source_ref.commit_id)
643 self._remote.rebase(
643 self._remote.rebase(
644 source=source_ref.commit_id, dest=target_ref.commit_id)
644 source=source_ref.commit_id, dest=target_ref.commit_id)
645 self._remote.invalidate_vcs_cache()
645 self._remote.invalidate_vcs_cache()
646 self._update(bookmark_name, clean=True)
646 self._update(bookmark_name, clean=True)
647 return self._identify(), True
647 return self._identify(), True
648 except RepositoryError as e:
648 except RepositoryError as e:
649 # The rebase-abort may raise another exception which 'hides'
649 # The rebase-abort may raise another exception which 'hides'
650 # the original one, therefore we log it here.
650 # the original one, therefore we log it here.
651 log.exception('Error while rebasing shadow repo during merge.')
651 log.exception('Error while rebasing shadow repo during merge.')
652 if 'unresolved conflicts' in e.message:
652 if 'unresolved conflicts' in e.message:
653 unresolved = self._remote.get_unresolved_files()
653 unresolved = self._remote.get_unresolved_files()
654 log.debug('unresolved files: %s', unresolved)
654 log.debug('unresolved files: %s', unresolved)
655
655
656 # Cleanup any rebase leftovers
656 # Cleanup any rebase leftovers
657 self._remote.invalidate_vcs_cache()
657 self._remote.invalidate_vcs_cache()
658 self._remote.rebase(abort=True)
658 self._remote.rebase(abort=True)
659 self._remote.invalidate_vcs_cache()
659 self._remote.invalidate_vcs_cache()
660 self._remote.update(clean=True)
660 self._remote.update(clean=True)
661 if unresolved:
661 if unresolved:
662 raise UnresolvedFilesInRepo(unresolved)
662 raise UnresolvedFilesInRepo(unresolved)
663 else:
663 else:
664 raise
664 raise
665 else:
665 else:
666 try:
666 try:
667 self._remote.merge(source_ref.commit_id)
667 self._remote.merge(source_ref.commit_id)
668 self._remote.invalidate_vcs_cache()
668 self._remote.invalidate_vcs_cache()
669 self._remote.commit(
669 self._remote.commit(
670 message=safe_str(merge_message),
670 message=safe_str(merge_message),
671 username=safe_str('%s <%s>' % (user_name, user_email)))
671 username=safe_str('%s <%s>' % (user_name, user_email)))
672 self._remote.invalidate_vcs_cache()
672 self._remote.invalidate_vcs_cache()
673 return self._identify(), True
673 return self._identify(), True
674 except RepositoryError as e:
674 except RepositoryError as e:
675 # The merge-abort may raise another exception which 'hides'
675 # The merge-abort may raise another exception which 'hides'
676 # the original one, therefore we log it here.
676 # the original one, therefore we log it here.
677 log.exception('Error while merging shadow repo during merge.')
677 log.exception('Error while merging shadow repo during merge.')
678 if 'unresolved merge conflicts' in e.message:
678 if 'unresolved merge conflicts' in e.message:
679 unresolved = self._remote.get_unresolved_files()
679 unresolved = self._remote.get_unresolved_files()
680 log.debug('unresolved files: %s', unresolved)
680 log.debug('unresolved files: %s', unresolved)
681
681
682 # Cleanup any merge leftovers
682 # Cleanup any merge leftovers
683 self._remote.update(clean=True)
683 self._remote.update(clean=True)
684 if unresolved:
684 if unresolved:
685 raise UnresolvedFilesInRepo(unresolved)
685 raise UnresolvedFilesInRepo(unresolved)
686 else:
686 else:
687 raise
687 raise
688
688
689 def _local_close(self, target_ref, user_name, user_email,
689 def _local_close(self, target_ref, user_name, user_email,
690 source_ref, close_message=''):
690 source_ref, close_message=''):
691 """
691 """
692 Close the branch of the given source_revision
692 Close the branch of the given source_revision
693
693
694 Returns the commit id of the close and a boolean indicating if the
694 Returns the commit id of the close and a boolean indicating if the
695 commit needs to be pushed.
695 commit needs to be pushed.
696 """
696 """
697 self._update(source_ref.commit_id)
697 self._update(source_ref.commit_id)
698 message = close_message or "Closing branch: `{}`".format(source_ref.name)
698 message = close_message or "Closing branch: `{}`".format(source_ref.name)
699 try:
699 try:
700 self._remote.commit(
700 self._remote.commit(
701 message=safe_str(message),
701 message=safe_str(message),
702 username=safe_str('%s <%s>' % (user_name, user_email)),
702 username=safe_str('%s <%s>' % (user_name, user_email)),
703 close_branch=True)
703 close_branch=True)
704 self._remote.invalidate_vcs_cache()
704 self._remote.invalidate_vcs_cache()
705 return self._identify(), True
705 return self._identify(), True
706 except RepositoryError:
706 except RepositoryError:
707 # Cleanup any commit leftovers
707 # Cleanup any commit leftovers
708 self._remote.update(clean=True)
708 self._remote.update(clean=True)
709 raise
709 raise
710
710
711 def _is_the_same_branch(self, target_ref, source_ref):
711 def _is_the_same_branch(self, target_ref, source_ref):
712 return (
712 return (
713 self._get_branch_name(target_ref) ==
713 self._get_branch_name(target_ref) ==
714 self._get_branch_name(source_ref))
714 self._get_branch_name(source_ref))
715
715
716 def _get_branch_name(self, ref):
716 def _get_branch_name(self, ref):
717 if ref.type == 'branch':
717 if ref.type == 'branch':
718 return ref.name
718 return ref.name
719 return self._remote.ctx_branch(ref.commit_id)
719 return self._remote.ctx_branch(ref.commit_id)
720
720
721 def _maybe_prepare_merge_workspace(
721 def _maybe_prepare_merge_workspace(
722 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
722 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
723 shadow_repository_path = self._get_shadow_repository_path(
723 shadow_repository_path = self._get_shadow_repository_path(
724 self.path, repo_id, workspace_id)
724 self.path, repo_id, workspace_id)
725 if not os.path.exists(shadow_repository_path):
725 if not os.path.exists(shadow_repository_path):
726 self._local_clone(shadow_repository_path)
726 self._local_clone(shadow_repository_path)
727 log.debug(
727 log.debug(
728 'Prepared shadow repository in %s', shadow_repository_path)
728 'Prepared shadow repository in %s', shadow_repository_path)
729
729
730 return shadow_repository_path
730 return shadow_repository_path
731
731
732 def _merge_repo(self, repo_id, workspace_id, target_ref,
732 def _merge_repo(self, repo_id, workspace_id, target_ref,
733 source_repo, source_ref, merge_message,
733 source_repo, source_ref, merge_message,
734 merger_name, merger_email, dry_run=False,
734 merger_name, merger_email, dry_run=False,
735 use_rebase=False, close_branch=False):
735 use_rebase=False, close_branch=False):
736
736
737 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
737 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
738 'rebase' if use_rebase else 'merge', dry_run)
738 'rebase' if use_rebase else 'merge', dry_run)
739 if target_ref.commit_id not in self._heads():
739 if target_ref.commit_id not in self._heads():
740 return MergeResponse(
740 return MergeResponse(
741 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
741 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
742 metadata={'target_ref': target_ref})
742 metadata={'target_ref': target_ref})
743
743
744 try:
744 try:
745 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
745 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
746 heads = '\n,'.join(self._heads(target_ref.name))
746 heads = '\n,'.join(self._heads(target_ref.name))
747 metadata = {
747 metadata = {
748 'target_ref': target_ref,
748 'target_ref': target_ref,
749 'source_ref': source_ref,
749 'source_ref': source_ref,
750 'heads': heads
750 'heads': heads
751 }
751 }
752 return MergeResponse(
752 return MergeResponse(
753 False, False, None,
753 False, False, None,
754 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
754 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
755 metadata=metadata)
755 metadata=metadata)
756 except CommitDoesNotExistError:
756 except CommitDoesNotExistError:
757 log.exception('Failure when looking up branch heads on hg target')
757 log.exception('Failure when looking up branch heads on hg target')
758 return MergeResponse(
758 return MergeResponse(
759 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
759 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
760 metadata={'target_ref': target_ref})
760 metadata={'target_ref': target_ref})
761
761
762 shadow_repository_path = self._maybe_prepare_merge_workspace(
762 shadow_repository_path = self._maybe_prepare_merge_workspace(
763 repo_id, workspace_id, target_ref, source_ref)
763 repo_id, workspace_id, target_ref, source_ref)
764 shadow_repo = self.get_shadow_instance(shadow_repository_path)
764 shadow_repo = self.get_shadow_instance(shadow_repository_path)
765
765
766 log.debug('Pulling in target reference %s', target_ref)
766 log.debug('Pulling in target reference %s', target_ref)
767 self._validate_pull_reference(target_ref)
767 self._validate_pull_reference(target_ref)
768 shadow_repo._local_pull(self.path, target_ref)
768 shadow_repo._local_pull(self.path, target_ref)
769
769
770 try:
770 try:
771 log.debug('Pulling in source reference %s', source_ref)
771 log.debug('Pulling in source reference %s', source_ref)
772 source_repo._validate_pull_reference(source_ref)
772 source_repo._validate_pull_reference(source_ref)
773 shadow_repo._local_pull(source_repo.path, source_ref)
773 shadow_repo._local_pull(source_repo.path, source_ref)
774 except CommitDoesNotExistError:
774 except CommitDoesNotExistError:
775 log.exception('Failure when doing local pull on hg shadow repo')
775 log.exception('Failure when doing local pull on hg shadow repo')
776 return MergeResponse(
776 return MergeResponse(
777 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
777 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
778 metadata={'source_ref': source_ref})
778 metadata={'source_ref': source_ref})
779
779
780 merge_ref = None
780 merge_ref = None
781 merge_commit_id = None
781 merge_commit_id = None
782 close_commit_id = None
782 close_commit_id = None
783 merge_failure_reason = MergeFailureReason.NONE
783 merge_failure_reason = MergeFailureReason.NONE
784 metadata = {}
784 metadata = {}
785
785
786 # enforce that close branch should be used only in case we source from
786 # enforce that close branch should be used only in case we source from
787 # an actual Branch
787 # an actual Branch
788 close_branch = close_branch and source_ref.type == 'branch'
788 close_branch = close_branch and source_ref.type == 'branch'
789
789
790 # don't allow to close branch if source and target are the same
790 # don't allow to close branch if source and target are the same
791 close_branch = close_branch and source_ref.name != target_ref.name
791 close_branch = close_branch and source_ref.name != target_ref.name
792
792
793 needs_push_on_close = False
793 needs_push_on_close = False
794 if close_branch and not use_rebase and not dry_run:
794 if close_branch and not use_rebase and not dry_run:
795 try:
795 try:
796 close_commit_id, needs_push_on_close = shadow_repo._local_close(
796 close_commit_id, needs_push_on_close = shadow_repo._local_close(
797 target_ref, merger_name, merger_email, source_ref)
797 target_ref, merger_name, merger_email, source_ref)
798 merge_possible = True
798 merge_possible = True
799 except RepositoryError:
799 except RepositoryError:
800 log.exception('Failure when doing close branch on '
800 log.exception('Failure when doing close branch on '
801 'shadow repo: %s', shadow_repo)
801 'shadow repo: %s', shadow_repo)
802 merge_possible = False
802 merge_possible = False
803 merge_failure_reason = MergeFailureReason.MERGE_FAILED
803 merge_failure_reason = MergeFailureReason.MERGE_FAILED
804 else:
804 else:
805 merge_possible = True
805 merge_possible = True
806
806
807 needs_push = False
807 needs_push = False
808 if merge_possible:
808 if merge_possible:
809 try:
809 try:
810 merge_commit_id, needs_push = shadow_repo._local_merge(
810 merge_commit_id, needs_push = shadow_repo._local_merge(
811 target_ref, merge_message, merger_name, merger_email,
811 target_ref, merge_message, merger_name, merger_email,
812 source_ref, use_rebase=use_rebase, dry_run=dry_run)
812 source_ref, use_rebase=use_rebase, dry_run=dry_run)
813 merge_possible = True
813 merge_possible = True
814
814
815 # read the state of the close action, if it
815 # read the state of the close action, if it
816 # maybe required a push
816 # maybe required a push
817 needs_push = needs_push or needs_push_on_close
817 needs_push = needs_push or needs_push_on_close
818
818
819 # Set a bookmark pointing to the merge commit. This bookmark
819 # Set a bookmark pointing to the merge commit. This bookmark
820 # may be used to easily identify the last successful merge
820 # may be used to easily identify the last successful merge
821 # commit in the shadow repository.
821 # commit in the shadow repository.
822 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
822 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
823 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
823 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
824 except SubrepoMergeError:
824 except SubrepoMergeError:
825 log.exception(
825 log.exception(
826 'Subrepo merge error during local merge on hg shadow repo.')
826 'Subrepo merge error during local merge on hg shadow repo.')
827 merge_possible = False
827 merge_possible = False
828 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
828 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
829 needs_push = False
829 needs_push = False
830 except RepositoryError as e:
830 except RepositoryError as e:
831 log.exception('Failure when doing local merge on hg shadow repo')
831 log.exception('Failure when doing local merge on hg shadow repo')
832 if isinstance(e, UnresolvedFilesInRepo):
832 if isinstance(e, UnresolvedFilesInRepo):
833 metadata['unresolved_files'] = 'file: ' + (', file: '.join(e.args[0]))
833 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
834
834
835 merge_possible = False
835 merge_possible = False
836 merge_failure_reason = MergeFailureReason.MERGE_FAILED
836 merge_failure_reason = MergeFailureReason.MERGE_FAILED
837 needs_push = False
837 needs_push = False
838
838
839 if merge_possible and not dry_run:
839 if merge_possible and not dry_run:
840 if needs_push:
840 if needs_push:
841 # In case the target is a bookmark, update it, so after pushing
841 # In case the target is a bookmark, update it, so after pushing
842 # the bookmarks is also updated in the target.
842 # the bookmarks is also updated in the target.
843 if target_ref.type == 'book':
843 if target_ref.type == 'book':
844 shadow_repo.bookmark(
844 shadow_repo.bookmark(
845 target_ref.name, revision=merge_commit_id)
845 target_ref.name, revision=merge_commit_id)
846 try:
846 try:
847 shadow_repo_with_hooks = self.get_shadow_instance(
847 shadow_repo_with_hooks = self.get_shadow_instance(
848 shadow_repository_path,
848 shadow_repository_path,
849 enable_hooks=True)
849 enable_hooks=True)
850 # This is the actual merge action, we push from shadow
850 # This is the actual merge action, we push from shadow
851 # into origin.
851 # into origin.
852 # Note: the push_branches option will push any new branch
852 # Note: the push_branches option will push any new branch
853 # defined in the source repository to the target. This may
853 # defined in the source repository to the target. This may
854 # be dangerous as branches are permanent in Mercurial.
854 # be dangerous as branches are permanent in Mercurial.
855 # This feature was requested in issue #441.
855 # This feature was requested in issue #441.
856 shadow_repo_with_hooks._local_push(
856 shadow_repo_with_hooks._local_push(
857 merge_commit_id, self.path, push_branches=True,
857 merge_commit_id, self.path, push_branches=True,
858 enable_hooks=True)
858 enable_hooks=True)
859
859
860 # maybe we also need to push the close_commit_id
860 # maybe we also need to push the close_commit_id
861 if close_commit_id:
861 if close_commit_id:
862 shadow_repo_with_hooks._local_push(
862 shadow_repo_with_hooks._local_push(
863 close_commit_id, self.path, push_branches=True,
863 close_commit_id, self.path, push_branches=True,
864 enable_hooks=True)
864 enable_hooks=True)
865 merge_succeeded = True
865 merge_succeeded = True
866 except RepositoryError:
866 except RepositoryError:
867 log.exception(
867 log.exception(
868 'Failure when doing local push from the shadow '
868 'Failure when doing local push from the shadow '
869 'repository to the target repository at %s.', self.path)
869 'repository to the target repository at %s.', self.path)
870 merge_succeeded = False
870 merge_succeeded = False
871 merge_failure_reason = MergeFailureReason.PUSH_FAILED
871 merge_failure_reason = MergeFailureReason.PUSH_FAILED
872 metadata['target'] = 'hg shadow repo'
872 metadata['target'] = 'hg shadow repo'
873 metadata['merge_commit'] = merge_commit_id
873 metadata['merge_commit'] = merge_commit_id
874 else:
874 else:
875 merge_succeeded = True
875 merge_succeeded = True
876 else:
876 else:
877 merge_succeeded = False
877 merge_succeeded = False
878
878
879 return MergeResponse(
879 return MergeResponse(
880 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
880 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
881 metadata=metadata)
881 metadata=metadata)
882
882
883 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
883 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
884 config = self.config.copy()
884 config = self.config.copy()
885 if not enable_hooks:
885 if not enable_hooks:
886 config.clear_section('hooks')
886 config.clear_section('hooks')
887 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
887 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
888
888
889 def _validate_pull_reference(self, reference):
889 def _validate_pull_reference(self, reference):
890 if not (reference.name in self.bookmarks or
890 if not (reference.name in self.bookmarks or
891 reference.name in self.branches or
891 reference.name in self.branches or
892 self.get_commit(reference.commit_id)):
892 self.get_commit(reference.commit_id)):
893 raise CommitDoesNotExistError(
893 raise CommitDoesNotExistError(
894 'Unknown branch, bookmark or commit id')
894 'Unknown branch, bookmark or commit id')
895
895
896 def _local_pull(self, repository_path, reference):
896 def _local_pull(self, repository_path, reference):
897 """
897 """
898 Fetch a branch, bookmark or commit from a local repository.
898 Fetch a branch, bookmark or commit from a local repository.
899 """
899 """
900 repository_path = os.path.abspath(repository_path)
900 repository_path = os.path.abspath(repository_path)
901 if repository_path == self.path:
901 if repository_path == self.path:
902 raise ValueError('Cannot pull from the same repository')
902 raise ValueError('Cannot pull from the same repository')
903
903
904 reference_type_to_option_name = {
904 reference_type_to_option_name = {
905 'book': 'bookmark',
905 'book': 'bookmark',
906 'branch': 'branch',
906 'branch': 'branch',
907 }
907 }
908 option_name = reference_type_to_option_name.get(
908 option_name = reference_type_to_option_name.get(
909 reference.type, 'revision')
909 reference.type, 'revision')
910
910
911 if option_name == 'revision':
911 if option_name == 'revision':
912 ref = reference.commit_id
912 ref = reference.commit_id
913 else:
913 else:
914 ref = reference.name
914 ref = reference.name
915
915
916 options = {option_name: [ref]}
916 options = {option_name: [ref]}
917 self._remote.pull_cmd(repository_path, hooks=False, **options)
917 self._remote.pull_cmd(repository_path, hooks=False, **options)
918 self._remote.invalidate_vcs_cache()
918 self._remote.invalidate_vcs_cache()
919
919
920 def bookmark(self, bookmark, revision=None):
920 def bookmark(self, bookmark, revision=None):
921 if isinstance(bookmark, unicode):
921 if isinstance(bookmark, unicode):
922 bookmark = safe_str(bookmark)
922 bookmark = safe_str(bookmark)
923 self._remote.bookmark(bookmark, revision=revision)
923 self._remote.bookmark(bookmark, revision=revision)
924 self._remote.invalidate_vcs_cache()
924 self._remote.invalidate_vcs_cache()
925
925
926 def get_path_permissions(self, username):
926 def get_path_permissions(self, username):
927 hgacl_file = os.path.join(self.path, '.hg/hgacl')
927 hgacl_file = os.path.join(self.path, '.hg/hgacl')
928
928
929 def read_patterns(suffix):
929 def read_patterns(suffix):
930 svalue = None
930 svalue = None
931 for section, option in [
931 for section, option in [
932 ('narrowacl', username + suffix),
932 ('narrowacl', username + suffix),
933 ('narrowacl', 'default' + suffix),
933 ('narrowacl', 'default' + suffix),
934 ('narrowhgacl', username + suffix),
934 ('narrowhgacl', username + suffix),
935 ('narrowhgacl', 'default' + suffix)
935 ('narrowhgacl', 'default' + suffix)
936 ]:
936 ]:
937 try:
937 try:
938 svalue = hgacl.get(section, option)
938 svalue = hgacl.get(section, option)
939 break # stop at the first value we find
939 break # stop at the first value we find
940 except configparser.NoOptionError:
940 except configparser.NoOptionError:
941 pass
941 pass
942 if not svalue:
942 if not svalue:
943 return None
943 return None
944 result = ['/']
944 result = ['/']
945 for pattern in svalue.split():
945 for pattern in svalue.split():
946 result.append(pattern)
946 result.append(pattern)
947 if '*' not in pattern and '?' not in pattern:
947 if '*' not in pattern and '?' not in pattern:
948 result.append(pattern + '/*')
948 result.append(pattern + '/*')
949 return result
949 return result
950
950
951 if os.path.exists(hgacl_file):
951 if os.path.exists(hgacl_file):
952 try:
952 try:
953 hgacl = configparser.RawConfigParser()
953 hgacl = configparser.RawConfigParser()
954 hgacl.read(hgacl_file)
954 hgacl.read(hgacl_file)
955
955
956 includes = read_patterns('.includes')
956 includes = read_patterns('.includes')
957 excludes = read_patterns('.excludes')
957 excludes = read_patterns('.excludes')
958 return BasePathPermissionChecker.create_from_patterns(
958 return BasePathPermissionChecker.create_from_patterns(
959 includes, excludes)
959 includes, excludes)
960 except BaseException as e:
960 except BaseException as e:
961 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
961 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
962 hgacl_file, self.name, e)
962 hgacl_file, self.name, e)
963 raise exceptions.RepositoryRequirementError(msg)
963 raise exceptions.RepositoryRequirementError(msg)
964 else:
964 else:
965 return None
965 return None
966
966
967
967
968 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
968 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
969
969
970 def _commit_factory(self, commit_id):
970 def _commit_factory(self, commit_id):
971 return self.repo.get_commit(
971 return self.repo.get_commit(
972 commit_idx=commit_id, pre_load=self.pre_load)
972 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,84 +1,84 b''
1
1
2 <div class="pull-request-wrap">
2 <div class="pull-request-wrap">
3
3
4 % if c.pr_merge_possible:
4 % if c.pr_merge_possible:
5 <h2 class="merge-status">
5 <h2 class="merge-status">
6 <span class="merge-icon success"><i class="icon-ok"></i></span>
6 <span class="merge-icon success"><i class="icon-ok"></i></span>
7 ${_('This pull request can be merged automatically.')}
7 ${_('This pull request can be merged automatically.')}
8 </h2>
8 </h2>
9 % else:
9 % else:
10 <h2 class="merge-status">
10 <h2 class="merge-status">
11 <span class="merge-icon warning"><i class="icon-false"></i></span>
11 <span class="merge-icon warning"><i class="icon-false"></i></span>
12 ${_('Merge is not currently possible because of below failed checks.')}
12 ${_('Merge is not currently possible because of below failed checks.')}
13 </h2>
13 </h2>
14 % endif
14 % endif
15
15
16 % if c.pr_merge_errors.items():
16 % if c.pr_merge_errors.items():
17 <ul>
17 <ul>
18 % for pr_check_key, pr_check_details in c.pr_merge_errors.items():
18 % for pr_check_key, pr_check_details in c.pr_merge_errors.items():
19 <% pr_check_type = pr_check_details['error_type'] %>
19 <% pr_check_type = pr_check_details['error_type'] %>
20 <li>
20 <li>
21 <span class="merge-message ${pr_check_type}" data-role="merge-message">
21 <div class="merge-message ${pr_check_type}" data-role="merge-message">
22 - ${pr_check_details['message']}
22 <span style="white-space: pre-line">- ${pr_check_details['message']}</span>
23 % if pr_check_key == 'todo':
23 % if pr_check_key == 'todo':
24 % for co in pr_check_details['details']:
24 % for co in pr_check_details['details']:
25 <a class="permalink" href="#comment-${co.comment_id}" onclick="Rhodecode.comments.scrollToComment($('#comment-${co.comment_id}'), 0, ${h.json.dumps(co.outdated)})"> #${co.comment_id}</a>${'' if loop.last else ','}
25 <a class="permalink" href="#comment-${co.comment_id}" onclick="Rhodecode.comments.scrollToComment($('#comment-${co.comment_id}'), 0, ${h.json.dumps(co.outdated)})"> #${co.comment_id}</a>${'' if loop.last else ','}
26 % endfor
26 % endfor
27 % endif
27 % endif
28 </span>
28 </div>
29 </li>
29 </li>
30 % endfor
30 % endfor
31 </ul>
31 </ul>
32 % endif
32 % endif
33
33
34 <div class="pull-request-merge-actions">
34 <div class="pull-request-merge-actions">
35 % if c.allowed_to_merge:
35 % if c.allowed_to_merge:
36 ## Merge info, show only if all errors are taken care of
36 ## Merge info, show only if all errors are taken care of
37 % if not c.pr_merge_errors and c.pr_merge_info:
37 % if not c.pr_merge_errors and c.pr_merge_info:
38 <div class="pull-request-merge-info">
38 <div class="pull-request-merge-info">
39 <ul>
39 <ul>
40 % for pr_merge_key, pr_merge_details in c.pr_merge_info.items():
40 % for pr_merge_key, pr_merge_details in c.pr_merge_info.items():
41 <li>
41 <li>
42 - ${pr_merge_details['message']}
42 - ${pr_merge_details['message']}
43 </li>
43 </li>
44 % endfor
44 % endfor
45 </ul>
45 </ul>
46 </div>
46 </div>
47 % endif
47 % endif
48
48
49 <div>
49 <div>
50 ${h.secure_form(h.route_path('pullrequest_merge', repo_name=c.repo_name, pull_request_id=c.pull_request.pull_request_id), id='merge_pull_request_form', request=request)}
50 ${h.secure_form(h.route_path('pullrequest_merge', repo_name=c.repo_name, pull_request_id=c.pull_request.pull_request_id), id='merge_pull_request_form', request=request)}
51 <% merge_disabled = ' disabled' if c.pr_merge_possible is False else '' %>
51 <% merge_disabled = ' disabled' if c.pr_merge_possible is False else '' %>
52
52
53 % if c.allowed_to_close:
53 % if c.allowed_to_close:
54 ## close PR action, injected later next to COMMENT button
54 ## close PR action, injected later next to COMMENT button
55 % if c.pull_request_review_status == c.REVIEW_STATUS_APPROVED:
55 % if c.pull_request_review_status == c.REVIEW_STATUS_APPROVED:
56 <a id="close-pull-request-action" class="btn btn-approved-status" href="#close-as-approved" onclick="closePullRequest('${c.REVIEW_STATUS_APPROVED}'); return false;">
56 <a id="close-pull-request-action" class="btn btn-approved-status" href="#close-as-approved" onclick="closePullRequest('${c.REVIEW_STATUS_APPROVED}'); return false;">
57 ${_('Close with status {}').format(h.commit_status_lbl(c.REVIEW_STATUS_APPROVED))}
57 ${_('Close with status {}').format(h.commit_status_lbl(c.REVIEW_STATUS_APPROVED))}
58 </a>
58 </a>
59 % else:
59 % else:
60 <a id="close-pull-request-action" class="btn btn-rejected-status" href="#close-as-rejected" onclick="closePullRequest('${c.REVIEW_STATUS_REJECTED}'); return false;">
60 <a id="close-pull-request-action" class="btn btn-rejected-status" href="#close-as-rejected" onclick="closePullRequest('${c.REVIEW_STATUS_REJECTED}'); return false;">
61 ${_('Close with status {}').format(h.commit_status_lbl(c.REVIEW_STATUS_REJECTED))}
61 ${_('Close with status {}').format(h.commit_status_lbl(c.REVIEW_STATUS_REJECTED))}
62 </a>
62 </a>
63 % endif
63 % endif
64 % endif
64 % endif
65
65
66 <input type="submit" id="merge_pull_request" value="${_('Merge and close Pull Request')}" class="btn${merge_disabled}"${merge_disabled}>
66 <input type="submit" id="merge_pull_request" value="${_('Merge and close Pull Request')}" class="btn${merge_disabled}"${merge_disabled}>
67 ${h.end_form()}
67 ${h.end_form()}
68
68
69 <div class="pull-request-merge-refresh">
69 <div class="pull-request-merge-refresh">
70 <a href="#refreshChecks" onclick="refreshMergeChecks(); return false;">${_('refresh checks')}</a>
70 <a href="#refreshChecks" onclick="refreshMergeChecks(); return false;">${_('refresh checks')}</a>
71 /
71 /
72 <a class="tooltip" title="Force refresh of the merge workspace in case current status seems wrong." href="${h.route_path('pullrequest_show', repo_name=c.repo_name, pull_request_id=c.pull_request.pull_request_id,_query={"force_refresh":1})}">forced recheck</a>
72 <a class="tooltip" title="Force refresh of the merge workspace in case current status seems wrong." href="${h.route_path('pullrequest_show', repo_name=c.repo_name, pull_request_id=c.pull_request.pull_request_id,_query={"force_refresh":1})}">forced recheck</a>
73 </div>
73 </div>
74
74
75 </div>
75 </div>
76 % elif c.rhodecode_user.username != h.DEFAULT_USER:
76 % elif c.rhodecode_user.username != h.DEFAULT_USER:
77 <a class="btn" href="#" onclick="refreshMergeChecks(); return false;">${_('refresh checks')}</a>
77 <a class="btn" href="#" onclick="refreshMergeChecks(); return false;">${_('refresh checks')}</a>
78 <input type="submit" value="${_('Merge and close Pull Request')}" class="btn disabled" disabled="disabled" title="${_('You are not allowed to merge this pull request.')}">
78 <input type="submit" value="${_('Merge and close Pull Request')}" class="btn disabled" disabled="disabled" title="${_('You are not allowed to merge this pull request.')}">
79 % else:
79 % else:
80 <input type="submit" value="${_('Login to Merge this Pull Request')}" class="btn disabled" disabled="disabled">
80 <input type="submit" value="${_('Login to Merge this Pull Request')}" class="btn disabled" disabled="disabled">
81 % endif
81 % endif
82 </div>
82 </div>
83
83
84 </div>
84 </div>
General Comments 0
You need to be logged in to leave comments. Login now