##// END OF EJS Templates
tests: fixed further tests
marcink -
r3777:aceb6b16 new-ui
parent child Browse files
Show More
@@ -1,1218 +1,1218 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35
35
36
36
37 def route_path(name, params=None, **kwargs):
37 def route_path(name, params=None, **kwargs):
38 import urllib
38 import urllib
39
39
40 base_url = {
40 base_url = {
41 'repo_changelog': '/{repo_name}/changelog',
41 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_commits': '/{repo_name}/changelog',
43 'repo_commits': '/{repo_name}/commits',
44 'repo_commits_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all': '/{repo_name}/pull-request',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_new': '/{repo_name}/pull-request/new',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_create': '/{repo_name}/pull-request/create',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
57 }[name].format(**kwargs)
57 }[name].format(**kwargs)
58
58
59 if params:
59 if params:
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
61 return base_url
61 return base_url
62
62
63
63
64 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.usefixtures('app', 'autologin_user')
65 @pytest.mark.backends("git", "hg")
65 @pytest.mark.backends("git", "hg")
66 class TestPullrequestsView(object):
66 class TestPullrequestsView(object):
67
67
68 def test_index(self, backend):
68 def test_index(self, backend):
69 self.app.get(route_path(
69 self.app.get(route_path(
70 'pullrequest_new',
70 'pullrequest_new',
71 repo_name=backend.repo_name))
71 repo_name=backend.repo_name))
72
72
73 def test_option_menu_create_pull_request_exists(self, backend):
73 def test_option_menu_create_pull_request_exists(self, backend):
74 repo_name = backend.repo_name
74 repo_name = backend.repo_name
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
76
76
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
78 'pullrequest_new', repo_name=repo_name)
78 'pullrequest_new', repo_name=repo_name)
79 response.mustcontain(create_pr_link)
79 response.mustcontain(create_pr_link)
80
80
81 def test_create_pr_form_with_raw_commit_id(self, backend):
81 def test_create_pr_form_with_raw_commit_id(self, backend):
82 repo = backend.repo
82 repo = backend.repo
83
83
84 self.app.get(
84 self.app.get(
85 route_path('pullrequest_new', repo_name=repo.repo_name,
85 route_path('pullrequest_new', repo_name=repo.repo_name,
86 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
90 @pytest.mark.parametrize('range_diff', ["0", "1"])
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
91 def test_show(self, pr_util, pr_merge_enabled, range_diff):
92 pull_request = pr_util.create_pull_request(
92 pull_request = pr_util.create_pull_request(
93 mergeable=pr_merge_enabled, enable_notifications=False)
93 mergeable=pr_merge_enabled, enable_notifications=False)
94
94
95 response = self.app.get(route_path(
95 response = self.app.get(route_path(
96 'pullrequest_show',
96 'pullrequest_show',
97 repo_name=pull_request.target_repo.scm_instance().name,
97 repo_name=pull_request.target_repo.scm_instance().name,
98 pull_request_id=pull_request.pull_request_id,
98 pull_request_id=pull_request.pull_request_id,
99 params={'range-diff': range_diff}))
99 params={'range-diff': range_diff}))
100
100
101 for commit_id in pull_request.revisions:
101 for commit_id in pull_request.revisions:
102 response.mustcontain(commit_id)
102 response.mustcontain(commit_id)
103
103
104 assert pull_request.target_ref_parts.type in response
104 assert pull_request.target_ref_parts.type in response
105 assert pull_request.target_ref_parts.name in response
105 assert pull_request.target_ref_parts.name in response
106 target_clone_url = pull_request.target_repo.clone_url()
106 target_clone_url = pull_request.target_repo.clone_url()
107 assert target_clone_url in response
107 assert target_clone_url in response
108
108
109 assert 'class="pull-request-merge"' in response
109 assert 'class="pull-request-merge"' in response
110 if pr_merge_enabled:
110 if pr_merge_enabled:
111 response.mustcontain('Pull request reviewer approval is pending')
111 response.mustcontain('Pull request reviewer approval is pending')
112 else:
112 else:
113 response.mustcontain('Server-side pull request merging is disabled.')
113 response.mustcontain('Server-side pull request merging is disabled.')
114
114
115 if range_diff == "1":
115 if range_diff == "1":
116 response.mustcontain('Turn off: Show the diff as commit range')
116 response.mustcontain('Turn off: Show the diff as commit range')
117
117
118 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
118 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
119 # Logout
119 # Logout
120 response = self.app.post(
120 response = self.app.post(
121 h.route_path('logout'),
121 h.route_path('logout'),
122 params={'csrf_token': csrf_token})
122 params={'csrf_token': csrf_token})
123 # Login as regular user
123 # Login as regular user
124 response = self.app.post(h.route_path('login'),
124 response = self.app.post(h.route_path('login'),
125 {'username': TEST_USER_REGULAR_LOGIN,
125 {'username': TEST_USER_REGULAR_LOGIN,
126 'password': 'test12'})
126 'password': 'test12'})
127
127
128 pull_request = pr_util.create_pull_request(
128 pull_request = pr_util.create_pull_request(
129 author=TEST_USER_REGULAR_LOGIN)
129 author=TEST_USER_REGULAR_LOGIN)
130
130
131 response = self.app.get(route_path(
131 response = self.app.get(route_path(
132 'pullrequest_show',
132 'pullrequest_show',
133 repo_name=pull_request.target_repo.scm_instance().name,
133 repo_name=pull_request.target_repo.scm_instance().name,
134 pull_request_id=pull_request.pull_request_id))
134 pull_request_id=pull_request.pull_request_id))
135
135
136 response.mustcontain('Server-side pull request merging is disabled.')
136 response.mustcontain('Server-side pull request merging is disabled.')
137
137
138 assert_response = response.assert_response()
138 assert_response = response.assert_response()
139 # for regular user without a merge permissions, we don't see it
139 # for regular user without a merge permissions, we don't see it
140 assert_response.no_element_exists('#close-pull-request-action')
140 assert_response.no_element_exists('#close-pull-request-action')
141
141
142 user_util.grant_user_permission_to_repo(
142 user_util.grant_user_permission_to_repo(
143 pull_request.target_repo,
143 pull_request.target_repo,
144 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
144 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
145 'repository.write')
145 'repository.write')
146 response = self.app.get(route_path(
146 response = self.app.get(route_path(
147 'pullrequest_show',
147 'pullrequest_show',
148 repo_name=pull_request.target_repo.scm_instance().name,
148 repo_name=pull_request.target_repo.scm_instance().name,
149 pull_request_id=pull_request.pull_request_id))
149 pull_request_id=pull_request.pull_request_id))
150
150
151 response.mustcontain('Server-side pull request merging is disabled.')
151 response.mustcontain('Server-side pull request merging is disabled.')
152
152
153 assert_response = response.assert_response()
153 assert_response = response.assert_response()
154 # now regular user has a merge permissions, we have CLOSE button
154 # now regular user has a merge permissions, we have CLOSE button
155 assert_response.one_element_exists('#close-pull-request-action')
155 assert_response.one_element_exists('#close-pull-request-action')
156
156
157 def test_show_invalid_commit_id(self, pr_util):
157 def test_show_invalid_commit_id(self, pr_util):
158 # Simulating invalid revisions which will cause a lookup error
158 # Simulating invalid revisions which will cause a lookup error
159 pull_request = pr_util.create_pull_request()
159 pull_request = pr_util.create_pull_request()
160 pull_request.revisions = ['invalid']
160 pull_request.revisions = ['invalid']
161 Session().add(pull_request)
161 Session().add(pull_request)
162 Session().commit()
162 Session().commit()
163
163
164 response = self.app.get(route_path(
164 response = self.app.get(route_path(
165 'pullrequest_show',
165 'pullrequest_show',
166 repo_name=pull_request.target_repo.scm_instance().name,
166 repo_name=pull_request.target_repo.scm_instance().name,
167 pull_request_id=pull_request.pull_request_id))
167 pull_request_id=pull_request.pull_request_id))
168
168
169 for commit_id in pull_request.revisions:
169 for commit_id in pull_request.revisions:
170 response.mustcontain(commit_id)
170 response.mustcontain(commit_id)
171
171
172 def test_show_invalid_source_reference(self, pr_util):
172 def test_show_invalid_source_reference(self, pr_util):
173 pull_request = pr_util.create_pull_request()
173 pull_request = pr_util.create_pull_request()
174 pull_request.source_ref = 'branch:b:invalid'
174 pull_request.source_ref = 'branch:b:invalid'
175 Session().add(pull_request)
175 Session().add(pull_request)
176 Session().commit()
176 Session().commit()
177
177
178 self.app.get(route_path(
178 self.app.get(route_path(
179 'pullrequest_show',
179 'pullrequest_show',
180 repo_name=pull_request.target_repo.scm_instance().name,
180 repo_name=pull_request.target_repo.scm_instance().name,
181 pull_request_id=pull_request.pull_request_id))
181 pull_request_id=pull_request.pull_request_id))
182
182
183 def test_edit_title_description(self, pr_util, csrf_token):
183 def test_edit_title_description(self, pr_util, csrf_token):
184 pull_request = pr_util.create_pull_request()
184 pull_request = pr_util.create_pull_request()
185 pull_request_id = pull_request.pull_request_id
185 pull_request_id = pull_request.pull_request_id
186
186
187 response = self.app.post(
187 response = self.app.post(
188 route_path('pullrequest_update',
188 route_path('pullrequest_update',
189 repo_name=pull_request.target_repo.repo_name,
189 repo_name=pull_request.target_repo.repo_name,
190 pull_request_id=pull_request_id),
190 pull_request_id=pull_request_id),
191 params={
191 params={
192 'edit_pull_request': 'true',
192 'edit_pull_request': 'true',
193 'title': 'New title',
193 'title': 'New title',
194 'description': 'New description',
194 'description': 'New description',
195 'csrf_token': csrf_token})
195 'csrf_token': csrf_token})
196
196
197 assert_session_flash(
197 assert_session_flash(
198 response, u'Pull request title & description updated.',
198 response, u'Pull request title & description updated.',
199 category='success')
199 category='success')
200
200
201 pull_request = PullRequest.get(pull_request_id)
201 pull_request = PullRequest.get(pull_request_id)
202 assert pull_request.title == 'New title'
202 assert pull_request.title == 'New title'
203 assert pull_request.description == 'New description'
203 assert pull_request.description == 'New description'
204
204
205 def test_edit_title_description_closed(self, pr_util, csrf_token):
205 def test_edit_title_description_closed(self, pr_util, csrf_token):
206 pull_request = pr_util.create_pull_request()
206 pull_request = pr_util.create_pull_request()
207 pull_request_id = pull_request.pull_request_id
207 pull_request_id = pull_request.pull_request_id
208 repo_name = pull_request.target_repo.repo_name
208 repo_name = pull_request.target_repo.repo_name
209 pr_util.close()
209 pr_util.close()
210
210
211 response = self.app.post(
211 response = self.app.post(
212 route_path('pullrequest_update',
212 route_path('pullrequest_update',
213 repo_name=repo_name, pull_request_id=pull_request_id),
213 repo_name=repo_name, pull_request_id=pull_request_id),
214 params={
214 params={
215 'edit_pull_request': 'true',
215 'edit_pull_request': 'true',
216 'title': 'New title',
216 'title': 'New title',
217 'description': 'New description',
217 'description': 'New description',
218 'csrf_token': csrf_token}, status=200)
218 'csrf_token': csrf_token}, status=200)
219 assert_session_flash(
219 assert_session_flash(
220 response, u'Cannot update closed pull requests.',
220 response, u'Cannot update closed pull requests.',
221 category='error')
221 category='error')
222
222
223 def test_update_invalid_source_reference(self, pr_util, csrf_token):
223 def test_update_invalid_source_reference(self, pr_util, csrf_token):
224 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
224 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
225
225
226 pull_request = pr_util.create_pull_request()
226 pull_request = pr_util.create_pull_request()
227 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
227 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
228 Session().add(pull_request)
228 Session().add(pull_request)
229 Session().commit()
229 Session().commit()
230
230
231 pull_request_id = pull_request.pull_request_id
231 pull_request_id = pull_request.pull_request_id
232
232
233 response = self.app.post(
233 response = self.app.post(
234 route_path('pullrequest_update',
234 route_path('pullrequest_update',
235 repo_name=pull_request.target_repo.repo_name,
235 repo_name=pull_request.target_repo.repo_name,
236 pull_request_id=pull_request_id),
236 pull_request_id=pull_request_id),
237 params={'update_commits': 'true', 'csrf_token': csrf_token})
237 params={'update_commits': 'true', 'csrf_token': csrf_token})
238
238
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
240 UpdateFailureReason.MISSING_SOURCE_REF])
240 UpdateFailureReason.MISSING_SOURCE_REF])
241 assert_session_flash(response, expected_msg, category='error')
241 assert_session_flash(response, expected_msg, category='error')
242
242
243 def test_missing_target_reference(self, pr_util, csrf_token):
243 def test_missing_target_reference(self, pr_util, csrf_token):
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
245 pull_request = pr_util.create_pull_request(
245 pull_request = pr_util.create_pull_request(
246 approved=True, mergeable=True)
246 approved=True, mergeable=True)
247 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
247 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
248 pull_request.target_ref = unicode_reference
248 pull_request.target_ref = unicode_reference
249 Session().add(pull_request)
249 Session().add(pull_request)
250 Session().commit()
250 Session().commit()
251
251
252 pull_request_id = pull_request.pull_request_id
252 pull_request_id = pull_request.pull_request_id
253 pull_request_url = route_path(
253 pull_request_url = route_path(
254 'pullrequest_show',
254 'pullrequest_show',
255 repo_name=pull_request.target_repo.repo_name,
255 repo_name=pull_request.target_repo.repo_name,
256 pull_request_id=pull_request_id)
256 pull_request_id=pull_request_id)
257
257
258 response = self.app.get(pull_request_url)
258 response = self.app.get(pull_request_url)
259 target_ref_id = 'invalid-branch'
259 target_ref_id = 'invalid-branch'
260 merge_resp = MergeResponse(
260 merge_resp = MergeResponse(
261 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
261 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
262 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
262 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
263 response.assert_response().element_contains(
263 response.assert_response().element_contains(
264 'span[data-role="merge-message"]', merge_resp.merge_status_message)
264 'span[data-role="merge-message"]', merge_resp.merge_status_message)
265
265
266 def test_comment_and_close_pull_request_custom_message_approved(
266 def test_comment_and_close_pull_request_custom_message_approved(
267 self, pr_util, csrf_token, xhr_header):
267 self, pr_util, csrf_token, xhr_header):
268
268
269 pull_request = pr_util.create_pull_request(approved=True)
269 pull_request = pr_util.create_pull_request(approved=True)
270 pull_request_id = pull_request.pull_request_id
270 pull_request_id = pull_request.pull_request_id
271 author = pull_request.user_id
271 author = pull_request.user_id
272 repo = pull_request.target_repo.repo_id
272 repo = pull_request.target_repo.repo_id
273
273
274 self.app.post(
274 self.app.post(
275 route_path('pullrequest_comment_create',
275 route_path('pullrequest_comment_create',
276 repo_name=pull_request.target_repo.scm_instance().name,
276 repo_name=pull_request.target_repo.scm_instance().name,
277 pull_request_id=pull_request_id),
277 pull_request_id=pull_request_id),
278 params={
278 params={
279 'close_pull_request': '1',
279 'close_pull_request': '1',
280 'text': 'Closing a PR',
280 'text': 'Closing a PR',
281 'csrf_token': csrf_token},
281 'csrf_token': csrf_token},
282 extra_environ=xhr_header,)
282 extra_environ=xhr_header,)
283
283
284 journal = UserLog.query()\
284 journal = UserLog.query()\
285 .filter(UserLog.user_id == author)\
285 .filter(UserLog.user_id == author)\
286 .filter(UserLog.repository_id == repo) \
286 .filter(UserLog.repository_id == repo) \
287 .order_by('user_log_id') \
287 .order_by('user_log_id') \
288 .all()
288 .all()
289 assert journal[-1].action == 'repo.pull_request.close'
289 assert journal[-1].action == 'repo.pull_request.close'
290
290
291 pull_request = PullRequest.get(pull_request_id)
291 pull_request = PullRequest.get(pull_request_id)
292 assert pull_request.is_closed()
292 assert pull_request.is_closed()
293
293
294 status = ChangesetStatusModel().get_status(
294 status = ChangesetStatusModel().get_status(
295 pull_request.source_repo, pull_request=pull_request)
295 pull_request.source_repo, pull_request=pull_request)
296 assert status == ChangesetStatus.STATUS_APPROVED
296 assert status == ChangesetStatus.STATUS_APPROVED
297 comments = ChangesetComment().query() \
297 comments = ChangesetComment().query() \
298 .filter(ChangesetComment.pull_request == pull_request) \
298 .filter(ChangesetComment.pull_request == pull_request) \
299 .order_by(ChangesetComment.comment_id.asc())\
299 .order_by(ChangesetComment.comment_id.asc())\
300 .all()
300 .all()
301 assert comments[-1].text == 'Closing a PR'
301 assert comments[-1].text == 'Closing a PR'
302
302
303 def test_comment_force_close_pull_request_rejected(
303 def test_comment_force_close_pull_request_rejected(
304 self, pr_util, csrf_token, xhr_header):
304 self, pr_util, csrf_token, xhr_header):
305 pull_request = pr_util.create_pull_request()
305 pull_request = pr_util.create_pull_request()
306 pull_request_id = pull_request.pull_request_id
306 pull_request_id = pull_request.pull_request_id
307 PullRequestModel().update_reviewers(
307 PullRequestModel().update_reviewers(
308 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
308 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
309 pull_request.author)
309 pull_request.author)
310 author = pull_request.user_id
310 author = pull_request.user_id
311 repo = pull_request.target_repo.repo_id
311 repo = pull_request.target_repo.repo_id
312
312
313 self.app.post(
313 self.app.post(
314 route_path('pullrequest_comment_create',
314 route_path('pullrequest_comment_create',
315 repo_name=pull_request.target_repo.scm_instance().name,
315 repo_name=pull_request.target_repo.scm_instance().name,
316 pull_request_id=pull_request_id),
316 pull_request_id=pull_request_id),
317 params={
317 params={
318 'close_pull_request': '1',
318 'close_pull_request': '1',
319 'csrf_token': csrf_token},
319 'csrf_token': csrf_token},
320 extra_environ=xhr_header)
320 extra_environ=xhr_header)
321
321
322 pull_request = PullRequest.get(pull_request_id)
322 pull_request = PullRequest.get(pull_request_id)
323
323
324 journal = UserLog.query()\
324 journal = UserLog.query()\
325 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
325 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
326 .order_by('user_log_id') \
326 .order_by('user_log_id') \
327 .all()
327 .all()
328 assert journal[-1].action == 'repo.pull_request.close'
328 assert journal[-1].action == 'repo.pull_request.close'
329
329
330 # check only the latest status, not the review status
330 # check only the latest status, not the review status
331 status = ChangesetStatusModel().get_status(
331 status = ChangesetStatusModel().get_status(
332 pull_request.source_repo, pull_request=pull_request)
332 pull_request.source_repo, pull_request=pull_request)
333 assert status == ChangesetStatus.STATUS_REJECTED
333 assert status == ChangesetStatus.STATUS_REJECTED
334
334
335 def test_comment_and_close_pull_request(
335 def test_comment_and_close_pull_request(
336 self, pr_util, csrf_token, xhr_header):
336 self, pr_util, csrf_token, xhr_header):
337 pull_request = pr_util.create_pull_request()
337 pull_request = pr_util.create_pull_request()
338 pull_request_id = pull_request.pull_request_id
338 pull_request_id = pull_request.pull_request_id
339
339
340 response = self.app.post(
340 response = self.app.post(
341 route_path('pullrequest_comment_create',
341 route_path('pullrequest_comment_create',
342 repo_name=pull_request.target_repo.scm_instance().name,
342 repo_name=pull_request.target_repo.scm_instance().name,
343 pull_request_id=pull_request.pull_request_id),
343 pull_request_id=pull_request.pull_request_id),
344 params={
344 params={
345 'close_pull_request': 'true',
345 'close_pull_request': 'true',
346 'csrf_token': csrf_token},
346 'csrf_token': csrf_token},
347 extra_environ=xhr_header)
347 extra_environ=xhr_header)
348
348
349 assert response.json
349 assert response.json
350
350
351 pull_request = PullRequest.get(pull_request_id)
351 pull_request = PullRequest.get(pull_request_id)
352 assert pull_request.is_closed()
352 assert pull_request.is_closed()
353
353
354 # check only the latest status, not the review status
354 # check only the latest status, not the review status
355 status = ChangesetStatusModel().get_status(
355 status = ChangesetStatusModel().get_status(
356 pull_request.source_repo, pull_request=pull_request)
356 pull_request.source_repo, pull_request=pull_request)
357 assert status == ChangesetStatus.STATUS_REJECTED
357 assert status == ChangesetStatus.STATUS_REJECTED
358
358
359 def test_create_pull_request(self, backend, csrf_token):
359 def test_create_pull_request(self, backend, csrf_token):
360 commits = [
360 commits = [
361 {'message': 'ancestor'},
361 {'message': 'ancestor'},
362 {'message': 'change'},
362 {'message': 'change'},
363 {'message': 'change2'},
363 {'message': 'change2'},
364 ]
364 ]
365 commit_ids = backend.create_master_repo(commits)
365 commit_ids = backend.create_master_repo(commits)
366 target = backend.create_repo(heads=['ancestor'])
366 target = backend.create_repo(heads=['ancestor'])
367 source = backend.create_repo(heads=['change2'])
367 source = backend.create_repo(heads=['change2'])
368
368
369 response = self.app.post(
369 response = self.app.post(
370 route_path('pullrequest_create', repo_name=source.repo_name),
370 route_path('pullrequest_create', repo_name=source.repo_name),
371 [
371 [
372 ('source_repo', source.repo_name),
372 ('source_repo', source.repo_name),
373 ('source_ref', 'branch:default:' + commit_ids['change2']),
373 ('source_ref', 'branch:default:' + commit_ids['change2']),
374 ('target_repo', target.repo_name),
374 ('target_repo', target.repo_name),
375 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
375 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
376 ('common_ancestor', commit_ids['ancestor']),
376 ('common_ancestor', commit_ids['ancestor']),
377 ('pullrequest_title', 'Title'),
377 ('pullrequest_title', 'Title'),
378 ('pullrequest_desc', 'Description'),
378 ('pullrequest_desc', 'Description'),
379 ('description_renderer', 'markdown'),
379 ('description_renderer', 'markdown'),
380 ('__start__', 'review_members:sequence'),
380 ('__start__', 'review_members:sequence'),
381 ('__start__', 'reviewer:mapping'),
381 ('__start__', 'reviewer:mapping'),
382 ('user_id', '1'),
382 ('user_id', '1'),
383 ('__start__', 'reasons:sequence'),
383 ('__start__', 'reasons:sequence'),
384 ('reason', 'Some reason'),
384 ('reason', 'Some reason'),
385 ('__end__', 'reasons:sequence'),
385 ('__end__', 'reasons:sequence'),
386 ('__start__', 'rules:sequence'),
386 ('__start__', 'rules:sequence'),
387 ('__end__', 'rules:sequence'),
387 ('__end__', 'rules:sequence'),
388 ('mandatory', 'False'),
388 ('mandatory', 'False'),
389 ('__end__', 'reviewer:mapping'),
389 ('__end__', 'reviewer:mapping'),
390 ('__end__', 'review_members:sequence'),
390 ('__end__', 'review_members:sequence'),
391 ('__start__', 'revisions:sequence'),
391 ('__start__', 'revisions:sequence'),
392 ('revisions', commit_ids['change']),
392 ('revisions', commit_ids['change']),
393 ('revisions', commit_ids['change2']),
393 ('revisions', commit_ids['change2']),
394 ('__end__', 'revisions:sequence'),
394 ('__end__', 'revisions:sequence'),
395 ('user', ''),
395 ('user', ''),
396 ('csrf_token', csrf_token),
396 ('csrf_token', csrf_token),
397 ],
397 ],
398 status=302)
398 status=302)
399
399
400 location = response.headers['Location']
400 location = response.headers['Location']
401 pull_request_id = location.rsplit('/', 1)[1]
401 pull_request_id = location.rsplit('/', 1)[1]
402 assert pull_request_id != 'new'
402 assert pull_request_id != 'new'
403 pull_request = PullRequest.get(int(pull_request_id))
403 pull_request = PullRequest.get(int(pull_request_id))
404
404
405 # check that we have now both revisions
405 # check that we have now both revisions
406 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
406 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
407 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
407 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
408 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
408 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
409 assert pull_request.target_ref == expected_target_ref
409 assert pull_request.target_ref == expected_target_ref
410
410
411 def test_reviewer_notifications(self, backend, csrf_token):
411 def test_reviewer_notifications(self, backend, csrf_token):
412 # We have to use the app.post for this test so it will create the
412 # We have to use the app.post for this test so it will create the
413 # notifications properly with the new PR
413 # notifications properly with the new PR
414 commits = [
414 commits = [
415 {'message': 'ancestor',
415 {'message': 'ancestor',
416 'added': [FileNode('file_A', content='content_of_ancestor')]},
416 'added': [FileNode('file_A', content='content_of_ancestor')]},
417 {'message': 'change',
417 {'message': 'change',
418 'added': [FileNode('file_a', content='content_of_change')]},
418 'added': [FileNode('file_a', content='content_of_change')]},
419 {'message': 'change-child'},
419 {'message': 'change-child'},
420 {'message': 'ancestor-child', 'parents': ['ancestor'],
420 {'message': 'ancestor-child', 'parents': ['ancestor'],
421 'added': [
421 'added': [
422 FileNode('file_B', content='content_of_ancestor_child')]},
422 FileNode('file_B', content='content_of_ancestor_child')]},
423 {'message': 'ancestor-child-2'},
423 {'message': 'ancestor-child-2'},
424 ]
424 ]
425 commit_ids = backend.create_master_repo(commits)
425 commit_ids = backend.create_master_repo(commits)
426 target = backend.create_repo(heads=['ancestor-child'])
426 target = backend.create_repo(heads=['ancestor-child'])
427 source = backend.create_repo(heads=['change'])
427 source = backend.create_repo(heads=['change'])
428
428
429 response = self.app.post(
429 response = self.app.post(
430 route_path('pullrequest_create', repo_name=source.repo_name),
430 route_path('pullrequest_create', repo_name=source.repo_name),
431 [
431 [
432 ('source_repo', source.repo_name),
432 ('source_repo', source.repo_name),
433 ('source_ref', 'branch:default:' + commit_ids['change']),
433 ('source_ref', 'branch:default:' + commit_ids['change']),
434 ('target_repo', target.repo_name),
434 ('target_repo', target.repo_name),
435 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
435 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
436 ('common_ancestor', commit_ids['ancestor']),
436 ('common_ancestor', commit_ids['ancestor']),
437 ('pullrequest_title', 'Title'),
437 ('pullrequest_title', 'Title'),
438 ('pullrequest_desc', 'Description'),
438 ('pullrequest_desc', 'Description'),
439 ('description_renderer', 'markdown'),
439 ('description_renderer', 'markdown'),
440 ('__start__', 'review_members:sequence'),
440 ('__start__', 'review_members:sequence'),
441 ('__start__', 'reviewer:mapping'),
441 ('__start__', 'reviewer:mapping'),
442 ('user_id', '2'),
442 ('user_id', '2'),
443 ('__start__', 'reasons:sequence'),
443 ('__start__', 'reasons:sequence'),
444 ('reason', 'Some reason'),
444 ('reason', 'Some reason'),
445 ('__end__', 'reasons:sequence'),
445 ('__end__', 'reasons:sequence'),
446 ('__start__', 'rules:sequence'),
446 ('__start__', 'rules:sequence'),
447 ('__end__', 'rules:sequence'),
447 ('__end__', 'rules:sequence'),
448 ('mandatory', 'False'),
448 ('mandatory', 'False'),
449 ('__end__', 'reviewer:mapping'),
449 ('__end__', 'reviewer:mapping'),
450 ('__end__', 'review_members:sequence'),
450 ('__end__', 'review_members:sequence'),
451 ('__start__', 'revisions:sequence'),
451 ('__start__', 'revisions:sequence'),
452 ('revisions', commit_ids['change']),
452 ('revisions', commit_ids['change']),
453 ('__end__', 'revisions:sequence'),
453 ('__end__', 'revisions:sequence'),
454 ('user', ''),
454 ('user', ''),
455 ('csrf_token', csrf_token),
455 ('csrf_token', csrf_token),
456 ],
456 ],
457 status=302)
457 status=302)
458
458
459 location = response.headers['Location']
459 location = response.headers['Location']
460
460
461 pull_request_id = location.rsplit('/', 1)[1]
461 pull_request_id = location.rsplit('/', 1)[1]
462 assert pull_request_id != 'new'
462 assert pull_request_id != 'new'
463 pull_request = PullRequest.get(int(pull_request_id))
463 pull_request = PullRequest.get(int(pull_request_id))
464
464
465 # Check that a notification was made
465 # Check that a notification was made
466 notifications = Notification.query()\
466 notifications = Notification.query()\
467 .filter(Notification.created_by == pull_request.author.user_id,
467 .filter(Notification.created_by == pull_request.author.user_id,
468 Notification.type_ == Notification.TYPE_PULL_REQUEST,
468 Notification.type_ == Notification.TYPE_PULL_REQUEST,
469 Notification.subject.contains(
469 Notification.subject.contains(
470 "wants you to review pull request #%s" % pull_request_id))
470 "wants you to review pull request #%s" % pull_request_id))
471 assert len(notifications.all()) == 1
471 assert len(notifications.all()) == 1
472
472
473 # Change reviewers and check that a notification was made
473 # Change reviewers and check that a notification was made
474 PullRequestModel().update_reviewers(
474 PullRequestModel().update_reviewers(
475 pull_request.pull_request_id, [(1, [], False, [])],
475 pull_request.pull_request_id, [(1, [], False, [])],
476 pull_request.author)
476 pull_request.author)
477 assert len(notifications.all()) == 2
477 assert len(notifications.all()) == 2
478
478
479 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
479 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
480 csrf_token):
480 csrf_token):
481 commits = [
481 commits = [
482 {'message': 'ancestor',
482 {'message': 'ancestor',
483 'added': [FileNode('file_A', content='content_of_ancestor')]},
483 'added': [FileNode('file_A', content='content_of_ancestor')]},
484 {'message': 'change',
484 {'message': 'change',
485 'added': [FileNode('file_a', content='content_of_change')]},
485 'added': [FileNode('file_a', content='content_of_change')]},
486 {'message': 'change-child'},
486 {'message': 'change-child'},
487 {'message': 'ancestor-child', 'parents': ['ancestor'],
487 {'message': 'ancestor-child', 'parents': ['ancestor'],
488 'added': [
488 'added': [
489 FileNode('file_B', content='content_of_ancestor_child')]},
489 FileNode('file_B', content='content_of_ancestor_child')]},
490 {'message': 'ancestor-child-2'},
490 {'message': 'ancestor-child-2'},
491 ]
491 ]
492 commit_ids = backend.create_master_repo(commits)
492 commit_ids = backend.create_master_repo(commits)
493 target = backend.create_repo(heads=['ancestor-child'])
493 target = backend.create_repo(heads=['ancestor-child'])
494 source = backend.create_repo(heads=['change'])
494 source = backend.create_repo(heads=['change'])
495
495
496 response = self.app.post(
496 response = self.app.post(
497 route_path('pullrequest_create', repo_name=source.repo_name),
497 route_path('pullrequest_create', repo_name=source.repo_name),
498 [
498 [
499 ('source_repo', source.repo_name),
499 ('source_repo', source.repo_name),
500 ('source_ref', 'branch:default:' + commit_ids['change']),
500 ('source_ref', 'branch:default:' + commit_ids['change']),
501 ('target_repo', target.repo_name),
501 ('target_repo', target.repo_name),
502 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
502 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
503 ('common_ancestor', commit_ids['ancestor']),
503 ('common_ancestor', commit_ids['ancestor']),
504 ('pullrequest_title', 'Title'),
504 ('pullrequest_title', 'Title'),
505 ('pullrequest_desc', 'Description'),
505 ('pullrequest_desc', 'Description'),
506 ('description_renderer', 'markdown'),
506 ('description_renderer', 'markdown'),
507 ('__start__', 'review_members:sequence'),
507 ('__start__', 'review_members:sequence'),
508 ('__start__', 'reviewer:mapping'),
508 ('__start__', 'reviewer:mapping'),
509 ('user_id', '1'),
509 ('user_id', '1'),
510 ('__start__', 'reasons:sequence'),
510 ('__start__', 'reasons:sequence'),
511 ('reason', 'Some reason'),
511 ('reason', 'Some reason'),
512 ('__end__', 'reasons:sequence'),
512 ('__end__', 'reasons:sequence'),
513 ('__start__', 'rules:sequence'),
513 ('__start__', 'rules:sequence'),
514 ('__end__', 'rules:sequence'),
514 ('__end__', 'rules:sequence'),
515 ('mandatory', 'False'),
515 ('mandatory', 'False'),
516 ('__end__', 'reviewer:mapping'),
516 ('__end__', 'reviewer:mapping'),
517 ('__end__', 'review_members:sequence'),
517 ('__end__', 'review_members:sequence'),
518 ('__start__', 'revisions:sequence'),
518 ('__start__', 'revisions:sequence'),
519 ('revisions', commit_ids['change']),
519 ('revisions', commit_ids['change']),
520 ('__end__', 'revisions:sequence'),
520 ('__end__', 'revisions:sequence'),
521 ('user', ''),
521 ('user', ''),
522 ('csrf_token', csrf_token),
522 ('csrf_token', csrf_token),
523 ],
523 ],
524 status=302)
524 status=302)
525
525
526 location = response.headers['Location']
526 location = response.headers['Location']
527
527
528 pull_request_id = location.rsplit('/', 1)[1]
528 pull_request_id = location.rsplit('/', 1)[1]
529 assert pull_request_id != 'new'
529 assert pull_request_id != 'new'
530 pull_request = PullRequest.get(int(pull_request_id))
530 pull_request = PullRequest.get(int(pull_request_id))
531
531
532 # target_ref has to point to the ancestor's commit_id in order to
532 # target_ref has to point to the ancestor's commit_id in order to
533 # show the correct diff
533 # show the correct diff
534 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
534 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
535 assert pull_request.target_ref == expected_target_ref
535 assert pull_request.target_ref == expected_target_ref
536
536
537 # Check generated diff contents
537 # Check generated diff contents
538 response = response.follow()
538 response = response.follow()
539 assert 'content_of_ancestor' not in response.body
539 assert 'content_of_ancestor' not in response.body
540 assert 'content_of_ancestor-child' not in response.body
540 assert 'content_of_ancestor-child' not in response.body
541 assert 'content_of_change' in response.body
541 assert 'content_of_change' in response.body
542
542
543 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
543 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
544 # Clear any previous calls to rcextensions
544 # Clear any previous calls to rcextensions
545 rhodecode.EXTENSIONS.calls.clear()
545 rhodecode.EXTENSIONS.calls.clear()
546
546
547 pull_request = pr_util.create_pull_request(
547 pull_request = pr_util.create_pull_request(
548 approved=True, mergeable=True)
548 approved=True, mergeable=True)
549 pull_request_id = pull_request.pull_request_id
549 pull_request_id = pull_request.pull_request_id
550 repo_name = pull_request.target_repo.scm_instance().name,
550 repo_name = pull_request.target_repo.scm_instance().name,
551
551
552 response = self.app.post(
552 response = self.app.post(
553 route_path('pullrequest_merge',
553 route_path('pullrequest_merge',
554 repo_name=str(repo_name[0]),
554 repo_name=str(repo_name[0]),
555 pull_request_id=pull_request_id),
555 pull_request_id=pull_request_id),
556 params={'csrf_token': csrf_token}).follow()
556 params={'csrf_token': csrf_token}).follow()
557
557
558 pull_request = PullRequest.get(pull_request_id)
558 pull_request = PullRequest.get(pull_request_id)
559
559
560 assert response.status_int == 200
560 assert response.status_int == 200
561 assert pull_request.is_closed()
561 assert pull_request.is_closed()
562 assert_pull_request_status(
562 assert_pull_request_status(
563 pull_request, ChangesetStatus.STATUS_APPROVED)
563 pull_request, ChangesetStatus.STATUS_APPROVED)
564
564
565 # Check the relevant log entries were added
565 # Check the relevant log entries were added
566 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
566 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
567 actions = [log.action for log in user_logs]
567 actions = [log.action for log in user_logs]
568 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
568 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
569 expected_actions = [
569 expected_actions = [
570 u'repo.pull_request.close',
570 u'repo.pull_request.close',
571 u'repo.pull_request.merge',
571 u'repo.pull_request.merge',
572 u'repo.pull_request.comment.create'
572 u'repo.pull_request.comment.create'
573 ]
573 ]
574 assert actions == expected_actions
574 assert actions == expected_actions
575
575
576 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
576 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
577 actions = [log for log in user_logs]
577 actions = [log for log in user_logs]
578 assert actions[-1].action == 'user.push'
578 assert actions[-1].action == 'user.push'
579 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
579 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
580
580
581 # Check post_push rcextension was really executed
581 # Check post_push rcextension was really executed
582 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
582 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
583 assert len(push_calls) == 1
583 assert len(push_calls) == 1
584 unused_last_call_args, last_call_kwargs = push_calls[0]
584 unused_last_call_args, last_call_kwargs = push_calls[0]
585 assert last_call_kwargs['action'] == 'push'
585 assert last_call_kwargs['action'] == 'push'
586 assert last_call_kwargs['commit_ids'] == pr_commit_ids
586 assert last_call_kwargs['commit_ids'] == pr_commit_ids
587
587
588 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
588 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
589 pull_request = pr_util.create_pull_request(mergeable=False)
589 pull_request = pr_util.create_pull_request(mergeable=False)
590 pull_request_id = pull_request.pull_request_id
590 pull_request_id = pull_request.pull_request_id
591 pull_request = PullRequest.get(pull_request_id)
591 pull_request = PullRequest.get(pull_request_id)
592
592
593 response = self.app.post(
593 response = self.app.post(
594 route_path('pullrequest_merge',
594 route_path('pullrequest_merge',
595 repo_name=pull_request.target_repo.scm_instance().name,
595 repo_name=pull_request.target_repo.scm_instance().name,
596 pull_request_id=pull_request.pull_request_id),
596 pull_request_id=pull_request.pull_request_id),
597 params={'csrf_token': csrf_token}).follow()
597 params={'csrf_token': csrf_token}).follow()
598
598
599 assert response.status_int == 200
599 assert response.status_int == 200
600 response.mustcontain(
600 response.mustcontain(
601 'Merge is not currently possible because of below failed checks.')
601 'Merge is not currently possible because of below failed checks.')
602 response.mustcontain('Server-side pull request merging is disabled.')
602 response.mustcontain('Server-side pull request merging is disabled.')
603
603
604 @pytest.mark.skip_backends('svn')
604 @pytest.mark.skip_backends('svn')
605 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
605 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
606 pull_request = pr_util.create_pull_request(mergeable=True)
606 pull_request = pr_util.create_pull_request(mergeable=True)
607 pull_request_id = pull_request.pull_request_id
607 pull_request_id = pull_request.pull_request_id
608 repo_name = pull_request.target_repo.scm_instance().name
608 repo_name = pull_request.target_repo.scm_instance().name
609
609
610 response = self.app.post(
610 response = self.app.post(
611 route_path('pullrequest_merge',
611 route_path('pullrequest_merge',
612 repo_name=repo_name, pull_request_id=pull_request_id),
612 repo_name=repo_name, pull_request_id=pull_request_id),
613 params={'csrf_token': csrf_token}).follow()
613 params={'csrf_token': csrf_token}).follow()
614
614
615 assert response.status_int == 200
615 assert response.status_int == 200
616
616
617 response.mustcontain(
617 response.mustcontain(
618 'Merge is not currently possible because of below failed checks.')
618 'Merge is not currently possible because of below failed checks.')
619 response.mustcontain('Pull request reviewer approval is pending.')
619 response.mustcontain('Pull request reviewer approval is pending.')
620
620
621 def test_merge_pull_request_renders_failure_reason(
621 def test_merge_pull_request_renders_failure_reason(
622 self, user_regular, csrf_token, pr_util):
622 self, user_regular, csrf_token, pr_util):
623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
624 pull_request_id = pull_request.pull_request_id
624 pull_request_id = pull_request.pull_request_id
625 repo_name = pull_request.target_repo.scm_instance().name
625 repo_name = pull_request.target_repo.scm_instance().name
626
626
627 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
627 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
628 MergeFailureReason.PUSH_FAILED,
628 MergeFailureReason.PUSH_FAILED,
629 metadata={'target': 'shadow repo',
629 metadata={'target': 'shadow repo',
630 'merge_commit': 'xxx'})
630 'merge_commit': 'xxx'})
631 model_patcher = mock.patch.multiple(
631 model_patcher = mock.patch.multiple(
632 PullRequestModel,
632 PullRequestModel,
633 merge_repo=mock.Mock(return_value=merge_resp),
633 merge_repo=mock.Mock(return_value=merge_resp),
634 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
634 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
635
635
636 with model_patcher:
636 with model_patcher:
637 response = self.app.post(
637 response = self.app.post(
638 route_path('pullrequest_merge',
638 route_path('pullrequest_merge',
639 repo_name=repo_name,
639 repo_name=repo_name,
640 pull_request_id=pull_request_id),
640 pull_request_id=pull_request_id),
641 params={'csrf_token': csrf_token}, status=302)
641 params={'csrf_token': csrf_token}, status=302)
642
642
643 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
643 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
644 metadata={'target': 'shadow repo',
644 metadata={'target': 'shadow repo',
645 'merge_commit': 'xxx'})
645 'merge_commit': 'xxx'})
646 assert_session_flash(response, merge_resp.merge_status_message)
646 assert_session_flash(response, merge_resp.merge_status_message)
647
647
648 def test_update_source_revision(self, backend, csrf_token):
648 def test_update_source_revision(self, backend, csrf_token):
649 commits = [
649 commits = [
650 {'message': 'ancestor'},
650 {'message': 'ancestor'},
651 {'message': 'change'},
651 {'message': 'change'},
652 {'message': 'change-2'},
652 {'message': 'change-2'},
653 ]
653 ]
654 commit_ids = backend.create_master_repo(commits)
654 commit_ids = backend.create_master_repo(commits)
655 target = backend.create_repo(heads=['ancestor'])
655 target = backend.create_repo(heads=['ancestor'])
656 source = backend.create_repo(heads=['change'])
656 source = backend.create_repo(heads=['change'])
657
657
658 # create pr from a in source to A in target
658 # create pr from a in source to A in target
659 pull_request = PullRequest()
659 pull_request = PullRequest()
660
660
661 pull_request.source_repo = source
661 pull_request.source_repo = source
662 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
662 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
663 branch=backend.default_branch_name, commit_id=commit_ids['change'])
663 branch=backend.default_branch_name, commit_id=commit_ids['change'])
664
664
665 pull_request.target_repo = target
665 pull_request.target_repo = target
666 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
666 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
667 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
667 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
668
668
669 pull_request.revisions = [commit_ids['change']]
669 pull_request.revisions = [commit_ids['change']]
670 pull_request.title = u"Test"
670 pull_request.title = u"Test"
671 pull_request.description = u"Description"
671 pull_request.description = u"Description"
672 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
672 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
673 pull_request.pull_request_state = PullRequest.STATE_CREATED
673 pull_request.pull_request_state = PullRequest.STATE_CREATED
674 Session().add(pull_request)
674 Session().add(pull_request)
675 Session().commit()
675 Session().commit()
676 pull_request_id = pull_request.pull_request_id
676 pull_request_id = pull_request.pull_request_id
677
677
678 # source has ancestor - change - change-2
678 # source has ancestor - change - change-2
679 backend.pull_heads(source, heads=['change-2'])
679 backend.pull_heads(source, heads=['change-2'])
680
680
681 # update PR
681 # update PR
682 self.app.post(
682 self.app.post(
683 route_path('pullrequest_update',
683 route_path('pullrequest_update',
684 repo_name=target.repo_name, pull_request_id=pull_request_id),
684 repo_name=target.repo_name, pull_request_id=pull_request_id),
685 params={'update_commits': 'true', 'csrf_token': csrf_token})
685 params={'update_commits': 'true', 'csrf_token': csrf_token})
686
686
687 response = self.app.get(
687 response = self.app.get(
688 route_path('pullrequest_show',
688 route_path('pullrequest_show',
689 repo_name=target.repo_name,
689 repo_name=target.repo_name,
690 pull_request_id=pull_request.pull_request_id))
690 pull_request_id=pull_request.pull_request_id))
691
691
692 assert response.status_int == 200
692 assert response.status_int == 200
693 assert 'Pull request updated to' in response.body
693 assert 'Pull request updated to' in response.body
694 assert 'with 1 added, 0 removed commits.' in response.body
694 assert 'with 1 added, 0 removed commits.' in response.body
695
695
696 # check that we have now both revisions
696 # check that we have now both revisions
697 pull_request = PullRequest.get(pull_request_id)
697 pull_request = PullRequest.get(pull_request_id)
698 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
698 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
699
699
700 def test_update_target_revision(self, backend, csrf_token):
700 def test_update_target_revision(self, backend, csrf_token):
701 commits = [
701 commits = [
702 {'message': 'ancestor'},
702 {'message': 'ancestor'},
703 {'message': 'change'},
703 {'message': 'change'},
704 {'message': 'ancestor-new', 'parents': ['ancestor']},
704 {'message': 'ancestor-new', 'parents': ['ancestor']},
705 {'message': 'change-rebased'},
705 {'message': 'change-rebased'},
706 ]
706 ]
707 commit_ids = backend.create_master_repo(commits)
707 commit_ids = backend.create_master_repo(commits)
708 target = backend.create_repo(heads=['ancestor'])
708 target = backend.create_repo(heads=['ancestor'])
709 source = backend.create_repo(heads=['change'])
709 source = backend.create_repo(heads=['change'])
710
710
711 # create pr from a in source to A in target
711 # create pr from a in source to A in target
712 pull_request = PullRequest()
712 pull_request = PullRequest()
713
713
714 pull_request.source_repo = source
714 pull_request.source_repo = source
715 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
715 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
716 branch=backend.default_branch_name, commit_id=commit_ids['change'])
716 branch=backend.default_branch_name, commit_id=commit_ids['change'])
717
717
718 pull_request.target_repo = target
718 pull_request.target_repo = target
719 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
719 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
720 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
720 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
721
721
722 pull_request.revisions = [commit_ids['change']]
722 pull_request.revisions = [commit_ids['change']]
723 pull_request.title = u"Test"
723 pull_request.title = u"Test"
724 pull_request.description = u"Description"
724 pull_request.description = u"Description"
725 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
725 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
726 pull_request.pull_request_state = PullRequest.STATE_CREATED
726 pull_request.pull_request_state = PullRequest.STATE_CREATED
727
727
728 Session().add(pull_request)
728 Session().add(pull_request)
729 Session().commit()
729 Session().commit()
730 pull_request_id = pull_request.pull_request_id
730 pull_request_id = pull_request.pull_request_id
731
731
732 # target has ancestor - ancestor-new
732 # target has ancestor - ancestor-new
733 # source has ancestor - ancestor-new - change-rebased
733 # source has ancestor - ancestor-new - change-rebased
734 backend.pull_heads(target, heads=['ancestor-new'])
734 backend.pull_heads(target, heads=['ancestor-new'])
735 backend.pull_heads(source, heads=['change-rebased'])
735 backend.pull_heads(source, heads=['change-rebased'])
736
736
737 # update PR
737 # update PR
738 self.app.post(
738 self.app.post(
739 route_path('pullrequest_update',
739 route_path('pullrequest_update',
740 repo_name=target.repo_name,
740 repo_name=target.repo_name,
741 pull_request_id=pull_request_id),
741 pull_request_id=pull_request_id),
742 params={'update_commits': 'true', 'csrf_token': csrf_token},
742 params={'update_commits': 'true', 'csrf_token': csrf_token},
743 status=200)
743 status=200)
744
744
745 # check that we have now both revisions
745 # check that we have now both revisions
746 pull_request = PullRequest.get(pull_request_id)
746 pull_request = PullRequest.get(pull_request_id)
747 assert pull_request.revisions == [commit_ids['change-rebased']]
747 assert pull_request.revisions == [commit_ids['change-rebased']]
748 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
748 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
749 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
749 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
750
750
751 response = self.app.get(
751 response = self.app.get(
752 route_path('pullrequest_show',
752 route_path('pullrequest_show',
753 repo_name=target.repo_name,
753 repo_name=target.repo_name,
754 pull_request_id=pull_request.pull_request_id))
754 pull_request_id=pull_request.pull_request_id))
755 assert response.status_int == 200
755 assert response.status_int == 200
756 assert 'Pull request updated to' in response.body
756 assert 'Pull request updated to' in response.body
757 assert 'with 1 added, 1 removed commits.' in response.body
757 assert 'with 1 added, 1 removed commits.' in response.body
758
758
759 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
759 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
760 backend = backend_git
760 backend = backend_git
761 commits = [
761 commits = [
762 {'message': 'master-commit-1'},
762 {'message': 'master-commit-1'},
763 {'message': 'master-commit-2-change-1'},
763 {'message': 'master-commit-2-change-1'},
764 {'message': 'master-commit-3-change-2'},
764 {'message': 'master-commit-3-change-2'},
765
765
766 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
766 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
767 {'message': 'feat-commit-2'},
767 {'message': 'feat-commit-2'},
768 ]
768 ]
769 commit_ids = backend.create_master_repo(commits)
769 commit_ids = backend.create_master_repo(commits)
770 target = backend.create_repo(heads=['master-commit-3-change-2'])
770 target = backend.create_repo(heads=['master-commit-3-change-2'])
771 source = backend.create_repo(heads=['feat-commit-2'])
771 source = backend.create_repo(heads=['feat-commit-2'])
772
772
773 # create pr from a in source to A in target
773 # create pr from a in source to A in target
774 pull_request = PullRequest()
774 pull_request = PullRequest()
775 pull_request.source_repo = source
775 pull_request.source_repo = source
776
776
777 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
777 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
778 branch=backend.default_branch_name,
778 branch=backend.default_branch_name,
779 commit_id=commit_ids['master-commit-3-change-2'])
779 commit_id=commit_ids['master-commit-3-change-2'])
780
780
781 pull_request.target_repo = target
781 pull_request.target_repo = target
782 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
782 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
783 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
783 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
784
784
785 pull_request.revisions = [
785 pull_request.revisions = [
786 commit_ids['feat-commit-1'],
786 commit_ids['feat-commit-1'],
787 commit_ids['feat-commit-2']
787 commit_ids['feat-commit-2']
788 ]
788 ]
789 pull_request.title = u"Test"
789 pull_request.title = u"Test"
790 pull_request.description = u"Description"
790 pull_request.description = u"Description"
791 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
791 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
792 pull_request.pull_request_state = PullRequest.STATE_CREATED
792 pull_request.pull_request_state = PullRequest.STATE_CREATED
793 Session().add(pull_request)
793 Session().add(pull_request)
794 Session().commit()
794 Session().commit()
795 pull_request_id = pull_request.pull_request_id
795 pull_request_id = pull_request.pull_request_id
796
796
797 # PR is created, now we simulate a force-push into target,
797 # PR is created, now we simulate a force-push into target,
798 # that drops a 2 last commits
798 # that drops a 2 last commits
799 vcsrepo = target.scm_instance()
799 vcsrepo = target.scm_instance()
800 vcsrepo.config.clear_section('hooks')
800 vcsrepo.config.clear_section('hooks')
801 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
801 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
802
802
803 # update PR
803 # update PR
804 self.app.post(
804 self.app.post(
805 route_path('pullrequest_update',
805 route_path('pullrequest_update',
806 repo_name=target.repo_name,
806 repo_name=target.repo_name,
807 pull_request_id=pull_request_id),
807 pull_request_id=pull_request_id),
808 params={'update_commits': 'true', 'csrf_token': csrf_token},
808 params={'update_commits': 'true', 'csrf_token': csrf_token},
809 status=200)
809 status=200)
810
810
811 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
811 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
812 assert response.status_int == 200
812 assert response.status_int == 200
813 response.mustcontain('Pull request updated to')
813 response.mustcontain('Pull request updated to')
814 response.mustcontain('with 0 added, 0 removed commits.')
814 response.mustcontain('with 0 added, 0 removed commits.')
815
815
816 def test_update_of_ancestor_reference(self, backend, csrf_token):
816 def test_update_of_ancestor_reference(self, backend, csrf_token):
817 commits = [
817 commits = [
818 {'message': 'ancestor'},
818 {'message': 'ancestor'},
819 {'message': 'change'},
819 {'message': 'change'},
820 {'message': 'change-2'},
820 {'message': 'change-2'},
821 {'message': 'ancestor-new', 'parents': ['ancestor']},
821 {'message': 'ancestor-new', 'parents': ['ancestor']},
822 {'message': 'change-rebased'},
822 {'message': 'change-rebased'},
823 ]
823 ]
824 commit_ids = backend.create_master_repo(commits)
824 commit_ids = backend.create_master_repo(commits)
825 target = backend.create_repo(heads=['ancestor'])
825 target = backend.create_repo(heads=['ancestor'])
826 source = backend.create_repo(heads=['change'])
826 source = backend.create_repo(heads=['change'])
827
827
828 # create pr from a in source to A in target
828 # create pr from a in source to A in target
829 pull_request = PullRequest()
829 pull_request = PullRequest()
830 pull_request.source_repo = source
830 pull_request.source_repo = source
831
831
832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
832 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
833 branch=backend.default_branch_name, commit_id=commit_ids['change'])
833 branch=backend.default_branch_name, commit_id=commit_ids['change'])
834 pull_request.target_repo = target
834 pull_request.target_repo = target
835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
836 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
836 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
837 pull_request.revisions = [commit_ids['change']]
837 pull_request.revisions = [commit_ids['change']]
838 pull_request.title = u"Test"
838 pull_request.title = u"Test"
839 pull_request.description = u"Description"
839 pull_request.description = u"Description"
840 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
840 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
841 pull_request.pull_request_state = PullRequest.STATE_CREATED
841 pull_request.pull_request_state = PullRequest.STATE_CREATED
842 Session().add(pull_request)
842 Session().add(pull_request)
843 Session().commit()
843 Session().commit()
844 pull_request_id = pull_request.pull_request_id
844 pull_request_id = pull_request.pull_request_id
845
845
846 # target has ancestor - ancestor-new
846 # target has ancestor - ancestor-new
847 # source has ancestor - ancestor-new - change-rebased
847 # source has ancestor - ancestor-new - change-rebased
848 backend.pull_heads(target, heads=['ancestor-new'])
848 backend.pull_heads(target, heads=['ancestor-new'])
849 backend.pull_heads(source, heads=['change-rebased'])
849 backend.pull_heads(source, heads=['change-rebased'])
850
850
851 # update PR
851 # update PR
852 self.app.post(
852 self.app.post(
853 route_path('pullrequest_update',
853 route_path('pullrequest_update',
854 repo_name=target.repo_name, pull_request_id=pull_request_id),
854 repo_name=target.repo_name, pull_request_id=pull_request_id),
855 params={'update_commits': 'true', 'csrf_token': csrf_token},
855 params={'update_commits': 'true', 'csrf_token': csrf_token},
856 status=200)
856 status=200)
857
857
858 # Expect the target reference to be updated correctly
858 # Expect the target reference to be updated correctly
859 pull_request = PullRequest.get(pull_request_id)
859 pull_request = PullRequest.get(pull_request_id)
860 assert pull_request.revisions == [commit_ids['change-rebased']]
860 assert pull_request.revisions == [commit_ids['change-rebased']]
861 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
861 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
862 branch=backend.default_branch_name,
862 branch=backend.default_branch_name,
863 commit_id=commit_ids['ancestor-new'])
863 commit_id=commit_ids['ancestor-new'])
864 assert pull_request.target_ref == expected_target_ref
864 assert pull_request.target_ref == expected_target_ref
865
865
866 def test_remove_pull_request_branch(self, backend_git, csrf_token):
866 def test_remove_pull_request_branch(self, backend_git, csrf_token):
867 branch_name = 'development'
867 branch_name = 'development'
868 commits = [
868 commits = [
869 {'message': 'initial-commit'},
869 {'message': 'initial-commit'},
870 {'message': 'old-feature'},
870 {'message': 'old-feature'},
871 {'message': 'new-feature', 'branch': branch_name},
871 {'message': 'new-feature', 'branch': branch_name},
872 ]
872 ]
873 repo = backend_git.create_repo(commits)
873 repo = backend_git.create_repo(commits)
874 commit_ids = backend_git.commit_ids
874 commit_ids = backend_git.commit_ids
875
875
876 pull_request = PullRequest()
876 pull_request = PullRequest()
877 pull_request.source_repo = repo
877 pull_request.source_repo = repo
878 pull_request.target_repo = repo
878 pull_request.target_repo = repo
879 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
879 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
880 branch=branch_name, commit_id=commit_ids['new-feature'])
880 branch=branch_name, commit_id=commit_ids['new-feature'])
881 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
881 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
882 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
882 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
883 pull_request.revisions = [commit_ids['new-feature']]
883 pull_request.revisions = [commit_ids['new-feature']]
884 pull_request.title = u"Test"
884 pull_request.title = u"Test"
885 pull_request.description = u"Description"
885 pull_request.description = u"Description"
886 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
886 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
887 pull_request.pull_request_state = PullRequest.STATE_CREATED
887 pull_request.pull_request_state = PullRequest.STATE_CREATED
888 Session().add(pull_request)
888 Session().add(pull_request)
889 Session().commit()
889 Session().commit()
890
890
891 vcs = repo.scm_instance()
891 vcs = repo.scm_instance()
892 vcs.remove_ref('refs/heads/{}'.format(branch_name))
892 vcs.remove_ref('refs/heads/{}'.format(branch_name))
893
893
894 response = self.app.get(route_path(
894 response = self.app.get(route_path(
895 'pullrequest_show',
895 'pullrequest_show',
896 repo_name=repo.repo_name,
896 repo_name=repo.repo_name,
897 pull_request_id=pull_request.pull_request_id))
897 pull_request_id=pull_request.pull_request_id))
898
898
899 assert response.status_int == 200
899 assert response.status_int == 200
900
900
901 response.assert_response().element_contains(
901 response.assert_response().element_contains(
902 '#changeset_compare_view_content .alert strong',
902 '#changeset_compare_view_content .alert strong',
903 'Missing commits')
903 'Missing commits')
904 response.assert_response().element_contains(
904 response.assert_response().element_contains(
905 '#changeset_compare_view_content .alert',
905 '#changeset_compare_view_content .alert',
906 'This pull request cannot be displayed, because one or more'
906 'This pull request cannot be displayed, because one or more'
907 ' commits no longer exist in the source repository.')
907 ' commits no longer exist in the source repository.')
908
908
909 def test_strip_commits_from_pull_request(
909 def test_strip_commits_from_pull_request(
910 self, backend, pr_util, csrf_token):
910 self, backend, pr_util, csrf_token):
911 commits = [
911 commits = [
912 {'message': 'initial-commit'},
912 {'message': 'initial-commit'},
913 {'message': 'old-feature'},
913 {'message': 'old-feature'},
914 {'message': 'new-feature', 'parents': ['initial-commit']},
914 {'message': 'new-feature', 'parents': ['initial-commit']},
915 ]
915 ]
916 pull_request = pr_util.create_pull_request(
916 pull_request = pr_util.create_pull_request(
917 commits, target_head='initial-commit', source_head='new-feature',
917 commits, target_head='initial-commit', source_head='new-feature',
918 revisions=['new-feature'])
918 revisions=['new-feature'])
919
919
920 vcs = pr_util.source_repository.scm_instance()
920 vcs = pr_util.source_repository.scm_instance()
921 if backend.alias == 'git':
921 if backend.alias == 'git':
922 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
922 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
923 else:
923 else:
924 vcs.strip(pr_util.commit_ids['new-feature'])
924 vcs.strip(pr_util.commit_ids['new-feature'])
925
925
926 response = self.app.get(route_path(
926 response = self.app.get(route_path(
927 'pullrequest_show',
927 'pullrequest_show',
928 repo_name=pr_util.target_repository.repo_name,
928 repo_name=pr_util.target_repository.repo_name,
929 pull_request_id=pull_request.pull_request_id))
929 pull_request_id=pull_request.pull_request_id))
930
930
931 assert response.status_int == 200
931 assert response.status_int == 200
932
932
933 response.assert_response().element_contains(
933 response.assert_response().element_contains(
934 '#changeset_compare_view_content .alert strong',
934 '#changeset_compare_view_content .alert strong',
935 'Missing commits')
935 'Missing commits')
936 response.assert_response().element_contains(
936 response.assert_response().element_contains(
937 '#changeset_compare_view_content .alert',
937 '#changeset_compare_view_content .alert',
938 'This pull request cannot be displayed, because one or more'
938 'This pull request cannot be displayed, because one or more'
939 ' commits no longer exist in the source repository.')
939 ' commits no longer exist in the source repository.')
940 response.assert_response().element_contains(
940 response.assert_response().element_contains(
941 '#update_commits',
941 '#update_commits',
942 'Update commits')
942 'Update commits')
943
943
944 def test_strip_commits_and_update(
944 def test_strip_commits_and_update(
945 self, backend, pr_util, csrf_token):
945 self, backend, pr_util, csrf_token):
946 commits = [
946 commits = [
947 {'message': 'initial-commit'},
947 {'message': 'initial-commit'},
948 {'message': 'old-feature'},
948 {'message': 'old-feature'},
949 {'message': 'new-feature', 'parents': ['old-feature']},
949 {'message': 'new-feature', 'parents': ['old-feature']},
950 ]
950 ]
951 pull_request = pr_util.create_pull_request(
951 pull_request = pr_util.create_pull_request(
952 commits, target_head='old-feature', source_head='new-feature',
952 commits, target_head='old-feature', source_head='new-feature',
953 revisions=['new-feature'], mergeable=True)
953 revisions=['new-feature'], mergeable=True)
954
954
955 vcs = pr_util.source_repository.scm_instance()
955 vcs = pr_util.source_repository.scm_instance()
956 if backend.alias == 'git':
956 if backend.alias == 'git':
957 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
957 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
958 else:
958 else:
959 vcs.strip(pr_util.commit_ids['new-feature'])
959 vcs.strip(pr_util.commit_ids['new-feature'])
960
960
961 response = self.app.post(
961 response = self.app.post(
962 route_path('pullrequest_update',
962 route_path('pullrequest_update',
963 repo_name=pull_request.target_repo.repo_name,
963 repo_name=pull_request.target_repo.repo_name,
964 pull_request_id=pull_request.pull_request_id),
964 pull_request_id=pull_request.pull_request_id),
965 params={'update_commits': 'true',
965 params={'update_commits': 'true',
966 'csrf_token': csrf_token})
966 'csrf_token': csrf_token})
967
967
968 assert response.status_int == 200
968 assert response.status_int == 200
969 assert response.body == 'true'
969 assert response.body == 'true'
970
970
971 # Make sure that after update, it won't raise 500 errors
971 # Make sure that after update, it won't raise 500 errors
972 response = self.app.get(route_path(
972 response = self.app.get(route_path(
973 'pullrequest_show',
973 'pullrequest_show',
974 repo_name=pr_util.target_repository.repo_name,
974 repo_name=pr_util.target_repository.repo_name,
975 pull_request_id=pull_request.pull_request_id))
975 pull_request_id=pull_request.pull_request_id))
976
976
977 assert response.status_int == 200
977 assert response.status_int == 200
978 response.assert_response().element_contains(
978 response.assert_response().element_contains(
979 '#changeset_compare_view_content .alert strong',
979 '#changeset_compare_view_content .alert strong',
980 'Missing commits')
980 'Missing commits')
981
981
982 def test_branch_is_a_link(self, pr_util):
982 def test_branch_is_a_link(self, pr_util):
983 pull_request = pr_util.create_pull_request()
983 pull_request = pr_util.create_pull_request()
984 pull_request.source_ref = 'branch:origin:1234567890abcdef'
984 pull_request.source_ref = 'branch:origin:1234567890abcdef'
985 pull_request.target_ref = 'branch:target:abcdef1234567890'
985 pull_request.target_ref = 'branch:target:abcdef1234567890'
986 Session().add(pull_request)
986 Session().add(pull_request)
987 Session().commit()
987 Session().commit()
988
988
989 response = self.app.get(route_path(
989 response = self.app.get(route_path(
990 'pullrequest_show',
990 'pullrequest_show',
991 repo_name=pull_request.target_repo.scm_instance().name,
991 repo_name=pull_request.target_repo.scm_instance().name,
992 pull_request_id=pull_request.pull_request_id))
992 pull_request_id=pull_request.pull_request_id))
993 assert response.status_int == 200
993 assert response.status_int == 200
994
994
995 origin = response.assert_response().get_element('.pr-origininfo .tag')
995 origin = response.assert_response().get_element('.pr-origininfo .tag')
996 origin_children = origin.getchildren()
996 origin_children = origin.getchildren()
997 assert len(origin_children) == 1
997 assert len(origin_children) == 1
998 target = response.assert_response().get_element('.pr-targetinfo .tag')
998 target = response.assert_response().get_element('.pr-targetinfo .tag')
999 target_children = target.getchildren()
999 target_children = target.getchildren()
1000 assert len(target_children) == 1
1000 assert len(target_children) == 1
1001
1001
1002 expected_origin_link = route_path(
1002 expected_origin_link = route_path(
1003 'repo_commits',
1003 'repo_commits',
1004 repo_name=pull_request.source_repo.scm_instance().name,
1004 repo_name=pull_request.source_repo.scm_instance().name,
1005 params=dict(branch='origin'))
1005 params=dict(branch='origin'))
1006 expected_target_link = route_path(
1006 expected_target_link = route_path(
1007 'repo_commits',
1007 'repo_commits',
1008 repo_name=pull_request.target_repo.scm_instance().name,
1008 repo_name=pull_request.target_repo.scm_instance().name,
1009 params=dict(branch='target'))
1009 params=dict(branch='target'))
1010 assert origin_children[0].attrib['href'] == expected_origin_link
1010 assert origin_children[0].attrib['href'] == expected_origin_link
1011 assert origin_children[0].text == 'branch: origin'
1011 assert origin_children[0].text == 'branch: origin'
1012 assert target_children[0].attrib['href'] == expected_target_link
1012 assert target_children[0].attrib['href'] == expected_target_link
1013 assert target_children[0].text == 'branch: target'
1013 assert target_children[0].text == 'branch: target'
1014
1014
1015 def test_bookmark_is_not_a_link(self, pr_util):
1015 def test_bookmark_is_not_a_link(self, pr_util):
1016 pull_request = pr_util.create_pull_request()
1016 pull_request = pr_util.create_pull_request()
1017 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1017 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1018 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1018 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1019 Session().add(pull_request)
1019 Session().add(pull_request)
1020 Session().commit()
1020 Session().commit()
1021
1021
1022 response = self.app.get(route_path(
1022 response = self.app.get(route_path(
1023 'pullrequest_show',
1023 'pullrequest_show',
1024 repo_name=pull_request.target_repo.scm_instance().name,
1024 repo_name=pull_request.target_repo.scm_instance().name,
1025 pull_request_id=pull_request.pull_request_id))
1025 pull_request_id=pull_request.pull_request_id))
1026 assert response.status_int == 200
1026 assert response.status_int == 200
1027
1027
1028 origin = response.assert_response().get_element('.pr-origininfo .tag')
1028 origin = response.assert_response().get_element('.pr-origininfo .tag')
1029 assert origin.text.strip() == 'bookmark: origin'
1029 assert origin.text.strip() == 'bookmark: origin'
1030 assert origin.getchildren() == []
1030 assert origin.getchildren() == []
1031
1031
1032 target = response.assert_response().get_element('.pr-targetinfo .tag')
1032 target = response.assert_response().get_element('.pr-targetinfo .tag')
1033 assert target.text.strip() == 'bookmark: target'
1033 assert target.text.strip() == 'bookmark: target'
1034 assert target.getchildren() == []
1034 assert target.getchildren() == []
1035
1035
1036 def test_tag_is_not_a_link(self, pr_util):
1036 def test_tag_is_not_a_link(self, pr_util):
1037 pull_request = pr_util.create_pull_request()
1037 pull_request = pr_util.create_pull_request()
1038 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1038 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1039 pull_request.target_ref = 'tag:target:abcdef1234567890'
1039 pull_request.target_ref = 'tag:target:abcdef1234567890'
1040 Session().add(pull_request)
1040 Session().add(pull_request)
1041 Session().commit()
1041 Session().commit()
1042
1042
1043 response = self.app.get(route_path(
1043 response = self.app.get(route_path(
1044 'pullrequest_show',
1044 'pullrequest_show',
1045 repo_name=pull_request.target_repo.scm_instance().name,
1045 repo_name=pull_request.target_repo.scm_instance().name,
1046 pull_request_id=pull_request.pull_request_id))
1046 pull_request_id=pull_request.pull_request_id))
1047 assert response.status_int == 200
1047 assert response.status_int == 200
1048
1048
1049 origin = response.assert_response().get_element('.pr-origininfo .tag')
1049 origin = response.assert_response().get_element('.pr-origininfo .tag')
1050 assert origin.text.strip() == 'tag: origin'
1050 assert origin.text.strip() == 'tag: origin'
1051 assert origin.getchildren() == []
1051 assert origin.getchildren() == []
1052
1052
1053 target = response.assert_response().get_element('.pr-targetinfo .tag')
1053 target = response.assert_response().get_element('.pr-targetinfo .tag')
1054 assert target.text.strip() == 'tag: target'
1054 assert target.text.strip() == 'tag: target'
1055 assert target.getchildren() == []
1055 assert target.getchildren() == []
1056
1056
1057 @pytest.mark.parametrize('mergeable', [True, False])
1057 @pytest.mark.parametrize('mergeable', [True, False])
1058 def test_shadow_repository_link(
1058 def test_shadow_repository_link(
1059 self, mergeable, pr_util, http_host_only_stub):
1059 self, mergeable, pr_util, http_host_only_stub):
1060 """
1060 """
1061 Check that the pull request summary page displays a link to the shadow
1061 Check that the pull request summary page displays a link to the shadow
1062 repository if the pull request is mergeable. If it is not mergeable
1062 repository if the pull request is mergeable. If it is not mergeable
1063 the link should not be displayed.
1063 the link should not be displayed.
1064 """
1064 """
1065 pull_request = pr_util.create_pull_request(
1065 pull_request = pr_util.create_pull_request(
1066 mergeable=mergeable, enable_notifications=False)
1066 mergeable=mergeable, enable_notifications=False)
1067 target_repo = pull_request.target_repo.scm_instance()
1067 target_repo = pull_request.target_repo.scm_instance()
1068 pr_id = pull_request.pull_request_id
1068 pr_id = pull_request.pull_request_id
1069 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1069 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1070 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1070 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1071
1071
1072 response = self.app.get(route_path(
1072 response = self.app.get(route_path(
1073 'pullrequest_show',
1073 'pullrequest_show',
1074 repo_name=target_repo.name,
1074 repo_name=target_repo.name,
1075 pull_request_id=pr_id))
1075 pull_request_id=pr_id))
1076
1076
1077 if mergeable:
1077 if mergeable:
1078 response.assert_response().element_value_contains(
1078 response.assert_response().element_value_contains(
1079 'input.pr-mergeinfo', shadow_url)
1079 'input.pr-mergeinfo', shadow_url)
1080 response.assert_response().element_value_contains(
1080 response.assert_response().element_value_contains(
1081 'input.pr-mergeinfo ', 'pr-merge')
1081 'input.pr-mergeinfo ', 'pr-merge')
1082 else:
1082 else:
1083 response.assert_response().no_element_exists('.pr-mergeinfo')
1083 response.assert_response().no_element_exists('.pr-mergeinfo')
1084
1084
1085
1085
1086 @pytest.mark.usefixtures('app')
1086 @pytest.mark.usefixtures('app')
1087 @pytest.mark.backends("git", "hg")
1087 @pytest.mark.backends("git", "hg")
1088 class TestPullrequestsControllerDelete(object):
1088 class TestPullrequestsControllerDelete(object):
1089 def test_pull_request_delete_button_permissions_admin(
1089 def test_pull_request_delete_button_permissions_admin(
1090 self, autologin_user, user_admin, pr_util):
1090 self, autologin_user, user_admin, pr_util):
1091 pull_request = pr_util.create_pull_request(
1091 pull_request = pr_util.create_pull_request(
1092 author=user_admin.username, enable_notifications=False)
1092 author=user_admin.username, enable_notifications=False)
1093
1093
1094 response = self.app.get(route_path(
1094 response = self.app.get(route_path(
1095 'pullrequest_show',
1095 'pullrequest_show',
1096 repo_name=pull_request.target_repo.scm_instance().name,
1096 repo_name=pull_request.target_repo.scm_instance().name,
1097 pull_request_id=pull_request.pull_request_id))
1097 pull_request_id=pull_request.pull_request_id))
1098
1098
1099 response.mustcontain('id="delete_pullrequest"')
1099 response.mustcontain('id="delete_pullrequest"')
1100 response.mustcontain('Confirm to delete this pull request')
1100 response.mustcontain('Confirm to delete this pull request')
1101
1101
1102 def test_pull_request_delete_button_permissions_owner(
1102 def test_pull_request_delete_button_permissions_owner(
1103 self, autologin_regular_user, user_regular, pr_util):
1103 self, autologin_regular_user, user_regular, pr_util):
1104 pull_request = pr_util.create_pull_request(
1104 pull_request = pr_util.create_pull_request(
1105 author=user_regular.username, enable_notifications=False)
1105 author=user_regular.username, enable_notifications=False)
1106
1106
1107 response = self.app.get(route_path(
1107 response = self.app.get(route_path(
1108 'pullrequest_show',
1108 'pullrequest_show',
1109 repo_name=pull_request.target_repo.scm_instance().name,
1109 repo_name=pull_request.target_repo.scm_instance().name,
1110 pull_request_id=pull_request.pull_request_id))
1110 pull_request_id=pull_request.pull_request_id))
1111
1111
1112 response.mustcontain('id="delete_pullrequest"')
1112 response.mustcontain('id="delete_pullrequest"')
1113 response.mustcontain('Confirm to delete this pull request')
1113 response.mustcontain('Confirm to delete this pull request')
1114
1114
1115 def test_pull_request_delete_button_permissions_forbidden(
1115 def test_pull_request_delete_button_permissions_forbidden(
1116 self, autologin_regular_user, user_regular, user_admin, pr_util):
1116 self, autologin_regular_user, user_regular, user_admin, pr_util):
1117 pull_request = pr_util.create_pull_request(
1117 pull_request = pr_util.create_pull_request(
1118 author=user_admin.username, enable_notifications=False)
1118 author=user_admin.username, enable_notifications=False)
1119
1119
1120 response = self.app.get(route_path(
1120 response = self.app.get(route_path(
1121 'pullrequest_show',
1121 'pullrequest_show',
1122 repo_name=pull_request.target_repo.scm_instance().name,
1122 repo_name=pull_request.target_repo.scm_instance().name,
1123 pull_request_id=pull_request.pull_request_id))
1123 pull_request_id=pull_request.pull_request_id))
1124 response.mustcontain(no=['id="delete_pullrequest"'])
1124 response.mustcontain(no=['id="delete_pullrequest"'])
1125 response.mustcontain(no=['Confirm to delete this pull request'])
1125 response.mustcontain(no=['Confirm to delete this pull request'])
1126
1126
1127 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1127 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1128 self, autologin_regular_user, user_regular, user_admin, pr_util,
1128 self, autologin_regular_user, user_regular, user_admin, pr_util,
1129 user_util):
1129 user_util):
1130
1130
1131 pull_request = pr_util.create_pull_request(
1131 pull_request = pr_util.create_pull_request(
1132 author=user_admin.username, enable_notifications=False)
1132 author=user_admin.username, enable_notifications=False)
1133
1133
1134 user_util.grant_user_permission_to_repo(
1134 user_util.grant_user_permission_to_repo(
1135 pull_request.target_repo, user_regular,
1135 pull_request.target_repo, user_regular,
1136 'repository.write')
1136 'repository.write')
1137
1137
1138 response = self.app.get(route_path(
1138 response = self.app.get(route_path(
1139 'pullrequest_show',
1139 'pullrequest_show',
1140 repo_name=pull_request.target_repo.scm_instance().name,
1140 repo_name=pull_request.target_repo.scm_instance().name,
1141 pull_request_id=pull_request.pull_request_id))
1141 pull_request_id=pull_request.pull_request_id))
1142
1142
1143 response.mustcontain('id="open_edit_pullrequest"')
1143 response.mustcontain('id="open_edit_pullrequest"')
1144 response.mustcontain('id="delete_pullrequest"')
1144 response.mustcontain('id="delete_pullrequest"')
1145 response.mustcontain(no=['Confirm to delete this pull request'])
1145 response.mustcontain(no=['Confirm to delete this pull request'])
1146
1146
1147 def test_delete_comment_returns_404_if_comment_does_not_exist(
1147 def test_delete_comment_returns_404_if_comment_does_not_exist(
1148 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1148 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1149
1149
1150 pull_request = pr_util.create_pull_request(
1150 pull_request = pr_util.create_pull_request(
1151 author=user_admin.username, enable_notifications=False)
1151 author=user_admin.username, enable_notifications=False)
1152
1152
1153 self.app.post(
1153 self.app.post(
1154 route_path(
1154 route_path(
1155 'pullrequest_comment_delete',
1155 'pullrequest_comment_delete',
1156 repo_name=pull_request.target_repo.scm_instance().name,
1156 repo_name=pull_request.target_repo.scm_instance().name,
1157 pull_request_id=pull_request.pull_request_id,
1157 pull_request_id=pull_request.pull_request_id,
1158 comment_id=1024404),
1158 comment_id=1024404),
1159 extra_environ=xhr_header,
1159 extra_environ=xhr_header,
1160 params={'csrf_token': csrf_token},
1160 params={'csrf_token': csrf_token},
1161 status=404
1161 status=404
1162 )
1162 )
1163
1163
1164 def test_delete_comment(
1164 def test_delete_comment(
1165 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1165 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1166
1166
1167 pull_request = pr_util.create_pull_request(
1167 pull_request = pr_util.create_pull_request(
1168 author=user_admin.username, enable_notifications=False)
1168 author=user_admin.username, enable_notifications=False)
1169 comment = pr_util.create_comment()
1169 comment = pr_util.create_comment()
1170 comment_id = comment.comment_id
1170 comment_id = comment.comment_id
1171
1171
1172 response = self.app.post(
1172 response = self.app.post(
1173 route_path(
1173 route_path(
1174 'pullrequest_comment_delete',
1174 'pullrequest_comment_delete',
1175 repo_name=pull_request.target_repo.scm_instance().name,
1175 repo_name=pull_request.target_repo.scm_instance().name,
1176 pull_request_id=pull_request.pull_request_id,
1176 pull_request_id=pull_request.pull_request_id,
1177 comment_id=comment_id),
1177 comment_id=comment_id),
1178 extra_environ=xhr_header,
1178 extra_environ=xhr_header,
1179 params={'csrf_token': csrf_token},
1179 params={'csrf_token': csrf_token},
1180 status=200
1180 status=200
1181 )
1181 )
1182 assert response.body == 'true'
1182 assert response.body == 'true'
1183
1183
1184 @pytest.mark.parametrize('url_type', [
1184 @pytest.mark.parametrize('url_type', [
1185 'pullrequest_new',
1185 'pullrequest_new',
1186 'pullrequest_create',
1186 'pullrequest_create',
1187 'pullrequest_update',
1187 'pullrequest_update',
1188 'pullrequest_merge',
1188 'pullrequest_merge',
1189 ])
1189 ])
1190 def test_pull_request_is_forbidden_on_archived_repo(
1190 def test_pull_request_is_forbidden_on_archived_repo(
1191 self, autologin_user, backend, xhr_header, user_util, url_type):
1191 self, autologin_user, backend, xhr_header, user_util, url_type):
1192
1192
1193 # create a temporary repo
1193 # create a temporary repo
1194 source = user_util.create_repo(repo_type=backend.alias)
1194 source = user_util.create_repo(repo_type=backend.alias)
1195 repo_name = source.repo_name
1195 repo_name = source.repo_name
1196 repo = Repository.get_by_repo_name(repo_name)
1196 repo = Repository.get_by_repo_name(repo_name)
1197 repo.archived = True
1197 repo.archived = True
1198 Session().commit()
1198 Session().commit()
1199
1199
1200 response = self.app.get(
1200 response = self.app.get(
1201 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1201 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1202
1202
1203 msg = 'Action not supported for archived repository.'
1203 msg = 'Action not supported for archived repository.'
1204 assert_session_flash(response, msg)
1204 assert_session_flash(response, msg)
1205
1205
1206
1206
1207 def assert_pull_request_status(pull_request, expected_status):
1207 def assert_pull_request_status(pull_request, expected_status):
1208 status = ChangesetStatusModel().calculated_review_status(
1208 status = ChangesetStatusModel().calculated_review_status(
1209 pull_request=pull_request)
1209 pull_request=pull_request)
1210 assert status == expected_status
1210 assert status == expected_status
1211
1211
1212
1212
1213 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1213 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1214 @pytest.mark.usefixtures("autologin_user")
1214 @pytest.mark.usefixtures("autologin_user")
1215 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1215 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1216 response = app.get(
1216 response = app.get(
1217 route_path(route, repo_name=backend_svn.repo_name), status=404)
1217 route_path(route, repo_name=backend_svn.repo_name), status=404)
1218
1218
@@ -1,523 +1,524 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
26 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.compat import OrderedDict
28 from rhodecode.lib.compat import OrderedDict
29 from rhodecode.lib.utils2 import AttributeDict, safe_str
29 from rhodecode.lib.utils2 import AttributeDict, safe_str
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
30 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
31 from rhodecode.model.db import Repository
31 from rhodecode.model.db import Repository
32 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
33 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo import RepoModel
34 from rhodecode.model.scm import ScmModel
34 from rhodecode.model.scm import ScmModel
35 from rhodecode.tests import assert_session_flash
35 from rhodecode.tests import assert_session_flash
36 from rhodecode.tests.fixture import Fixture
36 from rhodecode.tests.fixture import Fixture
37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
37 from rhodecode.tests.utils import AssertResponse, repo_on_filesystem
38
38
39
39
40 fixture = Fixture()
40 fixture = Fixture()
41
41
42
42
43 def route_path(name, params=None, **kwargs):
43 def route_path(name, params=None, **kwargs):
44 import urllib
44 import urllib
45
45
46 base_url = {
46 base_url = {
47 'repo_summary': '/{repo_name}',
47 'repo_summary': '/{repo_name}',
48 'repo_stats': '/{repo_name}/repo_stats/{commit_id}',
48 'repo_stats': '/{repo_name}/repo_stats/{commit_id}',
49 'repo_refs_data': '/{repo_name}/refs-data',
49 'repo_refs_data': '/{repo_name}/refs-data',
50 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog',
50 'repo_refs_changelog_data': '/{repo_name}/refs-data-changelog',
51 'repo_creating_check': '/{repo_name}/repo_creating_check',
51 'repo_creating_check': '/{repo_name}/repo_creating_check',
52 }[name].format(**kwargs)
52 }[name].format(**kwargs)
53
53
54 if params:
54 if params:
55 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
55 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
56 return base_url
56 return base_url
57
57
58
58
59 def assert_clone_url(response, server, repo, disabled=False):
59 def assert_clone_url(response, server, repo, disabled=False):
60
60
61 response.mustcontain(
61 response.mustcontain(
62 '<input type="text" class="input-monospace clone_url_input" '
62 '<input type="text" class="input-monospace clone_url_input" '
63 '{disabled}readonly="readonly" '
63 '{disabled}readonly="readonly" '
64 'value="http://test_admin@{server}/{repo}"/>'.format(
64 'value="http://test_admin@{server}/{repo}"/>'.format(
65 server=server, repo=repo, disabled='disabled ' if disabled else ' ')
65 server=server, repo=repo, disabled='disabled ' if disabled else ' ')
66 )
66 )
67
67
68
68
69 @pytest.mark.usefixtures('app')
69 @pytest.mark.usefixtures('app')
70 class TestSummaryView(object):
70 class TestSummaryView(object):
71 def test_index(self, autologin_user, backend, http_host_only_stub):
71 def test_index(self, autologin_user, backend, http_host_only_stub):
72 repo_id = backend.repo.repo_id
72 repo_id = backend.repo.repo_id
73 repo_name = backend.repo_name
73 repo_name = backend.repo_name
74 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
74 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
75 return_value=False):
75 return_value=False):
76 response = self.app.get(
76 response = self.app.get(
77 route_path('repo_summary', repo_name=repo_name))
77 route_path('repo_summary', repo_name=repo_name))
78
78
79 # repo type
79 # repo type
80 response.mustcontain(
80 response.mustcontain(
81 '<i class="icon-%s">' % (backend.alias, )
81 '<i class="icon-%s">' % (backend.alias, )
82 )
82 )
83 # public/private
83 # public/private
84 response.mustcontain(
84 response.mustcontain(
85 """<i class="icon-unlock-alt">"""
85 """<i class="icon-unlock-alt">"""
86 )
86 )
87
87
88 # clone url...
88 # clone url...
89 assert_clone_url(response, http_host_only_stub, repo_name)
89 assert_clone_url(response, http_host_only_stub, repo_name)
90 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
90 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
91
91
92 def test_index_svn_without_proxy(
92 def test_index_svn_without_proxy(
93 self, autologin_user, backend_svn, http_host_only_stub):
93 self, autologin_user, backend_svn, http_host_only_stub):
94 repo_id = backend_svn.repo.repo_id
94 repo_id = backend_svn.repo.repo_id
95 repo_name = backend_svn.repo_name
95 repo_name = backend_svn.repo_name
96 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
96 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
97 # clone url...
97 # clone url...
98
98
99 assert_clone_url(response, http_host_only_stub, repo_name, disabled=True)
99 assert_clone_url(response, http_host_only_stub, repo_name, disabled=True)
100 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id), disabled=True)
100 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id), disabled=True)
101
101
102 def test_index_with_trailing_slash(
102 def test_index_with_trailing_slash(
103 self, autologin_user, backend, http_host_only_stub):
103 self, autologin_user, backend, http_host_only_stub):
104
104
105 repo_id = backend.repo.repo_id
105 repo_id = backend.repo.repo_id
106 repo_name = backend.repo_name
106 repo_name = backend.repo_name
107 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
107 with mock.patch('rhodecode.lib.helpers.is_svn_without_proxy',
108 return_value=False):
108 return_value=False):
109 response = self.app.get(
109 response = self.app.get(
110 route_path('repo_summary', repo_name=repo_name) + '/',
110 route_path('repo_summary', repo_name=repo_name) + '/',
111 status=200)
111 status=200)
112
112
113 # clone url...
113 # clone url...
114 assert_clone_url(response, http_host_only_stub, repo_name)
114 assert_clone_url(response, http_host_only_stub, repo_name)
115 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
115 assert_clone_url(response, http_host_only_stub, '_{}'.format(repo_id))
116
116
117 def test_index_by_id(self, autologin_user, backend):
117 def test_index_by_id(self, autologin_user, backend):
118 repo_id = backend.repo.repo_id
118 repo_id = backend.repo.repo_id
119 response = self.app.get(
119 response = self.app.get(
120 route_path('repo_summary', repo_name='_%s' % (repo_id,)))
120 route_path('repo_summary', repo_name='_%s' % (repo_id,)))
121
121
122 # repo type
122 # repo type
123 response.mustcontain(
123 response.mustcontain(
124 '<i class="icon-%s">' % (backend.alias, )
124 '<i class="icon-%s">' % (backend.alias, )
125 )
125 )
126 # public/private
126 # public/private
127 response.mustcontain(
127 response.mustcontain(
128 """<i class="icon-unlock-alt">"""
128 """<i class="icon-unlock-alt">"""
129 )
129 )
130
130
131 def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user):
131 def test_index_by_repo_having_id_path_in_name_hg(self, autologin_user):
132 fixture.create_repo(name='repo_1')
132 fixture.create_repo(name='repo_1')
133 response = self.app.get(route_path('repo_summary', repo_name='repo_1'))
133 response = self.app.get(route_path('repo_summary', repo_name='repo_1'))
134
134
135 try:
135 try:
136 response.mustcontain("repo_1")
136 response.mustcontain("repo_1")
137 finally:
137 finally:
138 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
138 RepoModel().delete(Repository.get_by_repo_name('repo_1'))
139 Session().commit()
139 Session().commit()
140
140
141 def test_index_with_anonymous_access_disabled(
141 def test_index_with_anonymous_access_disabled(
142 self, backend, disable_anonymous_user):
142 self, backend, disable_anonymous_user):
143 response = self.app.get(
143 response = self.app.get(
144 route_path('repo_summary', repo_name=backend.repo_name), status=302)
144 route_path('repo_summary', repo_name=backend.repo_name), status=302)
145 assert 'login' in response.location
145 assert 'login' in response.location
146
146
147 def _enable_stats(self, repo):
147 def _enable_stats(self, repo):
148 r = Repository.get_by_repo_name(repo)
148 r = Repository.get_by_repo_name(repo)
149 r.enable_statistics = True
149 r.enable_statistics = True
150 Session().add(r)
150 Session().add(r)
151 Session().commit()
151 Session().commit()
152
152
153 expected_trending = {
153 expected_trending = {
154 'hg': {
154 'hg': {
155 "py": {"count": 68, "desc": ["Python"]},
155 "py": {"count": 68, "desc": ["Python"]},
156 "rst": {"count": 16, "desc": ["Rst"]},
156 "rst": {"count": 16, "desc": ["Rst"]},
157 "css": {"count": 2, "desc": ["Css"]},
157 "css": {"count": 2, "desc": ["Css"]},
158 "sh": {"count": 2, "desc": ["Bash"]},
158 "sh": {"count": 2, "desc": ["Bash"]},
159 "bat": {"count": 1, "desc": ["Batch"]},
159 "bat": {"count": 1, "desc": ["Batch"]},
160 "cfg": {"count": 1, "desc": ["Ini"]},
160 "cfg": {"count": 1, "desc": ["Ini"]},
161 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
161 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
162 "ini": {"count": 1, "desc": ["Ini"]},
162 "ini": {"count": 1, "desc": ["Ini"]},
163 "js": {"count": 1, "desc": ["Javascript"]},
163 "js": {"count": 1, "desc": ["Javascript"]},
164 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
164 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
165 },
165 },
166 'git': {
166 'git': {
167 "py": {"count": 68, "desc": ["Python"]},
167 "py": {"count": 68, "desc": ["Python"]},
168 "rst": {"count": 16, "desc": ["Rst"]},
168 "rst": {"count": 16, "desc": ["Rst"]},
169 "css": {"count": 2, "desc": ["Css"]},
169 "css": {"count": 2, "desc": ["Css"]},
170 "sh": {"count": 2, "desc": ["Bash"]},
170 "sh": {"count": 2, "desc": ["Bash"]},
171 "bat": {"count": 1, "desc": ["Batch"]},
171 "bat": {"count": 1, "desc": ["Batch"]},
172 "cfg": {"count": 1, "desc": ["Ini"]},
172 "cfg": {"count": 1, "desc": ["Ini"]},
173 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
173 "html": {"count": 1, "desc": ["EvoqueHtml", "Html"]},
174 "ini": {"count": 1, "desc": ["Ini"]},
174 "ini": {"count": 1, "desc": ["Ini"]},
175 "js": {"count": 1, "desc": ["Javascript"]},
175 "js": {"count": 1, "desc": ["Javascript"]},
176 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
176 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]}
177 },
177 },
178 'svn': {
178 'svn': {
179 "py": {"count": 75, "desc": ["Python"]},
179 "py": {"count": 75, "desc": ["Python"]},
180 "rst": {"count": 16, "desc": ["Rst"]},
180 "rst": {"count": 16, "desc": ["Rst"]},
181 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
181 "html": {"count": 11, "desc": ["EvoqueHtml", "Html"]},
182 "css": {"count": 2, "desc": ["Css"]},
182 "css": {"count": 2, "desc": ["Css"]},
183 "bat": {"count": 1, "desc": ["Batch"]},
183 "bat": {"count": 1, "desc": ["Batch"]},
184 "cfg": {"count": 1, "desc": ["Ini"]},
184 "cfg": {"count": 1, "desc": ["Ini"]},
185 "ini": {"count": 1, "desc": ["Ini"]},
185 "ini": {"count": 1, "desc": ["Ini"]},
186 "js": {"count": 1, "desc": ["Javascript"]},
186 "js": {"count": 1, "desc": ["Javascript"]},
187 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
187 "makefile": {"count": 1, "desc": ["Makefile", "Makefile"]},
188 "sh": {"count": 1, "desc": ["Bash"]}
188 "sh": {"count": 1, "desc": ["Bash"]}
189 },
189 },
190 }
190 }
191
191
192 def test_repo_stats(self, autologin_user, backend, xhr_header):
192 def test_repo_stats(self, autologin_user, backend, xhr_header):
193 response = self.app.get(
193 response = self.app.get(
194 route_path(
194 route_path(
195 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
195 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
196 extra_environ=xhr_header,
196 extra_environ=xhr_header,
197 status=200)
197 status=200)
198 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
198 assert re.match(r'6[\d\.]+ KiB', response.json['size'])
199
199
200 def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header):
200 def test_repo_stats_code_stats_enabled(self, autologin_user, backend, xhr_header):
201 repo_name = backend.repo_name
201 repo_name = backend.repo_name
202
202
203 # codes stats
203 # codes stats
204 self._enable_stats(repo_name)
204 self._enable_stats(repo_name)
205 ScmModel().mark_for_invalidation(repo_name)
205 ScmModel().mark_for_invalidation(repo_name)
206
206
207 response = self.app.get(
207 response = self.app.get(
208 route_path(
208 route_path(
209 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
209 'repo_stats', repo_name=backend.repo_name, commit_id='tip'),
210 extra_environ=xhr_header,
210 extra_environ=xhr_header,
211 status=200)
211 status=200)
212
212
213 expected_data = self.expected_trending[backend.alias]
213 expected_data = self.expected_trending[backend.alias]
214 returned_stats = response.json['code_stats']
214 returned_stats = response.json['code_stats']
215 for k, v in expected_data.items():
215 for k, v in expected_data.items():
216 assert v == returned_stats[k]
216 assert v == returned_stats[k]
217
217
218 def test_repo_refs_data(self, backend):
218 def test_repo_refs_data(self, backend):
219 response = self.app.get(
219 response = self.app.get(
220 route_path('repo_refs_data', repo_name=backend.repo_name),
220 route_path('repo_refs_data', repo_name=backend.repo_name),
221 status=200)
221 status=200)
222
222
223 # Ensure that there is the correct amount of items in the result
223 # Ensure that there is the correct amount of items in the result
224 repo = backend.repo.scm_instance()
224 repo = backend.repo.scm_instance()
225 data = response.json['results']
225 data = response.json['results']
226 items = sum(len(section['children']) for section in data)
226 items = sum(len(section['children']) for section in data)
227 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
227 repo_refs = len(repo.branches) + len(repo.tags) + len(repo.bookmarks)
228 assert items == repo_refs
228 assert items == repo_refs
229
229
230 def test_index_shows_missing_requirements_message(
230 def test_index_shows_missing_requirements_message(
231 self, backend, autologin_user):
231 self, backend, autologin_user):
232 repo_name = backend.repo_name
232 repo_name = backend.repo_name
233 scm_patcher = mock.patch.object(
233 scm_patcher = mock.patch.object(
234 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
234 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
235
235
236 with scm_patcher:
236 with scm_patcher:
237 response = self.app.get(
237 response = self.app.get(
238 route_path('repo_summary', repo_name=repo_name))
238 route_path('repo_summary', repo_name=repo_name))
239 assert_response = AssertResponse(response)
239 assert_response = AssertResponse(response)
240 assert_response.element_contains(
240 assert_response.element_contains(
241 '.main .alert-warning strong', 'Missing requirements')
241 '.main .alert-warning strong', 'Missing requirements')
242 assert_response.element_contains(
242 assert_response.element_contains(
243 '.main .alert-warning',
243 '.main .alert-warning',
244 'Commits cannot be displayed, because this repository '
244 'Commits cannot be displayed, because this repository '
245 'uses one or more extensions, which was not enabled.')
245 'uses one or more extensions, which was not enabled.')
246
246
247 def test_missing_requirements_page_does_not_contains_switch_to(
247 def test_missing_requirements_page_does_not_contains_switch_to(
248 self, autologin_user, backend):
248 self, autologin_user, backend):
249 repo_name = backend.repo_name
249 repo_name = backend.repo_name
250 scm_patcher = mock.patch.object(
250 scm_patcher = mock.patch.object(
251 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
251 Repository, 'scm_instance', side_effect=RepositoryRequirementError)
252
252
253 with scm_patcher:
253 with scm_patcher:
254 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
254 response = self.app.get(route_path('repo_summary', repo_name=repo_name))
255 response.mustcontain(no='Switch To')
255 response.mustcontain(no='Switch To')
256
256
257
257
258 @pytest.mark.usefixtures('app')
258 @pytest.mark.usefixtures('app')
259 class TestRepoLocation(object):
259 class TestRepoLocation(object):
260
260
261 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
261 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
262 def test_missing_filesystem_repo(
262 def test_missing_filesystem_repo(
263 self, autologin_user, backend, suffix, csrf_token):
263 self, autologin_user, backend, suffix, csrf_token):
264 repo = backend.create_repo(name_suffix=suffix)
264 repo = backend.create_repo(name_suffix=suffix)
265 repo_name = repo.repo_name
265 repo_name = repo.repo_name
266
266
267 # delete from file system
267 # delete from file system
268 RepoModel()._delete_filesystem_repo(repo)
268 RepoModel()._delete_filesystem_repo(repo)
269
269
270 # test if the repo is still in the database
270 # test if the repo is still in the database
271 new_repo = RepoModel().get_by_repo_name(repo_name)
271 new_repo = RepoModel().get_by_repo_name(repo_name)
272 assert new_repo.repo_name == repo_name
272 assert new_repo.repo_name == repo_name
273
273
274 # check if repo is not in the filesystem
274 # check if repo is not in the filesystem
275 assert not repo_on_filesystem(repo_name)
275 assert not repo_on_filesystem(repo_name)
276
276
277 response = self.app.get(
277 response = self.app.get(
278 route_path('repo_summary', repo_name=safe_str(repo_name)), status=302)
278 route_path('repo_summary', repo_name=safe_str(repo_name)), status=302)
279
279
280 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
280 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
281 'Please check if it exist, or is not damaged.' % repo_name
281 'Please check if it exist, or is not damaged.' % repo_name
282 assert_session_flash(response, msg)
282 assert_session_flash(response, msg)
283
283
284 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
284 @pytest.mark.parametrize("suffix", [u'', u'Δ…Δ™Ε‚'], ids=['', 'non-ascii'])
285 def test_missing_filesystem_repo_on_repo_check(
285 def test_missing_filesystem_repo_on_repo_check(
286 self, autologin_user, backend, suffix, csrf_token):
286 self, autologin_user, backend, suffix, csrf_token):
287 repo = backend.create_repo(name_suffix=suffix)
287 repo = backend.create_repo(name_suffix=suffix)
288 repo_name = repo.repo_name
288 repo_name = repo.repo_name
289
289
290 # delete from file system
290 # delete from file system
291 RepoModel()._delete_filesystem_repo(repo)
291 RepoModel()._delete_filesystem_repo(repo)
292
292
293 # test if the repo is still in the database
293 # test if the repo is still in the database
294 new_repo = RepoModel().get_by_repo_name(repo_name)
294 new_repo = RepoModel().get_by_repo_name(repo_name)
295 assert new_repo.repo_name == repo_name
295 assert new_repo.repo_name == repo_name
296
296
297 # check if repo is not in the filesystem
297 # check if repo is not in the filesystem
298 assert not repo_on_filesystem(repo_name)
298 assert not repo_on_filesystem(repo_name)
299
299
300 # flush the session
300 # flush the session
301 self.app.get(
301 self.app.get(
302 route_path('repo_summary', repo_name=safe_str(repo_name)),
302 route_path('repo_summary', repo_name=safe_str(repo_name)),
303 status=302)
303 status=302)
304
304
305 response = self.app.get(
305 response = self.app.get(
306 route_path('repo_creating_check', repo_name=safe_str(repo_name)),
306 route_path('repo_creating_check', repo_name=safe_str(repo_name)),
307 status=200)
307 status=200)
308 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
308 msg = 'The repository `%s` cannot be loaded in filesystem. ' \
309 'Please check if it exist, or is not damaged.' % repo_name
309 'Please check if it exist, or is not damaged.' % repo_name
310 assert_session_flash(response, msg )
310 assert_session_flash(response, msg )
311
311
312
312
313 @pytest.fixture()
313 @pytest.fixture()
314 def summary_view(context_stub, request_stub, user_util):
314 def summary_view(context_stub, request_stub, user_util):
315 """
315 """
316 Bootstrap view to test the view functions
316 Bootstrap view to test the view functions
317 """
317 """
318 request_stub.matched_route = AttributeDict(name='test_view')
318 request_stub.matched_route = AttributeDict(name='test_view')
319
319
320 request_stub.user = user_util.create_user().AuthUser()
320 request_stub.user = user_util.create_user().AuthUser()
321 request_stub.db_repo = user_util.create_repo()
321 request_stub.db_repo = user_util.create_repo()
322
322
323 view = RepoSummaryView(context=context_stub, request=request_stub)
323 view = RepoSummaryView(context=context_stub, request=request_stub)
324 return view
324 return view
325
325
326
326
327 @pytest.mark.usefixtures('app')
327 @pytest.mark.usefixtures('app')
328 class TestCreateReferenceData(object):
328 class TestCreateReferenceData(object):
329
329
330 @pytest.fixture
330 @pytest.fixture
331 def example_refs(self):
331 def example_refs(self):
332 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
332 section_1_refs = OrderedDict((('a', 'a_id'), ('b', 'b_id')))
333 example_refs = [
333 example_refs = [
334 ('section_1', section_1_refs, 't1'),
334 ('section_1', section_1_refs, 't1'),
335 ('section_2', {'c': 'c_id'}, 't2'),
335 ('section_2', {'c': 'c_id'}, 't2'),
336 ]
336 ]
337 return example_refs
337 return example_refs
338
338
339 def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view):
339 def test_generates_refs_based_on_commit_ids(self, example_refs, summary_view):
340 repo = mock.Mock()
340 repo = mock.Mock()
341 repo.name = 'test-repo'
341 repo.name = 'test-repo'
342 repo.alias = 'git'
342 repo.alias = 'git'
343 full_repo_name = 'pytest-repo-group/' + repo.name
343 full_repo_name = 'pytest-repo-group/' + repo.name
344
344
345 result = summary_view._create_reference_data(
345 result = summary_view._create_reference_data(
346 repo, full_repo_name, example_refs)
346 repo, full_repo_name, example_refs)
347
347
348 expected_files_url = '/{}/files/'.format(full_repo_name)
348 expected_files_url = '/{}/files/'.format(full_repo_name)
349 expected_result = [
349 expected_result = [
350 {
350 {
351 'children': [
351 'children': [
352 {
352 {
353 'id': 'a', 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
353 'id': 'a', 'idx': 0, 'raw_id': 'a_id', 'text': 'a', 'type': 't1',
354 'files_url': expected_files_url + 'a/?at=a',
354 'files_url': expected_files_url + 'a/?at=a',
355 },
355 },
356 {
356 {
357 'id': 'b', 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
357 'id': 'b', 'idx': 0, 'raw_id': 'b_id', 'text': 'b', 'type': 't1',
358 'files_url': expected_files_url + 'b/?at=b',
358 'files_url': expected_files_url + 'b/?at=b',
359 }
359 }
360 ],
360 ],
361 'text': 'section_1'
361 'text': 'section_1'
362 },
362 },
363 {
363 {
364 'children': [
364 'children': [
365 {
365 {
366 'id': 'c', 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
366 'id': 'c', 'idx': 0, 'raw_id': 'c_id', 'text': 'c', 'type': 't2',
367 'files_url': expected_files_url + 'c/?at=c',
367 'files_url': expected_files_url + 'c/?at=c',
368 }
368 }
369 ],
369 ],
370 'text': 'section_2'
370 'text': 'section_2'
371 }]
371 }]
372 assert result == expected_result
372 assert result == expected_result
373
373
374 def test_generates_refs_with_path_for_svn(self, example_refs, summary_view):
374 def test_generates_refs_with_path_for_svn(self, example_refs, summary_view):
375 repo = mock.Mock()
375 repo = mock.Mock()
376 repo.name = 'test-repo'
376 repo.name = 'test-repo'
377 repo.alias = 'svn'
377 repo.alias = 'svn'
378 full_repo_name = 'pytest-repo-group/' + repo.name
378 full_repo_name = 'pytest-repo-group/' + repo.name
379
379
380 result = summary_view._create_reference_data(
380 result = summary_view._create_reference_data(
381 repo, full_repo_name, example_refs)
381 repo, full_repo_name, example_refs)
382
382
383 expected_files_url = '/{}/files/'.format(full_repo_name)
383 expected_files_url = '/{}/files/'.format(full_repo_name)
384 expected_result = [
384 expected_result = [
385 {
385 {
386 'children': [
386 'children': [
387 {
387 {
388 'id': 'a@a_id', 'raw_id': 'a_id',
388 'id': 'a@a_id', 'idx': 0, 'raw_id': 'a_id',
389 'text': 'a', 'type': 't1',
389 'text': 'a', 'type': 't1',
390 'files_url': expected_files_url + 'a_id/a?at=a',
390 'files_url': expected_files_url + 'a_id/a?at=a',
391 },
391 },
392 {
392 {
393 'id': 'b@b_id', 'raw_id': 'b_id',
393 'id': 'b@b_id', 'idx': 0, 'raw_id': 'b_id',
394 'text': 'b', 'type': 't1',
394 'text': 'b', 'type': 't1',
395 'files_url': expected_files_url + 'b_id/b?at=b',
395 'files_url': expected_files_url + 'b_id/b?at=b',
396 }
396 }
397 ],
397 ],
398 'text': 'section_1'
398 'text': 'section_1'
399 },
399 },
400 {
400 {
401 'children': [
401 'children': [
402 {
402 {
403 'id': 'c@c_id', 'raw_id': 'c_id',
403 'id': 'c@c_id', 'idx': 0, 'raw_id': 'c_id',
404 'text': 'c', 'type': 't2',
404 'text': 'c', 'type': 't2',
405 'files_url': expected_files_url + 'c_id/c?at=c',
405 'files_url': expected_files_url + 'c_id/c?at=c',
406 }
406 }
407 ],
407 ],
408 'text': 'section_2'
408 'text': 'section_2'
409 }
409 }
410 ]
410 ]
411 assert result == expected_result
411 assert result == expected_result
412
412
413
413
414 class TestCreateFilesUrl(object):
414 class TestCreateFilesUrl(object):
415
415
416 def test_creates_non_svn_url(self, app, summary_view):
416 def test_creates_non_svn_url(self, app, summary_view):
417 repo = mock.Mock()
417 repo = mock.Mock()
418 repo.name = 'abcde'
418 repo.name = 'abcde'
419 full_repo_name = 'test-repo-group/' + repo.name
419 full_repo_name = 'test-repo-group/' + repo.name
420 ref_name = 'branch1'
420 ref_name = 'branch1'
421 raw_id = 'deadbeef0123456789'
421 raw_id = 'deadbeef0123456789'
422 is_svn = False
422 is_svn = False
423
423
424 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
424 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
425 result = summary_view._create_files_url(
425 result = summary_view._create_files_url(
426 repo, full_repo_name, ref_name, raw_id, is_svn)
426 repo, full_repo_name, ref_name, raw_id, is_svn)
427 url_mock.assert_called_once_with(
427 url_mock.assert_called_once_with(
428 'repo_files', repo_name=full_repo_name, commit_id=ref_name,
428 'repo_files', repo_name=full_repo_name, commit_id=ref_name,
429 f_path='', _query=dict(at=ref_name))
429 f_path='', _query=dict(at=ref_name))
430 assert result == url_mock.return_value
430 assert result == url_mock.return_value
431
431
432 def test_creates_svn_url(self, app, summary_view):
432 def test_creates_svn_url(self, app, summary_view):
433 repo = mock.Mock()
433 repo = mock.Mock()
434 repo.name = 'abcde'
434 repo.name = 'abcde'
435 full_repo_name = 'test-repo-group/' + repo.name
435 full_repo_name = 'test-repo-group/' + repo.name
436 ref_name = 'branch1'
436 ref_name = 'branch1'
437 raw_id = 'deadbeef0123456789'
437 raw_id = 'deadbeef0123456789'
438 is_svn = True
438 is_svn = True
439
439
440 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
440 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
441 result = summary_view._create_files_url(
441 result = summary_view._create_files_url(
442 repo, full_repo_name, ref_name, raw_id, is_svn)
442 repo, full_repo_name, ref_name, raw_id, is_svn)
443 url_mock.assert_called_once_with(
443 url_mock.assert_called_once_with(
444 'repo_files', repo_name=full_repo_name, f_path=ref_name,
444 'repo_files', repo_name=full_repo_name, f_path=ref_name,
445 commit_id=raw_id, _query=dict(at=ref_name))
445 commit_id=raw_id, _query=dict(at=ref_name))
446 assert result == url_mock.return_value
446 assert result == url_mock.return_value
447
447
448 def test_name_has_slashes(self, app, summary_view):
448 def test_name_has_slashes(self, app, summary_view):
449 repo = mock.Mock()
449 repo = mock.Mock()
450 repo.name = 'abcde'
450 repo.name = 'abcde'
451 full_repo_name = 'test-repo-group/' + repo.name
451 full_repo_name = 'test-repo-group/' + repo.name
452 ref_name = 'branch1/branch2'
452 ref_name = 'branch1/branch2'
453 raw_id = 'deadbeef0123456789'
453 raw_id = 'deadbeef0123456789'
454 is_svn = False
454 is_svn = False
455
455
456 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
456 with mock.patch('rhodecode.lib.helpers.route_path') as url_mock:
457 result = summary_view._create_files_url(
457 result = summary_view._create_files_url(
458 repo, full_repo_name, ref_name, raw_id, is_svn)
458 repo, full_repo_name, ref_name, raw_id, is_svn)
459 url_mock.assert_called_once_with(
459 url_mock.assert_called_once_with(
460 'repo_files', repo_name=full_repo_name, commit_id=raw_id,
460 'repo_files', repo_name=full_repo_name, commit_id=raw_id,
461 f_path='', _query=dict(at=ref_name))
461 f_path='', _query=dict(at=ref_name))
462 assert result == url_mock.return_value
462 assert result == url_mock.return_value
463
463
464
464
465 class TestReferenceItems(object):
465 class TestReferenceItems(object):
466 repo = mock.Mock()
466 repo = mock.Mock()
467 repo.name = 'pytest-repo'
467 repo.name = 'pytest-repo'
468 repo_full_name = 'pytest-repo-group/' + repo.name
468 repo_full_name = 'pytest-repo-group/' + repo.name
469 ref_type = 'branch'
469 ref_type = 'branch'
470 fake_url = '/abcde/'
470 fake_url = '/abcde/'
471
471
472 @staticmethod
472 @staticmethod
473 def _format_function(name, id_):
473 def _format_function(name, id_):
474 return 'format_function_{}_{}'.format(name, id_)
474 return 'format_function_{}_{}'.format(name, id_)
475
475
476 def test_creates_required_amount_of_items(self, summary_view):
476 def test_creates_required_amount_of_items(self, summary_view):
477 amount = 100
477 amount = 100
478 refs = {
478 refs = {
479 'ref{}'.format(i): '{0:040d}'.format(i)
479 'ref{}'.format(i): '{0:040d}'.format(i)
480 for i in range(amount)
480 for i in range(amount)
481 }
481 }
482
482
483 url_patcher = mock.patch.object(summary_view, '_create_files_url')
483 url_patcher = mock.patch.object(summary_view, '_create_files_url')
484 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
484 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
485 return_value=False)
485 return_value=False)
486
486
487 with url_patcher as url_mock, svn_patcher:
487 with url_patcher as url_mock, svn_patcher:
488 result = summary_view._create_reference_items(
488 result = summary_view._create_reference_items(
489 self.repo, self.repo_full_name, refs, self.ref_type,
489 self.repo, self.repo_full_name, refs, self.ref_type,
490 self._format_function)
490 self._format_function)
491 assert len(result) == amount
491 assert len(result) == amount
492 assert url_mock.call_count == amount
492 assert url_mock.call_count == amount
493
493
494 def test_single_item_details(self, summary_view):
494 def test_single_item_details(self, summary_view):
495 ref_name = 'ref1'
495 ref_name = 'ref1'
496 ref_id = 'deadbeef'
496 ref_id = 'deadbeef'
497 refs = {
497 refs = {
498 ref_name: ref_id
498 ref_name: ref_id
499 }
499 }
500
500
501 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
501 svn_patcher = mock.patch('rhodecode.lib.helpers.is_svn',
502 return_value=False)
502 return_value=False)
503
503
504 url_patcher = mock.patch.object(
504 url_patcher = mock.patch.object(
505 summary_view, '_create_files_url', return_value=self.fake_url)
505 summary_view, '_create_files_url', return_value=self.fake_url)
506
506
507 with url_patcher as url_mock, svn_patcher:
507 with url_patcher as url_mock, svn_patcher:
508 result = summary_view._create_reference_items(
508 result = summary_view._create_reference_items(
509 self.repo, self.repo_full_name, refs, self.ref_type,
509 self.repo, self.repo_full_name, refs, self.ref_type,
510 self._format_function)
510 self._format_function)
511
511
512 url_mock.assert_called_once_with(
512 url_mock.assert_called_once_with(
513 self.repo, self.repo_full_name, ref_name, ref_id, False)
513 self.repo, self.repo_full_name, ref_name, ref_id, False)
514 expected_result = [
514 expected_result = [
515 {
515 {
516 'text': ref_name,
516 'text': ref_name,
517 'id': self._format_function(ref_name, ref_id),
517 'id': self._format_function(ref_name, ref_id),
518 'raw_id': ref_id,
518 'raw_id': ref_id,
519 'idx': 0,
519 'type': self.ref_type,
520 'type': self.ref_type,
520 'files_url': self.fake_url
521 'files_url': self.fake_url
521 }
522 }
522 ]
523 ]
523 assert result == expected_result
524 assert result == expected_result
@@ -1,1288 +1,1293 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import mock
22 import mock
23 import os
23 import os
24 import sys
24 import sys
25 import shutil
25 import shutil
26
26
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib.utils import make_db_config
29 from rhodecode.lib.utils import make_db_config
30 from rhodecode.lib.vcs.backends.base import Reference
30 from rhodecode.lib.vcs.backends.base import Reference
31 from rhodecode.lib.vcs.backends.git import (
31 from rhodecode.lib.vcs.backends.git import (
32 GitRepository, GitCommit, discover_git_version)
32 GitRepository, GitCommit, discover_git_version)
33 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
34 RepositoryError, VCSError, NodeDoesNotExistError)
34 RepositoryError, VCSError, NodeDoesNotExistError)
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39
39
40
40
41 pytestmark = pytest.mark.backends("git")
41 pytestmark = pytest.mark.backends("git")
42
42
43
43
44 def repo_path_generator():
44 def repo_path_generator():
45 """
45 """
46 Return a different path to be used for cloning repos.
46 Return a different path to be used for cloning repos.
47 """
47 """
48 i = 0
48 i = 0
49 while True:
49 while True:
50 i += 1
50 i += 1
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52
52
53
53
54 REPO_PATH_GENERATOR = repo_path_generator()
54 REPO_PATH_GENERATOR = repo_path_generator()
55
55
56
56
57 class TestGitRepository:
57 class TestGitRepository:
58
58
59 # pylint: disable=protected-access
59 # pylint: disable=protected-access
60
60
61 def __check_for_existing_repo(self):
61 def __check_for_existing_repo(self):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 self.fail('Cannot test git clone repo as location %s already '
63 self.fail('Cannot test git clone repo as location %s already '
64 'exists. You should manually remove it first.'
64 'exists. You should manually remove it first.'
65 % TEST_GIT_REPO_CLONE)
65 % TEST_GIT_REPO_CLONE)
66
66
67 @pytest.fixture(autouse=True)
67 @pytest.fixture(autouse=True)
68 def prepare(self, request, baseapp):
68 def prepare(self, request, baseapp):
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70
70
71 def get_clone_repo(self):
71 def get_clone_repo(self):
72 """
72 """
73 Return a non bare clone of the base repo.
73 Return a non bare clone of the base repo.
74 """
74 """
75 clone_path = next(REPO_PATH_GENERATOR)
75 clone_path = next(REPO_PATH_GENERATOR)
76 repo_clone = GitRepository(
76 repo_clone = GitRepository(
77 clone_path, create=True, src_url=self.repo.path, bare=False)
77 clone_path, create=True, src_url=self.repo.path, bare=False)
78
78
79 return repo_clone
79 return repo_clone
80
80
81 def get_empty_repo(self, bare=False):
81 def get_empty_repo(self, bare=False):
82 """
82 """
83 Return a non bare empty repo.
83 Return a non bare empty repo.
84 """
84 """
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86
86
87 def test_wrong_repo_path(self):
87 def test_wrong_repo_path(self):
88 wrong_repo_path = '/tmp/errorrepo_git'
88 wrong_repo_path = '/tmp/errorrepo_git'
89 with pytest.raises(RepositoryError):
89 with pytest.raises(RepositoryError):
90 GitRepository(wrong_repo_path)
90 GitRepository(wrong_repo_path)
91
91
92 def test_repo_clone(self):
92 def test_repo_clone(self):
93 self.__check_for_existing_repo()
93 self.__check_for_existing_repo()
94 repo = GitRepository(TEST_GIT_REPO)
94 repo = GitRepository(TEST_GIT_REPO)
95 repo_clone = GitRepository(
95 repo_clone = GitRepository(
96 TEST_GIT_REPO_CLONE,
96 TEST_GIT_REPO_CLONE,
97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 # Checking hashes of commits should be enough
99 # Checking hashes of commits should be enough
100 for commit in repo.get_commits():
100 for commit in repo.get_commits():
101 raw_id = commit.raw_id
101 raw_id = commit.raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103
103
104 def test_repo_clone_without_create(self):
104 def test_repo_clone_without_create(self):
105 with pytest.raises(RepositoryError):
105 with pytest.raises(RepositoryError):
106 GitRepository(
106 GitRepository(
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108
108
109 def test_repo_clone_with_update(self):
109 def test_repo_clone_with_update(self):
110 repo = GitRepository(TEST_GIT_REPO)
110 repo = GitRepository(TEST_GIT_REPO)
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 repo_clone = GitRepository(
112 repo_clone = GitRepository(
113 clone_path,
113 clone_path,
114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116
116
117 # check if current workdir was updated
117 # check if current workdir was updated
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 assert os.path.isfile(fpath)
119 assert os.path.isfile(fpath)
120
120
121 def test_repo_clone_without_update(self):
121 def test_repo_clone_without_update(self):
122 repo = GitRepository(TEST_GIT_REPO)
122 repo = GitRepository(TEST_GIT_REPO)
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 repo_clone = GitRepository(
124 repo_clone = GitRepository(
125 clone_path,
125 clone_path,
126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 # check if current workdir was *NOT* updated
128 # check if current workdir was *NOT* updated
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 # Make sure it's not bare repo
130 # Make sure it's not bare repo
131 assert not repo_clone.bare
131 assert not repo_clone.bare
132 assert not os.path.isfile(fpath)
132 assert not os.path.isfile(fpath)
133
133
134 def test_repo_clone_into_bare_repo(self):
134 def test_repo_clone_into_bare_repo(self):
135 repo = GitRepository(TEST_GIT_REPO)
135 repo = GitRepository(TEST_GIT_REPO)
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 repo_clone = GitRepository(
137 repo_clone = GitRepository(
138 clone_path, create=True, src_url=repo.path, bare=True)
138 clone_path, create=True, src_url=repo.path, bare=True)
139 assert repo_clone.bare
139 assert repo_clone.bare
140
140
141 def test_create_repo_is_not_bare_by_default(self):
141 def test_create_repo_is_not_bare_by_default(self):
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 assert not repo.bare
143 assert not repo.bare
144
144
145 def test_create_bare_repo(self):
145 def test_create_bare_repo(self):
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 assert repo.bare
147 assert repo.bare
148
148
149 def test_update_server_info(self):
149 def test_update_server_info(self):
150 self.repo._update_server_info()
150 self.repo._update_server_info()
151
151
152 def test_fetch(self, vcsbackend_git):
152 def test_fetch(self, vcsbackend_git):
153 # Note: This is a git specific part of the API, it's only implemented
153 # Note: This is a git specific part of the API, it's only implemented
154 # by the git backend.
154 # by the git backend.
155 source_repo = vcsbackend_git.repo
155 source_repo = vcsbackend_git.repo
156 target_repo = vcsbackend_git.create_repo(bare=True)
156 target_repo = vcsbackend_git.create_repo(bare=True)
157 target_repo.fetch(source_repo.path)
157 target_repo.fetch(source_repo.path)
158 # Note: Get a fresh instance, avoids caching trouble
158 # Note: Get a fresh instance, avoids caching trouble
159 target_repo = vcsbackend_git.backend(target_repo.path)
159 target_repo = vcsbackend_git.backend(target_repo.path)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161
161
162 def test_commit_ids(self):
162 def test_commit_ids(self):
163 # there are 112 commits (by now)
163 # there are 112 commits (by now)
164 # so we can assume they would be available from now on
164 # so we can assume they would be available from now on
165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 'fa6600f6848800641328adbf7811fd2372c02ab2',
167 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 '102607b09cdd60e2793929c4f90478be29f85a17',
168 '102607b09cdd60e2793929c4f90478be29f85a17',
169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 '2d1028c054665b962fa3d307adfc923ddd528038',
170 '2d1028c054665b962fa3d307adfc923ddd528038',
171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 '8430a588b43b5d6da365400117c89400326e7992',
174 '8430a588b43b5d6da365400117c89400326e7992',
175 'd955cd312c17b02143c04fa1099a352b04368118',
175 'd955cd312c17b02143c04fa1099a352b04368118',
176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 '27d48942240f5b91dfda77accd2caac94708cc7d',
187 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
190 assert subset.issubset(set(self.repo.commit_ids))
190 assert subset.issubset(set(self.repo.commit_ids))
191
191
192 def test_slicing(self):
192 def test_slicing(self):
193 # 4 1 5 10 95
193 # 4 1 5 10 95
194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 (10, 20, 10), (5, 100, 95)]:
195 (10, 20, 10), (5, 100, 95)]:
196 commit_ids = list(self.repo[sfrom:sto])
196 commit_ids = list(self.repo[sfrom:sto])
197 assert len(commit_ids) == size
197 assert len(commit_ids) == size
198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200
200
201 def test_branches(self):
201 def test_branches(self):
202 # TODO: Need more tests here
202 # TODO: Need more tests here
203 # Removed (those are 'remotes' branches for cloned repo)
203 # Removed (those are 'remotes' branches for cloned repo)
204 # assert 'master' in self.repo.branches
204 # assert 'master' in self.repo.branches
205 # assert 'gittree' in self.repo.branches
205 # assert 'gittree' in self.repo.branches
206 # assert 'web-branch' in self.repo.branches
206 # assert 'web-branch' in self.repo.branches
207 for __, commit_id in self.repo.branches.items():
207 for __, commit_id in self.repo.branches.items():
208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209
209
210 def test_tags(self):
210 def test_tags(self):
211 # TODO: Need more tests here
211 # TODO: Need more tests here
212 assert 'v0.1.1' in self.repo.tags
212 assert 'v0.1.1' in self.repo.tags
213 assert 'v0.1.2' in self.repo.tags
213 assert 'v0.1.2' in self.repo.tags
214 for __, commit_id in self.repo.tags.items():
214 for __, commit_id in self.repo.tags.items():
215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216
216
217 def _test_single_commit_cache(self, commit_id):
217 def _test_single_commit_cache(self, commit_id):
218 commit = self.repo.get_commit(commit_id)
218 commit = self.repo.get_commit(commit_id)
219 assert commit_id in self.repo.commits
219 assert commit_id in self.repo.commits
220 assert commit is self.repo.commits[commit_id]
220 assert commit is self.repo.commits[commit_id]
221
221
222 def test_initial_commit(self):
222 def test_initial_commit(self):
223 commit_id = self.repo.commit_ids[0]
223 commit_id = self.repo.commit_ids[0]
224 init_commit = self.repo.get_commit(commit_id)
224 init_commit = self.repo.get_commit(commit_id)
225 init_author = init_commit.author
225 init_author = init_commit.author
226
226
227 assert init_commit.message == 'initial import\n'
227 assert init_commit.message == 'initial import\n'
228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 assert init_author == init_commit.committer
229 assert init_author == init_commit.committer
230 for path in ('vcs/__init__.py',
230 for path in ('vcs/__init__.py',
231 'vcs/backends/BaseRepository.py',
231 'vcs/backends/BaseRepository.py',
232 'vcs/backends/__init__.py'):
232 'vcs/backends/__init__.py'):
233 assert isinstance(init_commit.get_node(path), FileNode)
233 assert isinstance(init_commit.get_node(path), FileNode)
234 for path in ('', 'vcs', 'vcs/backends'):
234 for path in ('', 'vcs', 'vcs/backends'):
235 assert isinstance(init_commit.get_node(path), DirNode)
235 assert isinstance(init_commit.get_node(path), DirNode)
236
236
237 with pytest.raises(NodeDoesNotExistError):
237 with pytest.raises(NodeDoesNotExistError):
238 init_commit.get_node(path='foobar')
238 init_commit.get_node(path='foobar')
239
239
240 node = init_commit.get_node('vcs/')
240 node = init_commit.get_node('vcs/')
241 assert hasattr(node, 'kind')
241 assert hasattr(node, 'kind')
242 assert node.kind == NodeKind.DIR
242 assert node.kind == NodeKind.DIR
243
243
244 node = init_commit.get_node('vcs')
244 node = init_commit.get_node('vcs')
245 assert hasattr(node, 'kind')
245 assert hasattr(node, 'kind')
246 assert node.kind == NodeKind.DIR
246 assert node.kind == NodeKind.DIR
247
247
248 node = init_commit.get_node('vcs/__init__.py')
248 node = init_commit.get_node('vcs/__init__.py')
249 assert hasattr(node, 'kind')
249 assert hasattr(node, 'kind')
250 assert node.kind == NodeKind.FILE
250 assert node.kind == NodeKind.FILE
251
251
252 def test_not_existing_commit(self):
252 def test_not_existing_commit(self):
253 with pytest.raises(RepositoryError):
253 with pytest.raises(RepositoryError):
254 self.repo.get_commit('f' * 40)
254 self.repo.get_commit('f' * 40)
255
255
256 def test_commit10(self):
256 def test_commit10(self):
257
257
258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 README = """===
259 README = """===
260 VCS
260 VCS
261 ===
261 ===
262
262
263 Various Version Control System management abstraction layer for Python.
263 Various Version Control System management abstraction layer for Python.
264
264
265 Introduction
265 Introduction
266 ------------
266 ------------
267
267
268 TODO: To be written...
268 TODO: To be written...
269
269
270 """
270 """
271 node = commit10.get_node('README.rst')
271 node = commit10.get_node('README.rst')
272 assert node.kind == NodeKind.FILE
272 assert node.kind == NodeKind.FILE
273 assert node.content == README
273 assert node.content == README
274
274
275 def test_head(self):
275 def test_head(self):
276 assert self.repo.head == self.repo.get_commit().raw_id
276 assert self.repo.head == self.repo.get_commit().raw_id
277
277
278 def test_checkout_with_create(self):
278 def test_checkout_with_create(self):
279 repo_clone = self.get_clone_repo()
279 repo_clone = self.get_clone_repo()
280
280
281 new_branch = 'new_branch'
281 new_branch = 'new_branch'
282 assert repo_clone._current_branch() == 'master'
282 assert repo_clone._current_branch() == 'master'
283 assert set(repo_clone.branches) == {'master'}
283 assert set(repo_clone.branches) == {'master'}
284 repo_clone._checkout(new_branch, create=True)
284 repo_clone._checkout(new_branch, create=True)
285
285
286 # Branches is a lazy property so we need to recrete the Repo object.
286 # Branches is a lazy property so we need to recrete the Repo object.
287 repo_clone = GitRepository(repo_clone.path)
287 repo_clone = GitRepository(repo_clone.path)
288 assert set(repo_clone.branches) == {'master', new_branch}
288 assert set(repo_clone.branches) == {'master', new_branch}
289 assert repo_clone._current_branch() == new_branch
289 assert repo_clone._current_branch() == new_branch
290
290
291 def test_checkout(self):
291 def test_checkout(self):
292 repo_clone = self.get_clone_repo()
292 repo_clone = self.get_clone_repo()
293
293
294 repo_clone._checkout('new_branch', create=True)
294 repo_clone._checkout('new_branch', create=True)
295 repo_clone._checkout('master')
295 repo_clone._checkout('master')
296
296
297 assert repo_clone._current_branch() == 'master'
297 assert repo_clone._current_branch() == 'master'
298
298
299 def test_checkout_same_branch(self):
299 def test_checkout_same_branch(self):
300 repo_clone = self.get_clone_repo()
300 repo_clone = self.get_clone_repo()
301
301
302 repo_clone._checkout('master')
302 repo_clone._checkout('master')
303 assert repo_clone._current_branch() == 'master'
303 assert repo_clone._current_branch() == 'master'
304
304
305 def test_checkout_branch_already_exists(self):
305 def test_checkout_branch_already_exists(self):
306 repo_clone = self.get_clone_repo()
306 repo_clone = self.get_clone_repo()
307
307
308 with pytest.raises(RepositoryError):
308 with pytest.raises(RepositoryError):
309 repo_clone._checkout('master', create=True)
309 repo_clone._checkout('master', create=True)
310
310
311 def test_checkout_bare_repo(self):
311 def test_checkout_bare_repo(self):
312 with pytest.raises(RepositoryError):
312 with pytest.raises(RepositoryError):
313 self.repo._checkout('master')
313 self.repo._checkout('master')
314
314
315 def test_current_branch_bare_repo(self):
315 def test_current_branch_bare_repo(self):
316 with pytest.raises(RepositoryError):
316 with pytest.raises(RepositoryError):
317 self.repo._current_branch()
317 self.repo._current_branch()
318
318
319 def test_current_branch_empty_repo(self):
319 def test_current_branch_empty_repo(self):
320 repo = self.get_empty_repo()
320 repo = self.get_empty_repo()
321 assert repo._current_branch() is None
321 assert repo._current_branch() is None
322
322
323 def test_local_clone(self):
323 def test_local_clone(self):
324 clone_path = next(REPO_PATH_GENERATOR)
324 clone_path = next(REPO_PATH_GENERATOR)
325 self.repo._local_clone(clone_path, 'master')
325 self.repo._local_clone(clone_path, 'master')
326 repo_clone = GitRepository(clone_path)
326 repo_clone = GitRepository(clone_path)
327
327
328 assert self.repo.commit_ids == repo_clone.commit_ids
328 assert self.repo.commit_ids == repo_clone.commit_ids
329
329
330 def test_local_clone_with_specific_branch(self):
330 def test_local_clone_with_specific_branch(self):
331 source_repo = self.get_clone_repo()
331 source_repo = self.get_clone_repo()
332
332
333 # Create a new branch in source repo
333 # Create a new branch in source repo
334 new_branch_commit = source_repo.commit_ids[-3]
334 new_branch_commit = source_repo.commit_ids[-3]
335 source_repo._checkout(new_branch_commit)
335 source_repo._checkout(new_branch_commit)
336 source_repo._checkout('new_branch', create=True)
336 source_repo._checkout('new_branch', create=True)
337
337
338 clone_path = next(REPO_PATH_GENERATOR)
338 clone_path = next(REPO_PATH_GENERATOR)
339 source_repo._local_clone(clone_path, 'new_branch')
339 source_repo._local_clone(clone_path, 'new_branch')
340 repo_clone = GitRepository(clone_path)
340 repo_clone = GitRepository(clone_path)
341
341
342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343
343
344 clone_path = next(REPO_PATH_GENERATOR)
344 clone_path = next(REPO_PATH_GENERATOR)
345 source_repo._local_clone(clone_path, 'master')
345 source_repo._local_clone(clone_path, 'master')
346 repo_clone = GitRepository(clone_path)
346 repo_clone = GitRepository(clone_path)
347
347
348 assert source_repo.commit_ids == repo_clone.commit_ids
348 assert source_repo.commit_ids == repo_clone.commit_ids
349
349
350 def test_local_clone_fails_if_target_exists(self):
350 def test_local_clone_fails_if_target_exists(self):
351 with pytest.raises(RepositoryError):
351 with pytest.raises(RepositoryError):
352 self.repo._local_clone(self.repo.path, 'master')
352 self.repo._local_clone(self.repo.path, 'master')
353
353
354 def test_local_fetch(self):
354 def test_local_fetch(self):
355 target_repo = self.get_empty_repo()
355 target_repo = self.get_empty_repo()
356 source_repo = self.get_clone_repo()
356 source_repo = self.get_clone_repo()
357
357
358 # Create a new branch in source repo
358 # Create a new branch in source repo
359 master_commit = source_repo.commit_ids[-1]
359 master_commit = source_repo.commit_ids[-1]
360 new_branch_commit = source_repo.commit_ids[-3]
360 new_branch_commit = source_repo.commit_ids[-3]
361 source_repo._checkout(new_branch_commit)
361 source_repo._checkout(new_branch_commit)
362 source_repo._checkout('new_branch', create=True)
362 source_repo._checkout('new_branch', create=True)
363
363
364 target_repo._local_fetch(source_repo.path, 'new_branch')
364 target_repo._local_fetch(source_repo.path, 'new_branch')
365 assert target_repo._last_fetch_heads() == [new_branch_commit]
365 assert target_repo._last_fetch_heads() == [new_branch_commit]
366
366
367 target_repo._local_fetch(source_repo.path, 'master')
367 target_repo._local_fetch(source_repo.path, 'master')
368 assert target_repo._last_fetch_heads() == [master_commit]
368 assert target_repo._last_fetch_heads() == [master_commit]
369
369
370 def test_local_fetch_from_bare_repo(self):
370 def test_local_fetch_from_bare_repo(self):
371 target_repo = self.get_empty_repo()
371 target_repo = self.get_empty_repo()
372 target_repo._local_fetch(self.repo.path, 'master')
372 target_repo._local_fetch(self.repo.path, 'master')
373
373
374 master_commit = self.repo.commit_ids[-1]
374 master_commit = self.repo.commit_ids[-1]
375 assert target_repo._last_fetch_heads() == [master_commit]
375 assert target_repo._last_fetch_heads() == [master_commit]
376
376
377 def test_local_fetch_from_same_repo(self):
377 def test_local_fetch_from_same_repo(self):
378 with pytest.raises(ValueError):
378 with pytest.raises(ValueError):
379 self.repo._local_fetch(self.repo.path, 'master')
379 self.repo._local_fetch(self.repo.path, 'master')
380
380
381 def test_local_fetch_branch_does_not_exist(self):
381 def test_local_fetch_branch_does_not_exist(self):
382 target_repo = self.get_empty_repo()
382 target_repo = self.get_empty_repo()
383
383
384 with pytest.raises(RepositoryError):
384 with pytest.raises(RepositoryError):
385 target_repo._local_fetch(self.repo.path, 'new_branch')
385 target_repo._local_fetch(self.repo.path, 'new_branch')
386
386
387 def test_local_pull(self):
387 def test_local_pull(self):
388 target_repo = self.get_empty_repo()
388 target_repo = self.get_empty_repo()
389 source_repo = self.get_clone_repo()
389 source_repo = self.get_clone_repo()
390
390
391 # Create a new branch in source repo
391 # Create a new branch in source repo
392 master_commit = source_repo.commit_ids[-1]
392 master_commit = source_repo.commit_ids[-1]
393 new_branch_commit = source_repo.commit_ids[-3]
393 new_branch_commit = source_repo.commit_ids[-3]
394 source_repo._checkout(new_branch_commit)
394 source_repo._checkout(new_branch_commit)
395 source_repo._checkout('new_branch', create=True)
395 source_repo._checkout('new_branch', create=True)
396
396
397 target_repo._local_pull(source_repo.path, 'new_branch')
397 target_repo._local_pull(source_repo.path, 'new_branch')
398 target_repo = GitRepository(target_repo.path)
398 target_repo = GitRepository(target_repo.path)
399 assert target_repo.head == new_branch_commit
399 assert target_repo.head == new_branch_commit
400
400
401 target_repo._local_pull(source_repo.path, 'master')
401 target_repo._local_pull(source_repo.path, 'master')
402 target_repo = GitRepository(target_repo.path)
402 target_repo = GitRepository(target_repo.path)
403 assert target_repo.head == master_commit
403 assert target_repo.head == master_commit
404
404
405 def test_local_pull_in_bare_repo(self):
405 def test_local_pull_in_bare_repo(self):
406 with pytest.raises(RepositoryError):
406 with pytest.raises(RepositoryError):
407 self.repo._local_pull(self.repo.path, 'master')
407 self.repo._local_pull(self.repo.path, 'master')
408
408
409 def test_local_merge(self):
409 def test_local_merge(self):
410 target_repo = self.get_empty_repo()
410 target_repo = self.get_empty_repo()
411 source_repo = self.get_clone_repo()
411 source_repo = self.get_clone_repo()
412
412
413 # Create a new branch in source repo
413 # Create a new branch in source repo
414 master_commit = source_repo.commit_ids[-1]
414 master_commit = source_repo.commit_ids[-1]
415 new_branch_commit = source_repo.commit_ids[-3]
415 new_branch_commit = source_repo.commit_ids[-3]
416 source_repo._checkout(new_branch_commit)
416 source_repo._checkout(new_branch_commit)
417 source_repo._checkout('new_branch', create=True)
417 source_repo._checkout('new_branch', create=True)
418
418
419 # This is required as one cannot do a -ff-only merge in an empty repo.
419 # This is required as one cannot do a -ff-only merge in an empty repo.
420 target_repo._local_pull(source_repo.path, 'new_branch')
420 target_repo._local_pull(source_repo.path, 'new_branch')
421
421
422 target_repo._local_fetch(source_repo.path, 'master')
422 target_repo._local_fetch(source_repo.path, 'master')
423 merge_message = 'Merge message\n\nDescription:...'
423 merge_message = 'Merge message\n\nDescription:...'
424 user_name = 'Albert Einstein'
424 user_name = 'Albert Einstein'
425 user_email = 'albert@einstein.com'
425 user_email = 'albert@einstein.com'
426 target_repo._local_merge(merge_message, user_name, user_email,
426 target_repo._local_merge(merge_message, user_name, user_email,
427 target_repo._last_fetch_heads())
427 target_repo._last_fetch_heads())
428
428
429 target_repo = GitRepository(target_repo.path)
429 target_repo = GitRepository(target_repo.path)
430 assert target_repo.commit_ids[-2] == master_commit
430 assert target_repo.commit_ids[-2] == master_commit
431 last_commit = target_repo.get_commit(target_repo.head)
431 last_commit = target_repo.get_commit(target_repo.head)
432 assert last_commit.message.strip() == merge_message
432 assert last_commit.message.strip() == merge_message
433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434
434
435 assert not os.path.exists(
435 assert not os.path.exists(
436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437
437
438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441
441
442 target_repo._local_fetch(self.repo.path, 'master')
442 target_repo._local_fetch(self.repo.path, 'master')
443 with pytest.raises(RepositoryError):
443 with pytest.raises(RepositoryError):
444 target_repo._local_merge(
444 target_repo._local_merge(
445 'merge_message', 'user name', 'user@name.com',
445 'merge_message', 'user name', 'user@name.com',
446 target_repo._last_fetch_heads())
446 target_repo._last_fetch_heads())
447
447
448 # Check we are not left in an intermediate merge state
448 # Check we are not left in an intermediate merge state
449 assert not os.path.exists(
449 assert not os.path.exists(
450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451
451
452 def test_local_merge_into_empty_repo(self):
452 def test_local_merge_into_empty_repo(self):
453 target_repo = self.get_empty_repo()
453 target_repo = self.get_empty_repo()
454
454
455 # This is required as one cannot do a -ff-only merge in an empty repo.
455 # This is required as one cannot do a -ff-only merge in an empty repo.
456 target_repo._local_fetch(self.repo.path, 'master')
456 target_repo._local_fetch(self.repo.path, 'master')
457 with pytest.raises(RepositoryError):
457 with pytest.raises(RepositoryError):
458 target_repo._local_merge(
458 target_repo._local_merge(
459 'merge_message', 'user name', 'user@name.com',
459 'merge_message', 'user name', 'user@name.com',
460 target_repo._last_fetch_heads())
460 target_repo._last_fetch_heads())
461
461
462 def test_local_merge_in_bare_repo(self):
462 def test_local_merge_in_bare_repo(self):
463 with pytest.raises(RepositoryError):
463 with pytest.raises(RepositoryError):
464 self.repo._local_merge(
464 self.repo._local_merge(
465 'merge_message', 'user name', 'user@name.com', None)
465 'merge_message', 'user name', 'user@name.com', None)
466
466
467 def test_local_push_non_bare(self):
467 def test_local_push_non_bare(self):
468 target_repo = self.get_empty_repo()
468 target_repo = self.get_empty_repo()
469
469
470 pushed_branch = 'pushed_branch'
470 pushed_branch = 'pushed_branch'
471 self.repo._local_push('master', target_repo.path, pushed_branch)
471 self.repo._local_push('master', target_repo.path, pushed_branch)
472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 # report any branches.
473 # report any branches.
474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 f.write('ref: refs/heads/%s' % pushed_branch)
475 f.write('ref: refs/heads/%s' % pushed_branch)
476
476
477 target_repo = GitRepository(target_repo.path)
477 target_repo = GitRepository(target_repo.path)
478
478
479 assert (target_repo.branches[pushed_branch] ==
479 assert (target_repo.branches[pushed_branch] ==
480 self.repo.branches['master'])
480 self.repo.branches['master'])
481
481
482 def test_local_push_bare(self):
482 def test_local_push_bare(self):
483 target_repo = self.get_empty_repo(bare=True)
483 target_repo = self.get_empty_repo(bare=True)
484
484
485 pushed_branch = 'pushed_branch'
485 pushed_branch = 'pushed_branch'
486 self.repo._local_push('master', target_repo.path, pushed_branch)
486 self.repo._local_push('master', target_repo.path, pushed_branch)
487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 # report any branches.
488 # report any branches.
489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 f.write('ref: refs/heads/%s' % pushed_branch)
490 f.write('ref: refs/heads/%s' % pushed_branch)
491
491
492 target_repo = GitRepository(target_repo.path)
492 target_repo = GitRepository(target_repo.path)
493
493
494 assert (target_repo.branches[pushed_branch] ==
494 assert (target_repo.branches[pushed_branch] ==
495 self.repo.branches['master'])
495 self.repo.branches['master'])
496
496
497 def test_local_push_non_bare_target_branch_is_checked_out(self):
497 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 target_repo = self.get_clone_repo()
498 target_repo = self.get_clone_repo()
499
499
500 pushed_branch = 'pushed_branch'
500 pushed_branch = 'pushed_branch'
501 # Create a new branch in source repo
501 # Create a new branch in source repo
502 new_branch_commit = target_repo.commit_ids[-3]
502 new_branch_commit = target_repo.commit_ids[-3]
503 target_repo._checkout(new_branch_commit)
503 target_repo._checkout(new_branch_commit)
504 target_repo._checkout(pushed_branch, create=True)
504 target_repo._checkout(pushed_branch, create=True)
505
505
506 self.repo._local_push('master', target_repo.path, pushed_branch)
506 self.repo._local_push('master', target_repo.path, pushed_branch)
507
507
508 target_repo = GitRepository(target_repo.path)
508 target_repo = GitRepository(target_repo.path)
509
509
510 assert (target_repo.branches[pushed_branch] ==
510 assert (target_repo.branches[pushed_branch] ==
511 self.repo.branches['master'])
511 self.repo.branches['master'])
512
512
513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 with pytest.raises(RepositoryError):
515 with pytest.raises(RepositoryError):
516 self.repo._local_push('master', target_repo.path, 'master')
516 self.repo._local_push('master', target_repo.path, 'master')
517
517
518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 target_repo = self.get_empty_repo(bare=True)
519 target_repo = self.get_empty_repo(bare=True)
520
520
521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 self.repo._local_push(
522 self.repo._local_push(
523 'master', target_repo.path, 'master', enable_hooks=True)
523 'master', target_repo.path, 'master', enable_hooks=True)
524 env = run_mock.call_args[1]['extra_env']
524 env = run_mock.call_args[1]['extra_env']
525 assert 'RC_SKIP_HOOKS' not in env
525 assert 'RC_SKIP_HOOKS' not in env
526
526
527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 path_components = (
528 path_components = (
529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 hook_path = os.path.join(repo_path, *path_components)
530 hook_path = os.path.join(repo_path, *path_components)
531 with open(hook_path, 'w') as f:
531 with open(hook_path, 'w') as f:
532 script_lines = [
532 script_lines = [
533 '#!%s' % sys.executable,
533 '#!%s' % sys.executable,
534 'import os',
534 'import os',
535 'import sys',
535 'import sys',
536 'if os.environ.get("RC_SKIP_HOOKS"):',
536 'if os.environ.get("RC_SKIP_HOOKS"):',
537 ' sys.exit(0)',
537 ' sys.exit(0)',
538 'sys.exit(1)',
538 'sys.exit(1)',
539 ]
539 ]
540 f.write('\n'.join(script_lines))
540 f.write('\n'.join(script_lines))
541 os.chmod(hook_path, 0o755)
541 os.chmod(hook_path, 0o755)
542
542
543 def test_local_push_does_not_execute_hook(self):
543 def test_local_push_does_not_execute_hook(self):
544 target_repo = self.get_empty_repo()
544 target_repo = self.get_empty_repo()
545
545
546 pushed_branch = 'pushed_branch'
546 pushed_branch = 'pushed_branch'
547 self._add_failing_hook(target_repo.path, 'pre-receive')
547 self._add_failing_hook(target_repo.path, 'pre-receive')
548 self.repo._local_push('master', target_repo.path, pushed_branch)
548 self.repo._local_push('master', target_repo.path, pushed_branch)
549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 # report any branches.
550 # report any branches.
551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 f.write('ref: refs/heads/%s' % pushed_branch)
552 f.write('ref: refs/heads/%s' % pushed_branch)
553
553
554 target_repo = GitRepository(target_repo.path)
554 target_repo = GitRepository(target_repo.path)
555
555
556 assert (target_repo.branches[pushed_branch] ==
556 assert (target_repo.branches[pushed_branch] ==
557 self.repo.branches['master'])
557 self.repo.branches['master'])
558
558
559 def test_local_push_executes_hook(self):
559 def test_local_push_executes_hook(self):
560 target_repo = self.get_empty_repo(bare=True)
560 target_repo = self.get_empty_repo(bare=True)
561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 with pytest.raises(RepositoryError):
562 with pytest.raises(RepositoryError):
563 self.repo._local_push(
563 self.repo._local_push(
564 'master', target_repo.path, 'master', enable_hooks=True)
564 'master', target_repo.path, 'master', enable_hooks=True)
565
565
566 def test_maybe_prepare_merge_workspace(self):
566 def test_maybe_prepare_merge_workspace(self):
567 workspace = self.repo._maybe_prepare_merge_workspace(
567 workspace = self.repo._maybe_prepare_merge_workspace(
568 2, 'pr2', Reference('branch', 'master', 'unused'),
568 2, 'pr2', Reference('branch', 'master', 'unused'),
569 Reference('branch', 'master', 'unused'))
569 Reference('branch', 'master', 'unused'))
570
570
571 assert os.path.isdir(workspace)
571 assert os.path.isdir(workspace)
572 workspace_repo = GitRepository(workspace)
572 workspace_repo = GitRepository(workspace)
573 assert workspace_repo.branches == self.repo.branches
573 assert workspace_repo.branches == self.repo.branches
574
574
575 # Calling it a second time should also succeed
575 # Calling it a second time should also succeed
576 workspace = self.repo._maybe_prepare_merge_workspace(
576 workspace = self.repo._maybe_prepare_merge_workspace(
577 2, 'pr2', Reference('branch', 'master', 'unused'),
577 2, 'pr2', Reference('branch', 'master', 'unused'),
578 Reference('branch', 'master', 'unused'))
578 Reference('branch', 'master', 'unused'))
579 assert os.path.isdir(workspace)
579 assert os.path.isdir(workspace)
580
580
581 def test_maybe_prepare_merge_workspace_different_refs(self):
581 def test_maybe_prepare_merge_workspace_different_refs(self):
582 workspace = self.repo._maybe_prepare_merge_workspace(
582 workspace = self.repo._maybe_prepare_merge_workspace(
583 2, 'pr2', Reference('branch', 'master', 'unused'),
583 2, 'pr2', Reference('branch', 'master', 'unused'),
584 Reference('branch', 'develop', 'unused'))
584 Reference('branch', 'develop', 'unused'))
585
585
586 assert os.path.isdir(workspace)
586 assert os.path.isdir(workspace)
587 workspace_repo = GitRepository(workspace)
587 workspace_repo = GitRepository(workspace)
588 assert workspace_repo.branches == self.repo.branches
588 assert workspace_repo.branches == self.repo.branches
589
589
590 # Calling it a second time should also succeed
590 # Calling it a second time should also succeed
591 workspace = self.repo._maybe_prepare_merge_workspace(
591 workspace = self.repo._maybe_prepare_merge_workspace(
592 2, 'pr2', Reference('branch', 'master', 'unused'),
592 2, 'pr2', Reference('branch', 'master', 'unused'),
593 Reference('branch', 'develop', 'unused'))
593 Reference('branch', 'develop', 'unused'))
594 assert os.path.isdir(workspace)
594 assert os.path.isdir(workspace)
595
595
596 def test_cleanup_merge_workspace(self):
596 def test_cleanup_merge_workspace(self):
597 workspace = self.repo._maybe_prepare_merge_workspace(
597 workspace = self.repo._maybe_prepare_merge_workspace(
598 2, 'pr3', Reference('branch', 'master', 'unused'),
598 2, 'pr3', Reference('branch', 'master', 'unused'),
599 Reference('branch', 'master', 'unused'))
599 Reference('branch', 'master', 'unused'))
600 self.repo.cleanup_merge_workspace(2, 'pr3')
600 self.repo.cleanup_merge_workspace(2, 'pr3')
601
601
602 assert not os.path.exists(workspace)
602 assert not os.path.exists(workspace)
603
603
604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
605 # No assert: because in case of an inexistent workspace this function
605 # No assert: because in case of an inexistent workspace this function
606 # should still succeed.
606 # should still succeed.
607 self.repo.cleanup_merge_workspace(1, 'pr4')
607 self.repo.cleanup_merge_workspace(1, 'pr4')
608
608
609 def test_set_refs(self):
609 def test_set_refs(self):
610 test_ref = 'refs/test-refs/abcde'
610 test_ref = 'refs/test-refs/abcde'
611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
612
612
613 self.repo.set_refs(test_ref, test_commit_id)
613 self.repo.set_refs(test_ref, test_commit_id)
614 stdout, _ = self.repo.run_git_command(['show-ref'])
614 stdout, _ = self.repo.run_git_command(['show-ref'])
615 assert test_ref in stdout
615 assert test_ref in stdout
616 assert test_commit_id in stdout
616 assert test_commit_id in stdout
617
617
618 def test_remove_ref(self):
618 def test_remove_ref(self):
619 test_ref = 'refs/test-refs/abcde'
619 test_ref = 'refs/test-refs/abcde'
620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
621 self.repo.set_refs(test_ref, test_commit_id)
621 self.repo.set_refs(test_ref, test_commit_id)
622 stdout, _ = self.repo.run_git_command(['show-ref'])
622 stdout, _ = self.repo.run_git_command(['show-ref'])
623 assert test_ref in stdout
623 assert test_ref in stdout
624 assert test_commit_id in stdout
624 assert test_commit_id in stdout
625
625
626 self.repo.remove_ref(test_ref)
626 self.repo.remove_ref(test_ref)
627 stdout, _ = self.repo.run_git_command(['show-ref'])
627 stdout, _ = self.repo.run_git_command(['show-ref'])
628 assert test_ref not in stdout
628 assert test_ref not in stdout
629 assert test_commit_id not in stdout
629 assert test_commit_id not in stdout
630
630
631
631
632 class TestGitCommit(object):
632 class TestGitCommit(object):
633
633
634 @pytest.fixture(autouse=True)
634 @pytest.fixture(autouse=True)
635 def prepare(self):
635 def prepare(self):
636 self.repo = GitRepository(TEST_GIT_REPO)
636 self.repo = GitRepository(TEST_GIT_REPO)
637
637
638 def test_default_commit(self):
638 def test_default_commit(self):
639 tip = self.repo.get_commit()
639 tip = self.repo.get_commit()
640 assert tip == self.repo.get_commit(None)
640 assert tip == self.repo.get_commit(None)
641 assert tip == self.repo.get_commit('tip')
641 assert tip == self.repo.get_commit('tip')
642
642
643 def test_root_node(self):
643 def test_root_node(self):
644 tip = self.repo.get_commit()
644 tip = self.repo.get_commit()
645 assert tip.root is tip.get_node('')
645 assert tip.root is tip.get_node('')
646
646
647 def test_lazy_fetch(self):
647 def test_lazy_fetch(self):
648 """
648 """
649 Test if commit's nodes expands and are cached as we walk through
649 Test if commit's nodes expands and are cached as we walk through
650 the commit. This test is somewhat hard to write as order of tests
650 the commit. This test is somewhat hard to write as order of tests
651 is a key here. Written by running command after command in a shell.
651 is a key here. Written by running command after command in a shell.
652 """
652 """
653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
654 assert commit_id in self.repo.commit_ids
654 assert commit_id in self.repo.commit_ids
655 commit = self.repo.get_commit(commit_id)
655 commit = self.repo.get_commit(commit_id)
656 assert len(commit.nodes) == 0
656 assert len(commit.nodes) == 0
657 root = commit.root
657 root = commit.root
658 assert len(commit.nodes) == 1
658 assert len(commit.nodes) == 1
659 assert len(root.nodes) == 8
659 assert len(root.nodes) == 8
660 # accessing root.nodes updates commit.nodes
660 # accessing root.nodes updates commit.nodes
661 assert len(commit.nodes) == 9
661 assert len(commit.nodes) == 9
662
662
663 docs = root.get_node('docs')
663 docs = root.get_node('docs')
664 # we haven't yet accessed anything new as docs dir was already cached
664 # we haven't yet accessed anything new as docs dir was already cached
665 assert len(commit.nodes) == 9
665 assert len(commit.nodes) == 9
666 assert len(docs.nodes) == 8
666 assert len(docs.nodes) == 8
667 # accessing docs.nodes updates commit.nodes
667 # accessing docs.nodes updates commit.nodes
668 assert len(commit.nodes) == 17
668 assert len(commit.nodes) == 17
669
669
670 assert docs is commit.get_node('docs')
670 assert docs is commit.get_node('docs')
671 assert docs is root.nodes[0]
671 assert docs is root.nodes[0]
672 assert docs is root.dirs[0]
672 assert docs is root.dirs[0]
673 assert docs is commit.get_node('docs')
673 assert docs is commit.get_node('docs')
674
674
675 def test_nodes_with_commit(self):
675 def test_nodes_with_commit(self):
676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
677 commit = self.repo.get_commit(commit_id)
677 commit = self.repo.get_commit(commit_id)
678 root = commit.root
678 root = commit.root
679 docs = root.get_node('docs')
679 docs = root.get_node('docs')
680 assert docs is commit.get_node('docs')
680 assert docs is commit.get_node('docs')
681 api = docs.get_node('api')
681 api = docs.get_node('api')
682 assert api is commit.get_node('docs/api')
682 assert api is commit.get_node('docs/api')
683 index = api.get_node('index.rst')
683 index = api.get_node('index.rst')
684 assert index is commit.get_node('docs/api/index.rst')
684 assert index is commit.get_node('docs/api/index.rst')
685 assert index is commit.get_node('docs')\
685 assert index is commit.get_node('docs')\
686 .get_node('api')\
686 .get_node('api')\
687 .get_node('index.rst')
687 .get_node('index.rst')
688
688
689 def test_branch_and_tags(self):
689 def test_branch_and_tags(self):
690 """
690 """
691 rev0 = self.repo.commit_ids[0]
691 rev0 = self.repo.commit_ids[0]
692 commit0 = self.repo.get_commit(rev0)
692 commit0 = self.repo.get_commit(rev0)
693 assert commit0.branch == 'master'
693 assert commit0.branch == 'master'
694 assert commit0.tags == []
694 assert commit0.tags == []
695
695
696 rev10 = self.repo.commit_ids[10]
696 rev10 = self.repo.commit_ids[10]
697 commit10 = self.repo.get_commit(rev10)
697 commit10 = self.repo.get_commit(rev10)
698 assert commit10.branch == 'master'
698 assert commit10.branch == 'master'
699 assert commit10.tags == []
699 assert commit10.tags == []
700
700
701 rev44 = self.repo.commit_ids[44]
701 rev44 = self.repo.commit_ids[44]
702 commit44 = self.repo.get_commit(rev44)
702 commit44 = self.repo.get_commit(rev44)
703 assert commit44.branch == 'web-branch'
703 assert commit44.branch == 'web-branch'
704
704
705 tip = self.repo.get_commit('tip')
705 tip = self.repo.get_commit('tip')
706 assert 'tip' in tip.tags
706 assert 'tip' in tip.tags
707 """
707 """
708 # Those tests would fail - branches are now going
708 # Those tests would fail - branches are now going
709 # to be changed at main API in order to support git backend
709 # to be changed at main API in order to support git backend
710 pass
710 pass
711
711
712 def test_file_size(self):
712 def test_file_size(self):
713 to_check = (
713 to_check = (
714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
715 'vcs/backends/BaseRepository.py', 502),
715 'vcs/backends/BaseRepository.py', 502),
716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
717 'vcs/backends/hg.py', 854),
717 'vcs/backends/hg.py', 854),
718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
719 'setup.py', 1068),
719 'setup.py', 1068),
720
720
721 ('d955cd312c17b02143c04fa1099a352b04368118',
721 ('d955cd312c17b02143c04fa1099a352b04368118',
722 'vcs/backends/base.py', 2921),
722 'vcs/backends/base.py', 2921),
723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
724 'vcs/backends/base.py', 3936),
724 'vcs/backends/base.py', 3936),
725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
726 'vcs/backends/base.py', 6189),
726 'vcs/backends/base.py', 6189),
727 )
727 )
728 for commit_id, path, size in to_check:
728 for commit_id, path, size in to_check:
729 node = self.repo.get_commit(commit_id).get_node(path)
729 node = self.repo.get_commit(commit_id).get_node(path)
730 assert node.is_file()
730 assert node.is_file()
731 assert node.size == size
731 assert node.size == size
732
732
733 def test_file_history_from_commits(self):
733 def test_file_history_from_commits(self):
734 node = self.repo[10].get_node('setup.py')
734 node = self.repo[10].get_node('setup.py')
735 commit_ids = [commit.raw_id for commit in node.history]
735 commit_ids = [commit.raw_id for commit in node.history]
736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
737
737
738 node = self.repo[20].get_node('setup.py')
738 node = self.repo[20].get_node('setup.py')
739 node_ids = [commit.raw_id for commit in node.history]
739 node_ids = [commit.raw_id for commit in node.history]
740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
742
742
743 # special case we check history from commit that has this particular
743 # special case we check history from commit that has this particular
744 # file changed this means we check if it's included as well
744 # file changed this means we check if it's included as well
745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
746 .get_node('setup.py')
746 .get_node('setup.py')
747 node_ids = [commit.raw_id for commit in node.history]
747 node_ids = [commit.raw_id for commit in node.history]
748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
750
750
751 def test_file_history(self):
751 def test_file_history(self):
752 # we can only check if those commits are present in the history
752 # we can only check if those commits are present in the history
753 # as we cannot update this test every time file is changed
753 # as we cannot update this test every time file is changed
754 files = {
754 files = {
755 'setup.py': [
755 'setup.py': [
756 '54386793436c938cff89326944d4c2702340037d',
756 '54386793436c938cff89326944d4c2702340037d',
757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
758 '998ed409c795fec2012b1c0ca054d99888b22090',
758 '998ed409c795fec2012b1c0ca054d99888b22090',
759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
765 ],
765 ],
766 'vcs/nodes.py': [
766 'vcs/nodes.py': [
767 '33fa3223355104431402a888fa77a4e9956feb3e',
767 '33fa3223355104431402a888fa77a4e9956feb3e',
768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
772 '4313566d2e417cb382948f8d9d7c765330356054',
772 '4313566d2e417cb382948f8d9d7c765330356054',
773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
774 '54386793436c938cff89326944d4c2702340037d',
774 '54386793436c938cff89326944d4c2702340037d',
775 '54000345d2e78b03a99d561399e8e548de3f3203',
775 '54000345d2e78b03a99d561399e8e548de3f3203',
776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
798 ],
798 ],
799 'vcs/backends/git.py': [
799 'vcs/backends/git.py': [
800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
801 '9a751d84d8e9408e736329767387f41b36935153',
801 '9a751d84d8e9408e736329767387f41b36935153',
802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
807 '54000345d2e78b03a99d561399e8e548de3f3203',
807 '54000345d2e78b03a99d561399e8e548de3f3203',
808 ],
808 ],
809 }
809 }
810 for path, commit_ids in files.items():
810 for path, commit_ids in files.items():
811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
812 node_ids = [commit.raw_id for commit in node.history]
812 node_ids = [commit.raw_id for commit in node.history]
813 assert set(commit_ids).issubset(set(node_ids)), (
813 assert set(commit_ids).issubset(set(node_ids)), (
814 "We assumed that %s is subset of commit_ids for which file %s "
814 "We assumed that %s is subset of commit_ids for which file %s "
815 "has been changed, and history of that node returned: %s"
815 "has been changed, and history of that node returned: %s"
816 % (commit_ids, path, node_ids))
816 % (commit_ids, path, node_ids))
817
817
818 def test_file_annotate(self):
818 def test_file_annotate(self):
819 files = {
819 files = {
820 'vcs/backends/__init__.py': {
820 'vcs/backends/__init__.py': {
821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
822 'lines_no': 1,
822 'lines_no': 1,
823 'commits': [
823 'commits': [
824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
825 ],
825 ],
826 },
826 },
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
828 'lines_no': 21,
828 'lines_no': 21,
829 'commits': [
829 'commits': [
830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 ],
851 ],
852 },
852 },
853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
854 'lines_no': 32,
854 'lines_no': 32,
855 'commits': [
855 'commits': [
856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 '54000345d2e78b03a99d561399e8e548de3f3203',
863 '54000345d2e78b03a99d561399e8e548de3f3203',
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 ],
888 ],
889 },
889 },
890 },
890 },
891 }
891 }
892
892
893 for fname, commit_dict in files.items():
893 for fname, commit_dict in files.items():
894 for commit_id, __ in commit_dict.items():
894 for commit_id, __ in commit_dict.items():
895 commit = self.repo.get_commit(commit_id)
895 commit = self.repo.get_commit(commit_id)
896
896
897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
899 assert l1_1 == l1_2
899 assert l1_1 == l1_2
900 l1 = l1_1
900 l1 = l1_1
901 l2 = files[fname][commit_id]['commits']
901 l2 = files[fname][commit_id]['commits']
902 assert l1 == l2, (
902 assert l1 == l2, (
903 "The lists of commit_ids for %s@commit_id %s"
903 "The lists of commit_ids for %s@commit_id %s"
904 "from annotation list should match each other, "
904 "from annotation list should match each other, "
905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
906
906
907 def test_files_state(self):
907 def test_files_state(self):
908 """
908 """
909 Tests state of FileNodes.
909 Tests state of FileNodes.
910 """
910 """
911 node = self.repo\
911 node = self.repo\
912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
913 .get_node('vcs/utils/diffs.py')
913 .get_node('vcs/utils/diffs.py')
914 assert node.state, NodeState.ADDED
914 assert node.state, NodeState.ADDED
915 assert node.added
915 assert node.added
916 assert not node.changed
916 assert not node.changed
917 assert not node.not_changed
917 assert not node.not_changed
918 assert not node.removed
918 assert not node.removed
919
919
920 node = self.repo\
920 node = self.repo\
921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
922 .get_node('.hgignore')
922 .get_node('.hgignore')
923 assert node.state, NodeState.CHANGED
923 assert node.state, NodeState.CHANGED
924 assert not node.added
924 assert not node.added
925 assert node.changed
925 assert node.changed
926 assert not node.not_changed
926 assert not node.not_changed
927 assert not node.removed
927 assert not node.removed
928
928
929 node = self.repo\
929 node = self.repo\
930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
931 .get_node('setup.py')
931 .get_node('setup.py')
932 assert node.state, NodeState.NOT_CHANGED
932 assert node.state, NodeState.NOT_CHANGED
933 assert not node.added
933 assert not node.added
934 assert not node.changed
934 assert not node.changed
935 assert node.not_changed
935 assert node.not_changed
936 assert not node.removed
936 assert not node.removed
937
937
938 # If node has REMOVED state then trying to fetch it would raise
938 # If node has REMOVED state then trying to fetch it would raise
939 # CommitError exception
939 # CommitError exception
940 commit = self.repo.get_commit(
940 commit = self.repo.get_commit(
941 'fa6600f6848800641328adbf7811fd2372c02ab2')
941 'fa6600f6848800641328adbf7811fd2372c02ab2')
942 path = 'vcs/backends/BaseRepository.py'
942 path = 'vcs/backends/BaseRepository.py'
943 with pytest.raises(NodeDoesNotExistError):
943 with pytest.raises(NodeDoesNotExistError):
944 commit.get_node(path)
944 commit.get_node(path)
945 # but it would be one of ``removed`` (commit's attribute)
945 # but it would be one of ``removed`` (commit's attribute)
946 assert path in [rf.path for rf in commit.removed]
946 assert path in [rf.path for rf in commit.removed]
947
947
948 commit = self.repo.get_commit(
948 commit = self.repo.get_commit(
949 '54386793436c938cff89326944d4c2702340037d')
949 '54386793436c938cff89326944d4c2702340037d')
950 changed = [
950 changed = [
951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
952 'vcs/nodes.py']
952 'vcs/nodes.py']
953 assert set(changed) == set([f.path for f in commit.changed])
953 assert set(changed) == set([f.path for f in commit.changed])
954
954
955 def test_unicode_branch_refs(self):
955 def test_unicode_branch_refs(self):
956 unicode_branches = {
956 unicode_branches = {
957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
959 }
959 }
960 with mock.patch(
960 with mock.patch(
961 ("rhodecode.lib.vcs.backends.git.repository"
961 ("rhodecode.lib.vcs.backends.git.repository"
962 ".GitRepository._refs"),
962 ".GitRepository._refs"),
963 unicode_branches):
963 unicode_branches):
964 branches = self.repo.branches
964 branches = self.repo.branches
965
965
966 assert 'unicode' in branches
966 assert 'unicode' in branches
967 assert u'uniΓ§ΓΆβˆ‚e' in branches
967 assert u'uniΓ§ΓΆβˆ‚e' in branches
968
968
969 def test_unicode_tag_refs(self):
969 def test_unicode_tag_refs(self):
970 unicode_tags = {
970 unicode_tags = {
971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 }
973 }
974 with mock.patch(
974 with mock.patch(
975 ("rhodecode.lib.vcs.backends.git.repository"
975 ("rhodecode.lib.vcs.backends.git.repository"
976 ".GitRepository._refs"),
976 ".GitRepository._refs"),
977 unicode_tags):
977 unicode_tags):
978 tags = self.repo.tags
978 tags = self.repo.tags
979
979
980 assert 'unicode' in tags
980 assert 'unicode' in tags
981 assert u'uniΓ§ΓΆβˆ‚e' in tags
981 assert u'uniΓ§ΓΆβˆ‚e' in tags
982
982
983 def test_commit_message_is_unicode(self):
983 def test_commit_message_is_unicode(self):
984 for commit in self.repo:
984 for commit in self.repo:
985 assert type(commit.message) == unicode
985 assert type(commit.message) == unicode
986
986
987 def test_commit_author_is_unicode(self):
987 def test_commit_author_is_unicode(self):
988 for commit in self.repo:
988 for commit in self.repo:
989 assert type(commit.author) == unicode
989 assert type(commit.author) == unicode
990
990
991 def test_repo_files_content_is_unicode(self):
991 def test_repo_files_content_is_unicode(self):
992 commit = self.repo.get_commit()
992 commit = self.repo.get_commit()
993 for node in commit.get_node('/'):
993 for node in commit.get_node('/'):
994 if node.is_file():
994 if node.is_file():
995 assert type(node.content) == unicode
995 assert type(node.content) == unicode
996
996
997 def test_wrong_path(self):
997 def test_wrong_path(self):
998 # There is 'setup.py' in the root dir but not there:
998 # There is 'setup.py' in the root dir but not there:
999 path = 'foo/bar/setup.py'
999 path = 'foo/bar/setup.py'
1000 tip = self.repo.get_commit()
1000 tip = self.repo.get_commit()
1001 with pytest.raises(VCSError):
1001 with pytest.raises(VCSError):
1002 tip.get_node(path)
1002 tip.get_node(path)
1003
1003
1004 @pytest.mark.parametrize("author_email, commit_id", [
1004 @pytest.mark.parametrize("author_email, commit_id", [
1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1006 ('lukasz.balcerzak@python-center.pl',
1006 ('lukasz.balcerzak@python-center.pl',
1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1009 ])
1009 ])
1010 def test_author_email(self, author_email, commit_id):
1010 def test_author_email(self, author_email, commit_id):
1011 commit = self.repo.get_commit(commit_id)
1011 commit = self.repo.get_commit(commit_id)
1012 assert author_email == commit.author_email
1012 assert author_email == commit.author_email
1013
1013
1014 @pytest.mark.parametrize("author, commit_id", [
1014 @pytest.mark.parametrize("author, commit_id", [
1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1018 ])
1018 ])
1019 def test_author_username(self, author, commit_id):
1019 def test_author_username(self, author, commit_id):
1020 commit = self.repo.get_commit(commit_id)
1020 commit = self.repo.get_commit(commit_id)
1021 assert author == commit.author_name
1021 assert author == commit.author_name
1022
1022
1023
1023
1024 class TestLargeFileRepo(object):
1024 class TestLargeFileRepo(object):
1025
1025
1026 def test_large_file(self, backend_git):
1026 def test_large_file(self, backend_git):
1027 conf = make_db_config()
1027 conf = make_db_config()
1028 repo = backend_git.create_test_repo('largefiles', conf)
1028 repo = backend_git.create_test_repo('largefiles', conf)
1029
1029
1030 tip = repo.scm_instance().get_commit()
1030 tip = repo.scm_instance().get_commit()
1031
1031
1032 # extract stored LF node into the origin cache
1032 # extract stored LF node into the origin cache
1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1034
1034
1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1036 oid_path = os.path.join(lfs_store, oid)
1036 oid_path = os.path.join(lfs_store, oid)
1037 oid_destination = os.path.join(
1037 oid_destination = os.path.join(
1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1039 shutil.copy(oid_path, oid_destination)
1039 shutil.copy(oid_path, oid_destination)
1040
1040
1041 node = tip.get_node('1MB.zip')
1041 node = tip.get_node('1MB.zip')
1042
1042
1043 lf_node = node.get_largefile_node()
1043 lf_node = node.get_largefile_node()
1044
1044
1045 assert lf_node.is_largefile() is True
1045 assert lf_node.is_largefile() is True
1046 assert lf_node.size == 1024000
1046 assert lf_node.size == 1024000
1047 assert lf_node.name == '1MB.zip'
1047 assert lf_node.name == '1MB.zip'
1048
1048
1049
1049
1050 @pytest.mark.usefixtures("vcs_repository_support")
1050 @pytest.mark.usefixtures("vcs_repository_support")
1051 class TestGitSpecificWithRepo(BackendTestMixin):
1051 class TestGitSpecificWithRepo(BackendTestMixin):
1052
1052
1053 @classmethod
1053 @classmethod
1054 def _get_commits(cls):
1054 def _get_commits(cls):
1055 return [
1055 return [
1056 {
1056 {
1057 'message': 'Initial',
1057 'message': 'Initial',
1058 'author': 'Joe Doe <joe.doe@example.com>',
1058 'author': 'Joe Doe <joe.doe@example.com>',
1059 'date': datetime.datetime(2010, 1, 1, 20),
1059 'date': datetime.datetime(2010, 1, 1, 20),
1060 'added': [
1060 'added': [
1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1062 FileNode(
1062 FileNode(
1063 'foobar/static/admin', content='admin',
1063 'foobar/static/admin', content='admin',
1064 mode=0o120000), # this is a link
1064 mode=0o120000), # this is a link
1065 FileNode('foo', content='foo'),
1065 FileNode('foo', content='foo'),
1066 ],
1066 ],
1067 },
1067 },
1068 {
1068 {
1069 'message': 'Second',
1069 'message': 'Second',
1070 'author': 'Joe Doe <joe.doe@example.com>',
1070 'author': 'Joe Doe <joe.doe@example.com>',
1071 'date': datetime.datetime(2010, 1, 1, 22),
1071 'date': datetime.datetime(2010, 1, 1, 22),
1072 'added': [
1072 'added': [
1073 FileNode('foo2', content='foo2'),
1073 FileNode('foo2', content='foo2'),
1074 ],
1074 ],
1075 },
1075 },
1076 ]
1076 ]
1077
1077
1078 def test_paths_slow_traversing(self):
1078 def test_paths_slow_traversing(self):
1079 commit = self.repo.get_commit()
1079 commit = self.repo.get_commit()
1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1081 .get_node('admin').get_node('base.js').content == 'base'
1081 .get_node('admin').get_node('base.js').content == 'base'
1082
1082
1083 def test_paths_fast_traversing(self):
1083 def test_paths_fast_traversing(self):
1084 commit = self.repo.get_commit()
1084 commit = self.repo.get_commit()
1085 assert (
1085 assert (
1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1087 'base')
1087 'base')
1088
1088
1089 def test_get_diff_runs_git_command_with_hashes(self):
1089 def test_get_diff_runs_git_command_with_hashes(self):
1090 comm1 = self.repo[0]
1091 comm2 = self.repo[1]
1090 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1092 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1091 self.repo.get_diff(self.repo[0], self.repo[1])
1093 self.repo.get_diff(comm1, comm2)
1094
1095 self.repo.run_git_command.assert_called_once_with(
1096 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1097 '--abbrev=40', comm1.raw_id, comm2.raw_id])
1098
1099 def test_get_diff_runs_git_command_with_str_hashes(self):
1100 comm2 = self.repo[1]
1101 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1102 self.repo.get_diff(self.repo.EMPTY_COMMIT, comm2)
1103 self.repo.run_git_command.assert_called_once_with(
1104 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1105 '--abbrev=40', comm2.raw_id])
1106
1107 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1108 comm1 = self.repo[0]
1109 comm2 = self.repo[1]
1110 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1111 self.repo.get_diff(comm1, comm2, 'foo')
1092 self.repo.run_git_command.assert_called_once_with(
1112 self.repo.run_git_command.assert_called_once_with(
1093 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1113 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1094 '--abbrev=40', self.repo._lookup_commit(0),
1114 '--abbrev=40', self.repo._lookup_commit(0),
1095 self.repo._lookup_commit(1)])
1115 comm2.raw_id, '--', 'foo'])
1096
1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1100 self.repo.run_git_command.assert_called_once_with(
1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1102 '--abbrev=40', self.repo._lookup_commit(1)])
1103
1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1107 self.repo.run_git_command.assert_called_once_with(
1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1109 '--abbrev=40', self.repo._lookup_commit(0),
1110 self.repo._lookup_commit(1), '--', 'foo'])
1111
1116
1112
1117
1113 @pytest.mark.usefixtures("vcs_repository_support")
1118 @pytest.mark.usefixtures("vcs_repository_support")
1114 class TestGitRegression(BackendTestMixin):
1119 class TestGitRegression(BackendTestMixin):
1115
1120
1116 @classmethod
1121 @classmethod
1117 def _get_commits(cls):
1122 def _get_commits(cls):
1118 return [
1123 return [
1119 {
1124 {
1120 'message': 'Initial',
1125 'message': 'Initial',
1121 'author': 'Joe Doe <joe.doe@example.com>',
1126 'author': 'Joe Doe <joe.doe@example.com>',
1122 'date': datetime.datetime(2010, 1, 1, 20),
1127 'date': datetime.datetime(2010, 1, 1, 20),
1123 'added': [
1128 'added': [
1124 FileNode('bot/__init__.py', content='base'),
1129 FileNode('bot/__init__.py', content='base'),
1125 FileNode('bot/templates/404.html', content='base'),
1130 FileNode('bot/templates/404.html', content='base'),
1126 FileNode('bot/templates/500.html', content='base'),
1131 FileNode('bot/templates/500.html', content='base'),
1127 ],
1132 ],
1128 },
1133 },
1129 {
1134 {
1130 'message': 'Second',
1135 'message': 'Second',
1131 'author': 'Joe Doe <joe.doe@example.com>',
1136 'author': 'Joe Doe <joe.doe@example.com>',
1132 'date': datetime.datetime(2010, 1, 1, 22),
1137 'date': datetime.datetime(2010, 1, 1, 22),
1133 'added': [
1138 'added': [
1134 FileNode('bot/build/migrations/1.py', content='foo2'),
1139 FileNode('bot/build/migrations/1.py', content='foo2'),
1135 FileNode('bot/build/migrations/2.py', content='foo2'),
1140 FileNode('bot/build/migrations/2.py', content='foo2'),
1136 FileNode(
1141 FileNode(
1137 'bot/build/static/templates/f.html', content='foo2'),
1142 'bot/build/static/templates/f.html', content='foo2'),
1138 FileNode(
1143 FileNode(
1139 'bot/build/static/templates/f1.html', content='foo2'),
1144 'bot/build/static/templates/f1.html', content='foo2'),
1140 FileNode('bot/build/templates/err.html', content='foo2'),
1145 FileNode('bot/build/templates/err.html', content='foo2'),
1141 FileNode('bot/build/templates/err2.html', content='foo2'),
1146 FileNode('bot/build/templates/err2.html', content='foo2'),
1142 ],
1147 ],
1143 },
1148 },
1144 ]
1149 ]
1145
1150
1146 @pytest.mark.parametrize("path, expected_paths", [
1151 @pytest.mark.parametrize("path, expected_paths", [
1147 ('bot', [
1152 ('bot', [
1148 'bot/build',
1153 'bot/build',
1149 'bot/templates',
1154 'bot/templates',
1150 'bot/__init__.py']),
1155 'bot/__init__.py']),
1151 ('bot/build', [
1156 ('bot/build', [
1152 'bot/build/migrations',
1157 'bot/build/migrations',
1153 'bot/build/static',
1158 'bot/build/static',
1154 'bot/build/templates']),
1159 'bot/build/templates']),
1155 ('bot/build/static', [
1160 ('bot/build/static', [
1156 'bot/build/static/templates']),
1161 'bot/build/static/templates']),
1157 ('bot/build/static/templates', [
1162 ('bot/build/static/templates', [
1158 'bot/build/static/templates/f.html',
1163 'bot/build/static/templates/f.html',
1159 'bot/build/static/templates/f1.html']),
1164 'bot/build/static/templates/f1.html']),
1160 ('bot/build/templates', [
1165 ('bot/build/templates', [
1161 'bot/build/templates/err.html',
1166 'bot/build/templates/err.html',
1162 'bot/build/templates/err2.html']),
1167 'bot/build/templates/err2.html']),
1163 ('bot/templates/', [
1168 ('bot/templates/', [
1164 'bot/templates/404.html',
1169 'bot/templates/404.html',
1165 'bot/templates/500.html']),
1170 'bot/templates/500.html']),
1166 ])
1171 ])
1167 def test_similar_paths(self, path, expected_paths):
1172 def test_similar_paths(self, path, expected_paths):
1168 commit = self.repo.get_commit()
1173 commit = self.repo.get_commit()
1169 paths = [n.path for n in commit.get_nodes(path)]
1174 paths = [n.path for n in commit.get_nodes(path)]
1170 assert paths == expected_paths
1175 assert paths == expected_paths
1171
1176
1172
1177
1173 class TestDiscoverGitVersion(object):
1178 class TestDiscoverGitVersion(object):
1174
1179
1175 def test_returns_git_version(self, baseapp):
1180 def test_returns_git_version(self, baseapp):
1176 version = discover_git_version()
1181 version = discover_git_version()
1177 assert version
1182 assert version
1178
1183
1179 def test_returns_empty_string_without_vcsserver(self):
1184 def test_returns_empty_string_without_vcsserver(self):
1180 mock_connection = mock.Mock()
1185 mock_connection = mock.Mock()
1181 mock_connection.discover_git_version = mock.Mock(
1186 mock_connection.discover_git_version = mock.Mock(
1182 side_effect=Exception)
1187 side_effect=Exception)
1183 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1188 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1184 version = discover_git_version()
1189 version = discover_git_version()
1185 assert version == ''
1190 assert version == ''
1186
1191
1187
1192
1188 class TestGetSubmoduleUrl(object):
1193 class TestGetSubmoduleUrl(object):
1189 def test_submodules_file_found(self):
1194 def test_submodules_file_found(self):
1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1195 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1191 node = mock.Mock()
1196 node = mock.Mock()
1192 with mock.patch.object(
1197 with mock.patch.object(
1193 commit, 'get_node', return_value=node) as get_node_mock:
1198 commit, 'get_node', return_value=node) as get_node_mock:
1194 node.content = (
1199 node.content = (
1195 '[submodule "subrepo1"]\n'
1200 '[submodule "subrepo1"]\n'
1196 '\tpath = subrepo1\n'
1201 '\tpath = subrepo1\n'
1197 '\turl = https://code.rhodecode.com/dulwich\n'
1202 '\turl = https://code.rhodecode.com/dulwich\n'
1198 )
1203 )
1199 result = commit._get_submodule_url('subrepo1')
1204 result = commit._get_submodule_url('subrepo1')
1200 get_node_mock.assert_called_once_with('.gitmodules')
1205 get_node_mock.assert_called_once_with('.gitmodules')
1201 assert result == 'https://code.rhodecode.com/dulwich'
1206 assert result == 'https://code.rhodecode.com/dulwich'
1202
1207
1203 def test_complex_submodule_path(self):
1208 def test_complex_submodule_path(self):
1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1209 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1205 node = mock.Mock()
1210 node = mock.Mock()
1206 with mock.patch.object(
1211 with mock.patch.object(
1207 commit, 'get_node', return_value=node) as get_node_mock:
1212 commit, 'get_node', return_value=node) as get_node_mock:
1208 node.content = (
1213 node.content = (
1209 '[submodule "complex/subrepo/path"]\n'
1214 '[submodule "complex/subrepo/path"]\n'
1210 '\tpath = complex/subrepo/path\n'
1215 '\tpath = complex/subrepo/path\n'
1211 '\turl = https://code.rhodecode.com/dulwich\n'
1216 '\turl = https://code.rhodecode.com/dulwich\n'
1212 )
1217 )
1213 result = commit._get_submodule_url('complex/subrepo/path')
1218 result = commit._get_submodule_url('complex/subrepo/path')
1214 get_node_mock.assert_called_once_with('.gitmodules')
1219 get_node_mock.assert_called_once_with('.gitmodules')
1215 assert result == 'https://code.rhodecode.com/dulwich'
1220 assert result == 'https://code.rhodecode.com/dulwich'
1216
1221
1217 def test_submodules_file_not_found(self):
1222 def test_submodules_file_not_found(self):
1218 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1223 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1219 with mock.patch.object(
1224 with mock.patch.object(
1220 commit, 'get_node', side_effect=NodeDoesNotExistError):
1225 commit, 'get_node', side_effect=NodeDoesNotExistError):
1221 result = commit._get_submodule_url('complex/subrepo/path')
1226 result = commit._get_submodule_url('complex/subrepo/path')
1222 assert result is None
1227 assert result is None
1223
1228
1224 def test_path_not_found(self):
1229 def test_path_not_found(self):
1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1230 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1226 node = mock.Mock()
1231 node = mock.Mock()
1227 with mock.patch.object(
1232 with mock.patch.object(
1228 commit, 'get_node', return_value=node) as get_node_mock:
1233 commit, 'get_node', return_value=node) as get_node_mock:
1229 node.content = (
1234 node.content = (
1230 '[submodule "subrepo1"]\n'
1235 '[submodule "subrepo1"]\n'
1231 '\tpath = subrepo1\n'
1236 '\tpath = subrepo1\n'
1232 '\turl = https://code.rhodecode.com/dulwich\n'
1237 '\turl = https://code.rhodecode.com/dulwich\n'
1233 )
1238 )
1234 result = commit._get_submodule_url('subrepo2')
1239 result = commit._get_submodule_url('subrepo2')
1235 get_node_mock.assert_called_once_with('.gitmodules')
1240 get_node_mock.assert_called_once_with('.gitmodules')
1236 assert result is None
1241 assert result is None
1237
1242
1238 def test_returns_cached_values(self):
1243 def test_returns_cached_values(self):
1239 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1244 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1240 node = mock.Mock()
1245 node = mock.Mock()
1241 with mock.patch.object(
1246 with mock.patch.object(
1242 commit, 'get_node', return_value=node) as get_node_mock:
1247 commit, 'get_node', return_value=node) as get_node_mock:
1243 node.content = (
1248 node.content = (
1244 '[submodule "subrepo1"]\n'
1249 '[submodule "subrepo1"]\n'
1245 '\tpath = subrepo1\n'
1250 '\tpath = subrepo1\n'
1246 '\turl = https://code.rhodecode.com/dulwich\n'
1251 '\turl = https://code.rhodecode.com/dulwich\n'
1247 )
1252 )
1248 for _ in range(3):
1253 for _ in range(3):
1249 commit._get_submodule_url('subrepo1')
1254 commit._get_submodule_url('subrepo1')
1250 get_node_mock.assert_called_once_with('.gitmodules')
1255 get_node_mock.assert_called_once_with('.gitmodules')
1251
1256
1252 def test_get_node_returns_a_link(self):
1257 def test_get_node_returns_a_link(self):
1253 repository = mock.Mock()
1258 repository = mock.Mock()
1254 repository.alias = 'git'
1259 repository.alias = 'git'
1255 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1260 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1256 submodule_url = 'https://code.rhodecode.com/dulwich'
1261 submodule_url = 'https://code.rhodecode.com/dulwich'
1257 get_id_patch = mock.patch.object(
1262 get_id_patch = mock.patch.object(
1258 commit, '_get_id_for_path', return_value=(1, 'link'))
1263 commit, '_get_id_for_path', return_value=(1, 'link'))
1259 get_submodule_patch = mock.patch.object(
1264 get_submodule_patch = mock.patch.object(
1260 commit, '_get_submodule_url', return_value=submodule_url)
1265 commit, '_get_submodule_url', return_value=submodule_url)
1261
1266
1262 with get_id_patch, get_submodule_patch as submodule_mock:
1267 with get_id_patch, get_submodule_patch as submodule_mock:
1263 node = commit.get_node('/abcde')
1268 node = commit.get_node('/abcde')
1264
1269
1265 submodule_mock.assert_called_once_with('/abcde')
1270 submodule_mock.assert_called_once_with('/abcde')
1266 assert type(node) == SubModuleNode
1271 assert type(node) == SubModuleNode
1267 assert node.url == submodule_url
1272 assert node.url == submodule_url
1268
1273
1269 def test_get_nodes_returns_links(self):
1274 def test_get_nodes_returns_links(self):
1270 repository = mock.MagicMock()
1275 repository = mock.MagicMock()
1271 repository.alias = 'git'
1276 repository.alias = 'git'
1272 repository._remote.tree_items.return_value = [
1277 repository._remote.tree_items.return_value = [
1273 ('subrepo', 'stat', 1, 'link')
1278 ('subrepo', 'stat', 1, 'link')
1274 ]
1279 ]
1275 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1280 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1276 submodule_url = 'https://code.rhodecode.com/dulwich'
1281 submodule_url = 'https://code.rhodecode.com/dulwich'
1277 get_id_patch = mock.patch.object(
1282 get_id_patch = mock.patch.object(
1278 commit, '_get_id_for_path', return_value=(1, 'tree'))
1283 commit, '_get_id_for_path', return_value=(1, 'tree'))
1279 get_submodule_patch = mock.patch.object(
1284 get_submodule_patch = mock.patch.object(
1280 commit, '_get_submodule_url', return_value=submodule_url)
1285 commit, '_get_submodule_url', return_value=submodule_url)
1281
1286
1282 with get_id_patch, get_submodule_patch as submodule_mock:
1287 with get_id_patch, get_submodule_patch as submodule_mock:
1283 nodes = commit.get_nodes('/abcde')
1288 nodes = commit.get_nodes('/abcde')
1284
1289
1285 submodule_mock.assert_called_once_with('/abcde/subrepo')
1290 submodule_mock.assert_called_once_with('/abcde/subrepo')
1286 assert len(nodes) == 1
1291 assert len(nodes) == 1
1287 assert type(nodes[0]) == SubModuleNode
1292 assert type(nodes[0]) == SubModuleNode
1288 assert nodes[0].url == submodule_url
1293 assert nodes[0].url == submodule_url
General Comments 0
You need to be logged in to leave comments. Login now